+ ./ya make -T --test-size=small --test-size=medium --stat --test-threads 52 --link-threads 12 -DUSE_EAT_MY_DATA --add-peerdirs-tests all --build release --sanitize=address -DDEBUGINFO_LINES_ONLY --stat -DCONSISTENT_DEBUG --no-dir-outputs --test-failure-code 0 --build-all --cache-size 2TB --force-build-depends -A --bazel-remote-store --bazel-remote-base-uri http://cachesrv.internal:8081 --bazel-remote-username cache_user --bazel-remote-password-file /tmp/tmp.b5XDZ2hLrZ --bazel-remote-put --dist-cache-max-file-size=209715200 -A --retest ydb --build-custom-json=/home/runner/actions_runner/_work/ydb/ydb/graph.json --custom-context=/home/runner/actions_runner/_work/ydb/ydb/context.json --log-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/ya_log.txt --evlog-file /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/ya_evlog.jsonl --build-results-report /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json --output /home/runner/actions_runner/_work/ydb/ydb/tmp/out Output root is subdirectory of Arcadia root, this may cause non-idempotent build Configuring local and dist store caches Configuration done. Preparing for execution |33.3%| CLEANING SYMRES |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/http-parser/libcontrib-deprecated-http-parser.a |22.0%| [AR] {BAZEL_DOWNLOAD} $(B)/build/cow/on/libbuild-cow-on.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/cow_string/libcpp-containers-cow_string.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/disjoint_interval_tree/libcpp-containers-disjoint_interval_tree.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/legacy_zstd06/libblockcodecs-codecs-legacy_zstd06.global.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_extra/liblibs-libevent-event_extra.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_avl_tree/libcpp-containers-intrusive_avl_tree.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/bzip/libblockcodecs-codecs-bzip.global.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bit_io/liblibrary-cpp-bit_io.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/dragonbox/libdragonbox.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/paged_vector/libcpp-containers-paged_vector.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/snappy/libblockcodecs-codecs-snappy.global.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/uriparser/libcontrib-restricted-uriparser.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googlemock/librestricted-googletest-googlemock.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/concurrent_hash/libcpp-containers-concurrent_hash.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/fastlz/libblockcodecs-codecs-fastlz.global.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/binsaver/liblibrary-cpp-binsaver.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/thread/librestricted-boost-thread.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/cityhash-1.0.2/libcontrib-restricted-cityhash-1.0.2.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/patched/replxx/librestricted-patched-replxx.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/coroutine/librestricted-boost-coroutine.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/exception/librestricted-boost-exception.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/brotli/libblockcodecs-codecs-brotli.global.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/liblibrary-cpp-blockcodecs.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/impl_common/libboost-context-impl_common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zstd/libblockcodecs-codecs-zstd.global.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/core/libcpp-blockcodecs-core.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/atomizer/libcpp-containers-atomizer.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cgiparam/liblibrary-cpp-cgiparam.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/2d_array/libcpp-containers-2d_array.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/chrono/librestricted-boost-chrono.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/iostreams/librestricted-boost-iostreams.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/random/librestricted-boost-random.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon32/liblibs-base64-neon32.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/fcontext_impl/libboost-context-fcontext_impl.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/utf8_range/librestricted-google-utf8_range.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/liblibrary-cpp-charset.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lz4/libblockcodecs-codecs-lz4.global.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cache/liblibrary-cpp-cache.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/intrusive_rb_tree/libcpp-containers-intrusive_rb_tree.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/ring_buffer/libcpp-containers-ring_buffer.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/googletest/googletest/librestricted-googletest-googletest.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/case_insensitive_string/liblibrary-cpp-case_insensitive_string.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/colorizer/liblibrary-cpp-colorizer.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/zlib/libblockcodecs-codecs-zlib.global.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/compact_vector/libcpp-containers-compact_vector.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/atomic/librestricted-boost-atomic.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-event-stream/librestricted-aws-aws-c-event-stream.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/context/ucontext_impl/libboost-context-ucontext_impl.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/greedy_dict/libcpp-codecs-greedy_dict.a |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/charset/lite/libcpp-charset-lite.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/bitseq/libcpp-containers-bitseq.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/graph/librestricted-boost-graph.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/comptrie/libcpp-containers-comptrie.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-cal/librestricted-aws-aws-c-cal.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-crt-cpp/librestricted-aws-aws-crt-cpp.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/xxhash/libcontrib-libs-xxhash.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/builtins/liblibs-cxxsupp-builtins.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/absl_flat_hash/libcpp-containers-absl_flat_hash.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-sdkutils/librestricted-aws-aws-c-sdkutils.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snappy/libcontrib-libs-snappy.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-common/librestricted-aws-aws-c-common.a |51.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/serialization/librestricted-boost-serialization.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/regex/librestricted-boost-regex.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/locale/librestricted-boost-locale.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/thrift/libcontrib-restricted-thrift.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-checksums/librestricted-aws-aws-checksums.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/program_options/librestricted-boost-program_options.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/blockcodecs/codecs/lzma/libblockcodecs-codecs-lzma.global.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/re2/libcontrib-libs-re2.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-s3/librestricted-aws-aws-c-s3.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzma/libcontrib-libs-lzma.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd06/libcontrib-libs-zstd06.a |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/libcontrib-libs-pcre.a |47.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/snowball/libcontrib-libs-snowball.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/ObjCARC/liblib-Transforms-ObjCARC.a |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp/libcontrib-restricted-abseil-cpp.a |31.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |32.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |32.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |32.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/column_table/unittest |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/abseil-cpp-tstring/libcontrib-restricted-abseil-cpp-tstring.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sasl/libcontrib-libs-sasl.a |18.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |18.8%| PREPARE $(VCS) |21.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Vectorize/liblib-Transforms-Vectorize.a |21.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nayuki_md5/libcontrib-libs-nayuki_md5.a |21.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |21.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/libcontrib-libs-highwayhash.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/double-conversion/libcontrib-libs-double-conversion.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/dec/libbrotli-c-dec.a |21.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/avx2/libhighwayhash-arch-avx2.a |21.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/tzdata/liblibs-cctz-tzdata.global.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/backtrace/libcontrib-libs-backtrace.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_cxx/libclang_rt.asan_cxx-x86_64.a |22.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/crcutil/libcontrib-libs-crcutil.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/address_sorting/libgrpc-third_party-address_sorting.a |22.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ibdrv/libcontrib-libs-ibdrv.a |22.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/c-ares/libcontrib-libs-c-ares.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr3_cpp_runtime/libcontrib-libs-antlr3_cpp_runtime.a |22.9%| [AR] {BAZEL_DOWNLOAD} $(B)/certs/libcerts.global.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cctz/libcontrib-libs-cctz.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/enc/libbrotli-c-enc.a |23.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/ssse3/liblibs-base64-ssse3.a |23.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/str_map/libcpp-containers-str_map.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxrt/liblibs-cxxsupp-libcxxrt.a |23.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/highwayhash/arch/sse41/libhighwayhash-arch-sse41.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/brotli/c/common/libbrotli-c-common.a |23.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan_static/libclang_rt.asan_static-x86_64.a |23.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/expat/libcontrib-libs-expat.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42_aesni/libfarmhash-arch-sse42_aesni.a |23.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fastlz/libcontrib-libs-fastlz.a |24.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain64/liblibs-base64-plain64.a |24.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/plain32/liblibs-base64-plain32.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/libcontrib-libs-farmhash.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/fmt/libcontrib-libs-fmt.a |24.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_corei7/liblibs-hyperscan-runtime_corei7.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/deprecated/yajl/libcontrib-deprecated-yajl.a |24.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libiconv/static/liblibs-libiconv-static.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/neon64/liblibs-base64-neon64.a |24.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_thread/liblibs-libevent-event_thread.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Instrumentation/liblib-Transforms-Instrumentation.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/third_party/upb/libgrpc-third_party-upb.a |24.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/PerfJITEvents/liblib-ExecutionEngine-PerfJITEvents.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cpuid_check/liblibrary-cpp-cpuid_check.global.a |24.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/engine/libcpp-coroutine-engine.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_avx2/liblibs-hyperscan-runtime_avx2.a |24.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/flatbuffers/libcontrib-libs-flatbuffers.a |25.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libunwind/libcontrib-libs-libunwind.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_core/liblibs-libevent-event_core.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/AsmPrinter/liblib-CodeGen-AsmPrinter.a |25.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tcmalloc/malloc_extension/liblibs-tcmalloc-malloc_extension.a |25.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitstream/Reader/liblib-Bitstream-Reader.a |25.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hdr_histogram/libcontrib-libs-hdr_histogram.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRPrinter/libllvm16-lib-IRPrinter.a |25.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxxabi-parts/liblibs-cxxsupp-libcxxabi-parts.a |25.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IRReader/libllvm16-lib-IRReader.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/Shared/libExecutionEngine-Orc-Shared.a |25.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/cron_expression/liblibrary-cpp-cron_expression.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse42/libfarmhash-arch-sse42.a |25.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clang20-rt/lib/asan/libclang_rt.asan-x86_64.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/TargetInfo/libTarget-X86-TargetInfo.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dbg_output/liblibrary-cpp-dbg_output.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zlib/libcontrib-libs-zlib.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/CodeView/liblib-DebugInfo-CodeView.a |26.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/base64/avx2/liblibs-base64-avx2.a |26.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libaio/static/liblibs-libaio-static.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/InstCombine/liblib-Transforms-InstCombine.a |26.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libc_compat/libcontrib-libs-libc_compat.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libevent/event_openssl/liblibs-libevent-event_openssl.a |26.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc-format/liblibs-apache-orc-format.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/coroutine/listener/libcpp-coroutine-listener.a |26.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libbz2/libcontrib-libs-libbz2.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/BinaryFormat/libllvm16-lib-BinaryFormat.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_vector/libcpp-containers-stack_vector.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/utf8proc/libcontrib-libs-utf8proc.a |26.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Writer/liblib-Bitcode-Writer.a |27.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/orc/liblibs-apache-orc.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/libcontrib-libs-linuxvdso.a |27.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/liburing/libcontrib-libs-liburing.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/libllvm16-lib-ExecutionEngine.a |27.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libidn/static/liblibs-libidn-static.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/crypto/quictls/libngtcp2-crypto-quictls.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/runtime_core2/liblibs-hyperscan-runtime_core2.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/linuxvdso/original/liblibs-linuxvdso-original.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Bitcode/Reader/liblib-Bitcode-Reader.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libssh2/libcontrib-libs-libssh2.a |27.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Utils/liblib-Transforms-Utils.a |27.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/MSF/liblib-DebugInfo-MSF.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lz4/libcontrib-libs-lz4.a |27.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/liblibrary-cpp-getopt.global.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/NetSSL_OpenSSL/liblibs-poco-NetSSL_OpenSSL.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/libcpp-digest-argonish.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ftxui/libcontrib-libs-ftxui.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4_cpp_runtime/libcontrib-libs-antlr4_cpp_runtime.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/accessors/libcpp-deprecated-accessors.a |27.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/MCJIT/liblib-ExecutionEngine-MCJIT.a |27.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipv6_address/liblibrary-cpp-ipv6_address.a |27.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libfyaml/libcontrib-libs-libfyaml.a |27.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/kmp/libcpp-deprecated-kmp.a |28.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/AsmParser/libllvm16-lib-AsmParser.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/avro/liblibs-apache-avro.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/analytics/liblwtrace-mon-analytics.a |28.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.global.a |28.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/Symbolize/liblib-DebugInfo-Symbolize.a |28.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/lower_case/libcpp-digest-lower_case.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/simple/libcpp-http-simple.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/zstd/libcontrib-libs-zstd.a |28.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/out/libapi-protos-out.a |28.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/libcontrib-libs-opentelemetry-proto.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Support/libllvm16-lib-Support.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/config_clusters/libyt-lib-config_clusters.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/libllvm16-lib-MC.a |28.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libpy3scheme-defaults-protos.global.a |28.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/curl/libcontrib-libs-curl.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Net/liblibs-poco-Net.a |28.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/common/libcpp-mapreduce-common.a |29.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/proto/libutils-log-proto.a |29.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/libsql-v1-proto_parser.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4/libv1-proto_parser-antlr4.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/langver/libessentials-public-langver.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/binary_json/libessentials-types-binary_json.a |29.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-core/liblibs-aws-sdk-cpp-aws-cpp-sdk-core.a |29.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/llvm16/libyt-comp_nodes-llvm16.a |29.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libxml/libcontrib-libs-libxml.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/skiff/libcommon-schema-skiff.a |29.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/parser/libcommon-schema-parser.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/result_format/libessentials-public-result_format.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/libessentials-public-issue.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/PDB/liblib-DebugInfo-PDB.a |29.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/libproviders-common-metrics.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/metrics/protos/libcommon-metrics-protos.a |29.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http/libcpp-mapreduce-http.a |30.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/GlobalISel/liblib-CodeGen-GlobalISel.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateways_utils/libproviders-common-gateways_utils.a |30.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/provider/libproviders-pg-provider.a |30.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/liblibrary-cpp-json.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/mkql/libcommon-schema-mkql.a |30.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/libproviders-common-schema.a |30.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/pg/expr_nodes/libproviders-pg-expr_nodes.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/schema/expr/libcommon-schema-expr.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libcontrib-libs-googleapis-common-protos.a |30.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libcore-issue-protos.a |30.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/structured_token/libproviders-common-structured_token.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/decimal/libessentials-public-decimal.a |30.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/transform/libproviders-common-transform.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Passes/libllvm16-lib-Passes.a |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/expr_nodes/libproviders-result-expr_nodes.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/config/libessentials-providers-config.a |30.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/lexer/libsql-v0-lexer.a |30.9%| PREPARE $(YMAKE_PYTHON3-212672652) - 3.24 MB |31.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/arrow/libpublic-udf-arrow.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg_dummy/libessentials-sql-pg_dummy.a |31.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/provider/libproviders-common-provider.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/libessentials-public-udf.a |31.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/udf_resolve/libproviders-common-udf_resolve.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/exception_policy/libudf-service-exception_policy.global.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql/libproviders-common-mkql.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libessentials-public-types.a |31.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpublic-issue-protos.a |31.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/support/libpublic-udf-support.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/settings/libessentials-sql-settings.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/libsql-v1-lexer.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/libyql-essentials-sql.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_ansi/libv1-lexer-antlr4_ansi.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.global.a |31.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4/libv1-lexer-antlr4.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libutils-fetch-proto.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.global.a |31.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/result/provider/libproviders-result-provider.a |31.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/libessentials-utils-fetch.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libproviders-common-proto.a |31.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/lib/libcommon-math-lib.a |31.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/uuid/libessentials-types-uuid.a |32.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/types/dynumber/libessentials-types-dynumber.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/proto_parser/antlr4_ansi/libv1-proto_parser-antlr4_ansi.a |32.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/failure_injector/libessentials-utils-failure_injector.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/lib/libcommon-unicode_base-lib.a |32.3%| PREPARE $(TEST_TOOL_HOST-sbr:10669990147) - 25.18 MB |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/libsql-v1-format.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/backtrace/libessentials-utils-backtrace.a |32.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.global.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/log/libessentials-utils-log.a |32.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.global.a |32.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/threading/libessentials-utils-threading.a |32.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/libyql-essentials-utils.a |32.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/providers/stat/expr_nodes/libproviders-stat-expr_nodes.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/library/user_job_statistics/libmapreduce-library-user_job_statistics.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/logging/libmapreduce-interface-logging.a |32.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.global.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/common/libimpl-internal-common.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/libsrc-client-federated_topic.a |32.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.global.a |33.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/endpoints/libclient-impl-endpoints.a |33.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/qplayer/libyt-gateway-qplayer.a |33.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-s3/liblibs-aws-sdk-cpp-aws-cpp-sdk-s3.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam_private/libsrc-client-iam_private.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/discovery_mutator/libclient-extensions-discovery_mutator.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/io/libcpp-mapreduce-io.a |33.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/codegen/llvm16/libcodec-codegen-llvm16.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/driver/libsrc-client-driver.a |33.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actor_log/libyql-utils-actor_log.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.global.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/ydb_discovery/libydb_cli_command_ydb_discovery.a |33.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sdk_core_access/libydb_sdk_core_access.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/http_client/libcpp-mapreduce-http_client.a |33.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/operation/libclient-yc_private-operation.a |33.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/export/libsrc-client-export.a |33.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/actors/libyql-utils-actors.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/IR/libllvm16-lib-IR.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/gateway/libproviders-solomon-gateway.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/interface/libcpp-mapreduce-interface.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/expr_nodes/libproviders-solomon-expr_nodes.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/cm_client/libproviders-pq-cm_client.a |34.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/accessservice/libclient-yc_private-accessservice.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/events/libproviders-solomon-events.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/statistics/libproviders-s3-statistics.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/serializations/libproviders-s3-serializations.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/impl/libclient-federated_topic-impl.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/common/libproviders-solomon-common.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/expr_nodes/libproviders-s3-expr_nodes.a |34.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/SelectionDAG/liblib-CodeGen-SelectionDAG.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/task_meta/libproviders-pq-task_meta.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/codec/libproviders-yt-codec.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/expr_nodes/libproviders-pq-expr_nodes.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/object_listers/libproviders-s3-object_listers.a |34.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/pushdown/libproviders-generic-pushdown.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/resourcemanager/libclient-yc_private-resourcemanager.a |34.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/native/libyt-gateway-native.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/certifi/libpy3library-python-certifi.global.a |34.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/runtime/libproviders-dq-runtime.a |34.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/proto/libproviders-generic-proto.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/proto/libproviders-s3-proto.a |34.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/liblib-Target-X86.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/proto/libproviders-pq-proto.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/native/libpq-gateway-native.a |34.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/client/libcommon-token_accessor-client.a |34.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/actors/libproviders-solomon-actors.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/mkql/libproviders-dq-mkql.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/protos/libapi-service-protos.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/transform/libyql-dq-transform.a |34.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/libyql-dq-actors.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/grpc/libdq-api-grpc.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schemu/liblibrary-schlab-schemu.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/iam/v1/libnc_private-iam-v1.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/common/libyql-dq-common.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/input_transforms/libdq-actors-input_transforms.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/fq/libpublic-lib-fq.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/type_ann/libyql-dq-type_ann.a |35.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/range_treap/libydb-library-range_treap.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/iam/libclient-yc_private-iam.a |35.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/libydb-library-schlab.a |35.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/opt/libproviders-dq-opt.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/pushdown/libproviders-common-pushdown.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/state/libyql-dq-state.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/actors/libgrpc-server-actors.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/tasks/libyql-dq-tasks.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/task_runner/libdq-actors-task_runner.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/rpc_client/libcpp-mapreduce-rpc_client.a |35.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/db_id_async_resolver/libproviders-common-db_id_async_resolver.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/exec/libdq-provider-exec.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/expr_nodes/libproviders-clickhouse-expr_nodes.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/libproviders-common-arrow.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/proto/libproviders-clickhouse-proto.a |35.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/libcommon.a |35.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/arrow/interface/libcommon-arrow-interface.a |35.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/spilling/libdq-actors-spilling.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/cores/libpy3library-python-cores.global.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libyql-dq-proto.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/hash_join_utils/libdq-comp_nodes-hash_join_utils.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/http_gateway/libproviders-common-http_gateway.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/token_accessor/grpc/libcommon-token_accessor-grpc.a |35.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/events/libdq-actors-events.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/interface/libproviders-dq-interface.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v0/libessentials-sql-v0.a |35.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/helper/libproviders-dq-helper.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/counters/libproviders-dq-counters.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/async_io/libproviders-pq-async_io.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Scalar/liblib-Transforms-Scalar.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libdq-actors-protos.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/helpers/liblibrary-actors-helpers.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnsresolver/liblibrary-actors-dnsresolver.a |35.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/provider/libproviders-dq-provider.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/interface/libdq-worker_manager-interface.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/common/libproviders-dq-common.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/fs/libpy3library-python-fs.global.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/find_root/libpy3library-python-find_root.global.a |36.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/worker_manager/libproviders-dq-worker_manager.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner_actor/libproviders-dq-task_runner_actor.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/cpp/mapreduce/client/libcpp-mapreduce-client.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/api/service/libconnector-api-service.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/events/libproviders-s3-events.a |36.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/credentials/libproviders-s3-credentials.a |36.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/actors/libproviders-generic-actors.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/common/libproviders-pq-common.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/clickhouse/provider/libproviders-clickhouse-provider.a |36.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/config/libproviders-dq-config.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/task_runner/libproviders-dq-task_runner.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/actors/libproviders-dq-actors.a |36.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/planner/libproviders-dq-planner.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors_factory/libproviders-s3-actors_factory.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/common/libproviders-s3-common.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Analysis/libllvm16-lib-Analysis.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/provider/libproviders-generic-provider.a |36.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator_client/libcore-tx-tx_allocator_client.a |36.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/comp_nodes/llvm16/libdq-comp_nodes-llvm16.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/compute/libdq-actors-compute.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/public/libtx-sequenceshard-public.a |36.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/range_helpers/libproviders-s3-range_helpers.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/dq/api/protos/libdq-api-protos.a |36.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/provider/libproviders-pq-provider.a |36.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/filelock/libpy3library-python-filelock.global.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/compressors/libproviders-s3-compressors.a |37.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/public/ydb_issue/libyql-public-ydb_issue.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/expr_nodes/libproviders-ydb-expr_nodes.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/resharding/libalter-in_store-resharding.a |37.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/proto/libproviders-ydb-proto.a |37.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_allocator/libcore-tx-tx_allocator.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/libcore-tx-sequenceproxy.a |37.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/v1/common/libaudit-v1-common.a |37.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/client/libsolomon-solomon_accessor-client.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/audit/libclient-nc_private-audit.a |37.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/proto/libproviders-solomon-proto.a |37.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/provider/libproviders-s3-provider.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/libapi-client-nc_private.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/hyperscan/libcontrib-libs-hyperscan.a |37.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/solomon_accessor/grpc/libsolomon-solomon_accessor-grpc.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libclient-yc_public-common.a |37.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/path_generator/libproviders-s3-path_generator.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/utils/plan/libyql-utils-plan.a |37.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/runtime/libyql-dq-runtime.a |38.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/nc_private/common/v1/libnc_private-common-v1.a |38.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_cache/libcore-tx-scheme_cache.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/servicecontrol/libclient-yc_private-servicecontrol.a |38.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libclient-yc_public-iam.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libapi-protos-annotations.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/local_proxy/libreplication-ydb_proxy-local_proxy.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/fulltext/libfulltext_udf.global.a |38.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/ydb/provider/libproviders-ydb-provider.a |38.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/scheme_types/libpublic-lib-scheme_types.a |38.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/value/libpublic-lib-value.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/libtx-replication-ydb_proxy.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/s3/actors/libproviders-s3-actors.a |38.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/yson_value/libpublic-lib-yson_value.a |38.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/events/libclient-yc_public-events.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/yql_parser/libydb_cli-common-yql_parser.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/common/ini_config/libini_config.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/service/libtx-conveyor-service.a |39.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/json_value/libpublic-lib-json_value.a |39.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/files/libydb_cli-dump-files.a |39.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/expr_nodes/libproviders-yt-expr_nodes.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/adapters/issue/libcpp-adapters-issue.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/libsrc-client-common_client.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/kicli/liblib-deprecated-kicli.a |39.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/service/libtx-replication-service.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/include/ydb-cpp-sdk/client/topic/libydb-cpp-sdk-client-topic.a |39.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/config/libsrc-client-config.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/command_base/libydb_cli_command_base.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extensions/solomon_stats/libclient-extensions-solomon_stats.a |39.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tablet/libtx-columnshard-tablet.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/common_client/impl/libclient-common_client-impl.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/experimental/libpublic-lib-experimental.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.a |39.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/coordination/libsrc-client-coordination.a |39.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/thread_pool/libimpl-internal-thread_pool.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/logger/libimpl-internal-logger.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/solomon/provider/libproviders-solomon-provider.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/discovery/libsrc-client-discovery.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/iam/libsrc-client-iam.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/abstract/libstorage-optimizer-abstract.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/db_driver_state/libimpl-internal-db_driver_state.a |40.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/make_request/libimpl-internal-make_request.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/job/libproviders-yt-job.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/plain_status/libimpl-internal-plain_status.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/executor/libclient-impl-executor.a |40.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/abstract/libcolumnshard-normalizer-abstract.a |40.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/util/libydb_cli-dump-util.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/session/libclient-impl-session.a |40.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/retry/libimpl-internal-retry.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/libessentials-sql-v1.a |40.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/value_helpers/libimpl-internal-value_helpers.a |40.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/counters/libstorage-actualizer-counters.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/impl/libclient-query-impl.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/internal/grpc_connections/libimpl-internal-grpc_connections.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/column/libengines-scheme-column.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/impl/stats/libclient-impl-stats.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/extension_common/libsrc-client-extension_common.a |40.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/include/libclient-persqueue_public-include.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/proto/libsrc-client-proto.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/operation/libsrc-client-operation.a |40.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tx_proxy/libcore-tx-tx_proxy.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/lib/libyt-gateway-lib.a |41.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/transaction/libengines-reader-transaction.a |40.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/common/libproviders-yt-common.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/params/libsrc-client-params.a |41.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/protos/libbackup-import-protos.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/import/libsrc-client-import.a |41.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/events/libdata_sharing-modification-events.a |41.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/status/libdata_sharing-initiator-status.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/tasks/libdata_sharing-modification-tasks.a |41.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/abstract/libcolumnshard-data_accessor-abstract.a |41.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/local/libcolumnshard-blobs_action-local.a |41.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/manager/libcolumnshard-bg_tasks-manager.a |41.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/session/libcolumnshard-bg_tasks-session.a |41.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libapi-grpc-draft.a |42.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libapi-grpc.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/libtx-columnshard-blobs_action.a |42.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/manager/libcolumnshard-data_sharing-manager.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/transactions/libcolumnshard-bg_tasks-transactions.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/datastreams/libsrc-client-datastreams.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.global.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/libtx-columnshard-engines.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/import/libcolumnshard-backup-import.a |42.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/comp_nodes/dq/llvm16/libcomp_nodes-dq-llvm16.a |42.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tracing/libydb-core-tracing.a |42.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/six/py3/libpy3python-six-py3.global.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/libydb-core-scheme.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/bs/libcolumnshard-blobs_action-bs.a |42.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/pg_tables/libcore-sys_view-pg_tables.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/portions/libcolumnshard-engines-portions.a |42.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/icu/libcontrib-libs-icu.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/libydb-core-transfer.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/draft/libsrc-client-draft.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytest/py3/libpy3python-pytest-py3.global.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/partition_stats/libcore-sys_view-partition_stats.a |42.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libcore-scheme-protos.a |42.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.context/libpy3contrib-python-jaraco.context.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/decorator/py3/libpy3python-decorator-py3.global.a |42.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/resource_pools/libydb-core-resource_pools.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/token_manager/libcore-security-token_manager.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/packaging/py3/libpy3python-packaging-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/parso/py3/libpy3python-parso-py3.global.a |43.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pluggy/py3/libpy3python-pluggy-py3.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/nodes/libcore-sys_view-nodes.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.functools/py3/libpy3python-jaraco.functools-py3.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.global.a |43.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.collections/libpy3contrib-python-jaraco.collections.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/service/libcore-statistics-service.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/common/libcore-sys_view-common.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jedi/py3/libpy3python-jedi-py3.global.a |43.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/executing/libpy3contrib-python-executing.global.a |43.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/protos/libcore-pgproxy-protos.a |43.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/processor/libcore-sys_view-processor.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/matplotlib-inline/libpy3contrib-python-matplotlib-inline.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/future/py3/libpy3python-future-py3.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiosignal/libpy3contrib-python-aiosignal.global.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/attrs/py3/libpy3python-attrs-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyJWT/py3/libpy3python-PyJWT-py3.global.a |43.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/charset-normalizer/libpy3contrib-python-charset-normalizer.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.global.a |43.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/grpc/libcontrib-libs-grpc.a |43.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.global.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cffi/py3/libpy3python-cffi-py3.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/write_meta/libpersqueue-public-write_meta.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/asttokens/libpy3contrib-python-asttokens.global.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/cache/libpersqueue-pqtablet-cache.a |43.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/impl/libclient-persqueue_public-impl.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/auth/libcore-sys_view-auth.a |44.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/cryptography/py3/libpy3python-cryptography-py3.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/common/libpersqueue-pqtablet-common.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.a |44.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/fetcher/libpersqueue-public-fetcher.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/events/libcore-persqueue-events.a |44.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/proto/libkqp-proxy_service-proto.a |44.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.global.a |44.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/common/libbehaviour-streaming_query-common.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/aggregator/libcore-statistics-aggregator.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/result_set_format/libkqp-common-result_set_format.a |44.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/libydb-core-persqueue.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/blob/libpersqueue-pqtablet-blob.a |44.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.global.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/ut_common/libtx-datashard-ut_common.a |44.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/utils/libkqp-gateway-utils.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/events/libkqp-common-events.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/compilation/libkqp-common-compilation.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/interface/liblibs-shared_resources-interface.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/buffer/libkqp-common-buffer.a |44.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/signer/libfq-libs-signer.a |44.9%| PREPARE $(CLANG16-1380963495) - 305.67 MB |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/utils/liblibs-rate_limiter-utils.a |45.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/proxy/libcore-kesus-proxy.a |45.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/jaeger_tracing/libydb-core-jaeger_tracing.a |45.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/libcore-keyvalue-protos.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/protos/libcore-graph-protos.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/service/libcore-graph-service.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/tasks_packer/libfq-libs-tasks_packer.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/opt/libyql-dq-opt.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/events/liblibs-rate_limiter-events.a |45.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_compilation/liblibs-row_dispatcher-purecalc_compilation.a |45.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/result_formatter/libfq-libs-result_formatter.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/libcore-kqp-common.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool_classifier/libgateway-behaviour-resource_pool_classifier.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/libcore-graph-shard.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/streaming_query/libgateway-behaviour-streaming_query.a |45.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/metrics/libfq-libs-metrics.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/events/liblibs-control_plane_proxy-events.a |45.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/control_plane_service/liblibs-rate_limiter-control_plane_service.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/ydb/libfq-libs-ydb.a |45.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/libcore-persqueue-pqtablet.a |45.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/libcore-kqp-proxy_service.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/libfq-libs-config.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/events/liblibs-control_plane_config-events.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqrb/libcore-persqueue-pqrb.a |46.1%| PREPARE $(CLANG-2403293607) - 293.62 MB |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/filestore/core/libcore-filestore-core.a |46.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/common/libformats-arrow-common.a |46.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/libydb-core-testlib.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/libcore-external_sources-object_storage.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_config/libfq-libs-control_plane_config.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/libfq-libs-quota_manager.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/docapi/libydb-core-docapi.a |46.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/object_storage/inference/libexternal_sources-object_storage-inference.a |46.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/counters_info/libydb-core-counters_info.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/abstract/libarrow-accessor-abstract.a |46.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/libydb-core-control.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/base_utils/libbase_utils.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/purecalc_no_pg_wrapper/liblibs-row_dispatcher-purecalc_no_pg_wrapper.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/libcore-control-lib.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/host/libcore-kqp-host.a |46.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/discovery/libydb-core-discovery.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/libfq-libs-row_dispatcher.a |46.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/scheme_cache_lib/libcore-client-scheme_cache_lib.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/liblibs-config-protos.a |46.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/libcontrol-lib-generated.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/erasure/libydb-core-erasure.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/validators/libcms-console-validators.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_base/libcli_base.a |46.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/libpersqueue-pqtablet-partition.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/regexp/libcore-backup-regexp.a |46.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/libydb-core-external_sources.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/actors/liblibs-control_plane_proxy-actors.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/controller/libcore-backup-controller.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/malloc/libcpp-yt-malloc.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/proto/libbackup-common-proto.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/impl/libcore-backup-impl.a |47.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/backup/common/libcore-backup-common.a |47.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/libydb-core-keyvalue.a |47.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/version/libversion_definition.a |47.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |47.3%| [AR] {BAZEL_DOWNLOAD} $(B)/util/draft/libutil-draft.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/atomic/libcpp-threading-atomic.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/actorlib_impl/libydb-core-actorlib_impl.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/common/libblobstorage-vdisk-common.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/skip_list/libcpp-threading-skip_list.a |47.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/global/libcpp-yt-global.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ytalloc/api/libcpp-ytalloc-api.a |47.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/defrag/libblobstorage-vdisk-defrag.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/tz/libcpp-type_info-tz.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/anubis_osiris/libblobstorage-vdisk-anubis_osiris.a |47.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/liblibs-apache-arrow.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/node/libcpp-yson-node.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/hot_swap/libcpp-threading-hot_swap.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/libydb-core-mind.a |47.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/error/libcpp-yt-error.a |47.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/liblibrary-cpp-yson.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cancellation/libcpp-threading-cancellation.a |47.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/type_info/liblibrary-cpp-type_info.a |47.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest_main/libcpp-testing-unittest_main.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.global.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/repl/libblobstorage-vdisk-repl.a |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |47.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |48.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sighandler/liblibrary-cpp-sighandler.a |48.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/terminate_handler/liblibrary-cpp-terminate_handler.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/hyperscan/libcpp-regex-hyperscan.a |47.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/common/libcpp-testing-common.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/scrub/libblobstorage-vdisk-scrub.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/levenshtein_diff/libcpp-string_utils-levenshtein_diff.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/libfq-libs-control_plane_proxy.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/ztstrbuf/libcpp-string_utils-ztstrbuf.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/indent_text/libcpp-string_utils-indent_text.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/query/libblobstorage-vdisk-query.a |48.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/relaxed_escaper/libcpp-string_utils-relaxed_escaper.a |48.2%| [AR] {BAZEL_DOWNLOAD} $(B)/util/libyutil.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/quote/libcpp-string_utils-quote.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base64/libcpp-string_utils-base64.a |48.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/bzip2/libcpp-streams-bzip2.a |48.4%| PREPARE $(CLANG20-882982600) |48.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.a |48.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/proto/libprotobuf-util-proto.a |48.6%| [BI] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/buildinfo_data.h |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/thread_local/libcpp-threading-thread_local.a |48.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/random_provider/liblibrary-cpp-random_provider.a |48.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/util/libcpp-protobuf-util.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/proto/libprotobuf-json-proto.a |48.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/csv/libcpp-string_utils-csv.a |48.9%| [CP] {default-linux-x86_64, release, asan} $(B)/library/cpp/sanitizer/plugin/sanitizer.py.pyplugin |48.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/json/libcpp-protobuf-json.a |48.9%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |49.0%| [CF] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/base32/libcpp-string_utils-base32.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/interop/libcpp-protobuf-interop.a |49.0%| PREPARE $(YMAKE_PYTHON3) |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/skiff/liblibrary-cpp-skiff.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pire/libcpp-regex-pire.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/brotli/libcpp-streams-brotli.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/regex/pcre/libcpp-regex-pcre.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tdigest/liblibrary-cpp-tdigest.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/pg/libessentials-sql-pg.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.global.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/liblibrary-cpp-retry.a |49.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/retry/protos/libcpp-retry-protos.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/resource/liblibrary-cpp-resource.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sliding_window/liblibrary-cpp-sliding_window.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/scheme/liblibrary-cpp-scheme.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zstd/libcpp-streams-zstd.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/zc_memory_input/libcpp-streams-zc_memory_input.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lzma/libcpp-streams-lzma.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/scan/libcpp-string_utils-scan.a |49.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/unittest/libcpp-testing-unittest.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/blocking_queue/libcpp-threading-blocking_queue.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/hook/libcpp-testing-hook.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/sse/liblibrary-cpp-sse.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_extensions/libcpp-testing-gtest_extensions.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/rbo/libkqp-opt-rbo.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/init/libcpp-xml-init.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/xml/document/libcpp-xml-document.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/future/libcpp-threading-future.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/libkqp-opt-physical.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/init/libcpp-openssl-init.global.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/equeue/libcpp-threading-equeue.a |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/time_provider/liblibrary-cpp-time_provider.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/task_scheduler/libcpp-threading-task_scheduler.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/poor_man_openmp/libcpp-threading-poor_man_openmp.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/libblobstorage-vdisk-skeleton.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/light_rw_lock/libcpp-threading-light_rw_lock.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/queue/libcpp-threading-queue.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/parse_size/libcpp-string_utils-parse_size.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/method/libcpp-openssl-method.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson/json/libcpp-yson-json.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/io/libcpp-openssl-io.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/cron/libcpp-threading-cron.a |49.3%| PREPARE $(CLANG_FORMAT-3815817643) |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/prometheus/libmonlib-encode-prometheus.a |49.4%| [CP] {default-linux-x86_64, release, asan} $(B)/common_test.context |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packedtypes/liblibrary-cpp-packedtypes.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kafka_proxy/libydb-core-kafka_proxy.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/libcore-blobstorage-pdisk.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/holders/libcpp-openssl-holders.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/on_disk/chunks/libcpp-on_disk-chunks.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/tablesorter/libservice-pages-tablesorter.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/messagebus/libcpp-monlib-messagebus.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/resources/libservice-pages-resources.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/libcpp-monlib-service.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/exception/libcpp-monlib-exception.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/service/pages/libmonlib-service-pages.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/json/libmonlib-encode-json.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/dynamic_counters/libcpp-monlib-dynamic_counters.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/deprecated/json/libmonlib-deprecated-json.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/buffered/libmonlib-encode-buffered.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/system/libsystem_allocator.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/text/libmonlib-encode-text.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/libcpp-messagebus-monitoring.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/metrics/libcpp-monlib-metrics.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/oldmodule/libcpp-messagebus-oldmodule.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/global/libcpp-logger-global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.global.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/spack/libmonlib-encode-spack.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/alloc_profiler/libcpp-lfalloc-alloc_profiler.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/fast_sax/libcpp-json-fast_sax.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libapi-protos.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lcs/liblibrary-cpp-lcs.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/common/libcpp-json-common.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/iterator/liblibrary-cpp-iterator.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/heartbeat_actor/libcore-audit-heartbeat_actor.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/int128/liblibrary-cpp-int128.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/hdr/libcpp-histogram-hdr.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/ipmath/liblibrary-cpp-ipmath.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dwarf_backtrace/liblibrary-cpp-dwarf_backtrace.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/disjoint_sets/liblibrary-cpp-disjoint_sets.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/crc32c/libcpp-digest-crc32c.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/libcpp-monlib-encode.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/md5/libcpp-digest-md5.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/libcpp-lwtrace-protos.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dot_product/liblibrary-cpp-dot_product.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/old_crc/libcpp-digest-old_crc.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/system/libcpp-yt-system.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/enum_codegen/libcpp-deprecated-enum_codegen.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/big_integer/libcpp-openssl-big_integer.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gmock_in_unittest/libcpp-testing-gmock_in_unittest.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/openssl/crypto/libcpp-openssl-crypto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/server/libcpp-http-server.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/timezone_conversion/liblibrary-cpp-timezone_conversion.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/libencode-legacy_protobuf-protos.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/string_utils/url/libcpp-string_utils-url.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/diff/liblibrary-cpp-diff.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/liblibrary-cpp-unified_agent_client.global.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/cpu_clock/libcpp-yt-cpu_clock.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/libcpp-yt-backtrace.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/normalization/libcpp-unicode-normalization.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/set/libcpp-unicode-set.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/assert/libcpp-yt-assert.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/plain_text_formatter/libyt-logging-plain_text_formatter.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unified_agent_client/proto/libcpp-unified_agent_client-proto.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/backtrace/cursors/libunwind/libbacktrace-cursors-libunwind.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/exception/libcpp-yt-exception.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/uri/liblibrary-cpp-uri.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/audit_config/libcore-audit-audit_config.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/tools/enum_parser/enum_serialization_runtime/libtools-enum_parser-enum_serialization_runtime.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/physical/effects/libopt-physical-effects.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/writer/libcpp-json-writer.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/misc/libcpp-yt-misc.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/string/libcpp-yt-string.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/fetch/libcpp-http-fetch.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yson_pull/libyson_pull.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/threading/libcpp-yt-threading.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/memory/libcpp-yt-memory.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ref/libinternal-proxies-ref.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson/libcpp-yt-yson.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/yson_string/libcpp-yt-yson_string.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/util/charset/libutil-charset.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/audit/libydb-core-audit.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/other/libcore-blobstorage-other.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/libcore-base-generated.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/common/libcore-blobstorage-common.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/chacha_512/libblobstorage-crypto-chacha_512.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/metadata/libblobstorage-pdisk-metadata.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse41/libinternal-proxies-sse41.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/base/libcore-blobstorage-base.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/crypto/libcore-blobstorage-crypto.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/syncer/libblobstorage-bridge-syncer.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/lwtrace_probes/libcore-blobstorage-lwtrace_probes.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/mock/libblobstorage-dsproxy-mock.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/base/libcontrol-lib-base.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/groupinfo/libcore-blobstorage-groupinfo.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/mon/libcpp-lwtrace-mon.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/recovery/libvdisk-hulldb-recovery.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/base/libvdisk-hulldb-base.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/balance/libblobstorage-vdisk-balance.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/bridge/proxy/libblobstorage-bridge-proxy.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/incrhuge/libcore-blobstorage-incrhuge.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/barriers/libvdisk-hulldb-barriers.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/cache_block/libvdisk-hulldb-cache_block.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/backpressure/libcore-blobstorage-backpressure.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/libcore-blobstorage-vdisk.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/protos/libblobstorage-vdisk-protos.a |50.3%| PREPARE $(LLD_ROOT-3107549726) - 37.80 MB |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/huge/libblobstorage-vdisk-huge.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/ingress/libblobstorage-vdisk-ingress.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/compstrat/libvdisk-hulldb-compstrat.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/metadata/libcore-client-metadata.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/fresh/libvdisk-hulldb-fresh.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blockstore/core/libcore-blockstore-core.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/localrecovery/libblobstorage-vdisk-localrecovery.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/generic/libvdisk-hulldb-generic.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/phantom_flag_storage/libvdisk-synclog-phantom_flag_storage.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/agent/libcore-blob_depot-agent.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/util/libcms-console-util.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/libydb-core-base.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/minikql_compile/libcore-client-minikql_compile.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/change_exchange/libydb-core-change_exchange.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hulldb/bulksst_add/libvdisk-hulldb-bulksst_add.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/metadata/libblobstorage-vdisk-metadata.a |50.3%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/validation/libcore-config-validation.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libcore-config-protos.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/debug/libydb-core-debug.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/hullcompdelete/libvdisk-hullop-hullcompdelete.a |50.5%| PREPARE $(PYTHON) - 33.59 MB |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_config_base/libcore-driver_lib-cli_config_base.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/version/libversion.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/synclog/libblobstorage-vdisk-synclog.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/plain/libarrow-accessor-plain.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/init/libcore-config-init.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sparsed/libarrow-accessor-sparsed.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/proto/liblibs-quota_manager-proto.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/minikql/libcore-engine-minikql.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/dictionary/libarrow-accessor-dictionary.global.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/mock/libfq-libs-mock.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/events/liblibs-control_plane_storage-events.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/events/liblibs-quota_manager-events.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/graph/shard/protos/libgraph-shard-protos.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/liblibs-row_dispatcher-format_handler.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_proxy/utils/liblibs-control_plane_proxy-utils.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/libblobstorage-vdisk-syncer.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/common/liblibs-row_dispatcher-common.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/events/liblibs-row_dispatcher-events.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/logs/libfq-libs-logs.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/engine/libydb-core-engine.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/common/librow_dispatcher-format_handler-common.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/protos/liblibs-row_dispatcher-protos.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/rate_limiter/quoter_service/liblibs-rate_limiter-quoter_service.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/ydb_dump/libcore-io_formats-ydb_dump.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/shared_resources/libfq-libs-shared_resources.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/events/liblibs-test_connection-events.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/libydb-core-cms.global.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/read_rule/libfq-libs-read_rule.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/libcore-grpc_services-cancelation.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/libydb-core-grpc_streaming.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_caching/libydb-core-grpc_caching.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/cancelation/protos/libgrpc_services-cancelation-protos.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/cell_maker/libcore-io_formats-cell_maker.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/io_formats/arrow/scheme/libio_formats-arrow-scheme.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/filters/librow_dispatcher-format_handler-filters.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/expr_nodes/libcore-kqp-expr_nodes.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/protos/libfq-libs-protos.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/vdisk/hullop/libblobstorage-vdisk-hullop.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/tablet/libcore-grpc_services-tablet.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/helpers/libkqp-compile_service-helpers.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/shutdown/libkqp-common-shutdown.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/common/simple/libkqp-common-simple.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/counters/libcore-grpc_services-counters.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/test_connection/libfq-libs-test_connection.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.global.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/actors/libkqp-gateway-actors.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/local_rpc/libkqp-gateway-local_rpc.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/private_client/libfq-libs-private_client.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/counters/libcore-kqp-counters.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/parsers/librow_dispatcher-format_handler-parsers.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/external_data_source/libgateway-behaviour-external_data_source.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/finalize_script_service/libcore-kqp-finalize_script_service.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/table/libgateway-behaviour-table.global.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/shards_resolver/libkqp-executer_actor-shards_resolver.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.global.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/libgateway-behaviour-tablestore.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/resource_pool/libgateway-behaviour-resource_pool.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/metering/libydb-core-metering.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/sub_columns/libarrow-accessor-sub_columns.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/libcore-kqp-federated_query.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/libydb-core-kqp.global.a |51.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/view/libgateway-behaviour-view.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compile_service/libcore-kqp-compile_service.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/behaviour/tablestore/operations/libbehaviour-tablestore-operations.global.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/topics/libcore-kqp-topics.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/proxy_service/script_executions_utils/libkqp-proxy_service-script_executions_utils.a |49.4%| [CC] {BAZEL_DOWNLOAD, FAILED} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |49.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/health_check/libydb-core-health_check.a |49.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/run/librun.a |49.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/libcore-kqp-workload_service.a |49.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/common/libkqp-workload_service-common.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/driver_lib/cli_utils/libcli_utils.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/run_script_actor/libcore-kqp-run_script_actor.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/rm_service/libcore-kqp-rm_service.a |49.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/tables/libkqp-workload_service-tables.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_data/libcore-kqp-query_data.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/nodewarden/libcore-blobstorage-nodewarden.a |49.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/address_classification/libcore-mind-address_classification.a |49.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/memory_controller/libydb-core-memory_controller.a |49.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/log_backend/libydb-core-log_backend.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/audit/libcore-mon-audit.a |50.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/proxy/libpersqueue-common-proxy.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/actors/libkqp-workload_service-actors.a |50.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/dsproxy/libcore-blobstorage-dsproxy.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/node_service/libcore-kqp-node_service.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/query_compiler/libcore-kqp-query_compiler.a |50.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jaraco.text/libpy3contrib-python-jaraco.text.global.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon_alloc/libydb-core-mon_alloc.a |50.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/local_pgwire/libydb-core-local_pgwire.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/gateway/libcore-kqp-gateway.a |50.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/compute_actor/libcore-kqp-compute_actor.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/cluster_tracker/libpersqueue-public-cluster_tracker.a |50.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blob_depot/libydb-core-blob_depot.a |50.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/readproxy/libpersqueue-pqtablet-readproxy.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/libydb-core-mon.a |50.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/codecs/libpersqueue-public-codecs.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kesus/tablet/libcore-kesus-tablet.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/describer/libpersqueue-public-describer.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_key_range/libpersqueue-public-partition_key_range.a |50.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/list_topics/libpersqueue-public-list_topics.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/peephole/libkqp-opt-peephole.a |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mirrorer/libpqtablet-partition-mirrorer.a |50.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_common.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/partition_index_generator/libpersqueue-public-partition_index_generator.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/libcore-persqueue-public.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/quota/libpersqueue-pqtablet-quota.a |50.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_app.cpp |50.9%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.h_serialized.cpp |50.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/session_actor/libcore-kqp-session_actor.a |51.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/libpersqueue-public-mlp.a |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage.cpp |51.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage__serialization.cpp |51.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_message_enricher.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyHamcrest/py3/libpy3python-PyHamcrest-py3.global.a |51.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover.cpp |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libcore-protos-nbs.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/out/libcore-protos-out.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libcore-protos-schemeshard.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/client/server/libcore-client-server.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/load_test/libydb-core-load_test.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohappyeyeballs/libpy3contrib-python-aiohappyeyeballs.global.a |51.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/pgproxy/libydb-core-pgproxy.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml/libcontrib-libs-yaml.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/protos/libcore-public_http-protos.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/counters/libpersqueue-public-counters.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/importlib-resources/libpy3contrib-python-importlib-resources.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/stack-data/libpy3contrib-python-stack-data.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/iniconfig/libpy3contrib-python-iniconfig.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipdb/py3/libpy3python-ipdb-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/platformdirs/libpy3contrib-python-platformdirs.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/idna/py3/libpy3python-idna-py3.global.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/public_http/libydb-core-public_http.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/PyYAML/py3/libpy3python-PyYAML-py3.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/frozenlist/libpy3contrib-python-frozenlist.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/more-itertools/py3/libpy3python-more-itertools-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pure-eval/libpy3contrib-python-pure-eval.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/aiohttp/libpy3contrib-python-aiohttp.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pexpect/py3/libpy3python-pexpect-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pycparser/py3/libpy3python-pycparser-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typeguard/libpy3contrib-python-typeguard.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ptyprocess/py3/libpy3python-ptyprocess-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/libffi/libcontrib-restricted-libffi.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/requests/py3/libpy3python-requests-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/python-dateutil/py3/libpy3python-python-dateutil-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/typing-extensions/py3/libpy3python-typing-extensions-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/py/py3/libpy3python-py-py3.global.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/sasl/libcore-security-sasl.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/grpcio/py3/libpy3python-grpcio-py3.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/traitlets/py3/libpy3python-traitlets-py3.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/multidict/libpy3contrib-python-multidict.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ipython/py3/libpy3python-ipython-py3.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/compile_cache/libcore-sys_view-compile_cache.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/writer/libcore-persqueue-writer.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/certificate_check/libcore-security-certificate_check.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/protobuf/py3/libpy3python-protobuf-py3.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/ldap_auth_provider/libcore-security-ldap_auth_provider.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme_types/libydb-core-scheme_types.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/audit_helpers/libcore-testlib-audit_helpers.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/prompt-toolkit/py3/libpy3python-prompt-toolkit-py3.global.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/sessions/libcore-sys_view-sessions.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pool_classifiers/libcore-sys_view-resource_pool_classifiers.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/setuptools/py3/libpy3python-setuptools-py3.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/resource_pools/libcore-sys_view-resource_pools.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/tablets/libcore-sys_view-tablets.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/libydb-core-http_proxy.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/protos/libcore-tablet_flat-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/service/libcore-sys_view-service.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/actors/libcore-testlib-actors.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/balance_coverage/libcore-tx-balance_coverage.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/database/libcore-statistics-database.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/protos/libcolumnshard-bg_tasks-protos.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/show_create/libcore-sys_view-show_create.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/streaming_queries/libcore-sys_view-streaming_queries.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/query_stats/libcore-sys_view-query_stats.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/security/libydb-core-security.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/events/libcolumnshard-bg_tasks-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Pygments/py3/libpy3python-Pygments-py3.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/cms/console/libcore-cms-console.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/abstract/libcolumnshard-blobs_action-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/libydb-core-sys_view.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/events/libcolumnshard-blobs_action-events.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/bg_tasks/abstract/libcolumnshard-bg_tasks-abstract.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/sys_view/storage/libcore-sys_view-storage.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/common/libcolumnshard-blobs_action-common.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/testlib/basics/libcore-testlib-basics.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/iscan/libcolumnshard-backup-iscan.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/runtime/libcore-kqp-runtime.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/counters/libcolumnshard-blobs_action-counters.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_reader/libtx-columnshard-blobs_reader.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/protos/libcolumnshard-blobs_action-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/sqlite3/libcontrib-libs-sqlite3.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/executer_actor/libcore-kqp-executer_actor.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/storages_manager/libcolumnshard-blobs_action-storages_manager.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/column_fetching/libtx-columnshard-column_fetching.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/logical/libkqp-opt-logical.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/libtx-columnshard-common.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/transaction/libcolumnshard-blobs_action-transaction.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/backup/async_jobs/libcolumnshard-backup-async_jobs.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/clickhouse/client/libclickhouse_client_udf.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libcolumnshard-common-protos.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/test_tablet/libydb-core-test_tablet.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/ydb_sdk_import/libpy3tests-oss-ydb_sdk_import.global.a |52.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/oss/canonical/libpy3tests-oss-canonical.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/blobs_action/tier/libcolumnshard-blobs_action-tier.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/libpy3api-grpc-draft.global.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/mkql_helpers/libyt-lib-mkql_helpers.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/init_yt_api/libyt-lib-init_yt_api.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/common/libkqp-ut-common.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/hash/libyt-lib-hash.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/full_capture/libyt-lib-full_capture.a |52.1%| PREPARE $(CLANG-1922233694) - 209.73 MB |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/cache_policy/libcolumnshard-data_accessor-cache_policy.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/expr_traits/libyt-lib-expr_traits.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/libtx-columnshard-data_accessor.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/in_mem/libcolumnshard-data_accessor-in_mem.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/locks/libcolumnshard-data_locks-locks.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_accessor/local_db/libcolumnshard-data_accessor-local_db.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/graph_reorder/libyt-lib-graph_reorder.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_locks/manager/libcolumnshard-data_locks-manager.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/context/libdata_sharing-common-context.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/result/libsrc-client-result.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/resources/libsrc-client-resources.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/events/libdata_sharing-destination-events.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/common/session/libdata_sharing-common-session.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/session/libdata_sharing-destination-session.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/events/libdata_sharing-source-events.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/initiator/controller/libdata_sharing-initiator-controller.global.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/ss_tasks/libsrc-client-ss_tasks.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/query/libsrc-client-query.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/jwt/libsrc-library-jwt.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/modification/transactions/libdata_sharing-modification-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/destination/transactions/libdata_sharing-destination-transactions.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/diagnostics/libtx-columnshard-diagnostics.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/counters/libengines-changes-counters.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/plain/libchanges-compaction-plain.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/opt/libcore-kqp-opt.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/rate_limiter/libsrc-client-rate_limiter.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/session/libdata_sharing-source-session.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/lambda_builder/libyt-lib-lambda_builder.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libcolumnshard-engines-protos.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/log/libyt-lib-log.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/key_filter/libyt-lib-key_filter.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/codecs/libclient-topic-codecs.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_reader/libtx-columnshard-data_reader.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/scheme/libsrc-client-scheme.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/query_stats/libclient-table-query_stats.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sparsed/libchanges-compaction-sparsed.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/libengines-changes-compaction.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/infer_schema/libyt-lib-infer_schema.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/protos/libcolumnshard-data_sharing-protos.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/sub_columns/libchanges-compaction-sub_columns.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/predicate/libcolumnshard-engines-predicate.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common/libengines-reader-common.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/common/libclient-topic-common.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/abstract/libengines-reader-abstract.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/data_sharing/source/transactions/libdata_sharing-source-transactions.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/actor/libengines-reader-actor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/constructor/libreader-plain_reader-constructor.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/libpy3core-protos-nbs.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/abstract/libstorage-actualizer-abstract.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/libclient-types-credentials.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/constructor/libreader-simple_reader-constructor.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/constructor/libreader-common_reader-constructor.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet/libydb-core-tablet.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/libsrc-client-topic.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/loading/libcolumnshard-engines-loading.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/counters/libtx-columnshard-counters.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/common/libreader-common_reader-common.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/abstract/libiterator-sys_view-abstract.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/libessentials-parser-pg_wrapper.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/login/libtypes-credentials-login.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/exceptions/libclient-types-exceptions.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/libcolumnshard-engines-changes.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/collections/libsimple_reader-iterator-collections.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/common_reader/iterator/libreader-common_reader-iterator.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/chunks/libiterator-sys_view-chunks.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.global.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/granules/libiterator-sys_view-granules.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/libsrc-client-table.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/libpy3columnshard-common-protos.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sync_points/libsimple_reader-iterator-sync_points.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/credentials/oauth2_token_exchange/libtypes-credentials-oauth2_token_exchange.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/libpy3yql-dq-proto.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.global.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/libpy3library-folder_service-proto.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/libpy3core-scheme-protos.global.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/fatal_error_handlers/libclient-types-fatal_error_handlers.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/tracing/libengines-reader-tracing.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/executor/libclient-types-executor.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/schemas/libiterator-sys_view-schemas.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/duplicates/libreader-simple_reader-duplicates.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/portions/libiterator-sys_view-portions.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/operation/libclient-types-operation.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/common/libengines-scheme-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/abstract/libengines-scheme-abstract.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/plain_reader/iterator/libreader-plain_reader-iterator.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/common/libscheme-defaults-common.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/libscheme-defaults-protos.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/libsrc-client-types.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/libpy3core-protos-schemeshard.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/libreader-simple_reader-iterator.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/indexes/abstract/libscheme-indexes-abstract.a |52.9%| PREPARE $(CLANG18-1866954364) - 318.78 MB |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/tiering/libengines-scheme-tiering.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/provider/libcore-kqp-provider.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/types/status/libclient-types-status.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/value/libsrc-client-value.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/scheme/libstorage-actualizer-scheme.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/libpy3library-actors-protos.global.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/common/libpersqueue-ut-common.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/decimal/libsrc-library-decimal.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/index/libstorage-actualizer-index.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/issue/libsrc-library-issue.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/table/impl/libclient-table-impl.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/versions/libengines-scheme-versions.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/libcolumnshard-engines-scheme.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/chunks/libengines-storage-chunks.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.a |53.0%| PREPARE $(JDK17-2548586558) - 188.76 MB |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/tiering/libstorage-actualizer-tiering.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/grpc/client/libsdk-library-grpc-client-v3.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/hive/libcore-mind-hive.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bits_storage/libstorage-indexes-bits_storage.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/obfuscate/libsdk-library-persqueue-obfuscate-v3.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/libsrc-library-operation_id.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom/libstorage-indexes-bloom.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/granule/libengines-storage-granule.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/reader/simple_reader/iterator/sys_view/optimizer/libiterator-sys_view-optimizer.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/operation_id/protos/liblibrary-operation_id-protos.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/libpy3library-ydb_issue-proto.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/libpy3dq-actors-protos.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/libpy3library-formats-arrow-protos.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/libpy3library-login-protos.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mind/bscontroller/libcore-mind-bscontroller.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/count_min_sketch/libstorage-indexes-count_min_sketch.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/max/libstorage-indexes-max.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/categories_bloom/libstorage-indexes-categories_bloom.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/persqueue/topic_parser_public/libsdk-library-persqueue-topic_parser_public-v3.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/libpy3library-mkql_proto-protos.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/libstorage-indexes-portions.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/libydb-core-tablet_flat.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/portions/extractor/libindexes-portions-extractor.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/string_utils/helpers/liblibrary-string_utils-helpers.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/planner/liboptimizer-lbuckets-planner.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/skip_index/libstorage-indexes-skip_index.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/actualizer/common/libstorage-actualizer-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/uuid/libsrc-library-uuid.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/abstract/libservices-bg_tasks-abstract.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bg_tasks/protos/libservices-bg_tasks-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/level/liblcbuckets-constructor-level.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/impl/libclient-topic-impl.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/auth/libydb-services-auth.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/selector/liblcbuckets-constructor-selector.global.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/constructor/liboptimizer-lcbuckets-constructor.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/libpy3utils-fetch-proto.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/selector/liblcbuckets-planner-selector.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/buffer/libengines-writer-buffer.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/libpy3api-protos.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/libpy3api-grpc.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/liboptimizer-lcbuckets-planner.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/common/libcolumnshard-export-common.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lcbuckets/planner/level/liblcbuckets-planner-level.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/tiling/libstorage-optimizer-tiling.global.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/writer/libcolumnshard-engines-writer.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/protos/libcolumnshard-export-protos.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/bridge/libydb-services-bridge.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/actor/libcolumnshard-export-actor.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/libpy3api-protos-annotations.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/abstract/libcolumnshard-hooks-abstract.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/protos/libapi-protos-persqueue-deprecated.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/api/grpc/libapi-grpc-persqueue-deprecated.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/extractor/libext_index-metadata-extractor.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/export/session/libcolumnshard-export-session.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/common/libservices-ext_index-common.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/common/libcolumnshard-operations-common.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/backup/libydb-services-backup.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/loading/libtx-columnshard-loading.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/schema_version/libcolumnshard-normalizer-schema_version.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tables/libcolumnshard-normalizer-tables.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/tablet/libcolumnshard-normalizer-tablet.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/granule/libcolumnshard-normalizer-granule.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/discovery/libydb-services-discovery.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/optimizer/lbuckets/constructor/liboptimizer-lbuckets-constructor.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/libpy3essentials-public-types.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/libpy3public-issue-protos.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resources/libtx-columnshard-resources.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/cms/libydb-services-cms.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/libpy3providers-common-proto.global.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/overload_manager/libtx-columnshard-overload_manager.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/events/libsubscriber-abstract-events.a |53.5%| PREPARE $(WITH_JDK17-sbr:9470949154) |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/protos/libtx-coordinator-protos.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/batch_builder/libcolumnshard-operations-batch_builder.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/abstract/subscriber/libsubscriber-abstract-subscriber.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tables_erased/libsubscriber-events-tables_erased.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/resource_subscriber/libtx-columnshard-resource_subscriber.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/slice_builder/libcolumnshard-operations-slice_builder.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/storage/indexes/bloom_ngramm/libstorage-indexes-bloom_ngramm.global.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/subscriber/events/tx_completed/libsubscriber-events-tx_completed.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tracing/libtx-columnshard-tracing.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/sharding/libservices-lib-sharding.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.global.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/auth/libservices-lib-auth.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/libtx-columnshard-splitter.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/operations/libtx-columnshard-operations.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/deprecated/persqueue_v0/libservices-deprecated-persqueue_v0.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.a |51.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/locks/libcolumnshard-transactions-locks.a |51.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |51.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/metadata/libservices-ext_index-metadata.a |51.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/lib/actors/libservices-lib-actors.a |51.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/protos/libcolumnshard-transactions-protos.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/libtx-columnshard-transactions.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/tx_reader/libtx-columnshard-tx_reader.a |51.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/library/time/libsrc-library-time.a |52.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/kesus/libydb-services-kesus.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/tracing/libtx-conveyor-tracing.a |52.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor/usage/libtx-conveyor-usage.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/ev_write/libtransactions-operators-ev_write.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/wardens/libpy3tests-library-wardens.global.a |52.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/python/enable_v3_new_behavior/libpy3sdk-python-enable_v3_new_behavior.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/dynamic_config/libydb-services-dynamic_config.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/libpy3ydb-tests-library.global.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/operators/libcolumnshard-transactions-operators.global.a |52.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/tracing/libtx-conveyor_composite-tracing.a |52.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/splitter/abstract/libcolumnshard-splitter-abstract.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/transactions/transactions/libcolumnshard-transactions-transactions.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/usage/libtx-conveyor_composite-usage.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/common/libservices-metadata-common.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/abstract/libservices-metadata-abstract.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ext_index/service/libservices-ext_index-service.a |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/public/libtx-coordinator-public.a |52.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/conveyor_composite/service/libtx-conveyor_composite-service.a |52.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/common/libtx-data_events-common.a |52.7%| PREPARE $(JDK_DEFAULT-2548586558) |52.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/local_discovery/libydb-services-local_discovery.a |52.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/clients/libpy3tests-library-clients.global.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/data_events/libcore-tx-data_events.a |53.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.global.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/usage/libtx-general_cache-usage.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/service/libtx-general_cache-service.a |53.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/general_cache/source/libtx-general_cache-source.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/optimization/libservices-metadata-optimization.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/keyvalue/libydb-services-keyvalue.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/libydb-services-metadata.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/maintenance/libydb-services-maintenance.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/libydb-core-tx.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/usage/liblimiter-grouped_memory-usage.a |53.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/tracing/liblimiter-grouped_memory-tracing.a |53.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libpy3yql-essentials-protos.global.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libydb-core-protos.a |53.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/accessor/libmetadata-secret-accessor.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/manager/libservices-metadata-manager.a |53.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/public/libtx-long_tx_service-public.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/request/libservices-metadata-request.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/ds_table/libservices-metadata-ds_table.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/locks/libcore-tx-locks.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/limiter/grouped_memory/service/liblimiter-grouped_memory-service.a |53.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/common/libtx-replication-common.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/long_tx_service/libcore-tx-long_tx_service.a |53.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/service/libtx-priorities-service.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/initializer/libservices-metadata-initializer.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/cluster_ordering/libservices-persqueue_cluster_discovery-cluster_ordering.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/priorities/usage/libtx-priorities-usage.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/common/libtx-schemeshard-common.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/test_helper/libtx-columnshard-test_helper.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/coordinator/libcore-tx-coordinator.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/program/libcore-tx-program.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/libpy3core-issue-protos.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libpy3core-file_storage-proto.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/protos/libpy3columnshard-engines-protos.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libpy3ydb-library-services.global.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/events/libolap-bg_tasks-events.a |53.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/adapter/libolap-bg_tasks-adapter.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/protos/libolap-bg_tasks-protos.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/transactions/libolap-bg_tasks-transactions.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_cluster_discovery/libydb-services-persqueue_cluster_discovery.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/column_families/libschemeshard-olap-column_families.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/monitoring/libydb-services-monitoring.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/bg_tasks/tx_chain/libolap-bg_tasks-tx_chain.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/holder/libsqs_topic-queue_url-holder.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/protos/receipt/libsqs_topic-protos-receipt.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/queue_url/libservices-sqs_topic-queue_url.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/mediator/libcore-tx-mediator.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/common/libschemeshard-olap-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/layout/libschemeshard-olap-layout.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/abstract/liboperations-alter-abstract.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/manager/libschemeshard-olap-manager.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/common/libalter-in_store-common.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/common/liboperations-alter-common.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/serialize/libessentials-ast-serialize.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/config_shards/libalter-in_store-config_shards.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/columns/libschemeshard-olap-columns.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/rate_limiter/libydb-services-rate_limiter.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/liboperations-alter-in_store.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/schema/libalter-in_store-schema.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/fq/libydb-services-fq.a |53.9%| PREPARE $(WITH_JDK-sbr:9470949154) |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/schema/libschemeshard-olap-schema.a |53.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/normalizer/portion/libcolumnshard-normalizer-portion.global.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/view/libydb-services-view.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/in_store/transfer/libalter-in_store-transfer.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/test_shard/libydb-services-test_shard.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/libessentials-core-dq_integration.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/defs/libcore-file_storage-defs.a |54.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/tablet/libydb-services-tablet.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/options/libschemeshard-olap-options.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/replication/libydb-services-replication.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/registry/libcore-arrow_kernels-registry.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/alter/standalone/liboperations-alter-standalone.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/store/libschemeshard-olap-store.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/arrow_kernels/request/libcore-arrow_kernels-request.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/table/libschemeshard-olap-table.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/abstract/libtx-tiering-abstract.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/metadata/secret/libservices-metadata-secret.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/credentials/libessentials-core-credentials.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/libcore-tx-columnshard.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/time_cast/libcore-tx-time_cast.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/cbo/libessentials-core-cbo.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/json/libcore-viewer-json.a |54.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes/libessentials-core-expr_nodes.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/expr_nodes_gen/libessentials-core-expr_nodes_gen.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/evlog/libcore-util-evlog.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/dq_integration/transform/libcore-dq_integration-transform.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sharding/libcore-tx-sharding.global.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/usage/libtx-tracing-usage.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tracing/service/libtx-tracing-service.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/tier/libtx-tiering-tier.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/indexes/libschemeshard-olap-indexes.a |54.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/operation_queue_timer.h_serialized.cpp |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/sqs_topic/libydb-services-sqs_topic.a |54.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/tiering/libcore-tx-tiering.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__clean_pathes.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_cleaning.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/ttl/libschemeshard-olap-ttl.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/libydb-core-util.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/facade/libessentials-core-facade.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__get.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__list.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__forget.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_root.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__cancel.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/olap/operations/libschemeshard-olap-operations.a |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/scheme_board/libcore-tx-scheme_board.a |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export__create.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_continuous_backup_cleaner.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_billing_helpers.cpp |54.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index.cpp |54.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/ast/libyql-essentials-ast.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_bg_tasks__list.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_scan.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__progress.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/libydb-services-persqueue_v1.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__pq_stats.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_tables.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table_index.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/controller/libtx-replication-controller.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__root_shred_manager.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_data_source.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_part.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_external_table.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp |54.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ymq/libydb-services-ymq.a |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup_collection.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_data_source.cpp |54.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_services/libydb-core-grpc_services.a |54.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/persqueue_v1/actors/libservices-persqueue_v1-actors.a |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_secret.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login_finalize.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__make_access_database_no_inheritable.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_consistent_copy_tables.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_sequence.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_copy_table.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__list_users.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_populator.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__login.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__find_subdomain_path_id.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__background_compaction.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__delete_tablet_reply.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__borrowed_compaction.cpp |54.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__describe_scheme.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__backup_collection_common.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__conditional_erase.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_sequence.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_kesus.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__op_traits.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_continuous_backup.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_bsv.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__notify.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_cdc_stream.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_index.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_data_source.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_login.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_solomon.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_external_table.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_extsubdomain.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_change_path_state.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__monitoring.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_replication.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_apply_build_index.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_assign_bsv.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init_schema.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_streaming_query.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp |54.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_backup_collection.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_resource_pool.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_blob_depot.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_external_table.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_backup_incremental_backup_collection.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_bsv.cpp |54.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__fix_bad_paths.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_pq.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_subdomain.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_bsv.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_lock.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_continuous_backup.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_build_index.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_index.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_common_cdc_stream.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_fs.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_check.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation.cpp |54.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sysview.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_rtmr.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_cdc_stream.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_finalize.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore_incremental_backup.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_restore.cpp |55.0%| PREPARE $(OS_SDK_ROOT-sbr:243881345) - 8.40 MB |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_streaming_query.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_set_constraint_lock.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_continuous_backup.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_view.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp |54.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_bsv.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_cancel_tx.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_kesus.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_cdc_stream.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_db_changes.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_indexed_table.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_unsafe.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_resource_pool.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_backup_collection.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_fs.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_index.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_replication.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_indexed_table.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_lock.cpp |55.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_pq.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__init.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_streaming_query.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_secret.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_subdomain.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_sysview.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_mkdir.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_incremental_restore_finalize.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_drop_view.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_initiate_build_index.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_just_reject.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_table.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_sequence.cpp |55.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_restore_backup_collection.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rotate_cdc_stream.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_side_effects.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__shred_manager.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_split_merge.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_schema.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__state_changed_reply.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__serverless_storage_billing.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__sync_update_tenants.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__publish_to_scheme_board.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_memory_changes.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__unmark_restore_tables.cpp |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__list.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log_fragment.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__upgrade_access_database.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__forget.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__table_stats_histogram.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_backup_incremental__get.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_audit_log.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__get.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__forget.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__cancel.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__tenant_shred_manager.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_helpers.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__create.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_cdc_stream_common.cpp |55.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__list.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_effective_acl.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index_tx_base.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.h_serialized.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_domain_links.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_alter_fs.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__forget.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_svp_migration.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_create_backup.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_flow_proposals.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_incremental_restore_scan.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__create.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_export_uploaders.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_build_index__progress.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__cancel.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_flow_proposals.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_scheme_query_executor.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_helpers.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__list.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_login_helper.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__get.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import__forget.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_info_types.h_serialized.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_index_build_info.h_serialized.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_build_info.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_element.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_self_pinger.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_state_types.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_scheme_builders.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_index_utils.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__get.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_tx_infly.cpp |55.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/user_attributes.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_import_getters.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_identificators.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_shard_deleter.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_info_types.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_xxport__helpers.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_system_names.cpp |55.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_types.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_subop_types.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/schemeshard_types.h_serialized.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_sysviews_update.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_restore_incremental__list.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_validate_ttl.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard__operation_move_index.cpp |55.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_path_describer.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_lister/interface/libcore-url_lister-interface.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/histogram/libessentials-core-histogram.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/yaml/libcore-viewer-yaml.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/events/libcore-wrappers-events.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/download/libcore-file_storage-download.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cfg/libymq-actor-cfg.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/http_download/libcore-file_storage-http_download.a |54.6%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/build_info.cpp |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/libessentials-core-file_storage.a |54.7%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svn_interface.c |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/libcore-file_storage-proto.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceproxy/public/libtx-sequenceproxy-public.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/libymq-actor-cloud_events.a |54.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |54.5%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/build_info/build_info_static.cpp |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/attributes/libcore-ymq-attributes.a |54.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/cloud_events/proto/libactor-cloud_events-proto.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/minsketch/libessentials-core-minsketch.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/error/libcore-ymq-error.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/langver/libessentials-core-langver.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/proto/libcore-ymq-proto.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/common/libymq-queues-common.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/layers/libessentials-core-layers.a |54.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/base/libcore-ymq-base.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/common_opt/libessentials-core-common_opt.a |54.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/libydb-library-aclib.a |54.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/std/libymq-queues-std.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/actor_type/liblibrary-actors-actor_type.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/accessor/libydb-library-accessor.a |55.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/queues/fifo/libymq-queues-fifo.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ydb_convert/libydb-core-ydb_convert.a |55.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/liblibrary-aclib-protos.a |55.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/schemeshard_impl.cpp |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/dnscachelib/liblibrary-actors-dnscachelib.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/libydb-core-wrappers.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/http/libcore-ymq-http.a |55.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/async/liblibrary-actors-async.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/harmonizer/libactors-core-harmonizer.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/protos/libcore-viewer-protos.a |55.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_settings/libessentials-core-pg_settings.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/address/libactors-interconnect-address.a |55.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/storage/interface/libqplayer-storage-interface.a |55.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/ydb/libydb-services-ydb.a |55.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/mock/libactors-interconnect-mock.a |55.5%| [CC] {default-linux-x86_64, release, asan} $(S)/library/cpp/svnversion/svnversion.cpp |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/url_lister/libcore-qplayer-url_lister.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/qplayer/udf_resolver/libcore-qplayer-udf_resolver.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/extract_predicate/libessentials-core-extract_predicate.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/poller/libactors-interconnect-poller.a |55.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/prof/liblibrary-actors-prof.a |55.7%| [CC] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/sandbox.cpp |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/cq_actor/libinterconnect-rdma-cq_actor.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/rdma/libactors-interconnect-rdma.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/log_backend/liblibrary-actors-log_backend.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/memory_log/liblibrary-actors-memory_log.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/mounts/libcore-services-mounts.global.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/sql_types/libessentials-core-sql_types.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/http/liblibrary-actors-http.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/common/libactors-testlib-common.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/core/liblibrary-actors-core.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/util/liblibrary-actors-util.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/gateway/libproviders-common-gateway.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr4/libparser-proto_ast-antlr4.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/url_preprocessing/interface/libcore-url_preprocessing-interface.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Columns/liblibrary-arrow_clickhouse-Columns.a |55.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/services/libessentials-core-services.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/testlib/liblibrary-actors-testlib.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/arrow/libessentials-minikql-arrow.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/wilson/liblibrary-actors-wilson.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/user_data/libessentials-core-user_data.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.global.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/liblibrary-actors-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/Common/liblibrary-arrow_clickhouse-Common.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/DataStreams/liblibrary-arrow_clickhouse-DataStreams.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/conclusion/libydb-library-conclusion.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/libessentials-core-issue.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/peephole_opt/libessentials-core-peephole_opt.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/chunks_limiter/libydb-library-chunks_limiter.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_parquet/libydb-library-arrow_parquet.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/mock/liblibrary-folder_service-mock.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/protos/liblibrary-db_pool-protos.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/libydb-library-folder_service.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/codegen/llvm16/libminikql-codegen-llvm16.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/db_pool/libydb-library-db_pool.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_kernels/libydb-library-arrow_kernels.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/datetime/libessentials-minikql-datetime.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/liblibrary-folder_service-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/hash/liblibrary-formats-arrow-hash.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/llvm16/libminikql-computation-llvm16.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/scalar/liblibrary-formats-arrow-scalar.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/converter/libarrow-csv-converter.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/interconnect/liblibrary-actors-interconnect.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/modifier/liblibrary-formats-arrow-modifier.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/hyperscan/libjsonpath-rewrapper-hyperscan.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/re2/libjsonpath-rewrapper-re2.global.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/lexer_common/libessentials-parser-lexer_common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/simple_builder/liblibrary-formats-arrow-simple_builder.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/liblibrary-formats-arrow-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/splitter/liblibrary-formats-arrow-splitter.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/switch/liblibrary-formats-arrow-switch.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/transformer/liblibrary-formats-arrow-transformer.a |55.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/libminikql-jsonpath-rewrapper.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/fyamlcpp/libydb-library-fyamlcpp.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/rewrapper/proto/libjsonpath-rewrapper-proto.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/global_plugins/libydb-library-global_plugins.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/validation/liblibrary-formats-arrow-validation.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/antlr4/libparser-common-antlr4.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/liblibrary-formats-arrow.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/common/libessentials-parser-common.a |56.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/keys/libydb-library-keys.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/authorization/liblibrary-http_proxy-authorization.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/libessentials-minikql-jsonpath.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/logger/libydb-library-logger.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/grpc/server/liblibrary-grpc-server.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/cache/liblibrary-login-cache.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/http_proxy/error/liblibrary-http_proxy-error.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/account_lockout/liblibrary-login-account_lockout.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/sasl/liblibrary-login-sasl.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/password_checker/liblibrary-login-password_checker.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/jsonpath/parser/libminikql-jsonpath-parser.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/antlr3/libparser-proto_ast-antlr3.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/dq/libproviders-common-dq.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/liblibrary-login-protos.a |56.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_wrapper/interface/libparser-pg_wrapper-interface.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/hashes_checker/liblibrary-login-hashes_checker.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/libessentials-parser-pg_catalog.global.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/naming_conventions/libydb-library-naming_conventions.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/pg_catalog/proto/libparser-pg_catalog-proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_clickhouse/libydb-library-arrow_clickhouse.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/libydb-library-login.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/protos/liblibrary-pdisk_io-protos.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/liblibrary-mkql_proto-protos.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/libydb-library-mkql_proto.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pretty_types_print/protobuf/liblibrary-pretty_types_print-protobuf.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/deprecated/read_batch_converter/libpersqueue-deprecated-read_batch_converter.a |56.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ncloud/impl/liblibrary-ncloud-impl.a |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_addmember.cpp |56.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_append.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_factory.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/topic_parser/liblibrary-persqueue-topic_parser.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_exists.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_collect.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/plan2svg/libydb-library-plan2svg.a |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/pdisk_io/libydb-library-pdisk_io.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hopping.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_zip.cpp |56.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/type_ann/libessentials-core-type_ann.a |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_while.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_withcontext.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_visitall.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chain_map.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_way.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_size.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterator.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join_dict.cpp |56.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/libyql-essentials-minikql.a |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_iterable.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_container.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_contains.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flow.cpp |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_aggrcount.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_apply.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_getelem.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chopper.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense1.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_func.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_scalar_apply.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_skiptake.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_count.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_logical.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_decimal.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_just.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_check_args.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain1_map.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_callable.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_chain_map.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_compress.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_coalesce.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_enumerate.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_some.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_if.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_element.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_sum.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dynamic_variant.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_discard.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_div.cpp |56.4%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_top.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_dictitems.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_flatmap.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_exists.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_map_join.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ensure.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_expand_map.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mod.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_guess.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_condense.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_extend.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_hasitems.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/libydb-core-viewer.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_decimal_mul.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromyson.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_factory.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_frombytes.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_blocks.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/computation/libessentials-minikql-computation.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_filter.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg_minmax.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_combine.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fromstring.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_group.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_fold1.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_invoke.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_grace_join_imp.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_ifpresent.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_if.cpp |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/libyql-essentials-core.a |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_coalesce.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_join.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_removemember.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reduce.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_narrow_map.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_length.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lazy_list.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_linear.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mutdict.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_logical.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_lookup.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_list.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_rows_formatter.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_listfromrange.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize_measure_arg.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_mapnext.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_prepend.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_now.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_null.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_nop.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_next_value.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multimap.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_multihopping.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_pickle.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_safe_circular_buffer.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_random.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_seq.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_replicate.cpp |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_reverse.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_rh_hash.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_match_recognize.cpp |56.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_antlr4/libproto_ast-gen-v1_antlr4.a |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_state.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_map_join.cpp |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_queue.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_timezone.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_squeeze_to_list.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_source.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_range.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_skip.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_round.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_udf.cpp |56.6%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_time_order_recover.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_take.cpp |56.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tostring.cpp |56.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |56.3%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_sort.cpp |56.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |56.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tooptional.cpp |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_toindexdict.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_switch.cpp |56.7%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_varitem.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_tobytes.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_unwrap.cpp |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |57.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_chopper.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_filter.cpp |56.8%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_condense.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |56.9%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_heap.cpp |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_map.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_top_sort.cpp |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_weakmember.cpp |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_wide_combine.cpp |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/protobuf_printer/libydb-library-protobuf_printer.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/dom/libessentials-minikql-dom.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/query_actor/libydb-library-query_actor.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schine/liblibrary-schlab-schine.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/protos/liblibrary-schlab-protos.a |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_todict.cpp |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/mon/liblibrary-schlab-mon.global.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/probes/liblibrary-schlab-probes.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/security/libydb-library-security.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/schlab/schoot/liblibrary-schlab-schoot.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/activation/libproviders-common-activation.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/arrow/libcommon-codec-arrow.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signal_backtrace/libydb-library-signal_backtrace.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/jsonpath/libproto_ast-gen-jsonpath.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/libydb-library-services.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/config/libproviders-common-config.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/stock/liblibrary-workload-stock.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/arrow_resolve/libproviders-common-arrow_resolve.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/service/liblibrary-slide_limiter-service.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/signals/libydb-library-signals.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/slide_limiter/usage/liblibrary-slide_limiter-usage.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/abstract/liblibrary-workload-abstract.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/table_creator/libydb-library-table_creator.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/kv/liblibrary-workload-kv.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/codec/libproviders-common-codec.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/common/libdq-actors-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_json/libydb-library-yaml_json.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/protos/libyaml-config-protos.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/liblibrary-ydb_issue-proto.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/libydb-library-ydb_issue.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/main/libpython-runtime_py3-main.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/libyql-essentials-protos.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/registry/libpython-symbols-registry.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/public/liblibrary-yaml_config-public.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/comp_nodes/libproviders-common-comp_nodes.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/mkql_dq/libproviders-yt-mkql_dq.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_common/libpy3python-testing-yatest_common.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/libc/libpython-symbols-libc.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/module/libpy3python-symbols-module.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/strings/libpy3library-python-strings.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/ymq/actor/libcore-ymq-actor.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/svn_version/libpy3library-python-svn_version.global.a |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/temp_files/libyt-lib-temp_files.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/datashard/libcore-tx-datashard.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/symbols/python/libpy3cpython-symbols-python.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yaml_config/libydb-library-yaml_config.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/libfq-libs-checkpointing.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |56.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/cloud_audit/libfq-libs-cloud_audit.a |56.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydbd/ydbd |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0/libproto_ast-gen-v0.a |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |57.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/filter/libpy3python-testing-filter.global.a |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tz_types/libyt-library-tz_types.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/erasure/libyt-library-erasure.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/switch/libformats-arrow-switch.a |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/ycloud/impl/liblibrary-ycloud-impl.a |57.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/libyt-library-profiling.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/comp_nodes/llvm16/mkql_block_agg.cpp |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/kqprun |57.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/ctas/ctas |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/result_set_format/result_set_format |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.a |57.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/common/libpy3tests-stress-common.global.a |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/type/libpy3result_set_format-workload-type.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/responses/py3/libpy3python-responses-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/workload/libpy3stress-result_set_format-workload.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.global.a |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |57.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/boto3/py3/libpy3python-boto3-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/jmespath/py3/libpy3python-jmespath-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/MarkupSafe/py3/libpy3python-MarkupSafe-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/libf2c/libcontrib-libs-libf2c.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v0_proto_split/libproto_ast-gen-v0_proto_split.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_1de592266ca9bc1d10b20d8e9a.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/codes/libservices-datastreams-codes.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_e25036fa51e72ace049084c308.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Jinja2/py3/libpy3python-Jinja2-py3.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Werkzeug/py3/libpy3python-Werkzeug-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_ansi_antlr4/libproto_ast-gen-v1_ansi_antlr4.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/tests/objcopy_c4b248e24a6215fa53b9e5552d.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/psutil/py3/libpy3python-psutil-py3.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_keys.cpp |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_51c4be7ece033a38cc2a2dfb64.o |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_ab542bc0d25a481768cec1ad31.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/test_meta/libpy3tests-library-test_meta.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/recipes/common/libpy3library-recipes-common.global.a |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/view/view_ut.cpp |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_column_stats.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/xmltodict/py3/libpy3python-xmltodict-py3.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/recipe/libpy3python-testing-recipe.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/s3transfer/py3/libpy3python-s3transfer-py3.global.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_5294a064c14cf5a49516321590.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/stress/libpy3tests-library-stress.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/lib/libpy3tests-datashard-lib.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cblas/libcontrib-libs-cblas.a |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/tests/objcopy_2650fc91e1b7439f48496ea51e.o |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/lib/libpy3tests-sql-lib.global.a |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_c7c229be41e9b028572ad1aab3.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/tests/objcopy_08f7acdb6eb761b28bf6990862.o |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/test_events_writer.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.global.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/undumpable/libyt-library-undumpable.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/apache/arrow/cpp/src/arrow/python/libpy3src-arrow-python.a |57.9%| PREPARE $(GDB) - 16.79 MB |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/ytprof/api/liblibrary-ytprof-api.a |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/url_mapper/libyt-lib-url_mapper.a |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/yc_search_ut/index_events_processor_ut.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/dump_helpers/libyt-lib-dump_helpers.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/resource/libpy3library-python-resource.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/core/libyt_proto-yt-core.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/numpy/random/libpy3py3-numpy-random.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part1/liblibs-clapack-part1.a |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/gc_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/yaml-cpp/libcontrib-libs-yaml-cpp.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/packers/liblibrary-cpp-packers.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/reservoir_sampling/libpy3library-python-reservoir_sampling.global.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tvm/libyt-library-tvm.a |56.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/formats/libyt_proto-yt-formats.a |56.6%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/client/libyt-yt-client.a |56.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |56.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/py3/libpy3python-moto-py3.global.a |56.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |56.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wheel/libpy3contrib-python-wheel.global.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/clapack/part2/liblibs-clapack-part2.a |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/yt/logging/libcpp-yt-logging.a |56.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/config/libydb-services-config.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/mime/types/libcpp-mime-types.a |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/func/libpy3library-python-func.global.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |56.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/scheduler/libcpp-messagebus-scheduler.a |56.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_277b7e8f79021687bec95be8db.o |56.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |57.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/common/liblibrary-testlib-common.a |57.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |57.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_checkpoint_storage_ut.cpp |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCDisassembler/liblib-MC-MCDisassembler.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/lib/cmds/libpy3tools-lib-cmds.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/libpy3library-python-pytest.global.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/libpy3ydb_recipe.global.a |57.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/tools/ydb_recipe/objcopy_c55121179eeb3b5753498290c4.o |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/libgeneric-connector-libcpp.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/connector/libcpp/ut_helpers/libconnector-libcpp-ut_helpers.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/federated_query/common/libut-federated_query-common.a |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/static/libcommon-stat-static.a |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |57.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |57.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/client/linux/libsrc-client-linux.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json/libjson_udf.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/lib/libcommon-ip_base-lib.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/ip_base/libip_udf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/croaring/libcontrib-libs-croaring.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/lib/libcommon-compress_base-lib.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydbd/main.cpp |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/proto_ast/gen/v1_proto_split_antlr4/libproto_ast-gen-v1_proto_split_antlr4.a |57.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/generic/actors/ut/yql_generic_lookup_actor_ut.cpp |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/compress_base/libcompress_udf.global.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydbd/export/libapps-ydbd-export.a |57.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.global.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/federated_query/ut_service/common/libfederated_query-ut_service-common.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/unicode/punycode/libcpp-unicode-punycode.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/sfh/libcpp-digest-sfh.a |57.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/xz/libcpp-streams-xz.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/hyperloglog/liblibrary-cpp-hyperloglog.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/tld/liblibrary-cpp-tld.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/top_keeper/libcpp-containers-top_keeper.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/breakpad/libydb-library-breakpad.global.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/fast/kqp_federated_query_actors_ut.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/sequenceshard/libcore-tx-sequenceshard.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/statistics_internal/libstatistics_internal_udf.global.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/libcpp-histogram-adaptive.a |57.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/roaring/libroaring.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.global.a |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.global.a |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_4e3ea6c3c5a0438f05942dbc81.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/program/libformats-arrow-program.a |57.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_replication.cpp |57.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.global.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/proto/libkqprun-src-proto.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/mkql_simple_file/libproviders-common-mkql_simple_file.a |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/pq/gateway/dummy/libpq-gateway-dummy.a |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_28c396580e7e319c4a82e15fc9.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/tests/objcopy_75e82e9b2ff2024ae902b7d5e4.o |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/workload/libpy3stress-ctas-workload.global.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/libpy3ctas.global.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest_main/libcpp-testing-gtest_main.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_5923b362516b6632b9769a5db2.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/botocore/py3/libpy3python-botocore-py3.global.a |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_6b37760fb6a28054d0feafd61d.o |57.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/ctas/objcopy_3cb499a0fcc9aa014af2855233.o |57.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/tests/objcopy_278b1a63a14648a80c4b930adb.o |57.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/udf/service/stub/libudf-service-stub.global.a |57.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/gateway/ut/metadata_conversion.cpp |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/libpy3result_set_format.global.a |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/result_set_format/objcopy_ab0068aecc73dfe57505e1cc57.o |57.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d0e1cde98d2ab34e72d18aae9c.o |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gtest/libcpp-testing-gtest.a |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/workload/libpy3stress-node_broker-workload.global.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_953328e5c3275a286b65dc3b1d.o |57.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/export_s3_buffer_ut.cpp |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pyarrow/libpy3contrib-python-pyarrow.a |57.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/tests/objcopy_d2d4e3343da9b011ee6a983244.o |57.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/kqprun/kqprun.cpp |57.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/histogram/adaptive/protos/libhistogram-adaptive-protos.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/runlib/libtools-kqprun-runlib.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/udfs/common/knn/libknn_udf.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/lib/libcommon-url_base-lib.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/pytest/plugins/libpy3python-pytest-plugins.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.global.a |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/libpy3core-config-protos.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/config/libcpp-messagebus-config.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/generic/expr_nodes/libproviders-generic-expr_nodes.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/actor/libmessagebus_actor.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/protobuf/libmessagebus_protobuf.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/base/libpublic-lib-base.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/src/libtools-kqprun-src.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/archive/liblibrary-cpp-archive.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/runtime_py3/libpy3library-python-runtime_py3.global.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/breakpad/src/liblibs-breakpad-src.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/storage_service_ydb_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/construction/libchanges-actualization-construction.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/abstract/libengines-changes-abstract.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/gateway/file/libyt-gateway-file.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/health/libfq-libs-health.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/static/libcommon-topfreq-static.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/replication/ut_helpers/libtx-replication-ut_helpers.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/liblibrary-cpp-lwtrace.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_actors_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/dictionary/libchanges-compaction-dictionary.global.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_partition_stats/ut_top_cpu_usage.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/deprecated/client/liblib-deprecated-client.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_cache.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/chaos_lease_base.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_transaction.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/composite_compare.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/workload_service/ut/common/libworkload_service-ut-common.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Util/liblibs-poco-Util.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/comparator.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/internal_client.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_base.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_sort_schema.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/merge_table_schemas.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/validate_logical_type.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound_compressor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/adapters.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/column_rename_descriptor.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/chunk_stripe_statistics.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/blob_reader.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/validator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/signature.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/provided.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/generator.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/check_schema_compatibility.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_builder.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/spec_patch.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/public.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt_proto/yt/client/libyt_proto-yt-client.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/object_client/helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/helpers.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/numpy/py3/libpy3python-numpy-py3.a |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/helpers.cpp |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/workload.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/config.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/skynet.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_partition_reader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/transaction.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/bundle_controller_client/bundle_controller_settings.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openssl/libcontrib-libs-openssl.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/cluster_directory.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/address_helpers.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_reader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rowset.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_reader.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/queue_transaction_mixin.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/chaos_lease.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/private.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/query_tracker_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/journal_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/options.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/etc_client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/dynamic_table_transaction_mixin.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_reader.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/file_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_table_session.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/target_cluster_injecting_channel.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/delegating_client.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/cypress_client/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_writer.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_stream.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/security_client.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/row_batch_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/ready_event_reader_base.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_writer.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/timestamp_provider.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/sticky_transaction_pool.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/connection_impl.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/journal_reader.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_cache.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/wire_row_stream.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/table_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/public.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_type_compatibility.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/shuffle_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/election/public.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/helpers.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/client_common.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/persistent_queue.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/packet.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/read_limit.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/infinite_entity.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/table_mount_cache.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/data_statistics.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/config.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/check_yson_token.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/public.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/time_text.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/helpers.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/transaction_impl.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/chunk_replica.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/operation_client.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/distributed_file_session.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/helpers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/merge_complex_types.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/protocol.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/job_tracker_client/public.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/method_helpers.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/io_tags.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hydra/version.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/hive/timestamp_map.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/invoke_builtins/llvm16/libminikql-invoke_builtins-llvm16.a |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/uuid_text.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/file_client/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/journal_client/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/misc/config.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/complex_types/yson_format_conversion.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_id_or_alias.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/services/datastreams/libydb-services-datastreams.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/common.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/query_client/query_statistics.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/partition_reader.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/producer_client.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/node_tracker_client/node_directory.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chunk_client/config.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/public.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/signature/dynamic.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/queue_rowset.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/queue_client/consumer_client.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/kafka/requests.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/access_control.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/scheduler/operation_cache.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/acl.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/security_client/helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/constrained_schema.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key_bound.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/key.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/pipe.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_codegen_cpp.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/public.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/name_table.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/record_helpers.cpp |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_value.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_batch.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/row_buffer.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/backup/backup_stress |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_base.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_upload_options.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_row_reorderer.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/logical_type.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/serialize.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/timestamped_schema_helpers.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schemaless_dynamic_table_writer.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_output.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/schema_serialization_helpers.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/value_consumer.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/table_consumer.cpp |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/columnar_statistics.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unordered_schemaful_reader.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/remote_timestamp_provider.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/unversioned_row.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_reader.cpp |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/wire_protocol.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_io_options.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/public.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/table_client/versioned_row.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/noop_timestamp_provider.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/timestamp_provider_base.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/config.cpp |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/batching_timestamp_provider.cpp |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/watermark_runtime_data.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/config.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/transaction_client/helpers.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/parser_detail.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/datastreams_helpers/libpy3tests-tools-datastreams_helpers.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/retry/libpy3library-python-retry.global.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/chunk_queue/libcpp-threading-chunk_queue.a |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/tablet_client/table_mount_cache_detail.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/discovery/kqp_discovery_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/ut_sequence/dsproxy_config_retrieval.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_e8c94c485e81b4b2899f52f594.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_3bb523a1011c0a7019f2684a90.o |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_0035b673555f394234ae284e25.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_compaction.cpp |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_45b6981aed17dda33d43217f52.o |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/workload/libpy3stress-s3_backups-workload.global.a |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/test/objcopy_9818d2b70aad7db98a0f9c044c.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/ydb/ydb |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/tests/objcopy_cd57da3671b96739ee73293fb1.o |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/objcopy_dcbdf62672440a626e79a64e14.o |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fq_runner/libpy3tests-tools-fq_runner.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ut_utils/libpersqueue_public-ut-ut_utils.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer_ut.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_dlq_mover_ut.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/kqprun/recipe/libpy3kqprun_recipe.global.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/ypath/rich.cpp |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_counters_ut.cpp |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/topic/utils/libintegration-topic-utils.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/yarl/libpy3contrib-python-yarl.global.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/public/mlp/ut/common/libmlp-ut-common.a |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/hmac/libfq-libs-hmac.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/llhttp/libcontrib-restricted-llhttp.a |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_disk_quotas.cpp |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/malloc/api/libcpp-malloc-api.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/windows/libpy3library-python-windows.global.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/chaos_client/replication_card_serialization.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/logger/liblibrary-cpp-logger.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/yson/libcpp-json-yson.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp2/libcontrib-libs-nghttp2.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/lib/libpy3python-import_tracing-lib.global.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_schema/libfq-libs-db_schema.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpoint_storage/ut/ydb_state_storage_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/l2_distance/liblibrary-cpp-l2_distance.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/www/libcpp-messagebus-www.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/urllib3/py3/libpy3python-urllib3-py3.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/hash/libformats-arrow-hash.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/table_writer_ut.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_labeled.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/pcdata/libcpp-html-pcdata.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/json/easy_parse/libcpp-json-easy_parse.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/CFGuard/liblib-Transforms-CFGuard.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_registry.cpp |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/wcwidth/py3/libpy3python-wcwidth-py3.global.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/db_id_async_resolver_impl/libfq-libs-db_id_async_resolver_impl.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/html/escape/libcpp-html-escape.a |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/rate_limiter/rate_limiter_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/nghttp3/libcontrib-libs-nghttp3.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/misc/libcpp-http-misc.a |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_scan.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/sse2/libinternal-proxies-sse2.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/raw_socket/libydb-core-raw_socket.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/config/ut/ydb-services-config-ut |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/build/libyt-yt-build.a |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/enumbitset/liblibrary-cpp-enumbitset.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/murmur/libcpp-digest-murmur.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Demangle/libllvm16-lib-Demangle.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/proto/liblibs-control_plane_storage-proto.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ut_utils/libtopic-ut-ut_utils.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/execprofile/liblibrary-cpp-execprofile.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/ngtcp2/libcontrib-libs-ngtcp2.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/actualization/controller/libchanges-actualization-controller.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/lib2/py/libpy3python3-lib2-py.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/Orc/TargetProcess/libExecutionEngine-Orc-TargetProcess.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/events/libfq-libs-events.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/dns/liblibrary-cpp-dns.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/re2/libyt-library-re2.a |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/transfer |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/show_create_view |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/liblibrary-cpp-messagebus.a |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/getopt/small/libcpp-getopt-small.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/public/libcore-quoter-public.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libraries/liblber/libopenldap-libraries-liblber.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_2983697b65fe3fbc437e0acf1f.o |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_aac602e1ee9075b2856f1975ca.o |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/tests/objcopy_771af70678639da3d434ea1416.o |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/gateway/libfq-libs-gateway.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/workload/libpy3stress-backup-workload.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/internal/liblibs-control_plane_storage-internal.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_tables_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/quota_manager/ut_helpers/liblibs-quota_manager-ut_helpers.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/common/libcore-persqueue-common.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/libpy3backup_stress.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/deprecated/split/libcpp-deprecated-split.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/backup/objcopy_4f24d349611ecfcf19e3001adb.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/DebugInfo/DWARF/liblib-DebugInfo-DWARF.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protoc/libpy3protobuf-builtin_proto-protos_from_protoc.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.global.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_kqp.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/abstract/libchanges-compaction-abstract.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ExecutionEngine/RuntimeDyld/liblib-ExecutionEngine-RuntimeDyld.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/grpc/libfq-libs-grpc.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_vacuum.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/import_tracing/constructor/libpy3python-import_tracing-constructor.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/graph_params/proto/liblibs-graph_params-proto.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/changes/compaction/common/libchanges-compaction-common.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/builtin_proto/protos_from_protobuf/libpy3protobuf-builtin_proto-protos_from_protobuf.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/ssse3/libinternal-proxies-ssse3.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e508a8abac843a0a0f92fc62eb.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.global.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7f02665786b7523f76c02ad1dd.o |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/public_http/http_router_ut.cpp |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Object/libllvm16-lib-Object.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/workload/libpy3stress-cdc-workload.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/digest/argonish/internal/proxies/avx2/libinternal-proxies-avx2.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Frontend/OpenMP/liblib-Frontend-OpenMP.a |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/executer_actor/kqp_executer_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/client/api/rpc_proxy/client_impl.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_storage_ut.cpp |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_7c0098f27edc25092453a8033c.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/tests/objcopy_b9fcf9641e3e569e88014f85ff.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Lib/libpy3tools-python3-Lib.global.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/quoter/libydb-core-quoter.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/schemeshard/ut_helpers/libtx-schemeshard-ut_helpers.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/workload/libpy3stress-viewer-workload.global.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_e55498abceca534315a6428452.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/tests/objcopy_8e0f2cf91b35e6051ad82535a4.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre16/liblibs-pcre-pcre16.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_0359848ae21601186c5b0d9873.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/init/libfq-libs-init.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/libpy3workload_topic_kafka.global.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_reassign.cpp |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/objcopy_33061e6ec0580baa7f93f1ce23.o |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/ut/graph_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/lfalloc/dbg_info/libcpp-lfalloc-dbg_info.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/restarts/objcopy_afdf6d60c4f76ae91a235d460b.o |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/Modules/_sqlite/libpy3python3-Modules-_sqlite.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/printer/libformats-arrow-printer.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/control_plane_proxy/ut/control_plane_proxy_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/t1ha/libcontrib-libs-t1ha.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/libcpp-streams-lz.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/keyvalue/grpc_service_ut.cpp |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memory_controller_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Linker/libllvm16-lib-Linker.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/antlr4-c3/libcontrib-libs-antlr4-c3.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/memory_controller/memtable_collection_ut.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-io/librestricted-aws-aws-c-io.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure_ansi/libv1-lexer-antlr4_pure_ansi.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/antlr4_pure/libv1-lexer-antlr4_pure.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/tbb/libcontrib-libs-tbb.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/highlight/libsql-v1-highlight.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/backup/libkikimr_backup.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/core/libv1-complete-core.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/union/libname-service-union.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/dictionary/libformats-arrow-dictionary.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Foundation/liblibs-poco-Foundation.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/topic/libtopic.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/libcore-formats-arrow.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/local/libname-cache-local.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/libcomplete-name-object.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cache/libcomplete-name-cache.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/objcopy_774cbd1f10ee287899289ecb3f.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.global.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/sqs_json/libsqs_json.a |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/apps/ydb/main.cpp |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/cms/libsrc-client-cms.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/transfer_workload/libtransfer_workload.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpch/libbenchmarks-queries-tpch.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/queries/tpcds/libbenchmarks-queries-tpcds.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/arrow_inference/libydb-library-arrow_inference.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/clickbench/liblibrary-workload-clickbench.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/benchmark_base/liblibrary-workload-benchmark_base.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/monitoring/libsrc-client-monitoring.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/ydb/commands/libcommands.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/dread_cache_service/ut/unittest |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_common/libstreams-factory-open_common.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpcds-dbgen/libbenchmarks-gen-tpcds-dbgen.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/lz4/libstreams-lz-lz4.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/bucket_quoter/liblibrary-cpp-bucket_quoter.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/factory/open_by_signature/libstreams-factory-open_by_signature.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/topic_workload/libtopic_workload.a |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/streams/lz/snappy/libstreams-lz-snappy.a |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/libsql-v1-complete.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/import/liblib-ydb_cli-import.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/threading/local_executor/libcpp-threading-local_executor.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/antlr4/libv1-complete-antlr4.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/consumer_offset_tracker_ut.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/sqs_workload/libsqs_workload.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/binding/libname-service-binding.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/libcomplete-name-service.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/config/bsconfig_ut.cpp |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/batch_operations/unittest |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/libname-object-simple.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/control_plane_storage/libfq-libs-control_plane_storage.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/impatient/libname-service-impatient.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/cached/libobject-simple-cached.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/syntax/libv1-complete-syntax.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/object/simple/static/libobject-simple-static.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/unistat/libmonlib-encode-unistat.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.global.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/column/libname-service-column.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/libpy3show_create_view.global.a |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_445797246443360525d31550d1.o |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_cd9abca883cad9b25e20bf2f08.o |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/debug/libsrc-client-debug.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/dump/liblib-ydb_cli-dump.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_range_ops.cpp |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/workload/libpy3show_create-view-workload.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/ydb_serializable/lib/libpy3tools-ydb_serializable-lib.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/workload/libpy3stress-transfer-workload.global.a |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/objcopy_9ccdc4f01b578a43bc35d4d519.o |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |59.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/objcopy_b632f28ee823f938d14c0e85f9.o |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/libpy3transfer.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/serializability/libpy3tests-library-serializability.global.a |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/ranking/libname-service-ranking.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/global/libcomplete-analysis-global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/schema/libname-service-schema.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/text/libv1-complete-text.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/reflect/libsql-v1-reflect.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/cdc/cdc |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/aws-sdk-cpp/aws-cpp-sdk-sqs/liblibs-aws-sdk-cpp-aws-cpp-sdk-sqs.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/static/libname-service-static.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/local/libcomplete-analysis-local.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpc_base/liblibrary-workload-tpc_base.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/regex/libv1-lexer-regex.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/libcommands-interactive-highlight.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/monlib/libpy3library-python-monlib.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/session/libcommands-interactive-session.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/libydb_cli-commands-interactive.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.global.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/highlight/color/libinteractive-highlight-color.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/csv/table/libarrow-csv-table.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/benchmarks/gen/tpch-dbgen/libbenchmarks-gen-tpch-dbgen.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.global.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/effects/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/ut/message_id_deduplicator_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/log/liblibrary-workload-log.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/stat_visualization/libpublic-lib-stat_visualization.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/mixed/liblibrary-workload-mixed.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/tools/libinteractive-ai-tools.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/common/libcommands-interactive-common.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/query/liblibrary-workload-query.global.a |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.a |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_antlr4/libantlr_ast-gen-v1_antlr4.a |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_6508d12aaafde6f0a60fe8fff3.o |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/models/libinteractive-ai-models.a |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/s3_backups/s3_backups |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_3ea8aa67e7c24c4f0e3b0406b9.o |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/ai/libcommands-interactive-ai.a |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serializable/objcopy_51b071d7746089933668451b33.o |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpch/liblibrary-workload-tpch.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/tracing/libyt-library-tracing.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/interactive/complete/libcommands-interactive-complete.a |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/parser/antlr_ast/gen/v1_ansi_antlr4/libantlr_ast-gen-v1_ansi_antlr4.a |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/ut/objcopy_bd84885c5c24478d181ba9e493.o |59.2%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |57.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/signals/libyt-library-signals.a |57.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite/liblibrary-formats-arrow-accessor-composite.a |57.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |57.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/decimal/libyt-library-decimal.a |57.9%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/vector/liblibrary-workload-vector.a |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/tools/python3/libcontrib-tools-python3.a |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/Crypto/liblibs-poco-Crypto.a |57.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/common/liblibrary-formats-arrow-accessor-common.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/quantile_digest/libyt-library-quantile_digest.a |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcds/liblibrary-workload-tpcds.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/comptable/liblibrary-cpp-comptable.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/save_load/libformats-arrow-save_load.a |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/synchronization_service/libcompute-ydb-synchronization_service.a |58.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/farmhash/arch/sse41/libfarmhash-arch-sse41.a |58.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TargetParser/libllvm16-lib-TargetParser.a |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.a |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/solomon/actors/ut/unittest |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/proto/libytflow-integration-proto.a |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/skiff/libyt-lib-skiff.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/serializer/libformats-arrow-serializer.global.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-http/librestricted-aws-aws-c-http.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/boost/container/librestricted-boost-container.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Remarks/libllvm16-lib-Remarks.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/splitter/libformats-arrow-splitter.a |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_counters.cpp |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/rows/libformats-arrow-rows.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/AsmParser/libTarget-X86-AsmParser.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/reader/libformats-arrow-reader.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/events/liblibs-audit-events.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/sorted_vector/libcpp-containers-sorted_vector.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_download/libyt-lib-yt_download.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_file_download/libyt-lib-yt_file_download.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/libllvm16-lib-Target.a |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/time_cast/time_cast_ut.cpp |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yt_token_resolver/libyt-lib-yt_token_resolver.a |58.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/transformer/libformats-arrow-transformer.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/schema/libyt-lib-schema.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/row_spec/libyt-lib-row_spec.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/ProfileData/libllvm16-lib-ProfileData.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/libydb-core-formats.a |58.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/expr_nodes/libproviders-ytflow-expr_nodes.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_2900a0c4957bb4f1bc1729508c.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_dac3ec236f3cba753ea226bb96.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/workload/tpcc/liblibrary-workload-tpcc.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/libyt-yt-core.global.a |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/tests/objcopy_c386e2211742a44d16094d73d0.o |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_6bdc69403f0fa7c03edbf19c28.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/simdjson/libcontrib-libs-simdjson.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/Disassembler/libTarget-X86-Disassembler.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_ee633aebbed65e39e09fbf4271.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/tests/objcopy_d3749b17b0bc2057695c3e736a.o |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/containers/stack_array/libcpp-containers-stack_array.a |58.8%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/proto/libproviders-yt-proto.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/yson_helpers/libyt-lib-yson_helpers.a |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_e31620202d3ba8df14ff2a18e1.o |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_388aef0b6ac03d4f661ae7a30e.o |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/events/liblibs-checkpoint_storage-events.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/lib/res_pull/libyt-lib-res_pull.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/audit/libfq-libs-audit.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/proto/liblibs-checkpoint_storage-proto.a |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/query_cache/objcopy_f8b2cbafb1fed0e25bf9683c2d.o |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/formats/arrow/accessor/composite_serial/libarrow-accessor-composite_serial.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/libpy3libs-config-protos.global.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/pcre/pcre32/liblibs-pcre-pcre32.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/TextAPI/libllvm16-lib-TextAPI.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/ytflow/integration/interface/libytflow-integration-interface.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/minikql_compile/yql_expr_minikql_compile_ut.cpp |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ydb_cli/commands/libclicommands.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing/events/liblibs-checkpointing-events.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/opt/libproviders-yt-opt.a |58.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/common/libfq-libs-common.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-auth/librestricted-aws-aws-c-auth.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/lzmasdk/libcontrib-libs-lzmasdk.a |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/query_actor/query_actor_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/http/io/libcpp-http-io.a |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/objcopy_7d7339f4588397fc771e31030c.o |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/cdc/libpy3cdc.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-mqtt/librestricted-aws-aws-c-mqtt.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpointing_common/libfq-libs-checkpointing_common.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/openldap/libcontrib-libs-openldap.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/MC/MCParser/liblib-MC-MCParser.a |59.0%| [AR] {BAZEL_DOWNLOAD} $(B)/library/python/testing/yatest_lib/libpy3python-testing-yatest_lib.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/libpy3ydb-core-protos.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/misc/isa_crc64/libisa-l_crc_yt_patch.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/https/libyt-core-https.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/compproto/liblibrary-cpp-compproto.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/codecs/liblibrary-cpp-codecs.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/numeric/libyt-library-numeric.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/AggressiveInstCombine/liblib-Transforms-AggressiveInstCombine.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/aws-c-compression/librestricted-aws-aws-c-compression.a |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/procfs/libyt-library-procfs.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/ydb/py3/libpy3python-ydb-py3.global.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/client/query_tracker_client/libyt-client-query_tracker_client.a |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/cxxsupp/libcxx/liblibs-cxxsupp-libcxx.a |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_auth.cpp |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/auth/libyt-library-auth.a |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_59eb97971e5f83d3296e6c33b5.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_5acd2383ed2cd599cfd64f7c8a.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/common/liblibs-compute-common.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/view/tests/objcopy_60e08504076128d310212c6460.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/libpy3s3_backups.global.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/s3_backups/objcopy_4508aef343f36758ea760320db.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/JSON/liblibs-poco-JSON.a |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_2492aafb6862566a2398c9f27e.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_3df021aac8504049c53286aea0.o |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Target/X86/MCTargetDesc/libTarget-X86-MCTargetDesc.a |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/tests/objcopy_e66920085df69f6f7e41547063.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/library/profiling/resource_tracker/liblibrary-profiling-resource_tracker.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/checkpoint_storage/libfq-libs-checkpoint_storage.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_gateway.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/poco/XML/liblibs-poco-XML.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_forwarding_gateway.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/Coroutines/liblib-Transforms-Coroutines.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/actors/libfq-libs-actors.a |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/control_plane/libcompute-ydb-control_plane.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_create.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_epoch.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_optimize.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_output.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_type_ann.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/aws/s2n/librestricted-aws-s2n.a |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_join.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yt/yt/core/http/libyt-core-http.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_integration.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_sort.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/compute/ydb/liblibs-compute-ydb.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_dq_hybrid.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/Transforms/IPO/liblib-Transforms-IPO.a |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |59.6%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/protobuf/libcontrib-libs-protobuf.a |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |59.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/column_table/ydb-core-transfer-ut-column_table |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_key_range.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_map.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_write.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_push.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_merge.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_content.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_field_subset.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_fuse.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/llvm16/lib/CodeGen/libllvm16-lib-CodeGen.a |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_misc.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_utils.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_io_filter.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_block_input.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_finalize.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_ytql.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_constraints.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_cbo_helpers.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_trackable.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_exec.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasource_constraints.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_hash.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_type_ann.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_partitions.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_datasink_exec.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_mkql_compiler.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_key.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_intent_determination.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_columnar_stats.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery_walk_folders.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_layers_integration.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(B)/yt/yql/providers/yt/provider/yql_yt_op_settings.h_serialized.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_optimize.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_load_table_meta.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_reorder.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_optimize.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_op_settings.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_impl.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_io_discovery.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_optimize.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_provider_context.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_peephole.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table_desc.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_ytflow_integration.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_wide_flow.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_horizontal_join.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_helpers.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_weak_fields.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_lambda.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_helper.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/phy_opt/yql_yt_phy_opt_partition.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_join_impl.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_table.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_logical_optimize.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yql/providers/yt/provider/yql_yt_physical_finalizing.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_streaming/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |60.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ydb_convert/ut/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |59.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data_integrity/unittest |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/build_index/ut/unittest |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |60.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |60.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |60.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/raw_socket/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_indexes/unittest |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |59.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/apps/dstool/ydb-dstool |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/result_set_format/tests/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |59.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/public_http/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/config/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay/ydb_query_replay |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |59.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/shard/ut/unittest |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/streaming/tests/py3test |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |59.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |59.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/ut/gtest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |59.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_93dc3386250916dfae1ecb9b13.o |59.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_64cecb639c5f85fbf868097a08.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_4826ee2207124da1bc398e3bd8.o |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/encryption/objcopy_3d6916930a438b51675ef6dda7.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_5b5c3367c789898aa5a6cae866.o |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_tests/objcopy_8e57113197bb359e3999b04aab.o |59.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |59.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/objcopy_c0a0299090f532c29f2ee6dc87.o |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |59.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |59.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/libpy3workload_mixed.global.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/yt/kqp_yt_import/py3test |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/solomon/py3test |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/simple_queue/workload/libpy3stress-simple_queue-workload.global.a |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |59.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_session/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_helpers.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_rs/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceproxy/sequenceproxy_ut.cpp |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |59.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/list_topics/list_all_topics_ut.cpp |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_f022524c80d73c5ff5d59211dd.o |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sequenceshard/ut_sequenceshard.cpp |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/oltp_workload/tests/py3test |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/partition_stats/partition_stats_ut.cpp |58.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_vacuum/unittest |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_6dc907e08e2d6bd66a6c998081.o |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_init/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/tests/objcopy_4d329553dae503930cad7a3eec.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_stats.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/ut/ut_utils.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/workload/libpy3stress-streaming-workload.global.a |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/mon/ut_utils/libcore-mon-ut_utils.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_compaction/ut_compaction.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/topic_data_ut.cpp |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/wrappers/ut_helpers/libcore-wrappers-ut_helpers.a |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |59.1%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/clickhouse.pb.{h, cc} |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query_stats.pb.{h, cc} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/nodes_manager.pb.{h, cc} |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/types/yql_types.pb.{h, cc} |59.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_query.pb.{h, cc} |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/viewer/viewer_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/quota_requester.cpp |59.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/providers/common/proto/gateways_config.pb.{h, cc} |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/server.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/run_ut.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_bandwidth_test/main.cpp |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/conveyor_composite/ut/ut_simple.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/ut_continuous_backup_reboots.cpp |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl_utility.cpp |59.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compile_service_config.{pb.h ... grpc.pb.h} |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/hooks/testing/libcolumnshard-hooks-testing.a |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/libpy3ydb-dstool.global.a |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/objcopy_fca89909cedb628068681e1038.o |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore_fs.cpp |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/objcopy_e0d6c3883613601e455029f31f.o |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/statistics/ut_common/libcore-statistics-ut_common.a |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ttl/ut_ttl.cpp |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ut_split_merge_reboots.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_f0d8fb718a757998dc9403df32.o |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_f4b44a5d280d0f27f5ffd278e8.o |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/hive.{pb.h ... grpc.pb.h} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/workload/libpy3show_create-table-workload.global.a |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/libpy3kafka_streams_test.global.a |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source/ut_external_data_source.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |59.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_disk_states.{pb.h ... grpc.pb.h} |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_upload_rows.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/persqueue/tests/liblibrary-persqueue-tests.a |60.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_8685c3ae88e5169a5acffc7bc4.o |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |59.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/objcopy_5fddfa8f171a3216cad65e02ab.o |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/json_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/dictionary_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/sqs/libpy3tests-library-sqs.global.a |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_execute_mkql_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/mon_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_change_schema_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/delete_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/libpy3stress-oltp_workload-workload.global.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_private/oauth/libclient-yc_private-oauth.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_stats_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |60.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/util/actorsys_test/libcore-util-actorsys_test.a |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/ut_incremental_restore_reboots.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/http_ut.cpp |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/ydb_backup_ut.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/secret/ut/ut_secret.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/datastreams_fixture/libhttp_proxy-ut-datastreams_fixture.a |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_blobstorage/read_only_vdisk.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/lib/libblobstorage-ut_blobstorage-lib.a |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/json_proto_conversion_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |60.3%| PREPARE $(BLACK-1616416287) - 8.40 MB |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| PREPARE $(RUFF-3583455953) - 3.24 MB |60.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/pdisk/mock/libblobstorage-pdisk-mock.a |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_continuous_backup/ut_continuous_backup.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.4%| PREPARE $(FLAKE8_PY3-3596799299) - 16.79 MB |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_replica/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/topic_timestamp_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_proccessor.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/main.cpp |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base_reboots/ut_base_reboots.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/fs_backup_validation_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/mirrorer_autoscaling_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_restore/ut_restore.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/backup_path_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/list_objects_in_s3_export_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/workload/libpy3stress-topic_kafka-workload.global.a |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/backup_ut/encrypted_backup_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/dq/runtime/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_script_executions_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/proxy_service/kqp_proxy_ut.cpp |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/ut_backup_collection_reboots.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/balancing_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kafka/workload/libpy3stress-kafka-workload.global.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/actors/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/cost/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_table/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/table_creator/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/cms/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/proxy_service/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_scan/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_replication/unittest |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/apps/dstool/lib/libpy3dstool_lib.global.a |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/nodewarden/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/cms/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/tablet/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/restarts/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_upload_rows/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_transfer/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/perf/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/ttl/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/script_execution/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/describe_ut.cpp |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/ut/ydb-core-client-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/minikql_compile/ut/unittest |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_6e0da74b1512d0ffe19c5dc500.o |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/s3_backups/tests/py3test |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/commitoffset_ut.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/ut_with_sdk/autoscaling_ut.cpp |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/bool_test_enums.h_serialized.cpp |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_compiler.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |60.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/combinatory/libut-olap-combinatory.a |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/datatime64_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/blobs_sharing_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compression_ut.cpp |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/compaction_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ext_tenant_ut.cpp |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/decimal_ut.cpp |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/plan2svg/py3test |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_1815f02732d96389c328f04d90.o |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/clickbench_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/tablet/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/aggregations_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_aggregation/ut_aggregate_statistics.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_49a1ca9559288648fba9cf7b65.o |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_0446f521b26a2e8128f94ac50f.o |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/tests/objcopy_367e2bc5d83faa0907a06d2976.o |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_d191482d8b66f1c03ea8df56d3.o |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/s3/objcopy_ff581f3cff717ab223922f0cd8.o |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/bool_ut.cpp |60.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/oltp_workload/workload/type/libpy3oltp_workload-workload-type.global.a |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_8491a772a9425d10f304e6f0e9.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/cloud/objcopy_fd8d9957a06c9923c501e36fd9.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_08a4b5d38a76e21591db0c3424.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/async_replication/objcopy_e2637cea0f2e4db109b364a246.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/simple_reader_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ncloud/impl/access_service_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/tests/objcopy_df04396057094f2483296b9dbe.o |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/statistics_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sparsed_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/tablet/rpc_restart_tablet_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/optimizer_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/raw_socket/ut/buffered_writer_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/locks_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/write_ut.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay/query_replay.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/long_tx_service/long_tx_service_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_background_compaction.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build/ut_column_build.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/table_writer_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/sys_view_ut.cpp |61.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/olap/helpers/libut-olap-helpers.a |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/slow/txusage_slow_ut.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/tiering_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_object_storage_listing.cpp |61.1%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/control/lib/generated/control_board_proto.h |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/cms/cms_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/driver_lib/run/auto_config_initializer_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup_collection/ut_backup_collection.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_operation.pb.{h, cc} |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_value.pb.{h, cc} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/token_accessor.pb.{h, cc} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pending_fetcher.pb.{h, cc} |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/activation.pb.{h, cc} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_proxy.pb.{h, cc} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/db_pool.pb.{h, cc} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/health_config.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/lwtrace/protos/lwtrace.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/kqp_olap_ut.cpp |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_stats.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_status_codes.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_transport.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_bridge_common.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_federation_discovery.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_coordination.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_metrics.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.5%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/test_import/libtest_import_udf.so |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/netclassifier.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/follower_group.{pb.h ... grpc.pb.h} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_pdisk_config.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_scheme.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/object_storage_listing_ut.cpp |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/fs_settings.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blockstore_config.{pb.h ... grpc.pb.h} |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/helpers/libpy3olap-scenario-helpers.global.a |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/resource_broker.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_table.pb.{h, cc} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/auth.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bridge.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_ec9bc627b6d56d1a941c2b7e4f.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_base.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_compile_settings.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/local.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_physical.{pb.h ... grpc.pb.h} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/result_formatter/result_formatter_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_df4191b43fee1a7d77acb3207f.o |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/recoveryshard_config.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_limits.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_acf74a4313fbcafa6df239e3ec.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/nbs/blockstore.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_slot_broker.{pb.h ... grpc.pb.h} |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/tests/objcopy_7479409fb33baf855b74c3e835.o |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/subdomains.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sqs.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ycloud/impl/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_object_storage_listing/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_gateway_ut.cpp |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/iam/libpy3client-yc_public-iam.global.a |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_table_writer/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/query_stats/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-python-commons/libpy3contrib-python-allure-python-commons.global.a |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/allure-pytest/libpy3contrib-python-allure-pytest.global.a |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/pytz/py3/libpy3python-pytz-py3.global.a |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/oom/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/libs/googleapis-common-protos/libpy3contrib-libs-googleapis-common-protos.global.a |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/syncer/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_compaction/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/initializer/ut/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/arrow/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/ydb/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_cache/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/workload_service/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_info_types.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_pg_types.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_counters.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_table_decimal_types.cpp |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export_fs.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_commit_redo_limit.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/common/libpy3tests-olap-common.global.a |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_3bdea7737a87c43bfaa0aaf4c3.o |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/engines/ut/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_1ab2a5a6dd84a6c9ff5d5c50b0.o |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/column_family/compression/objcopy_6887bde1dc99f5c5c2f0922842.o |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/client/yc_public/common/libpy3client-yc_public-common.global.a |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/lib/libpy3tests-olap-lib.global.a |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_index_build/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_d709b1895f91108d9f51b703ea.o |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/select/objcopy_dfbd751fc64901b06ded4354c8.o |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/encrypted_storage_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_6d8369510b03c08a300f2e2657.o |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/table_creator/table_creator_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |62.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_e1e64d508ce59834ec0a40f731.o |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/read_attributes_utils_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/fq/ut_integration/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sysview/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/memory_controller/ut/unittest |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/provider/yql_kikimr_provider_ut.cpp |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/parametrized_queries/objcopy_7d0deb4120fbddf720c11b5358.o |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |61.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_col_ut.cpp |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compression_ut.cpp |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/wardens/py3test |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_compaction/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/transfer/tests/py3test |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/http_api/py3test |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_backup/ut_backup.cpp |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/sysview/kqp_sys_view_ut.cpp |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |60.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |60.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/recipe/solomon_recipe |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_reassign/unittest |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |60.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |60.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/mediator/ut/unittest |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_range_ops/unittest |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |60.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |60.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |60.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/olap_workload/tests/py3test |60.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/mixedpy/tests/py3test |60.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |60.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/rows/libtest-libs-rows.a |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/row_table/unittest |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |59.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/query_cache/py3test |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/federated_query/ut_service/fast/unittest |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |59.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |59.9%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/s3_recipe_helper/liblibrary-testlib-s3_recipe_helper.a |59.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/solomon_helpers/liblibrary-testlib-solomon_helpers.a |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |59.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move/ut_move.cpp |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |59.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |59.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |59.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |59.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/pq_helpers/liblibrary-testlib-pq_helpers.a |59.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_e4166f3d104a6751b45e7e712f.o |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_816e2dba53f55d924139cdb3c5.o |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/write_quoter_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/pqtablet/quota/quota_tracker_ut.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export/ut_export.cpp |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_base/ut_base.cpp |59.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/objcopy_b9aaa278b10ed44e5645b3ef2f.o |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/cancel_tx_ut.cpp |59.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/postgresql/common/libpy3functional-postgresql-common.global.a |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/locks_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_sequence/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_errors.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/replica_ut.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/datastreams/datastreams_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/flat_ut.cpp |60.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/libpy3functional-sqs-merge_split_common_table.global.a |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_2efdf95387a81f55cf9c81071a.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_242486256e1af973cd1d5376d1.o |60.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/objcopy_5d73baff4bb68923ddbe5f4fcd.o |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/json_change_record_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/describer/describer_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_monitoring.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/config/protos/marker.pb.{h, cc} |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/ssa.pb.{h, cc} |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_data_ut.cpp |60.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/pinger.pb.{h, cc} |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data_integrity/kqp_data_integrity_trails_ut.cpp |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minstep.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_localwriter_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/backup/impl/local_partition_reader_ut.cpp |60.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_helpers.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_5992d4831c5055a481712a2a80.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_common_pq.cpp |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_channel_service_ut.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_sample_k.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/ydb_issue/proto/issue_id.{pb.h ... grpc.pb.h} |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/sharding/ut/ut_sharding.cpp |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_rs.cpp |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_arrow_helpers_ut.cpp |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_split_merge.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_disk_color.{pb.h ... grpc.pb.h} |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/c4711c742b4f72331dccea9c2a_raw.auxcpp |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/index_builder.{pb.h ... grpc.pb.h} |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_94f66830f5c535f3f015b42e43.o |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bootstrap.{pb.h ... grpc.pb.h} |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_8cba80b2275265b72407436cdf.o |60.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_3382de65b417782bf648c475b1.o |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_9ea5b1fb7a4f8e1b0b8d7cf345.o |60.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/bind_channel_storage_pool.{pb.h ... grpc.pb.h} |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_system_names/ut_system_names.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_7f9e816a97aaeee837ac316091.o |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/in_memory_control_plane_storage_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/common_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_f928a40774b17a9d6cd7cabd2c.o |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/849c58233edc33539cbeb93a31_raw.auxcpp |60.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_bf578b7161cc94bf18488d04ca.o |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_flip_join_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/scheduler/kqp_compute_scheduler_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_benches_ut.cpp |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_complex_join_query_ut.cpp |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_b8d63b589074145793d63c27a3.o |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/retry_policy_ut.cpp |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/restarts/objcopy_e7477203b27fa0321cf18fd7ee.o |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_block_hash_join_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_order_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_permissions_ut.cpp |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/ut_helpers.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/downtime_ut.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_3db6af291678d4ac330517956a.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_fulltext_index_build_reboots.cpp |58.9%| COMPACTING CACHE 112.0KiB |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_488333b1ebd4c1d6d8ec5bcb8f.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/read_session_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/wardens/objcopy_1555e67a3dd43a3e7f09bf8eee.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build_reboots/ut_index_build_reboots.cpp |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_36807918bd7a86c1ea37310c9c.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_normalizer.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_0ab925f82bbba07bf3b749dc3c.o |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/click/py3/libpy3python-click-py3.global.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_backup.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/709f125727d9ea4165df516509_raw.auxcpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/itsdangerous/py3/libpy3python-itsdangerous-py3.global.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |59.1%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/colorama/py3/libpy3python-colorama-py3.global.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login/ut_login.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask/py3/libpy3python-Flask-py3.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/mixedpy/workload/libpy3stress-mixedpy-workload.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/Flask-Cors/py3/libpy3python-Flask-Cors-py3.global.a |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_5233daad98e22a16d60b4b7b86.o |59.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_38c6001204b7ada03b8b3e421d.o |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_fulltext_build.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_rw/ut_columnshard_read_write.cpp |59.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/solomon/reading/objcopy_1c931ae3496b883d009f58ef8e.o |59.5%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/retry/py3/libpy3python-retry-py3.global.a |59.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/bulk_upsert/ydb-public-sdk-cpp-tests-integration-bulk_upsert |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/client/libpy3tools-solomon_emulator-client.global.a |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/audit_ut.cpp |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_filter_kmeans.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_vector_index_build.cpp |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext_dict.cpp |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_fulltext.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |60.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/dynamic_config/dynamic_config_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/slow/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tiering/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncer_broker_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_indexes/ydb-tests-functional-kqp-kqp_indexes |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/dsproxy/ut_fat/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/kesus_quoter_ut.cpp |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut.cpp |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_move/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup_collection/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/quoter_service_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator_client/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_cdc_stream/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_resource_pool/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ext_index/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_stats/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_exchange/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_fat/dsproxy_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_write/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_tenants_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/ydb/table_split_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_maintenance_api_ut.cpp |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/lib/ydb_cli/topic/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/s3/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/load_test/ut_ycsb/unittest |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/quoter/ut_helpers.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_allocator/ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/async_replication/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/metadata/secret/ut/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |58.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |59.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kv/tests/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/testshard_workload/tests/py3test |59.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |59.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/high_load/unittest |58.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |58.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |58.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |58.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |58.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceproxy/ut/unittest |58.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/ydb-core-control-ut |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/graph/ut/unittest |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_index_lookup_join_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |58.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cluster_info_ut.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/query_actor/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/ut/unittest |58.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a38b1580810a6e4b419da99dcf.o |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/sql/ydb-tests-sql |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/idx_test/ydb_index_ut.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_topology_generator.cpp |58.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |58.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_column_stats/unittest |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_a5874452d3dbd6f6e49cd08be6.o |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/cms/objcopy_b9fd5c62781ec3b78d111a0ba7.o |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |58.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/statistics/objcopy_2a98e2f0e66f286cb125620511.o |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_fetcher_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/runtime/kqp_scan_data_ut.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/compress_executor_ut.cpp |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/library/fixtures/libpy3tests-library-fixtures.global.a |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_quotas_ut.cpp |58.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ut_cdc_stream_reboots.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_4f92526e13553482736b942b2c.o |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_ut.cpp |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7bfd03a31f5e230607792f10cc.o |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/olap/indexes/indexes_ut.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/example/ydb-tests-example |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_split_merge/ut_find_split_key.cpp |58.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |58.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_1a1e300767b552f4c13c3295d0.o |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/node_broker/tests/py3test |58.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_7eade8c49389813f8c36b72b5b.o |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/basic_usage_ut.cpp |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |58.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |58.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_ut.cpp |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/http_api_client/libpy3fq-libs-http_api_client.global.a |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/http_api/objcopy_3209cda00462f2963f3cbbc912.o |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |58.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/scenario/objcopy_656baae3c1e24959f5bcc457d7.o |58.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |58.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/testlib/service_mocks/ldap_mock/libtestlib-service_mocks-ldap_mock.a |58.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/common/py3test |58.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_permissions_ut.cpp |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes/ut_user_attributes.cpp |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |58.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/fqrun/src/libtools-fqrun-src.a |58.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |58.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/solomon/ydb-tests-fq-solomon |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |58.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |58.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_schemeshard_build_index_helpers.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/join/kqp_join_ut.cpp |58.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/idx_test/libpublic-lib-idx_test.a |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/ut/ut_helper.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_connections_ut.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_failure_injection/ut_failure_injection.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mon/audit/url_matcher_ut.cpp |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/ut_fat/blobstorage_node_warden_ut_fat.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/syncer/blobstorage_syncquorum_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_bindings_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_prefix_kmeans.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_volatile/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_secondary_index.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |58.2%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_session/ydb-tests-functional-kqp-kqp_query_session |58.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut_trace.cpp |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |58.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_reshuffle_kmeans.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_queries_permissions_ut.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_local_kmeans.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |58.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/tools/solomon_emulator/bin/solomon_emulator |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_recompute_kmeans.cpp |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/fetcher/fetch_request_ut.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |58.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/build_index/ut/ut_unique_index.cpp |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index_build/ut_index_build.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_order.cpp |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/dq/runtime/dq_output_channel_ut.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/control_plane_storage/ydb_control_plane_storage_internal_ut.cpp |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/keyvalue/protos/events.pb.{h, cc} |58.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_minikql.cpp |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_collector.cpp |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/folder_service/proto/config.pb.{h, cc} |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/interconnect.pb.{h, cc} |58.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/arrow/libsrc-client-arrow.a |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/sensitive.pb.{h, cc} |58.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_scheme.pb.{h, cc} |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/libpy3pile_promotion_workload.global.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/proto/libtools-stress_tool-proto.a |58.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/objcopy_e99c9b04005e36c324dfb9fd3b.o |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tools/stress_tool/lib/libydb_device_test.a |58.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/workload/libpy3scheme_board-pile_promotion-workload.global.a |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_pq_reboots/ut_pq_reboots.cpp |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ut_extsubdomain_reboots.cpp |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/plan2svg/ydb-tests-functional-kqp-plan2svg |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_pipe.{pb.h ... grpc.pb.h} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_e7dfca3c87220ea0ed36a65f9f.o |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/replication.{pb.h ... grpc.pb.h} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/objcopy_06a563d04a0e897145ef1061d2.o |58.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/compaction.{pb.h ... grpc.pb.h} |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |58.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/daf02fd86bb7e2296f1437ae1f_raw.auxcpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool/ut_resource_pool.cpp |58.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/whiteboard_flags.{pb.h ... grpc.pb.h} |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice.cpp |58.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/cloud_events_ut.cpp |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_2b682e146a665bfa19210b0fd9.o |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_vdisk_config.{pb.h ... grpc.pb.h} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |58.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_22edc4e58ff43cb5e83c9bbe2c.o |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/stream.{pb.h ... grpc.pb.h} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/base.{pb.h ... grpc.pb.h} |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_volatile.cpp |58.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/s3_recipe/s3_recipe |58.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_self.cpp |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_cdc_stream/ut_cdc_stream.cpp |58.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_whiteboard.{pb.h ... grpc.pb.h} |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |58.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |58.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_versions.cpp |58.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_40779f0570229cef213050a4fa.o |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_vacuum.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_screen.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |58.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_handle_ut.cpp |59.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_row_versions_ut.cpp |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |59.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/ut/objcopy_9f29b589555ed64086e5eadccf.o |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_proto.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_memtable.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_cxx_database_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_tiered_ut.cpp |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_forward.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_pages.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/tablet_flat/test/libs/table/libtest-libs-table.a |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_bloom.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_nodes.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_other.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/shared_cache_s3fifo_ut.cpp |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_comp_gen.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_iterator.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_backup.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_btree_index_iter_charge.cpp |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_program.cpp |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_iface.cpp |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_ut.cpp |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/kqp_pg_ut.cpp |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |59.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/server_restart/public-sdk-cpp-tests-integration-server_restart |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/yt/kqp_yt_import/ydb-tests-fq-yt-kqp_yt_import |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [LD] {BAZEL_DOWNLOAD} $(B)/contrib/python/moto/bin/moto_server |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/helper.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/pg/pg_catalog_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_local_partition_reader/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_db_scheme.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mon/audit/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_topic_reader/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_secret/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/partitioning/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.7%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/row_table/ydb-core-transfer-ut-row_table |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_console/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_base/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_schema/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/rm_service/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut_trace/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_range_cache_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/engines/ut/ut_script.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/tx/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/certificate_check/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/address_classification/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_363b5875cc5c5e5745458b16b8.o |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serverless/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/aggregator/ut/unittest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_gclogic_ut.cpp |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_14c03c6aecffbe39cb01ddf2ed.o |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/limits/objcopy_d52256d4fa9895f38df6030445.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/audit/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_leases_ut.cpp |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_decimal.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_auditsettings/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_sausage.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_c623700776b43ee95ec93c56f9.o |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_trace/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/sql/py3test |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/yc_search_ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/viewer/tests/py3test |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_basic_statistics.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/executer_actor/ut/unittest |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/fetcher/ut/unittest |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_253d734e8c901d319d84fcc6e9.o |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.a |61.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_2f0e0ac8198858b9ec9901778e.o |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_table_part_ut.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/flat_executor_database_ut.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rename_table_column.cpp |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part_multi.cpp |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_303f7409bfab4277e367bbd11a.o |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/api/objcopy_e2a089b95d9316f6e26025d3e3.o |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/providers/common/ut_helpers/libproviders-common-ut_helpers.a |61.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/libpy3workload_testshard.global.a |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_charge.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_redo.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_compaction_multi.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/ut_helpers.cpp |61.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/audit/objcopy_643fa2679e88d9b2d33558b050.o |61.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |61.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/example/objcopy_e0aef87c4bf15cfdc957f4bdd1.o |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_column_statistics.cpp |61.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/python/tornado/tornado-4/libpy3python-tornado-tornado-4.global.a |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/flat_test_db.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_datetime.cpp |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/service/ut/ut_http_request.cpp |61.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_f738234258cd034cd5383f92ad.o |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ut_topic_set_boundaries.cpp |61.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/gateway/ut/gtest |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/data/kqp_read_null_ut.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/yql/providers/solomon/actors/ut/dq_solomon_write_actor_ut.cpp |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/describer/ut/unittest |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |61.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/objcopy_484246668d943fbae3b476ec7d.o |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/ut_vdisk_io/vdisk_io.cpp |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ut_consistent_copy_tables.cpp |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/sql/objcopy_83efacabe56767ae4f106a6d27.o |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/inside_ydb_ut/inside_ydb_ut.cpp |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/node_broker/node_broker |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_rowlocks.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_table.cpp |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/library/yql/providers/generic/actors/ut/unittest |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/pq_async_io/ut/unittest |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |61.6%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/yql_testlib/libydb-core-yql_testlib.a |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_slice_loader.cpp |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/public/lib/ut_helpers/libpublic-lib-ut_helpers.a |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/control/immediate_control_board_actor_ut.cpp |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/objcopy_4f055c289b3de8f2a1e827ae5c.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/view/unittest |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless/ut_serverless.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/direct_read_ut.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |61.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/pq_read/test/py3test |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |60.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |60.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_followers/unittest |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_part.cpp |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/ydb-core-security-ut |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/ymq_ut.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |60.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_stat.cpp |60.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |60.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |60.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |60.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/knn/kqp_knn_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |60.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_48a08121f0a68da2f2666b0341.o |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_7211c23d9494c46f0f60063e9e.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/messaging/objcopy_791e2f78c18891d943ecce5e41.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_48884f6b745ced4d3e78997cb1.o |60.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_5333c1912ecbac0f64ff97551f.o |60.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/hive/objcopy_aebf7c73fcaf6a54715cc177c8.o |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred/ut_shred.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_backup.cpp |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/kinesis_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_arrow_in_channels_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_flowcontrol_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_types_arrow_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |60.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_point_consolidation_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.0%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/monlib/encode/legacy_protobuf/protos/metric_meta.pb.{h, cc} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_split_ut.cpp |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/task_controller.pb.{h, cc} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/row_dispatcher.pb.{h, cc} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/ut/ut_shared_sausagecache_actor.cpp |61.1%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_e68ca1a2fa9943132c020ae028.o |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/gateways.pb.{h, cc} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_4ffdb694eb351ca96de1930bf2.o |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scan/kqp_scan_ut.cpp |61.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_3310cbcd39c3373557308c8e76.o |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/bind_queue_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/distconf_ut.cpp |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/resource_manager.pb.{h, cc} |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ut_resource_pool_reboots.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_status_codes.pb.{h, cc} |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_tasks.pb.{h, cc} |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/arrow/kqp_result_set_formats_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/ut_sequence/datashard_ut_sequence.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/objcopy_2a9fba044b5f98d2ff5f5c7f44.o |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut_ycsb.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/partition_end_watcher_ut.cpp |61.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/ut/helpers/libmkql_proto-ut-helpers.a |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/compression_ut.cpp |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/nodewarden/blobstorage_node_warden_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/export.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_proto_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_workload_ut.cpp |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_pq.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_config.{pb.h ... grpc.pb.h} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/table_description_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_state_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/ydb_convert/ydb_convert_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_performance_test/main.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/ydb_proxy/ydb_proxy_ut.cpp |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_config.{pb.h ... grpc.pb.h} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_resolver_ut.cpp |61.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/node_broker/libpy3node_broker.global.a |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/ut_bscontroller/main.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication/ut_replication.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/perf/kqp_query_perf_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_read_actor_ut.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_rd_read_actor_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/fq/pq_async_io/ut/dq_pq_write_actor_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ticket_parser_ut.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |61.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr/ut_rtmr.cpp |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/grpc_services/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_node_enumeration_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table_reboots/ut_external_table_reboots.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/datastreams/datastreams_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kesus/proxy/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_auditsettings/ut_auditsettings.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/populator_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_v1/ut/describes_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_table/ut_external_table.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_cluster_discovery/cluster_discovery_service_ut.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/pipe_tracker_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/bootstrapper_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_pool.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_aggregator_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_serverless/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/kikimr_program_builder_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_counters_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipecache_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_metrics_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_pipe_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/tablet_req_blockbs_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/ut_aggregation/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet/resource_broker_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/node_broker_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_schemereq/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/backup/impl/ut_table_writer/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_host_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/tenant_ut_local.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl_qs.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_e0331f455507fe5ac3b71d0537.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/engine/mkql_engine_flat_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_a0543c2dc30365e9b2ad3d0ca6.o |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_backup/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/cloud/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/with_quotas/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/olap/oom/objcopy_df0cb3f315162a3110ee243ecd.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/pq_async_io/libtests-fq-pq_async_io.a |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/checkpointing/ut/checkpoint_coordinator_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/api/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/blobstorage/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ttl/unittest |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/splitter/ut/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/ut_sentinel_unstable/unittest |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/encryption/py3test |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/discovery/unittest |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_tests/py3test |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/kqprun/tests/py3test |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |61.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic_kafka/tests/objcopy_e91d43b449a687b2b36f1f5526.o |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/tests/kikimr_tpch/unittest |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/tests/library/ut/py3test |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |61.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |61.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |61.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_9be8b6745d0fa150928bab4206.o |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_sequence/unittest |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |61.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |61.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |61.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/close_with_load/kqp_cwl.cpp |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sdk/cpp/sdk_credprovider/ydb-tests-functional-sdk-cpp-sdk_credprovider |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |60.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/tests/objcopy_8e19d47784789c55156c57f816.o |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |60.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/view/tests/py3test |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/postgresql/py3test |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_utils_ut.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/rate_limiter/ut/unittest |61.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/streaming/streaming |61.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |60.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/ut_streaming_query_reboots.cpp |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |61.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/ldap_auth_provider/ldap_auth_provider_ut.cpp |61.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_init.cpp |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/format_handler_ut.cpp |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_filter_ut.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/topic_parser_ut.cpp |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/common/libformat_handler-ut-common.a |61.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/network/libessentials-utils-network.a |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |61.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/ut_utils.cpp |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |61.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.2%| [PR] {BAZEL_DOWNLOAD} $(B)/ydb/core/base/generated/runtime_feature_flags.h |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |61.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/fq/ut_integration/fq_ut.cpp |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_common.pb.{h, cc} |61.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/fmr.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_dynamic_config.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/actors/protos/dq_events.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/fq_config.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/olap/high_load/read_update_write.cpp |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/read_actors_factory.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/rate_limiter.pb.{h, cc} |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/services_common.pb.{h, cc} |61.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/fq.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/common/protos/snapshot.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/persqueue_error_codes.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/persqueue_error_codes_v1.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/login/protos/login.pb.{h, cc} |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/storage.pb.{h, cc} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/protos/common.pb.{h, cc} |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/stress_tool/device_test_tool.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_b06d27009e49b9ba3df883a226.o |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_f3c323ef80ada193284f036d44.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/test_connection.pb.{h, cc} |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_a52eb3c900a84eaad86a211549.o |61.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_ut.cpp |61.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/storagepoolmon/ut/storagepoolmon_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_b1ab101896e634020e0c6ffeaf.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_vector_index.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/topic_reader_ut.cpp |61.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_b9596990f3fd41de0fa350fc68.o |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_d54fb5ab35d376fe3311e9feea.o |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/topic_service_ut.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_trace.cpp |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_controller_config.{pb.h ... grpc.pb.h} |61.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/long_tx_service.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/resource/v1/resource.{pb.h ... grpc.pb.h} |61.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/datashard_backup.{pb.h ... grpc.pb.h} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write_ut.cpp |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config_units.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/key_range.{pb.h ... grpc.pb.h} |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/shared_cache.{pb.h ... grpc.pb.h} |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters_aggregator.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_proxy.{pb.h ... grpc.pb.h} |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/s3/ut/s3_aws_credentials_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/scheme_log.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.6%| RESOURCE $(sbr:4966407557) |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/cms.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_cca8dcd66462c9ca3c57fcb78e.o |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/schemeshard/operations.{pb.h ... grpc.pb.h} |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |61.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/labeled_counters.{pb.h ... grpc.pb.h} |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_impl_ut.cpp |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_yql_ut.cpp |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ut_subdomain_reboots.cpp |61.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_ext_blobs_multiple_channels.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/pq_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_pragma_ut.cpp |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/flat_scheme_op.{pb.h ... grpc.pb.h} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/cost/kqp_cost_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/config.{pb.h ... grpc.pb.h} |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/database/ut/ut_database.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/idx_test/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |61.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/add_column/objcopy_04ccb9b757b207bc74705e9bb1.o |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minikql/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/cms/console/ut_configs_dispatcher/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_erase_rows/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/hive_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_async_index.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/pire/libpire_udf.so |62.3%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_82d6d29ac7be3798b7e748facc.o |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/base/ut_board_subscriber/unittest |62.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/re2/libre2_udf.so |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/ttl/objcopy_589d529f9477963cf67237781c.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_fulltext_index.cpp |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/ut/rbo/objcopy_8a4839ce59e28719331c9dfe7e.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/testshard_workload/workload/libpy3stress-testshard_workload-workload.global.a |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/library/libpy3tools-nemesis-library.global.a |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/kqp_mock.cpp |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_index/ut_unique_index.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5865a174a6c25ca1a2d6386702.o |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/supp/ydb_supp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/federated_query/generic_ut/unittest |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_af18efc2f04dd1af5ca802c329.o |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/viewer/tests/objcopy_87b299e07b15c86f4f50f458ef.o |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ldap_auth_provider/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/functions_executor_wrapper.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/statistics/py3test |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_yql_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/partition_writer_cache_actor_fixture.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_compat_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/demo_tx.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/health_check/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_00c87b13e2f685811a9825079d.o |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_bsvolume/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/dc048c91e67372877fc6ad2dfc_raw.auxcpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_common.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_stream_creator/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/sessions/gtest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_external_blobs/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/runtime/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/sys_view/partition_stats/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/first_class_src_ids_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sharding/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/scheme_shard/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_worker/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp/unittest |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/wrappers/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/rbo/kqp_rbo_pg_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/scale_recommender_policy_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/object_distribution_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/actorlib_impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/hive/py3test |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_prefixed_vector_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/topic_yql_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/ydb_proxy/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_view/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/limits/py3test |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_b34c6a8a5501db208eebc5d8e4.o |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_9a3dabea847c21e0b4fa4cda26.o |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/service/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/common/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/aba998449c2518e3272d8e87fb_raw.auxcpp |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/kqp_generic_provider_ut.cpp |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/storage_pool_info_ut.cpp |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/common/objcopy_e32003454342267c2263935765.o |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |63.0%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/knn/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/hive/sequencer_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_external_data_source/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/dynamic_config/ut/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/objcopy_1ac33161075322a9a0efb211c2.o |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet_flat/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_stream_indexes_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_service_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume/ut_bsvolume.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/balance_coverage/balance_coverage_builder_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/streaming/libpy3streaming.global.a |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/generic_ut/iceberg_ut_data.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |62.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_partition_stats/unittest |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_object.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_write.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/backup/tests/py3test |62.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |62.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/lib/ydb_cli/topic/topic_read_ut.cpp |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_bcbbd2d8f2367d5f3ed5199234.o |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_f05ead59375a9db120b95dd730.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/fqrun/fqrun |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_move_reboots/ut_move_reboots.cpp |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/yql/essentials/tools/sql2yql/sql2yql |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/script_execution/objcopy_1aeeb50f676472f975830c135d.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_fulltext_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview_reboots/ut_sysview_reboots.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tiering/ut/ut_tiers.cpp |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ut_export_reboots_s3.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_ru_calculator/ut_ru_calculator.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/olap_workload/olap_workload |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_40226ff8497733c6e798ee3940.o |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_vector_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_disk_quotas/unittest |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/datashard/partitioning/objcopy_265d7fd505d52534f38ea6fb7f.o |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_c98e5b95c64b8486a12f10d408.o |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/nemesis/ut/objcopy_927a1f7611cf94fb1cd21ef8cf.o |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_5db899a01c2ec6f53648af6840.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_bfa810e70cd1de18c5d4a18a62.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/ut_large.cpp |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/rename/objcopy_c02c3d9f840d02af9fad858a55.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/large_results/kqp_scriptexec_results_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/pqtablet_mock.cpp |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |62.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_shred_reboots/ut_shred_reboots.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/ut_vector_index_build_reboots.cpp |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic_kafka/tests/py3test |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap/ut_olap.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_followers.cpp |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/yql/kqp_scripting_ut.cpp |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/checkpointing/ut/unittest |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/tests/integration/auth/gtest |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/indexes/kqp_indexes_multishard_ut.cpp |62.3%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/sequenceshard/ut/unittest |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_read_iterator_ext_blobs.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/coordinator/ut/unittest |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/unittest |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |61.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/quoter/ut/unittest |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/show_create/table/tests/py3test |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |61.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |61.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |61.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |61.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |61.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/slow/autopartitioning_ut.cpp |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |61.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_document_api_ut.cpp |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_snapshot_readonly.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_scripts_ut.cpp |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_clickhouse_internal.pb.{h, cc} |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/quotas_manager.pb.{h, cc} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/public/issue/protos/issue_severity.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_c19b3eb5266bf8e49b2b628bc2.o |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_4e45fac9e6e2cbc502659b10eb.o |62.2%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/blobstorage/objcopy_790c6ea4aad5e761d21421b25d.o |62.2%| [SB] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/yql/dq/proto/dq_state_load_plan.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/ut_user_attributes_reboots.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/pg_ext/libessentials-core-pg_ext.a |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/type/libpy3olap_workload-workload-type.global.a |62.2%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_common.pb.{h, cc} |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ut_rtmr_reboots.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yql/essentials/tools/sql2yql/sql2yql.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ut_bsvolume_reboots.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_request_reporting_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/private_api.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_streaming_query/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_topic.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_analyze_ut.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_sequence_ut.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/mkql_proto/protos/minikql.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/alloc.{pb.h ... grpc.pb.h} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_cache_ut.cpp |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/configs_dispatcher_ut.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base.{pb.h ... grpc.pb.h} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_reboots/ut_reboots.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/common/v1/common.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/workload_manager_config.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/filestore_config.{pb.h ... grpc.pb.h} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence/ut_sequence.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_tablemetadata.{pb.h ... grpc.pb.h} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/memory_stats.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/grpc_pq_old.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blob_depot_config.{pb.h ... grpc.pb.h} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/query_stats.{pb.h ... grpc.pb.h} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/yql_translation_settings.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tenant_pool.{pb.h ... grpc.pb.h} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_database.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/client/server/msgbus_server_pq_metarequest_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/assign_tx_id_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_params_ut.cpp |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/channel_purpose.{pb.h ... grpc.pb.h} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_filestore_reboots/ut_filestore_reboots.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_kv.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_explain_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_locks.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_user_management_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage.{pb.h ... grpc.pb.h} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/service/kqp_qs_queries_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_types_ut.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_quorum_tracker_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_965640ca94893d27c182c611e2.o |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/pqconfig.{pb.h ... grpc.pb.h} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_limits_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_block_ut.cpp |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_query_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_discover_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_patch_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/health_check/health_check_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_datashard.{pb.h ... grpc.pb.h} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_put_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_get_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/query/kqp_stats_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/quoter_resource_tree_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/blobstorage/ut_vdisk/lib/libblobstorage-ut_vdisk-lib.a |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tests/tools/fqrun/fqrun.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/rows_proto_splitter_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/util_ut.cpp |62.9%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/service/cluster/libname-service-cluster.a |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/immediate_controls_configurator_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/feature_flags_configurator_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/library/ncloud/impl/ut/unittest |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/string/libstring_udf.so |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/net_classifier_updater_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/tablet_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_secrets_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/workload/libpy3stress-olap_workload-workload.global.a |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/check/libv1-complete-check.a |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/objcopy_9de271b22d7bcc64ef77cc3cde.o |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/olap_workload/libpy3olap_workload.global.a |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/analysis/yql/libcomplete-analysis-yql.a |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/cache_ut.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_fulltext_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_constraints_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut/dsproxy_counters_ut.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/iceberg_processor_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_acl_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/log_settings_configurator_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/complete/name/cluster/static/libname-cluster-static.a |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/data/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/entity_id_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/jaeger_tracing_configurator_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/datastreams/ut/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/modifications_validator_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/lexer/check/libv1-lexer-check.a |63.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/tools/pq_read/pq_read |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/common/cache_ut.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_tenants.cpp |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/yql/unittest |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/tools/nemesis/ut/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/sql/v1/format/check/libv1-format-check.a |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/select/py3test |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/console/console_ut_configs.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [UN] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/psql/psql |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/client/server/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_snapshot/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/pg/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/sqs/messaging/py3test |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/columnshard/ut_rw/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/persqueue_cluster_discovery/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/storagepoolmon/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_monitoring/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/close_with_load/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/statistics/database/ut/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_collector_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_subscriber/unittest |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [AR] {BAZEL_DOWNLOAD, FAILED} $(B)/yt/yt/core/libyt-yt-core.a |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_0aefef587c181350d3a25f70e0.o |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |62.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/ttl/objcopy_c068ee86eb127df13256bfbe45.o |62.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/keyvalue/keyvalue_storage_read_request_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_keys/unittest |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |62.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |62.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_export/unittest |62.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |62.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/topic/tests/py3test |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/serializable/py3test |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |62.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |62.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |62.5%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tools/query_replay_yt/query_replay_yt |62.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/batch_slice.cpp |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/dstool/py3test |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |62.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/topic/workload_topic |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/viewer/tests/py3test |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |62.0%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |62.0%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |62.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/ctas/tests/py3test |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/simple_queue/tests/py3test |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/control_plane_proxy/ut/unittest |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/example/py3test |62.1%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/long_tx_service/ut/unittest |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |62.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |62.2%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |62.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/tablet/ut_helpers.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret_reboots/ut_secret_reboots.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/federated_query/ut_service/slow/kqp_federated_query_actors_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/monitoring_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/splitter/ut/ut_splitter.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_streaming_query/ut_streaming_query.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/rm_service/kqp_rm_ut.cpp |62.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/dread_cache_service/ut/caching_proxy_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_check_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_incremental_restore_scan.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/wrappers/s3_wrapper_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/security/certificate_check/cert_utils_ut.cpp |62.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/dst_creator_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/dsproxy/ut_ftol/dsproxy_fault_tolerance_ut.cpp |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/file_storage/proto/file_storage.pb.{h, cc} |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut.cpp |62.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/schemereq_ut.cpp |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/audit.pb.{h, cc} |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |62.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/utils/fetch/proto/fetch_config.pb.{h, cc} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_cms.pb.{h, cc} |62.3%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/aclib/protos/aclib.pb.{h, cc} |62.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_datashard.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_columnshard.cpp |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_analyze_columnshard.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/compute.pb.{h, cc} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/process_exit_profiler.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/draft/ydb_object_storage.pb.{h, cc} |62.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_pool.cpp |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/common.pb.{h, cc} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_v1.pb.{h, cc} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/local_bypass.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_issue_message.pb.{h, cc} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_formats.pb.{h, cc} |62.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_persqueue_cluster_discovery.pb.{h, cc} |62.4%| [PB] {BAZEL_DOWNLOAD} $(B)/library/cpp/messagebus/monitoring/mon_proto.pb.{h, cc} |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_integrity_trails.{pb.h ... grpc.pb.h} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet.{pb.h ... grpc.pb.h} |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_looper.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduler_thread.cpp |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp_stats.{pb.h ... grpc.pb.h} |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_f152d89e868e3e70c582478d88.o |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/metering_sink_ut.cpp |62.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a457e57e9ccca716aa1224bf90.o |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_write.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelable_context.cpp |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_change_exchange.cpp |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |62.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/key.{pb.h ... grpc.pb.h} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/serialize.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_locks/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/data_events.{pb.h ... grpc.pb.h} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/codicil_guarded_invoker.cpp |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.5%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/libpy3workload_topic.global.a |62.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |62.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/ut_helpers.cpp |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx.{pb.h ... grpc.pb.h} |62.5%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console_tenant.{pb.h ... grpc.pb.h} |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_f93c60b04a0499f2ec6880591a.o |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_d3af02c7d57ea2cbbe5d381baa.o |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_union_ut.cpp |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/services/services.{pb.h ... grpc.pb.h} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut/inside_ydb_ut.cpp |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_stats.{pb.h ... grpc.pb.h} |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/objcopy_e9d954952def1b899e1fb63731.o |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/feature_flags.{pb.h ... grpc.pb.h} |62.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/scheme_shard/objcopy_8120ef49e7e653ed0601604313.o |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/service_account_service_ut.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/contrib/libs/opentelemetry-proto/opentelemetry/proto/trace/v1/trace.{pb.h ... grpc.pb.h} |62.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.global.a |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/counters_schemeshard.{pb.h ... grpc.pb.h} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator/txallocator_ut_helpers.cpp |62.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/metrics.{pb.h ... grpc.pb.h} |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/common.cpp |62.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/kqp.{pb.h ... grpc.pb.h} |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/drivemodel.{pb.h ... grpc.pb.h} |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/stream_creator_ut.cpp |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/ut_helpers.cpp |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view.{pb.h ... grpc.pb.h} |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tx_columnshard.{pb.h ... grpc.pb.h} |62.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/scheme_board/subscriber_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |62.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_config.{pb.h ... grpc.pb.h} |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/rate_limiter_test_setup.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/http_proxy/ut/sqs_topic_ut.cpp |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/local_partition_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_split_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/libcore-external_sources-hive_metastore.a |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/basic_usage_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_connection_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_activity_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.8%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/dynamic_prototype/libcpp-protobuf-dynamic_prototype.a |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_serverless_reboots/ut_serverless_reboots.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_login/unittest |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_common_new_schemecache_ut.cpp |62.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |62.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/basic_example/public-sdk-cpp-tests-integration-basic_example |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_fetcher_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/main.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_re2_ut.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/persqueue_new_schemecache_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/topic_to_table_ut.cpp |62.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_interconnect_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_helpers.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_merger.cpp |62.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_secret/ut_secret.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/external_sources/hive_metastore/ut/hive_metastore_client_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/scheme/kqp_scheme_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_spilling_ut.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attributes_stripper.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_node_factory.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/make_config.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_designated_consumer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attributes.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ephemeral_attribute_owner.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/writer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_consumer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/exception_helpers.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/attribute_filter.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/helpers.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_scan_logging_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/node_detail.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limiter.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/runtime/kqp_hash_shuffle_ut.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/statistics_producer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/ypath_filtering_consumer.cpp |62.9%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/token.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/tokenizer.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/bindings.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token_writer.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/service_discovery/service_discovery.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_consumer.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/syntax_checker.cpp |63.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/kqp/tests/kikimr_tpch/ydb-core-kqp-tests-kikimr_tpch |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/token.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_filter.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/stream.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string_builder_stream.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_options.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop_unknown_fields.cpp |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/stream.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/forwarding_consumer.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/depth_limiting_yson_consumer.cpp |63.0%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/lexer.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/attribute_consumer.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/consumer.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/trie.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/config.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/protobuf_interop.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/throttling_channel.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/convert.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/serialized_channel.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/helpers.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/spin_wait_slow_path_logger.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/static_channel_factory.cpp |63.1%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/request_queue_provider.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/socket.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/retrying_channel.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/public.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/public.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dispatcher.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/response_keeper.cpp |63.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/datetime2/libdatetime2_udf.so |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/async_writer.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/peer_discovery.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/descriptors.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/listener.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message_format.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controller.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_server.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/hedging_channel.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/roaming_channel.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/service_detail.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/access_service_ut.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/address.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_def.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/dynamic_channel_pool.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/public.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize_dump.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/relaxed_mpsc_queue.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/serialize.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_statistics_producer.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/random.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packed_unsigned_vector.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/public.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/proc.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pattern_formatter.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/protobuf_helpers.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/memory_usage_tracker.cpp |63.2%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/blob_output.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bitmap.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fs.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_output.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backoff_strategy.cpp |63.2%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bit_packing.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/stream_log_writer.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/log_writer_detail.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/parser_helpers.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/formatter.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/helpers.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/dns_resolver.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/serializable_logger.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/config.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/crypto.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_rw_lock.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_affinity.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/dictionary_codec.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/future.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/two_level_fair_share_thread_pool.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/codec.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/profiling_helpers.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/suspendable_action_queue.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/single_queue_scheduler_thread.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_yielder.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher_impl.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/pollable_detail.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_queue_scheduler_thread.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/notify_manager.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_poller.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/throughput_throttler.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_queue.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zlib.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_semaphore.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_invoker_pool.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_parser.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_pipe.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/retrying_periodic_executor.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_writer.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/coroutine.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/propagating_storage.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lz.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/bzip2.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/action_queue.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/lzma.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_manager.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/delayed_executor.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_action_queue.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/cancelation_token.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/system_log_event_provider.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/quantized_executor.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fls.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/connection.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_util.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/invoker_detail.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/public.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fiber_scheduler_thread.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/periodic_executor.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/config.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/execution_stack.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/actions/current_invoker.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/dispatcher.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/thread_pool_detail.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/client.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_stream_helpers.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/brotli.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/random_access_gzip.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/zstd.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/snappy.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/scheduled_executor.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/zstd_log_codec.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_throttler.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/id_generator.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/digest.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/json_callbacks.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/server.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adaptive_hedging_manager.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/adjusted_exponential_moving_average.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/dns/ares_dns_resolver.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/appendable_compressed_file.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/codicil.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/logger_owner.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/coro_pipe.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/json/config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/config.cpp |63.4%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/external_sources/hive_metastore/hive_metastore_native/libexternal_sources-hive_metastore-hive_metastore_native.a |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/file_log_writer.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/configurable_singleton_def.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/linear_probe.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/public.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/bloom_filter.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/fair_share_hierarchical_queue.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/checksum.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/packet.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/backtrace.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/pool_allocator.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/async_barrier.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/crypto/tls.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/fluent_log.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/inotify.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/hazard_ptr.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/arithmetic_formula.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/error.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/helpers.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/histogram.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/utf8_decoder.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/bus/tcp/config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/zerocopy_output_writer.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistic_path.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/shutdown.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/null_channel.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/slab_allocator.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/cache_config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/statistics.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/load.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/context.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/connection.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/type_registry.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/folder_service_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/bscontroller/ut_bscontroller/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/logging/config.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/profiling/timing.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/phoenix/schemas.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/caching_channel_factory.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authenticator.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/authentication_identity.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/channel_detail.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/channel.cpp |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/misc/ref_counted_tracker_profiler.cpp |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/bus/server.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/balancing_channel.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/threading/thread.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/client.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/tracing/allocation_tags.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/message.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/overload_controlling_service_base.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/protocol_version.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/local_channel.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/dialer.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/net/local_address.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/helpers.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/stack.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ypath/tokenizer.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/per_key_request_queue_provider.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/null_consumer.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/list_verb_lazy_yson_consumer.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/producer.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/config.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser_deserialize.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_kqp_errors/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/yson_builder.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/string.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/service/ut_json_change_record/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/request_complexity_limits.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.5%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.6%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytalloc/config.cpp |63.5%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_88c3bdc430d7cb7f217b6a7f5c.o |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/parser.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/statistics/aggregator/ut/ut_traverse_datashard.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/yson/pull_parser.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/1bbc8f31f8eb46680aacf37392_raw.auxcpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/internals_ut.cpp |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_281dee3582d4ce59ddf75426c6.o |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_write_ut.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_verify_failure_regression.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/service_combiner.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/server_detail.cpp |63.6%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/dstool/objcopy_8d4997dfd93843149bbf2f76f1.o |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_incremental_restore/ut_incremental_restore.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/utilex/random.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_column_build/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/counters_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/sysview/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/rpc/viable_peer_registry.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.6%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/protobuf/yql/libcpp-protobuf-yql.a |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_bootstrapped_ut.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/actor_tracker_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_replay.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/balance_coverage/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/library/ycloud/impl/user_account_service_ut.cpp |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.global.a |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/permission.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sysview/ut_sysview.cpp |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_extract_predicate_unpack_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/interned_attributes.cpp |63.6%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.global.a |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_change_collector/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_streaming/grpc_streaming_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_kv_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_returning_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/size.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/ic_cache_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_export/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_move_table.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/hive/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sqlin_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_sort_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/scenario/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_named_expressions_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_incremental_backup/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_query_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot_isolation.cpp |63.7%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/objcopy_a14abb13ecebd457a15fc48470.o |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_rtmr/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_not_null_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/actorlib_impl/test_protocols_ut.cpp |63.7%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/core/grpc_streaming/ut/grpc/libgrpc_streaming-ut-grpc.a |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_subdomain/ut_subdomain.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_subdomain/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/metadata/initializer/ut/ut_init.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/olap/column_family/compression/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/columnshard/ut_schema/ut_columnshard_schema.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_overload_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/controller/target_discoverer_ut.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/compression/stream.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/streaming_optimize/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.8%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/driver_lib/run/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_system_names/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/system_invokers.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_dst_creator/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_alarm.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/public-sdk-cpp-tests-integration-sessions_pool |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/lease_manager.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/fair_share_thread_pool.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/engine/ut/unittest |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/invoker_queue.cpp |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |63.4%| [TS] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/time_cast/ut/unittest |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_immediate_effects_ut.cpp |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |63.5%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |63.2%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/kafka/tests/py3test |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partitiongraph_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/cdc/tests/py3test |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/compile_service/ut/kqp_compile_fallback_ut.cpp |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/add_column/py3test |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ext_index/ut/ut_ext_index.cpp |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/ut_external_data_source_reboots.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |63.4%| [AR] {BAZEL_DOWNLOAD} $(B)/yql/essentials/minikql/protobuf_udf/libessentials-minikql-protobuf_udf.a |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/services/keyvalue/ut/unittest |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_hash_combine_ut.cpp |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_agg_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/tools/query_replay_yt/query_compiler.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_merge_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/load_test/ut/group_test_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/base/board_subscriber_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/mlp/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_scale_manager_graph_cmp_ut.cpp |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |63.3%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/persqueue_v1/ut/describes_ut/describe_topic_ut.cpp |63.3%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/core/persqueue/ut/objcopy_11720a94357f4e62a79e34a420.o |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |63.4%| [UN] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/common-test_framework-udfs_deps.pkg.fake |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_replication/unittest |63.4%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |63.4%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/grpc_request_check_actor_ut/grpc_request_check_actor_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/user_info_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_mock.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/utils_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/s3_recipe_ut_helpers.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ranges_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/system_attribute_provider.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_s3_plan_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/federated_query/s3/kqp_federated_scheme_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/static_service_dispatcher.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_builder.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/tree_visitor.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqrb_describes_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/sourceid_ut.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_update.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_resolver.cpp |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_detail.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_service.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/yson_struct_detail.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_chooser_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_oos_logic_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_reattach_ut.cpp |63.4%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/objcopy_1406195445f45d950dda89fcd8.o |63.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/partition_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/concurrency/nonblocking_batcher.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kesus/proxy/proxy_actor_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_delete_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/batch_operations/kqp_batch_update_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pqtablet_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/ypath_client.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_inplace_update_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/yt/yt/core/ytree/virtual.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/topic/ut/describe_topic_ut.cpp |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/blobstorage/vdisk/skeleton/skeleton_vpatch_actor_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/effects/kqp_effects_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/opt/kqp_ne_ut.cpp |63.5%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.6%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/actors/protos/actors.pb.{h, cc} |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/ut/pq_ut.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_minstep/unittest |63.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/viewer/viewer |63.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_erase_rows.cpp |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/checkpoint_coordinator.pb.{h, cc} |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_proxy.pb.{h, cc} |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/annotations/validation.pb.{h, cc} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/tx/columnshard/engines/scheme/defaults/protos/data.pb.{h, cc} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/fq/libs/config/protos/control_plane_storage.pb.{h, cc} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_sequence_reboots/ut_sequence_reboots.cpp |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_replication_reboots/ut_replication_reboots.cpp |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/table_service_config.{pb.h ... grpc.pb.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/type_info.{pb.h ... grpc.pb.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/node_broker.{pb.h ... grpc.pb.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/scheme/protos/pathid.{pb.h ... grpc.pb.h} |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/grouper_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/mv_object_map_ut.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_login_large/ut_login_large.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperloglog/libhyperloglog_udf.so |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_snapshot.cpp |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/cms_ut_common.cpp |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/test/simple/libsimple_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/bscontroller/group_mapper_ut.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/math/libmath_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_31d605682329607481eb568ed0.o |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dummylog/libdummylog.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/lists/liblists_udf.so |63.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/callables/libcallables_udf.so |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.7%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/http_config.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/sys_view_types.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/ttl/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/blobstorage_base3.{pb.h ... grpc.pb.h} |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/vector/libvector_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/rename/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/mind/ut_fat/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/reservoir_sampling/libreservoir_sampling_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/topfreq/libtopfreq_udf.so |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/rbo/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/auth/ydb-public-sdk-cpp-tests-integration-auth |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/sys_view/query_stats/query_stats_ut.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_kafka_functions.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_reader_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/stat/libstat_udf.so |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/solomon/reading/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.9%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/digest/libdigest_udf.so |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/top/libtop_udf.so |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/security/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_olap/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_shred/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_changer_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/persqueue/public/mlp/mlp_writer_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/datashard/parametrized_queries/py3test |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/actors_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/fq/libs/result_formatter/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/metarequest_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/pqtablet/quota/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_actor.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_245adf3e28f56e6467e034d9f2.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/objcopy_7648c2519d02b8456f762efc4b.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/kafka_test_client.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_produce_actor.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/ut/query/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tablet/ut/unittest |64.1%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |64.1%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_olap_reboots/ut_olap_reboots.cpp |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/kv/workload_kv |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_cf3971576aced18377e99f5367.o |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/backup/s3_path_style/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_stats/unittest |63.9%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_e2acb41e7099c0db4fe54a1587.o |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/transfer/ut/functional/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/actors/ut/database_resolver_ut.cpp |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_background_compaction/unittest |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/persqueue/public/list_topics/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/replication/unittest |63.9%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.9%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/tests/stress/show_create/table/show_create_table |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |63.6%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/functional/kqp/kqp_query_svc/unittest |63.6%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/keyvalue/ut/unittest |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |63.6%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/mind/address_classification/net_classifier_ut.cpp |63.7%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_stats/ut_stats.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/tests/fq/restarts/py3test |63.8%| [TA] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |63.8%| [LD] {BAZEL_DOWNLOAD, FAILED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |63.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/cms/sentinel_ut_unstable.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp.cpp |63.8%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/objcopy_7c81cbfa6b5ce112674cb0a849.o |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/datashard/datashard_ut_kqp_stream_lookup.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/serverless/13360e4ecdf34efe6c3a817a44_raw.auxcpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_read_table/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_view/ut_view.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_background_cleaning/ut_background_cleaning.cpp |63.8%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/ydb/ut/ydb_ut.cpp |63.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_extsubdomain/ut_extsubdomain.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_serialization.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_transfer/ut_transfer.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/proxy_ut_helpers.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_transaction_coordinator.cpp |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_proxy/storage_tenant_ut.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kafka_proxy/ut/ut_protocol.cpp |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/fields.pb.{h, cc} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/library/formats/arrow/protos/accessor.pb.{h, cc} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |63.9%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_export.pb.{h, cc} |63.9%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/yql/essentials/core/issue/protos/issue_id.pb.{h, cc} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/protos/ydb_discovery.pb.{h, cc} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/mediator/mediator_ut.cpp |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/objcopy_691607a9cbabb8d8c0161d1a6d.o |64.0%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/libpy3workload_kv.global.a |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/graph/shard/ut/shard_ut.cpp |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_60a4829fdc305e3a74a7ddcb41.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_22b5b8dd6ea05f4194f60e6181.o |64.0%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/transfer/tests/objcopy_76cd981cf66123b7633d25b898.o |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tracing.{pb.h ... grpc.pb.h} |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/replication/service/worker_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_volatile_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.0%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/coordinator/coordinator_ut.cpp |64.0%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/public/api/grpc/draft/dummy.{pb.h ... grpc.pb.h} |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.0%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/tablet_counters.{pb.h ... grpc.pb.h} |64.1%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/topic/workload/libpy3stress-topic-workload.global.a |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/s3_settings.{pb.h ... grpc.pb.h} |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/file/libfile_udf.so |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/row_dispatcher_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/msgbus_pq.{pb.h ... grpc.pb.h} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [PB] {BAZEL_DOWNLOAD} $(B)/ydb/core/protos/console.{pb.h ... grpc.pb.h} |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/ut_helpers.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/leader_election_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/coordinator_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/dicts/libdicts_udf.so |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_monitoring_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_scripting_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_register_node_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/fq/libs/row_dispatcher/ut/topic_session_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_import_ut.cpp |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ldap_login_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_index_table_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_login_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/public/sdk/cpp/src/client/federated_topic/ut/basic_usage_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_object_storage_ut.cpp |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/structs/libstructs_udf.so |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/streaming/libstreaming_udf.so |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/control/ut/unittest |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/examples/type_inspection/libtype_inspection_udf.so |64.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/set/libset_udf.so |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/logs/dsv/libdsv_udf.so |64.1%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/ydb-public-sdk-cpp-tests-integration-sessions |64.1%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_coordination_ut.cpp |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.1%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_logstore_ut.cpp |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.2%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/scheme_board/ut_populator/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/blobstorage/ut_vdisk_io/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/datashard/ut_order/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/tx/schemeshard/ut_user_attributes/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [TM] {BAZEL_DOWNLOAD, FAILED} ydb/core/kqp/compile_service/ut/unittest |64.3%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_tx_ut.cpp |64.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/libpy3show_create_table.global.a |64.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so |64.4%| [PY] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/show_create/table/objcopy_970514ee5aa7605a49b54b8feb.o |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_tx_ut.cpp |64.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/yson2/libyson2_udf.so |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_locks_ut.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_ut.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_rollback.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/rpc_calls_ut.cpp |64.4%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/grpc_services/operation_helpers_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_locks_tricky_ut.cpp |64.5%| [AR] {BAZEL_DOWNLOAD} $(B)/library/cpp/testing/gbenchmark/libcpp-testing-gbenchmark.a |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_column_build_reboots/ut_column_build_reboots.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/tx_allocator_client/actor_client_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_mvcc_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_query_ut.cpp |64.5%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_read_rows_ut.cpp |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_stats_ut.cpp |64.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_sink_mvcc_ut.cpp |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/kqp/ut/tx/kqp_snapshot_isolation_ut.cpp |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ut_topic_splitmerge.cpp |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_olapstore_ut.cpp |64.7%| [AR] {BAZEL_DOWNLOAD} $(B)/contrib/restricted/google/benchmark/librestricted-google-benchmark.a |64.7%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_olap_ut.cpp |64.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_bulk_upsert_ut.cpp |64.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/histogram/libhistogram_udf.so |64.8%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/core/tablet_flat/benchmark/b_part.cpp |65.1%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/tools/astdiff/astdiff |65.2%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_ut.cpp |65.6%| [CC] {BAZEL_DOWNLOAD} $(S)/ydb/services/ydb/ydb_table_ut.cpp |65.6%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/ydb-tests-functional-backup |65.7%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/python/python3_small/libpython3_udf.so |66.2%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/url_base/liburl_udf.so |66.3%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/unicode_base/libunicode_udf.so |68.8%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/protobuf/libprotobuf_udf.so |69.4%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/build_info/liblibrary-cpp-build_info.a |69.5%| [AR] {default-linux-x86_64, release, asan} $(B)/library/cpp/svnversion/liblibrary-cpp-svnversion.a |70.0%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/fq/streaming_optimize/ydb-tests-fq-streaming_optimize |70.4%| [LD] {BAZEL_DOWNLOAD} $(B)/yql/essentials/udfs/common/json2/libjson2_udf.so |71.9%| [PK] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tests/common/test_framework/udfs_deps/{common-test_framework-udfs_deps.final.pkg.fake ... yql/essentials/udfs/common/hyperscan/libhyperscan_udf.so} |74.3%| [AR] {BAZEL_DOWNLOAD} $(B)/ydb/tests/stress/kv/workload/libpy3stress-kv-workload.global.a |74.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/core/transfer/ut/functional/ydb-core-transfer-ut-functional |74.4%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/kqp/kqp_query_svc/ydb-tests-functional-kqp-kqp_query_svc |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |74.5%| [AR] {RESULT} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yql/providers/yt/provider/libproviders-yt-provider.a |74.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/replication/ydb-tests-functional-replication |74.5%| [LD] {BAZEL_DOWNLOAD} $(B)/ydb/tests/functional/backup/s3_path_style/ydb-tests-functional-backup-s3_path_style |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |74.5%| [AR] {RESULT} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/minikql/comp_nodes/llvm16/libminikql-comp_nodes-llvm16.a |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |74.5%| [AR] {RESULT} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/libcore-tx-schemeshard.a |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/core/libyt-yt-core.a |74.5%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |74.5%| [AR] {default-linux-x86_64, release, asan} $(B)/yt/yt/client/libyt-yt-client.a |74.5%| [AR] {RESULT} $(B)/yt/yt/client/libyt-yt-client.a |74.5%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/client/libyt-yt-client.a |74.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/node_broker |74.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/node_broker |74.5%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/node_broker |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/workload_kv |74.6%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/workload_kv |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/workload_kv |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/workload_topic |74.6%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/workload_topic |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/workload_topic |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |74.6%| [LD] {RESULT} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/tests/ydb-core-viewer-tests |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |74.6%| [LD] {RESULT} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |74.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/olap_workload |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/dstool/ydb-tests-functional-dstool |74.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/olap_workload |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/olap_workload |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/tests/tests-stress-scheme_board-pile_promotion-tests |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/show_create_table |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/olap_workload/tests/ydb-tests-stress-olap_workload-tests |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/show_create_table |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/show_create_table |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/tests/ydb-tests-stress-topic_kafka-tests |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/example/ydb-tests-example |74.7%| [LD] {RESULT} $(B)/ydb/tests/example/ydb-tests-example |74.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/example/ydb-tests-example |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |74.7%| [LD] {RESULT} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/scheme_board/pile_promotion/pile_promotion_workload |74.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |74.7%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/merge_split_common_table/std/functional-sqs-merge_split_common_table-std |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/sql/ydb-tests-sql |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/wardens/ydb-tests-functional-wardens |74.8%| [LD] {RESULT} $(B)/ydb/tests/sql/ydb-tests-sql |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/sql/ydb-tests-sql |74.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.8%| [LD] {RESULT} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/postgresql/ydb-tests-functional-postgresql |74.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |74.9%| [LD] {RESULT} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/encryption/ydb-tests-functional-encryption |75.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/s3_backups |75.1%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/s3_backups |75.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/s3_backups |75.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |75.3%| [LD] {RESULT} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |75.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/simple_queue/tests/ydb-tests-stress-simple_queue-tests |75.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |75.5%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |75.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/tests/ydb-tests-stress-cdc-tests |75.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.7%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |75.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_tests/ydb-tests-functional-scheme_tests |76.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.3%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/recipe/kqprun_recipe |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydb/ydb |76.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydb/ydb |76.4%| [LD] {RESULT} $(B)/ydb/apps/ydb/ydb |76.4%| [LD] {RESULT} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |76.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/s3_backups/tests/ydb-tests-stress-s3_backups-tests |76.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |76.6%| [LD] {RESULT} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |76.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/tools/ydb_recipe/ydb_recipe |76.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |76.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |76.9%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/tests/ydb-tests-stress-ctas-tests |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/show_create_view |77.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/backup_stress |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/show_create_view |77.4%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/show_create_view |77.4%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/backup_stress |77.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/backup_stress |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/cdc/cdc |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/cdc/cdc |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/cdc/cdc |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |77.5%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/tests/ydb-tests-tools-kqprun-tests |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/tests/ydb-tests-stress-result_set_format-tests |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/workload_mixed |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/workload_mixed |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/workload_mixed |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/workload_testshard |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/kafka_streams_test |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/kafka_streams_test |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/kafka_streams_test |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.5%| [LD] {RESULT} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/testshard_workload/tests/ydb-tests-stress-testshard_workload-tests |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.5%| [LD] {RESULT} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/http_api/ydb-tests-fq-http_api |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.5%| [LD] {RESULT} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/common/ydb-tests-fq-common |77.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.6%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/tests/ydb-tests-stress-transfer-tests |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.6%| [LD] {RESULT} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic/tests/ydb-tests-stress-topic-tests |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |77.6%| [LD] {RESULT} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |77.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/add_column/ydb-tests-datashard-add_column |77.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.7%| [LD] {RESULT} $(B)/ydb/tests/stress/oltp_workload/tests/ydb-tests-stress-oltp_workload-tests |77.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |77.8%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |77.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/tests/ydb-tests-stress-streaming-tests |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |78.1%| [LD] {RESULT} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serializable/ydb-tests-functional-serializable |78.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |78.1%| [LD] {RESULT} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |78.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kafka/tests/ydb-tests-stress-kafka-tests |78.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.2%| [LD] {RESULT} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/result_set_format/result_set_format |78.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/ctas/ctas |78.5%| [LD] {RESULT} $(B)/ydb/tests/stress/ctas/ctas |78.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/ctas/ctas |78.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |78.7%| [LD] {RESULT} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |78.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/viewer/tests/ydb-tests-stress-viewer-tests |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |78.9%| [LD] {RESULT} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/kv/tests/ydb-tests-stress-kv-tests |78.9%| [LD] {RESULT} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/pq_read/test/ydb-tests-tools-pq_read-test |78.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |78.9%| [LD] {RESULT} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |78.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/mixedpy/tests/ydb-tests-stress-mixedpy-tests |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |79.0%| [LD] {RESULT} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |79.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/query_cache/ydb-tests-functional-query_cache |79.0%| [LD] {RESULT} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |79.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/restarts/ydb-tests-fq-restarts |79.1%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/table/tests/ydb-tests-stress-show_create-table-tests |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.5%| [LD] {RESULT} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |79.5%| [LD] {RESULT} $(B)/ydb/tests/stress/show_create/view/tests/ydb-tests-stress-show_create-view-tests |79.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/topic_kafka/workload_topic_kafka |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {RESULT} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/node_broker/tests/ydb-tests-stress-node_broker-tests |79.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |79.6%| [LD] {RESULT} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/backup/tests/ydb-tests-stress-backup-tests |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/transfer/transfer |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/transfer/transfer |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/transfer/transfer |79.7%| [AR] {default-linux-x86_64, release, asan, pic} $(B)/yt/yt/core/libyt-yt-core.a |79.7%| [AR] {RESULT} $(B)/yt/yt/core/libyt-yt-core.a |79.7%| [AR] {BAZEL_UPLOAD, SKIPPED} $(B)/yt/yt/core/libyt-yt-core.a |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/stress/streaming/streaming |79.7%| [LD] {RESULT} $(B)/ydb/tests/stress/streaming/streaming |79.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/stress/streaming/streaming |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |79.8%| [LD] {RESULT} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/library/ut/ydb-tests-library-ut |79.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |79.7%| [LD] {RESULT} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/async_replication/ydb-tests-datashard-async_replication |79.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.8%| [LD] {RESULT} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/audit/ydb-tests-functional-audit |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.9%| [LD] {RESULT} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/ydb-tests-functional-serverless |79.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/s3/ydb-tests-datashard-s3 |79.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |79.9%| [LD] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/parametrized_queries/ydb-tests-datashard-parametrized_queries |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.0%| [LD] {RESULT} $(B)/ydb/tests/datashard/select/ydb-tests-datashard-select |80.0%| [LD] {RESULT} $(B)/ydb/tests/olap/column_family/compression/ydb-tests-olap-column_family-compression |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.0%| [LD] {RESULT} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/yql/essentials/tools/sql2yql/sql2yql |80.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.0%| [LD] {RESULT} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/api/ydb-tests-functional-api |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/dstool/ydb-dstool |80.1%| [LD] {RESULT} $(B)/ydb/apps/dstool/ydb-dstool |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/dstool/ydb-dstool |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.1%| [LD] {RESULT} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/ttl/ydb-tests-functional-ttl |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.1%| [LD] {RESULT} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/blobstorage/ydb-tests-functional-blobstorage |80.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.1%| [LD] {RESULT} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/solomon/reading/ydb-tests-solomon-reading |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.2%| [LD] {RESULT} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/statistics/ydb-tests-functional-statistics |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.2%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/cloud/ydb-tests-functional-sqs-cloud |80.2%| [LD] {RESULT} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/restarts/ydb-tests-functional-restarts |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/nemesis/ut/ydb-tests-tools-nemesis-ut |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.4%| [LD] {RESULT} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/oom/ydb-tests-olap-oom |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/rename/ydb-tests-functional-rename |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/partitioning/ydb-tests-datashard-partitioning |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.4%| [LD] {RESULT} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/scenario/ydb-tests-olap-scenario |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.4%| [LD] {RESULT} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/datashard/ttl/ydb-tests-datashard-ttl |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/script_execution/ydb-tests-functional-script_execution |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/messaging/ydb-tests-functional-sqs-messaging |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/cms/ydb-tests-functional-cms |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.4%| [LD] {RESULT} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/scheme_shard/ydb-tests-functional-scheme_shard |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/limits/ydb-tests-functional-limits |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/sqs/with_quotas/ydb-tests-functional-sqs-with_quotas |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.5%| [LD] {RESULT} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/hive/ydb-tests-functional-hive |80.5%| [CC] {default-linux-x86_64, release, asan} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |80.5%| [AR] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |80.5%| [AR] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |80.5%| [CC] {BAZEL_UPLOAD} $(S)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp |80.5%| [AR] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/gateway/ut/ydb-core-kqp-gateway-ut |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/minikql_compile/ut/ydb-core-client-minikql_compile-ut >> MetadataConversion::MakeAuthTest [GOOD] >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |80.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/gateway/ut/gtest >> MetadataConversion::ConvertingExternalSourceMetadata [GOOD] |80.5%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/gateway/ut/gtest |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence |80.5%| [TS] {RESULT} ydb/core/kqp/gateway/ut/gtest |80.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut_sequence/ydb-core-blobstorage-nodewarden-ut_sequence >> TTestYqlToMiniKQLCompile::CheckResolve >> TTestYqlToMiniKQLCompile::CheckResolve [GOOD] >> TTestYqlToMiniKQLCompile::OnlyResult >> TTestYqlToMiniKQLCompile::OnlyResult [GOOD] >> TTestYqlToMiniKQLCompile::EraseRow >> TTestYqlToMiniKQLCompile::EraseRow [GOOD] >> TTestYqlToMiniKQLCompile::UpdateRow >> TTestYqlToMiniKQLCompile::UpdateRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRow >> TTestYqlToMiniKQLCompile::SelectRow [GOOD] >> TTestYqlToMiniKQLCompile::SelectRange >> TTestYqlToMiniKQLCompile::SelectRange [GOOD] >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx >> TTestYqlToMiniKQLCompile::SimpleCrossShardTx [GOOD] >> TTestYqlToMiniKQLCompile::AcquireLocks >> TTestYqlToMiniKQLCompile::AcquireLocks [GOOD] >> TTestYqlToMiniKQLCompile::StaticMapTypeOf >> TTestYqlToMiniKQLCompile::StaticMapTypeOf [GOOD] >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange >> TTestYqlToMiniKQLCompile::SelectRangeAtomInRange [GOOD] >> TTestYqlToMiniKQLCompile::Extract >> TTestYqlToMiniKQLCompile::Extract [GOOD] >> NodeWardenDsProxyConfigRetrieval::Disconnect |80.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/client/minikql_compile/ut/unittest >> TTestYqlToMiniKQLCompile::Extract [GOOD] |80.5%| [TS] {BAZEL_UPLOAD} ydb/core/client/minikql_compile/ut/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut_sequence/unittest >> NodeWardenDsProxyConfigRetrieval::Disconnect [GOOD] Test command err: Caught NodeWarden registration actorId# [1:11:2058] 2026-01-04T02:17:55.500928Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:17:55.529861Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskGuid: 702867010532567589 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 702867010532567589 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 702867010532567589 } } } } AvailabilityDomains: 0 } 2026-01-04T02:17:55.530123Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:17:55.530872Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:17:55.531205Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 702867010532567589 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:17:55.532179Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:1 PDiskGuid# 702867010532567589 2026-01-04T02:17:55.532228Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:17:55.533135Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:31:2076] ControllerId# 72057594037932033 2026-01-04T02:17:55.533174Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:17:55.533244Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:17:55.533404Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:17:55.533625Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:17:55.533730Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:17:55.566891Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:17:55.567020Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:17:55.567047Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:17:55.568633Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:17:55.570922Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:17:55.570960Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-04T02:17:55.571005Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-04T02:17:55.576119Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: ")\363N\313f\222\364-`\311\013\324\317\265\203\265p\355\274\333" } 2026-01-04T02:17:55.576421Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-04T02:17:55.576462Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:17:55.577527Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskGuid: 702867010532567589 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 702867010532567589 } VDiskKind: Default } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1 PDiskGuid: 702867010532567589 } } } } AvailabilityDomains: 0 } 2026-01-04T02:17:55.577693Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:17:55.579071Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:17:55.594184Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-04T02:17:55.596023Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-04T02:17:55.609614Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:17:55.613703Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:17:55.614057Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:17:55.615115Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:17:55.615526Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:17:55.615743Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:17:55.615776Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:17:55.615942Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:17:55.627791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:17:55.628278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:17:55.628430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:17:55.628613Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:17:55.628689Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:17:55.628770Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:17:55.667558Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:17:55.667678Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:17:55.682336Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:17:55.682461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:17:55.682533Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:17:55.682608Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:17:55.682726Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:17:55.682778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:17:55.682824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:17:55.682863Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:17:55.698656Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:17:55.698771Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:17:55.722301Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:17:55.722411Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:17:55.723627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:17:55.723670Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:17:55.740809Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:17:55.740880Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:17:55.741295Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639257 Sender# [1:92:2123] SessionId# [0:0:0] Cookie# 0 Pipe connected clientId# [1:31:2076] 2026-01-04T02:17:55.741386Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:31:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:31:2076] 2026-01-04T02:17:55.741974Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 702867010532567589 Status: INIT_PENDING OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2026-01-04T02:17:55.742060Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2026-01-04T02:17:55.745112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tm ... sionId# [0:0:0] Cookie# 0 === Waiting for pipe to establish === === Breaking pipe === === Sending put === Pipe disconnected clientId# [1:31:2076] 2026-01-04T02:17:55.775874Z node 1 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [1:31:2076] ServerId# [1:125:2147] TabletId# 72057594037932033 PipeClientId# [1:31:2076] 2026-01-04T02:17:55.775961Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:139:2160] ControllerId# 72057594037932033 2026-01-04T02:17:55.776039Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:17:55.782173Z node 1 :BS_NODE DEBUG: {NW46@node_warden_proxy.cpp:143} HandleForwarded GroupId# 2147483648 EnableProxyMock# false NoGroup# false 2026-01-04T02:17:55.782242Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 2147483648 HasGroupInfo# false GroupInfoGeneration# 2026-01-04T02:17:55.782293Z node 1 :BS_NODE DEBUG: {NW98@node_warden_group.cpp:285} RequestGroupConfig GroupId# 2147483648 2026-01-04T02:17:55.782577Z node 1 :BS_NODE INFO: {NW79@node_warden_group_resolver.cpp:74} TGroupResolverActor::Bootstrap GroupId# 2147483648 2026-01-04T02:17:55.782668Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:17:55.782718Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:11:2058] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:17:55.785860Z node 1 :BS_NODE INFO: {NW81@node_warden_group_resolver.cpp:270} TGroupResolverActor::PassAway GroupId# 2147483648 Pipe connected clientId# [1:139:2160] 2026-01-04T02:17:55.786283Z node 1 :BS_NODE DEBUG: {NW05@node_warden_pipe.cpp:53} TEvTabletPipe::TEvClientConnected OK ClientId# [1:139:2160] ServerId# [1:150:2169] TabletId# 72057594037932033 PipeClientId# [1:139:2160] 2026-01-04T02:17:55.786515Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN01@register_node.cpp:216} Handle TEvControllerRegisterNode Request# {NodeID: 1 VDiskStatus { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1 PDiskGuid: 702867010532567589 Status: READY OnlyPhantomsRemain: false } DeclarativePDiskManagement: true } 2026-01-04T02:17:55.786608Z node 1 :BS_CONTROLLER DEBUG: {BSCBR00@bridge.cpp:269} ApplySyncerState NodeId# 1 Update# {} Comprehensive# true 2026-01-04T02:17:55.786884Z node 1 :BS_CONTROLLER DEBUG: {BSC11@scrub.cpp:214} Handle(TEvControllerScrubQuantumFinished) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } Success: true } 2026-01-04T02:17:55.787083Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG02@get_group.cpp:58} TEvControllerGetGroup Sender# [1:11:2058] Cookie# 0 Recipient# [1:150:2169] RecipientRewrite# [1:92:2123] Request# {NodeID: 1 GroupIDs: 2147483648 } StopGivingGroups# false 2026-01-04T02:17:55.787149Z node 1 :BS_CONTROLLER DEBUG: {BSCTXGG01@get_group.cpp:22} Handle TEvControllerGetGroup Request# {NodeID: 1 GroupIDs: 2147483648 } 2026-01-04T02:17:55.787258Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } } 2026-01-04T02:17:55.787370Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank: 0 VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1 } State: OK Replicated: true DiskSpace: Green IsThrottling: false ThrottlingRate: 0 } } 2026-01-04T02:17:55.819115Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskGuid: 702867010532567589 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "f4d96c83-d5d1e3f6-b350335e-577356e2" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-04T02:17:55.819409Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskGuid: 702867010532567589 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } InstanceId: "f4d96c83-d5d1e3f6-b350335e-577356e2" Comprehensive: true AvailDomain: 0 UpdateSyncers: true } 2026-01-04T02:17:55.819587Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# true Origin# controller ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:/home/runner/.ya/build/build_root/c5zo/000150/r3tmp/tmpNHjHlb/static.dat" PDiskGuid: 702867010532567589 PDiskCategory: 0 PDiskConfig { ExpectedSlotCount: 2 } EntityStatus: INITIAL ExpectedSerial: "" ManagementStage: DISCOVER_SERIAL SpaceColorBorder: GREEN } VDisks { VDiskID { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } VDiskKind: Default StoragePoolName: "" GroupSizeInUnits: 0 } Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-04T02:17:55.819758Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:17:55.819827Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:17:55.819913Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 702867010532567589 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:17:55.820703Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [80000000:1:0:0:0] VSlotId# 1:1:1000 PDiskGuid# 702867010532567589 2026-01-04T02:17:55.823286Z node 1 :BS_CONTROLLER DEBUG: {BSCTXRN05@register_node.cpp:34} Add devicesData from NodeWarden NodeId# 1 Devices# [] 2026-01-04T02:17:55.830155Z node 1 :BS_NODE DEBUG: {NW52@node_warden_impl.cpp:845} TEvControllerNodeServiceSetUpdate Record# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-04T02:17:55.830275Z node 1 :BS_NODE DEBUG: {NW17@node_warden_impl.cpp:863} Handle(TEvBlobStorage::TEvControllerNodeServiceSetUpdate) Msg# {Status: OK NodeID: 1 ServiceSet { Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } } 2026-01-04T02:17:55.830410Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# false Comprehensive# false Origin# controller ServiceSet# {Groups { GroupID: 2147483648 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 1000 PDiskGuid: 702867010532567589 } } } EncryptionMode: 0 LifeCyclePhase: 0 MainKeyId: "" EncryptedGroupKey: "" GroupKeyNonce: 2147483648 MainKeyVersion: 0 StoragePoolName: "" DeviceType: ROT } } 2026-01-04T02:17:55.830578Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:17:55.830650Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 2 OldExpectedSlotCount# 2 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:17:55.832216Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 702867010532567589 Status: INIT_PENDING OnlyPhantomsRemain: false } } 2026-01-04T02:17:55.832845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2026-01-04T02:17:55.845552Z node 1 :BS_CONTROLLER DEBUG: {BSC10@scrub.cpp:187} Handle(TEvControllerScrubQueryStartQuantum) Msg# {VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } } 2026-01-04T02:17:55.878894Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-04T02:17:55.886274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 702867010532567589 Status: REPLICATING OnlyPhantomsRemain: false } } 2026-01-04T02:17:55.886952Z node 1 :BS_NODE DEBUG: {NW47@node_warden_impl.cpp:1230} Handle(TEvStatusUpdate) 2026-01-04T02:17:55.887137Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDiskStatus { VDiskId { GroupID: 2147483648 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } NodeId: 1 PDiskId: 1 VSlotId: 1000 PDiskGuid: 702867010532567589 Status: READY OnlyPhantomsRemain: false } } 2026-01-04T02:17:56.549637Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {PDisksMetrics { PDiskId: 1 AvailableSize: 34189869056 TotalSize: 34359738368 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 EnforcedDynamicSlotSize: 17041457152 State: Normal SlotCount: 2 SlotSizeInUnits: 0 PDiskUsage: 0.10449320794148381 } } |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.5%| [TS] {RESULT} ydb/core/client/minikql_compile/ut/unittest |80.5%| [TM] {RESULT} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |80.5%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/yc_search_ut/ydb-core-ymq-actor-yc_search_ut |80.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut_sequence/unittest |80.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |80.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |80.5%| [LD] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/ydb-core-kqp-workload_service-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpoint_storage/ut/ydb-core-fq-libs-checkpoint_storage-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |80.6%| [LD] {RESULT} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/rate_limiter/ut/ydb-services-rate_limiter-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |80.6%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/fast/ydb-core-kqp-federated_query-ut_service-fast |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_keys/ydb-core-tx-datashard-ut_keys |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_column_stats/ydb-core-tx-datashard-ut_column_stats |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/partition/mlp/ut/ydb-core-persqueue-pqtablet-partition-mlp-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_vacuum/ydb-core-tx-datashard-ut_vacuum >> KqpWorkloadServiceDistributed::TestDistributedQueue >> ResourcePoolsDdl::TestPoolSwitchToLimitedState >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool >> TxKeys::ComparePointKeys >> KqpWorkloadServiceActors::TestPoolFetcher >> KqpWorkloadService::TestQueueSizeSimple >> DefaultPoolSettings::TestResourcePoolsSysViewFilters >> DescribeSchemaSecretsService::GetNewValue >> TGRpcRateLimiterTest::CreateResource >> TxKeys::ComparePointKeys [GOOD] >> TxKeys::ComparePointKeysWithNull >> StatisticsScan::RunScanOnShard >> Vacuum::Vacuum >> TMLPConsumerTests::ReloadPQTablet |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceshard/ut/ydb-core-tx-sequenceshard-ut >> TxKeys::ComparePointKeysWithNull [GOOD] >> TxKeys::ComparePointAndRange >> TxKeys::ComparePointAndRange [GOOD] >> TxKeys::ComparePointAndRangeWithNull >> TIndexProcesorTests::TestCreateIndexProcessor >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAlter [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop >> KqpWorkloadServiceActors::TestPoolFetcher [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlag [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless >> TxKeys::ComparePointAndRangeWithNull [GOOD] >> TxKeys::ComparePointAndRangeWithInf >> TGRpcRateLimiterTest::CreateResource [GOOD] >> TGRpcRateLimiterTest::UpdateResource >> KqpWorkloadServiceTables::TestTablesIsNotCreatingForUnlimitedPool [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor >> SequenceShardTests::Basics |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/long_tx_service/ut/ydb-core-tx-long_tx_service-ut >> TxKeys::ComparePointAndRangeWithInf [GOOD] >> SequenceShardTests::Basics [GOOD] >> SequenceShardTests::MarkedPipeRetries >> SequenceShardTests::MarkedPipeRetries [GOOD] >> SequenceShardTests::FreezeRestoreRedirect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_keys/unittest >> TxKeys::ComparePointAndRangeWithInf [GOOD] Test command err: 2026-01-04T02:18:00.565595Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:18:00.567321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:18:00.567368Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.579149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:18:00.579486Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:18:00.579683Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:18:00.622723Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:18:00.631937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:18:00.632034Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:18:00.633167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:18:00.633239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:18:00.633324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:18:00.633668Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:18:00.634783Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:18:00.634865Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:18:00.706058Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:18:00.737125Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:18:00.737318Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:18:00.737440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:18:00.737492Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:18:00.737560Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:18:00.737602Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:00.737844Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:00.737894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:00.738210Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:18:00.738330Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:18:00.738412Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:18:00.738462Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:18:00.738502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:18:00.738547Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:18:00.738586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:18:00.738632Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:18:00.738678Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:18:00.738775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:18:00.738823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:18:00.738863Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:18:00.745611Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:18:00.745692Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:18:00.745820Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:18:00.745982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:18:00.746027Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:18:00.746083Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:18:00.746145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:18:00.746197Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:18:00.746234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:18:00.746266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:18:00.746651Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:18:00.746695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:18:00.746730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:18:00.746763Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:18:00.746816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:18:00.746858Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:18:00.746897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:18:00.746944Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:18:00.746973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:18:00.759528Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:18:00.759612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:18:00.759649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:18:00.759698Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:18:00.759787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:18:00.761437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:18:00.761481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:18:00.761514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:18:00.761607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:18:00.761629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:18:00.761741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:18:00.761775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:18:00.761839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:18:00.761867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:18:00.767230Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:18:00.767287Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:00.767499Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:00.767529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:00.767572Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:18:00.767604Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:18:00.767634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:18:00.767664Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:18:00.767687Z node 1 :TX_DATASHARD TRACE: dat ... ode 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:18:04.578025Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:136:2157]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-04T02:18:04.578075Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:18:04.578120Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-04T02:18:04.578161Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:04.581241Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000001 txid# 1} 2026-01-04T02:18:04.581295Z node 5 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000001} 2026-01-04T02:18:04.581348Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:18:04.583428Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:18:04.583481Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CreateTable 2026-01-04T02:18:04.583527Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:18:04.583574Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-04T02:18:04.583610Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000001:1] at 9437184 on unit CompleteOperation 2026-01-04T02:18:04.583666Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000001 : 1] from 9437184 at tablet 9437184 send result to client [5:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:18:04.583721Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-04T02:18:04.583802Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:04.584831Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:234:2230], Recipient [5:136:2157]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:236:2231] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:18:04.584882Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:18:04.585000Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 136 RawX2: 21474838637 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 2 2026-01-04T02:18:04.585072Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:127:2151], Recipient [5:136:2157]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-04T02:18:04.585098Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-04T02:18:04.585131Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-04T02:18:04.585182Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-04T02:18:04.585496Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [5:104:2137], Recipient [5:136:2157]: NActors::TEvents::TEvPoison 2026-01-04T02:18:04.586019Z node 5 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 9437184 2026-01-04T02:18:04.586131Z node 5 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 9437184 2026-01-04T02:18:04.597318Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [5:239:2232], Recipient [5:241:2233]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:18:04.602676Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [5:239:2232], Recipient [5:241:2233]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:18:04.602853Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [5:239:2232], Recipient [5:241:2233]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:18:04.605717Z node 5 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [5:241:2233] 2026-01-04T02:18:04.606807Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:18:04.609890Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:704: TxInitSchema.Execute Persist Sys_SubDomainInfo 2026-01-04T02:18:04.638918Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:18:04.639017Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:18:04.640922Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:18:04.640985Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:18:04.641031Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:18:04.641338Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:18:04.641492Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:18:04.641539Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [5:284:2233] in generation 3 2026-01-04T02:18:04.653271Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:18:04.653386Z node 5 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 9437184 2026-01-04T02:18:04.653479Z node 5 :TX_DATASHARD INFO: datashard.cpp:1599: 9437184 Sending notify to schemeshard 4200 txId 1 state Ready TxInFly 0 2026-01-04T02:18:04.653580Z node 5 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 9437184 mediators count is 0 coordinators count is 1 buckets per mediator 2 2026-01-04T02:18:04.653778Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [5:289:2272] 2026-01-04T02:18:04.653840Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:18:04.653890Z node 5 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 9437184 2026-01-04T02:18:04.653927Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:04.654128Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-04T02:18:04.654207Z node 5 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-04T02:18:04.654334Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [5:241:2233], Recipient [5:241:2233]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:04.654375Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:18:04.654557Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:18:04.654633Z node 5 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:18:04.654724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 241 RawX2: 21474838713 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-04T02:18:04.654773Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [5:26:2073], Recipient [5:241:2233]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-04T02:18:04.654795Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:18:04.654825Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-04T02:18:04.654857Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:18:04.654920Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [5:26:2073], Recipient [5:241:2233]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-04T02:18:04.654945Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-04T02:18:04.654978Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-04T02:18:04.655028Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:18:04.655063Z node 5 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:18:04.655093Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:18:04.655119Z node 5 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:18:04.655148Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:18:04.655173Z node 5 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:18:04.655203Z node 5 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:18:04.655262Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [5:287:2270], Recipient [5:241:2233]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [5:291:2274] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:18:04.655287Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:18:04.655342Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [5:127:2151], Recipient [5:241:2233]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-04T02:18:04.655366Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-04T02:18:04.655395Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-04T02:18:04.655441Z node 5 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-04T02:18:04.670741Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [5:287:2270], Recipient [5:241:2233]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [5:287:2270] ServerId: [5:291:2274] } 2026-01-04T02:18:04.670812Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed |80.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_keys/unittest >> TIndexProcesorTests::TestCreateIndexProcessor [GOOD] >> TIndexProcesorTests::TestSingleCreateQueueEvent >> SequenceShardTests::FreezeRestoreRedirect [GOOD] >> SequenceShardTests::NegativeIncrement >> DescribeSchemaSecretsService::GetNewValue [GOOD] >> DescribeSchemaSecretsService::GetUpdatedValue >> SequenceShardTests::NegativeIncrement [GOOD] >> KqpWorkloadService::TestQueueSizeSimple [GOOD] >> KqpWorkloadService::TestQueueSizeManyQueries >> StatisticsScan::RunScanOnShard [GOOD] >> LongTxService::BasicTransactions >> ResourcePoolsDdl::TestPoolSwitchToLimitedState [GOOD] >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceshard/ut/unittest >> SequenceShardTests::NegativeIncrement [GOOD] Test command err: 2026-01-04T02:18:04.729686Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-04T02:18:04.729788Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-04T02:18:04.738765Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-04T02:18:04.740693Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-04T02:18:04.740744Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-04T02:18:04.746991Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-04T02:18:04.747159Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 1 Cache# 1 Increment# 1 Cycle# false State# Active 2026-01-04T02:18:04.786562Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-04T02:18:04.786876Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } 2026-01-04T02:18:04.786943Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:33: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SEQUENCE_ALREADY_EXISTS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.787004Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-04T02:18:04.787225Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } StartValue: 100001 Cache: 10 2026-01-04T02:18:04.787314Z node 1 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] MinValue# 1 MaxValue# 9223372036854775807 StartValue# 100001 Cache# 10 Increment# 1 Cycle# false State# Active 2026-01-04T02:18:04.799353Z node 1 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-04T02:18:04.799713Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:04.799819Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 1 AllocationCount# 1 AllocationIncrement# 1 2026-01-04T02:18:04.815235Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.815559Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 10 2026-01-04T02:18:04.815648Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 2 AllocationCount# 10 AllocationIncrement# 1 2026-01-04T02:18:04.831441Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.831814Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-04T02:18:04.831923Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100001 AllocationCount# 10 AllocationIncrement# 1 2026-01-04T02:18:04.847150Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.847569Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 50 2026-01-04T02:18:04.847666Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100011 AllocationCount# 50 AllocationIncrement# 1 2026-01-04T02:18:04.862814Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.863222Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 99] Cache# 0 2026-01-04T02:18:04.863269Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 99] 2026-01-04T02:18:04.863333Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.863546Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-04T02:18:04.863646Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 12 AllocationCount# 9223372036854775796 AllocationIncrement# 1 2026-01-04T02:18:04.877894Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.878293Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-04T02:18:04.878357Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.878411Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.878633Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.878714Z node 1 :SEQUENCESHARD NOTICE: tx_drop_sequence.cpp:43: [sequenceshard 72057594037927937] TTxDropSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.894781Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-04T02:18:04.895163Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:20: [sequenceshard 72057594037927937] TTxDropSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.895229Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:33: [sequenceshard 72057594037927937] TTxDropSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.895313Z node 1 :SEQUENCESHARD TRACE: tx_drop_sequence.cpp:48: [sequenceshard 72057594037927937] TTxDropSequence.Complete 2026-01-04T02:18:04.913110Z node 1 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-04T02:18:04.913236Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-04T02:18:04.913727Z node 1 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-04T02:18:04.914110Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-04T02:18:04.914322Z node 1 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-04T02:18:04.918607Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:04.918667Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:35: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_NOT_FOUND PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:04.918719Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.919004Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-04T02:18:04.919094Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 100061 AllocationCount# 10 AllocationIncrement# 1 2026-01-04T02:18:04.970651Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.971242Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } NextValue: 200000 NextUsed: true 2026-01-04T02:18:04.971374Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-04T02:18:04.983559Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-04T02:18:04.983953Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-04T02:18:04.984059Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200001 AllocationCount# 10 AllocationIncrement# 1 2026-01-04T02:18:04.996108Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:04.996546Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Record# PathId { OwnerId: 123 LocalId: 51 } Cache: 5 2026-01-04T02:18:04.996654Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] 2026-01-04T02:18:05.023772Z node 1 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-04T02:18:05.024204Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 51] Cache# 0 2026-01-04T02:18:05.024299Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 51] AllocationStart# 200011 AllocationCount# 5 AllocationIncrement# 1 2026-01-04T02:18:05.038697Z node 1 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard ... Id: 43] Cache# 0 2026-01-04T02:18:05.972152Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] AllocationStart# 11 AllocationCount# 100 AllocationIncrement# 1 2026-01-04T02:18:05.994429Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:05.994942Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] Record# PathId { OwnerId: 123 LocalId: 43 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 11 Cache: 100 Increment: 1 2026-01-04T02:18:05.995002Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:66: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SEQUENCE_ALREADY_ACTIVE PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-04T02:18:05.995063Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-04T02:18:05.995305Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:05.995398Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:06.007849Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-04T02:18:06.008227Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:06.008313Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:06.020229Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-04T02:18:06.020592Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:06.020680Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] RedirectTo# 12345 2026-01-04T02:18:06.033983Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-04T02:18:06.034353Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.034410Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:54: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 42] MovedTo# 12345 2026-01-04T02:18:06.034479Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.034758Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-04T02:18:06.034858Z node 3 :SEQUENCESHARD NOTICE: tx_freeze_sequence.cpp:68: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-04T02:18:06.050618Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-04T02:18:06.051126Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:21: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-04T02:18:06.051272Z node 3 :SEQUENCESHARD NOTICE: tx_restore_sequence.cpp:98: [sequenceshard 72057594037927937] TTxRestoreSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 NextValue: 111 Cache: 100 Increment: 1 2026-01-04T02:18:06.064515Z node 3 :SEQUENCESHARD TRACE: tx_restore_sequence.cpp:103: [sequenceshard 72057594037927937] TTxRestoreSequence.Complete 2026-01-04T02:18:06.064958Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:22: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-04T02:18:06.065058Z node 3 :SEQUENCESHARD NOTICE: tx_redirect_sequence.cpp:59: [sequenceshard 72057594037927937] TTxRedirectSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 43] RedirectTo# 54321 2026-01-04T02:18:06.078027Z node 3 :SEQUENCESHARD TRACE: tx_redirect_sequence.cpp:64: [sequenceshard 72057594037927937] TTxRedirectSequence.Complete 2026-01-04T02:18:06.078451Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:20: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute PathId# [OwnerId: 123, LocalPathId: 43] 2026-01-04T02:18:06.078510Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:48: [sequenceshard 72057594037927937] TTxFreezeSequence.Execute SEQUENCE_MOVED PathId# [OwnerId: 123, LocalPathId: 43] MovedTo# 54321 2026-01-04T02:18:06.078580Z node 3 :SEQUENCESHARD TRACE: tx_freeze_sequence.cpp:73: [sequenceshard 72057594037927937] TTxFreezeSequence.Complete 2026-01-04T02:18:06.078838Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.078927Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# 111 AllocationCount# 100 AllocationIncrement# 1 2026-01-04T02:18:06.091254Z node 3 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.418604Z node 4 :SEQUENCESHARD TRACE: sequenceshard_impl.cpp:38: [sequenceshard 72057594037927937] OnActivateExecutor 2026-01-04T02:18:06.418694Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:14: [sequenceshard 72057594037927937] TTxInitSchema.Execute 2026-01-04T02:18:06.426497Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:14: [sequenceshard 72057594037927937] TTxInit.Execute 2026-01-04T02:18:06.429547Z node 4 :SEQUENCESHARD TRACE: tx_init_schema.cpp:22: [sequenceshard 72057594037927937] TTxInitSchema.Complete 2026-01-04T02:18:06.429628Z node 4 :SEQUENCESHARD TRACE: tx_init.cpp:112: [sequenceshard 72057594037927937] TTxInit.Complete 2026-01-04T02:18:06.431389Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:21: [sequenceshard 72057594037927937] TTxCreateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cache: 10 Increment: -1 2026-01-04T02:18:06.431500Z node 4 :SEQUENCESHARD NOTICE: tx_create_sequence.cpp:113: [sequenceshard 72057594037927937] TTxCreateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] MinValue# -9223372036854775808 MaxValue# -1 StartValue# -1 Cache# 10 Increment# -1 Cycle# false State# Active 2026-01-04T02:18:06.454222Z node 4 :SEQUENCESHARD TRACE: tx_create_sequence.cpp:118: [sequenceshard 72057594037927937] TTxCreateSequence.Complete 2026-01-04T02:18:06.454555Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.454657Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-04T02:18:06.466709Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.467053Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.467152Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-04T02:18:06.479117Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.479444Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 18446744073709551615 2026-01-04T02:18:06.479539Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -21 AllocationCount# 9223372036854775788 AllocationIncrement# -1 2026-01-04T02:18:06.491282Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.491571Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 1 2026-01-04T02:18:06.491613Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:72: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SEQUENCE_OVERFLOW PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:06.491659Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.491871Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:21: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Record# PathId { OwnerId: 123 LocalId: 42 } Cycle: true 2026-01-04T02:18:06.491933Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:103: [sequenceshard 72057594037927937] TTxUpdateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] 2026-01-04T02:18:06.503725Z node 4 :SEQUENCESHARD TRACE: tx_update_sequence.cpp:108: [sequenceshard 72057594037927937] TTxUpdateSequence.Complete 2026-01-04T02:18:06.503987Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.504084Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -1 AllocationCount# 10 AllocationIncrement# -1 2026-01-04T02:18:06.515947Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete 2026-01-04T02:18:06.516265Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:22: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute PathId# [OwnerId: 123, LocalPathId: 42] Cache# 0 2026-01-04T02:18:06.516348Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:89: [sequenceshard 72057594037927937] TTxAllocateSequence.Execute SUCCESS PathId# [OwnerId: 123, LocalPathId: 42] AllocationStart# -11 AllocationCount# 10 AllocationIncrement# -1 2026-01-04T02:18:06.528361Z node 4 :SEQUENCESHARD TRACE: tx_allocate_sequence.cpp:174: [sequenceshard 72057594037927937] TTxAllocateSequence.Complete |80.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceshard/ut/unittest >> KqpWorkloadServiceActors::TestPoolFetcherAclValidation [GOOD] >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterDrop [GOOD] >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables >> LongTxService::BasicTransactions [GOOD] >> LongTxService::AcquireSnapshot |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export |80.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_keys/unittest |80.6%| [TS] {RESULT} ydb/core/tx/sequenceshard/ut/unittest |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_export/ydb-core-tx-datashard-ut_export ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_column_stats/unittest >> StatisticsScan::RunScanOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:18:04.476541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:04.603931Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:04.604428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:04.604492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:04.887699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:04.887838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:04.960120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493081528254 != 1767493081528257 2026-01-04T02:18:04.973336Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:05.021308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:05.119469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:05.446085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:05.460073Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:05.568482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:05.870793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.870932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.871357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.872244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.872464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.876716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:06.013867Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:18:06.089007Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |80.6%| [TM] {RESULT} ydb/core/tx/datashard/ut_column_stats/unittest |80.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_column_stats/unittest >> Vacuum::Vacuum [GOOD] >> Vacuum::VacuumWithoutCompaction |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |80.6%| [LD] {RESULT} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/query_actor/ut/ydb-library-query_actor-ut |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |80.6%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/splitter/ut/ydb-core-tx-columnshard-splitter-ut >> TGRpcRateLimiterTest::UpdateResource [GOOD] >> TGRpcRateLimiterTest::DropResource >> ResourcePoolClassifiersDdl::TestResourcePoolClassifiersPermissions [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier >> TIndexProcesorTests::TestSingleCreateQueueEvent [GOOD] >> TIndexProcesorTests::TestReindexSingleQueue >> LongTxService::AcquireSnapshot [GOOD] >> LongTxService::LockSubscribe >> ExportS3BufferTest::MinBufferSize [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompression [GOOD] >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] >> Splitter::Simple >> QueryActorTest::SimpleQuery |80.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_export/unittest >> ExportS3BufferTest::MinBufferSizeWithCompressionAndEncryption [GOOD] >> Splitter::Simple [GOOD] >> Splitter::Small [GOOD] >> Splitter::Minimal [GOOD] >> Splitter::Trivial >> Splitter::Trivial [GOOD] >> Splitter::BigAndSmall |80.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_export/unittest |80.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |80.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops |80.7%| [TS] {RESULT} ydb/core/tx/datashard/ut_export/unittest |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_range_ops/ydb-core-tx-datashard-ut_range_ops >> Splitter::BigAndSmall [GOOD] >> Splitter::CritSmallPortions >> DefaultPoolSettings::TestResourcePoolsSysViewFilters [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/raw_socket/ut/ydb-core-raw_socket-ut >> KqpWorkloadServiceActors::TestPoolFetcherNotExistingPool [GOOD] >> KqpWorkloadServiceActors::TestCreateDefaultPool >> TIndexProcesorTests::TestReindexSingleQueue [GOOD] >> TIndexProcesorTests::TestDeletedQueueNotReindexed >> LongTxService::LockSubscribe [GOOD] >> KqpWorkloadServiceTables::TestPoolStateFetcherActor [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/long_tx_service/ut/unittest >> LongTxService::LockSubscribe [GOOD] Test command err: 2026-01-04T02:18:07.360695Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:18:07.361241Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpQaSOJf/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:18:07.361974Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpQaSOJf/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpQaSOJf/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13274180108377191582 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:18:07.415524Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 1] Received TEvBeginTx from [1:437:2327] 2026-01-04T02:18:07.415596Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:123: TLongTxService [Node 1] Created new LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.433349Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 2] Received TEvAttachColumnShardWrites from [2:438:2101] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.433520Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 2] Received TEvNodeConnected for NodeId# 1 from session [2:103:2048] 2026-01-04T02:18:07.433708Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:265: TLongTxService [Node 1] Received TEvAttachColumnShardWrites from [2:154:2090] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.433981Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:438:2101] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.434138Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 1] Received TEvCommitTx from [2:154:2090] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.434192Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:162: TLongTxService [Node 1] Committed LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 without side-effects 2026-01-04T02:18:07.434452Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:438:2101] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.434607Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:154:2090] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.434930Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 2] Received TEvRollbackTx from [2:438:2101] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.435094Z node 1 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:212: TLongTxService [Node 1] Received TEvRollbackTx from [2:154:2090] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=1 2026-01-04T02:18:07.435368Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-04T02:18:07.435469Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-04T02:18:07.435558Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-04T02:18:07.435807Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 1 from session [2:103:2048] 2026-01-04T02:18:07.435939Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-04T02:18:07.436335Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:361:2278] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2026-01-04T02:18:07.439745Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:139: TLongTxService [Node 2] Received TEvCommitTx from [2:438:2101] LongTxId# ydb://long-tx/000000001hdb29xw7x19b4x38q?node_id=3 2026-01-04T02:18:07.440059Z node 2 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 2] Received TEvNodeDisconnected for NodeId# 3 from session [2:476:2103] 2026-01-04T02:18:08.034575Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:18:08.034666Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:08.233982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:09.045550Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:18:09.045972Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpe1X03X/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:18:09.046262Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpe1X03X/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpe1X03X/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2959498231284014655 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:18:09.528843Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:346: TLongTxService [Node 3] Received TEvAcquireReadSnapshot from [3:629:2501] for database /dc-1 2026-01-04T02:18:09.528955Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.539343Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.539564Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:823:2680] Sending navigate request for /dc-1 2026-01-04T02:18:09.541358Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:823:2680] Received navigate response status Ok 2026-01-04T02:18:09.541425Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:823:2680] Sending acquire step to coordinator 72057594046316545 2026-01-04T02:18:09.541860Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:823:2680] Received read step 1500 2026-01-04T02:18:09.541954Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 1 2026-01-04T02:18:09.542175Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:629:2501] 2026-01-04T02:18:09.542216Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.552614Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.552828Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:836:2687] Sending navigate request for /dc-1 2026-01-04T02:18:09.553076Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:836:2687] Received navigate response status Ok 2026-01-04T02:18:09.553131Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:836:2687] Sending acquire step to coordinator 72057594046316545 2026-01-04T02:18:09.553330Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:836:2687] Received read step 1500 2026-01-04T02:18:09.553425Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 2 2026-01-04T02:18:09.553489Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:425: TLongTxService [Node 3] Created new read-only LongTxId# ydb://long-tx/read-only?snapshot=1500%3Amax 2026-01-04T02:18:09.553698Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:94: TLongTxService [Node 3] Received TEvBeginTx from [3:629:2501] 2026-01-04T02:18:09.553740Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:381: TLongTxService [Node 3] Scheduling TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.566302Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:388: TLongTxService [Node 3] Received TEvAcquireSnapshotFlush for database /dc-1 2026-01-04T02:18:09.566515Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:48: LongTxService.AcquireSnapshot [3:838:2689] Sending navigate request for /dc-1 2026-01-04T02:18:09.566770Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:75: LongTxService.AcquireSnapshot [3:838:2689] Received navigate response status Ok 2026-01-04T02:18:09.566819Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:129: LongTxService.AcquireSnapshot [3:838:2689] Sending acquire step to coordinator 72057594046316545 2026-01-04T02:18:09.567006Z node 3 :LONG_TX_SERVICE DEBUG: acquire_snapshot_impl.cpp:165: LongTxService.AcquireSnapshot [3:838:2689] Received read step 1500 2026-01-04T02:18:09.567102Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:400: TLongTxService [Node 3] Received TEvAcquireSnapshotFinished, cookie = 3 2026-01-04T02:18:09.567168Z node 3 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:423: TLongTxService [Node 3] Created new read-write LongTxId# ydb://long-tx/00000001ef12m7a707j0m69gnm?node_id=3&snapshot=1500%3Amax 2026-01-04T02:18:10.635805Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:18:10.636271Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpAY0goj/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:18:10.636786Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpAY0goj/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0032ab/r3tmp/tmpAY0goj/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7243903302526009135 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:18:10.699478Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:468: TLongTxService [Node 5] Received TEvRegisterLock for LockId# 123 2026-01-04T02:18:10.699608Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:440:2330] for LockId# 987 LockNode# 5 2026-01-04T02:18:10.715872Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:441:2101] for LockId# 987 LockNode# 5 2026-01-04T02:18:10.716055Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:91:2048] 2026-01-04T02:18:10.716211Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 987 LockNode# 5 2026-01-04T02:18:10.717616Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 987 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-04T02:18:10.717839Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [5:440:2330] for LockId# 123 LockNode# 5 2026-01-04T02:18:10.718011Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:441:2101] for LockId# 123 LockNode# 5 2026-01-04T02:18:10.718179Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 5] Received TEvSubscribeLock from [6:154:2090] for LockId# 123 LockNode# 5 2026-01-04T02:18:10.718372Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_SUBSCRIBED 2026-01-04T02:18:10.718521Z node 5 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:479: TLongTxService [Node 5] Received TEvUnregisterLock for LockId# 123 2026-01-04T02:18:10.718676Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:611: TLongTxService [Node 6] Received TEvLockStatus from [5:153:2138] for LockId# 123 LockNode# 5 LockStatus# STATUS_NOT_FOUND 2026-01-04T02:18:10.718846Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:519: TLongTxService [Node 6] Received TEvSubscribeLock from [6:441:2101] for LockId# 234 LockNode# 5 2026-01-04T02:18:10.719165Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 6 2026-01-04T02:18:10.719219Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-04T02:18:10.719349Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-04T02:18:10.719922Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:77:2077] ServerId# [5:362:2279] TabletId# 72057594037932033 PipeClientId# [6:77:2077] 2026-01-04T02:18:10.720210Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:91:2048] 2026-01-04T02:18:10.720349Z node 6 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [6:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-04T02:18:10.926255Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:471:2048] 2026-01-04T02:18:10.926567Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-04T02:18:10.926619Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-04T02:18:10.927125Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:472:2102] ServerId# [5:476:2349] TabletId# 72057594037932033 PipeClientId# [6:472:2102] 2026-01-04T02:18:10.927379Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:471:2048] 2026-01-04T02:18:11.169237Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:492:2048] 2026-01-04T02:18:11.169627Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-04T02:18:11.169684Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-04T02:18:11.170373Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:492:2048] 2026-01-04T02:18:11.170532Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:491:2103] ServerId# [5:496:2360] TabletId# 72057594037932033 PipeClientId# [6:491:2103] 2026-01-04T02:18:11.439822Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:833: TLongTxService [Node 6] Received TEvNodeConnected for NodeId# 5 from session [6:513:2048] 2026-01-04T02:18:11.440218Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 6 2026-01-04T02:18:11.440274Z node 5 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 6 2026-01-04T02:18:11.440564Z node 6 :LONG_TX_SERVICE DEBUG: long_tx_service_impl.cpp:871: TLongTxService [Node 6] Received TEvNodeDisconnected for NodeId# 5 from session [6:513:2048] 2026-01-04T02:18:11.441038Z node 6 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [6:514:2105] ServerId# [5:519:2374] TabletId# 72057594037932033 PipeClientId# [6:514:2105] |80.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/long_tx_service/ut/unittest |80.7%| [TS] {RESULT} ydb/core/tx/long_tx_service/ut/unittest >> DescribeSchemaSecretsService::GetUpdatedValue [GOOD] >> DescribeSchemaSecretsService::GetUnexistingValue |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |80.7%| [LD] {RESULT} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/control_plane_proxy/ut/ydb-core-fq-libs-control_plane_proxy-ut >> KqpWorkloadServiceDistributed::TestDistributedQueue [GOOD] >> KqpWorkloadServiceDistributed::TestNodeDisconnect >> RangeOps::Intersection [GOOD] >> TGRpcRateLimiterTest::DropResource [GOOD] >> TGRpcRateLimiterTest::DescribeResource >> Vacuum::VacuumWithoutCompaction [GOOD] >> Vacuum::MultipleVacuums >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_1 [GOOD] >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] >> Splitter::CritSmallPortions [GOOD] >> Splitter::Crit >> KqpWorkloadService::WorkloadServiceDisabledByFeatureFlagOnServerless [GOOD] >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_range_ops/unittest >> RangeOps::Intersection [GOOD] Test command err: first [(Uint64 : NULL, Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : NULL) ; ()) second [(Uint64 : NULL, Uint64 : 1) ; (Uint64 : 20, Uint64 : 20)] result [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] correct [(Uint64 : NULL) ; (Uint64 : 20, Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 5)] result [(Uint64 : 10) ; (Uint64 : 5)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 10)] result [(Uint64 : 10) ; (Uint64 : 10)] correct [(Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 15)] result [(Uint64 : 10) ; (Uint64 : 15)] correct [(Uint64 : 10) ; (Uint64 : 15)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 20)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 10) ; (Uint64 : 30)] result [(Uint64 : 10) ; (Uint64 : 20)] correct [(Uint64 : 10) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 17)] result [(Uint64 : 15) ; (Uint64 : 17)] correct [(Uint64 : 15) ; (Uint64 : 17)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 20)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 15) ; (Uint64 : 30)] result [(Uint64 : 15) ; (Uint64 : 20)] correct [(Uint64 : 15) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 20)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 20) ; (Uint64 : 30)] result [(Uint64 : 20) ; (Uint64 : 20)] correct [(Uint64 : 20) ; (Uint64 : 20)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 25) ; (Uint64 : 30)] result [(Uint64 : 25) ; (Uint64 : 20)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)] result ((Uint64 : 10) ; (Uint64 : 10)] first [(Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)] result ((Uint64 : 10) ; (Uint64 : 15)] correct ((Uint64 : 10) ; (Uint64 : 15)] first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 15)) result ((Uint64 : 10) ; (Uint64 : 15)) correct ((Uint64 : 10) ; (Uint64 : 15)) first ((Uint64 : 10) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; (Uint64 : 20)) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 20)) correct [(Uint64 : 1) ; (Uint64 : 20)) first [(Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 10) ; (Uint64 : 20)) correct [(Uint64 : 10) ; (Uint64 : 20)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 10)) result ((Uint64 : 10) ; (Uint64 : 10)) first ((Uint64 : 10) ; ()) second [(Uint64 : 1) ; (Uint64 : 20)) result ((Uint64 : 10) ; (Uint64 : 20)) correct ((Uint64 : 10) ; (Uint64 : 20)) first [(Uint64 : NULL) ; (Uint64 : 10)] second [(Uint64 : 1) ; (Uint64 : 20)) result [(Uint64 : 1) ; (Uint64 : 10)] correct [(Uint64 : 1) ; (Uint64 : 10)] first [(Uint64 : NULL) ; (Uint64 : 20)] second [(Uint64 : 1) ; (Uint64 : 10)) result [(Uint64 : 1) ; (Uint64 : 10)) correct [(Uint64 : 1) ; (Uint64 : 10)) |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_range_ops/unittest >> QueryActorTest::SimpleQuery [GOOD] >> QueryActorTest::Rollback |80.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/raw_socket/ut/unittest >> TBufferedWriter::Flush_AfterEAGAIN_ShouldRestartFromSavedPositionInBuffer_2 [GOOD] |80.7%| [TS] {BAZEL_UPLOAD} ydb/core/raw_socket/ut/unittest >> KqpWorkloadService::TestQueueSizeManyQueries [GOOD] >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool >> ResourcePoolsDdl::TestPoolSwitchToUnlimitedState [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery >> TIndexProcesorTests::TestDeletedQueueNotReindexed [GOOD] >> TIndexProcesorTests::TestManyMessages >> KqpWorkloadServiceActors::TestCreateDefaultPool [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewOnServerless [GOOD] >> ResourcePoolsDdl::TestCreateResourcePool >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery >> KqpWorkloadService::WorkloadServiceDisabledByInvalidDatabasePath [GOOD] >> KqpWorkloadService::TestZeroQueueSize >> Splitter::Crit [GOOD] >> Splitter::CritSimple >> QueryActorTest::Rollback [GOOD] >> QueryActorTest::Commit >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery >> TGRpcRateLimiterTest::DescribeResource [GOOD] >> TGRpcRateLimiterTest::ListResources >> TIndexProcesorTests::TestManyMessages [GOOD] >> TIndexProcesorTests::TestOver1000Queues >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs >> Vacuum::MultipleVacuums [GOOD] >> Vacuum::MultipleVacuumsWithOldGenerations >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob >> DescribeSchemaSecretsService::GetUnexistingValue [GOOD] >> DescribeSchemaSecretsService::GetDroppedValue >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> ResourcePoolClassifiersDdl::TestDropResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks >> KqpWorkloadServiceActors::TestDefaultPoolUsePermissions [GOOD] >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection >> KqpWorkloadServiceTables::TestCreateWorkloadSerivceTables [GOOD] >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart >> QueryActorTest::Commit [GOOD] >> QueryActorTest::StreamQuery >> Splitter::CritSimple [GOOD] >> ResourcePoolsDdl::TestResourcePoolAcl [GOOD] >> ResourcePoolsDdl::TestWorkloadConfigOnServerless >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/splitter/ut/unittest >> Splitter::CritSimple [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280384;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280336;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=2088936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5184936;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=5163264;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50240;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=50200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7124168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=132168;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=seria ... 82944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=71282912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8905200;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8947912;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=7964800;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964832;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7914944;columns=1; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=7964800;columns=1; |80.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/columnshard/splitter/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount >> TGRpcRateLimiterTest::ListResources [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection >> ResourcePoolsDdl::TestCreateResourcePool [GOOD] >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding >> KqpWorkloadService::TestZeroQueueSize [GOOD] >> KqpWorkloadService::TestZeroQueueSizeManyQueries >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] >> Vacuum::MultipleVacuumsWithOldGenerations [GOOD] >> Vacuum::VacuumWithRestart >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestDefaultPoolAdminPermissions [GOOD] Test command err: 2026-01-04T02:18:00.364203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324977339188473:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.366293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.754576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.769821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.769921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.793252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.916534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.919965Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324977339188373:2081] 1767493080360270 != 1767493080360273 2026-01-04T02:18:00.954204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.954223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.954234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.954320Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:01.003657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:01.093007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.371739Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.008597Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.014126Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990224091125:2319], Start check tables existence, number paths: 2 2026-01-04T02:18:03.018126Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.018160Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.018341Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990224091125:2319], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.018403Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990224091125:2319], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.018430Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990224091125:2319], Successfully finished 2026-01-04T02:18:03.018563Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.018743Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA= temp_dir_name# 3ed799c5-470c-5b78-fc61-748bc0fd878d trace_id# 2026-01-04T02:18:03.021591Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.022056Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:03.034563Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.038204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:03.039507Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:18:03.039702Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:03.047733Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:18:03.119180Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.165700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324990224091206:2530] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:03.165883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324990224091155:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:03.166157Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324990224091213:2536], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:03.167163Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324990224091213:2536], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:03.185466Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:03.190140Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:03.190215Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:03.190252Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:03.190401Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ZDdlZjhiZDAtNzcwMWVkZjgtMjYwNjE0ZjktOWYwY2I4YzA=, ActorId: [1:7591324990224091145:2324], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:03.862550Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591324989607498228:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:03.862622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:03.876414Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:03.931531Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591324989607498201:2081] 1767493083861768 != 1767493083861771 2026-01-04T02:18:03.937592Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:03.978399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.978470Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.981772Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:04.089899Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:04.090421Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:04.090432Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:04.090439Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:04.090506Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:04.318510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemesha ... 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Sending CompileQuery request trace_id# 2026-01-04T02:18:23.418713Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) 2026-01-04T02:18:23.420592Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7591325077086992865:2334], DatabaseId: /Root, PoolId: default, Got watch notification 2026-01-04T02:18:23.420666Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:476: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7591325077086992865:2334], DatabaseId: /Root, PoolId: default, Pool config has changed, queue size: -1, in flight limit: -1 2026-01-04T02:18:23.423398Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:23.423507Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, TxInfo status# Committed kind# Pure total_duration# 6.562 server_duration# 6.522 queries_count# 2 trace_id# 2026-01-04T02:18:23.423552Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:23.423643Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:23.423681Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, EndCleanup is_final# true trace_id# 2026-01-04T02:18:23.423742Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: ExecuteState, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Sent query response back to proxy proxy_request_id# 5 proxy_id# [6:7591325064202090227:2260] trace_id# 2026-01-04T02:18:23.423765Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: unknown state, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:23.423858Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=NDdlNjg5YjYtODhjOWViMmUtN2JlMGNhMzgtNjZhZDdmYTI=, ActorId: [6:7591325077086992900:2341], ActorState: unknown state, LegacyTraceId: 01ke3crvrxavsvxas2sshjb8h8, Session actor destroyed trace_id# 2026-01-04T02:18:23.425890Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ= temp_dir_name# 0de3a767-4a63-2184-c428-8d9693096cea trace_id# 2026-01-04T02:18:23.425959Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:23.426179Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ReadyState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# DROP RESOURCE POOL default; rpc_actor# [6:7591325077086992924:2590] database# Root database_id# /Root pool_id# default trace_id# 2026-01-04T02:18:23.426201Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ReadyState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Request placed into pool from cache pool_id# default trace_id# 2026-01-04T02:18:23.426244Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Sending CompileQuery request trace_id# 2026-01-04T02:18:23.446939Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:294: [WorkloadService] [TPoolHandlerActorBase] ActorId: [6:7591325077086992865:2334], DatabaseId: /Root, PoolId: default, Got delete notification 2026-01-04T02:18:23.447008Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:23.447039Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:23.447056Z node 6 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7591325077086992939:2345], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:18:23.447323Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:7591325077086992939:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.447395Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.449998Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:23.450103Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, TxInfo status# Committed kind# Pure total_duration# 6.677 server_duration# 6.62 queries_count# 2 trace_id# 2026-01-04T02:18:23.450160Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:23.450255Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:23.450275Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, EndCleanup is_final# true trace_id# 2026-01-04T02:18:23.450298Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Sent query response back to proxy proxy_request_id# 6 proxy_id# [6:7591325064202090227:2260] trace_id# 2026-01-04T02:18:23.450314Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: unknown state, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:23.450407Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=YWYxMzFkYTgtOTliNDU4YTAtYTczOGYxMGYtOTJmOTNjNWQ=, ActorId: [6:7591325077086992925:2343], ActorState: unknown state, LegacyTraceId: 01ke3crvt2actf8cjqzavtz4j0, Session actor destroyed trace_id# 2026-01-04T02:18:23.456377Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=6&id=ZTM0NmJmYzAtZmI5NGEwNGYtYWIwZGE0YjItZjBkYTBkMWQ=, ActorId: [6:7591325077086992761:2324], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:23.456415Z node 6 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=6&id=ZTM0NmJmYzAtZmI5NGEwNGYtYWIwZGE0YjItZjBkYTBkMWQ=, ActorId: [6:7591325077086992761:2324], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:23.456438Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=6&id=ZTM0NmJmYzAtZmI5NGEwNGYtYWIwZGE0YjItZjBkYTBkMWQ=, ActorId: [6:7591325077086992761:2324], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:23.456457Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=6&id=ZTM0NmJmYzAtZmI5NGEwNGYtYWIwZGE0YjItZjBkYTBkMWQ=, ActorId: [6:7591325077086992761:2324], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:23.456512Z node 6 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=6&id=ZTM0NmJmYzAtZmI5NGEwNGYtYWIwZGE0YjItZjBkYTBkMWQ=, ActorId: [6:7591325077086992761:2324], ActorState: unknown state, Session actor destroyed trace_id# |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding >> DescribeSchemaSecretsService::GetDroppedValue [GOOD] >> DescribeSchemaSecretsService::GetInParallel >> TControlPlaneProxyCheckNegativePermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery >> QueryActorTest::StreamQuery [GOOD] >> KqpWorkloadService::TestLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestCpuLoadThreshold >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData >> KqpWorkloadService::TestQueryCancelAfterUnlimitedPool [GOOD] >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob >> KqpWorkloadServiceTables::TestCleanupOnServiceRestart [GOOD] >> KqpWorkloadServiceTables::TestLeaseExpiration >> Vacuum::VacuumWithRestart [GOOD] >> Vacuum::OutReadSetsCleanedAfterCopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/library/query_actor/ut/unittest >> QueryActorTest::StreamQuery [GOOD] Test command err: 2026-01-04T02:18:11.016192Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591325021752533187:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:11.016801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:11.245886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:18:11.246377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:11.246463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:11.255752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:11.382742Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:11.508635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:18:11.556696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:11.585433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:11.668417Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7591325021752533979:2534], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-04T02:18:12.019920Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:13.582037Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:13.584366Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976580615.967281s seconds to be completed 2026-01-04T02:18:13.587611Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, workerId: [1:7591325030342468612:2307], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:18:13.587814Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:18:13.587883Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:18:13.587906Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:18:13.587928Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:13.588778Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7591325021752533979:2534], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, TxId: , text: SELECT 42 2026-01-04T02:18:13.589531Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591325030342468612:2307] 2026-01-04T02:18:13.589566Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [1:7591325030342468629:2546] 2026-01-04T02:18:13.911494Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7591325030342468627:2308], selfId: [1:7591325021752533376:2266], source: [1:7591325030342468612:2307] 2026-01-04T02:18:13.912756Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7591325021752533979:2534], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, TxId: 2026-01-04T02:18:13.912848Z node 1 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [1:7591325021752533979:2534], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, TxId: 2026-01-04T02:18:13.913694Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=ZTU0YzdhZjgtZWRiMDViNzYtNWI2MjNiOTEtMjM4NTVhMTE=, workerId: [1:7591325030342468612:2307], local sessions count: 0 2026-01-04T02:18:14.415345Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591325035268906775:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:14.415507Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:14.456611Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:18:14.534414Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:14.538111Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591325035268906648:2081] 1767493094409848 != 1767493094409851 2026-01-04T02:18:14.572379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:14.572460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:14.577331Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:14.746519Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:18:14.779983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:14.785778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:18:14.788806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:14.811258Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7591325035268907510:2534], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-04T02:18:15.419565Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:17.197217Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:17.198746Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976580612.352895s seconds to be completed 2026-01-04T02:18:17.200972Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, workerId: [2:7591325048153809455:2307], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:18:17.201164Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:18:17.201226Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:18:17.201251Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:18:17.201269Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:17.201447Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7591325035268907510:2534], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, TxId: , text: DECLARE $k As Uint64; DECLARE $v As String; UPSERT INTO TestTable (Key, Value) VALUES ($k, $v) 2026-01-04T02:18:17.201771Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [2:7591325048153809455:2307] 2026-01-04T02:18:17.201831Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 300.000000s actor id: [2:7591325048153809457:2547] 2026-01-04T02:18:17.304622Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:7591325048153809456:2308], selfId: [2:7591325035268906885:2264], source: [2:7591325048153809455:2307] 2026-01-04T02:18:17.304763Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7591325035268907510:2534], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, TxId: 2026-01-04T02:18:17.304803Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7591325035268907510:2534], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, TxId: 2026-01-04T02:18:17.304973Z node 2 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [2:7591325048153809485:2558], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-04T02:18:17.305059Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=NGM3Mjk2Y2ItMThkODEzNTctZDU5NGE4YjUtYTk0OGMyNzc=, workerId: [2:7591325048153809455:2307], local sessions count: 0 2026-01-04T02:18:17.305178Z node 2 :KQP_PROXY DEBUG: kqp_proxy_ser ... iled to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:18:21.458945Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:21.464544Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:21.486790Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-04T02:18:22.277844Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:23.292407Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:23.293234Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976580606.258396s seconds to be completed 2026-01-04T02:18:23.294865Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=ZDU1ZjBlYTEtNWQ0MWM3NmMtYmYxYjVjNDMtMjM3ZGNhMWI=, workerId: [4:7591325075139603981:2307], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:18:23.294998Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:18:23.295032Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:18:23.295046Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:18:23.295055Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:18:23.295163Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-04T02:18:23.295284Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], Start read next stream part 2026-01-04T02:18:23.299681Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3crvnz07dkkwnx4p802zvv", Created new session, sessionId: ydb://session/3?node_id=4&id=ZGE2ZWVhNjctYTMxZDFmNzQtYjQ0YzBmM2YtYjRmZDBkYjg=, workerId: [4:7591325075139603990:2308], database: /dc-1, longSession: 0, local sessions count: 2 2026-01-04T02:18:23.300039Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3crvnz07dkkwnx4p802zvv, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=ZGE2ZWVhNjctYTMxZDFmNzQtYjQ0YzBmM2YtYjRmZDBkYjg=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [4:7591325075139603990:2308] 2026-01-04T02:18:23.300080Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [4:7591325075139603991:2547] 2026-01-04T02:18:23.300846Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591325075139603992:2309], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.300875Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591325075139604000:2312], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.300893Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.301042Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591325075139604006:2313], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.301116Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:23.303269Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:23.308863Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591325075139604007:2314], DatabaseId: /dc-1, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:18:23.410824Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591325075139604059:2583] txid# 281474976710660, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:26.236111Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591325066549668506:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:26.236191Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:18:26.366517Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:18:26.368205Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:333: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], Cancel stream request 2026-01-04T02:18:26.368271Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325066549669339:2534], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=ZDU1ZjBlYTEtNWQ0MWM3NmMtYmYxYjVjNDMtMjM3ZGNhMWI=, TxId: 2026-01-04T02:18:26.368882Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], Bootstrap. Database: dc-1, IsSystemUser: 0, run create session 2026-01-04T02:18:26.468556Z node 4 :RPC_REQUEST WARN: rpc_stream_execute_scan_query.cpp:410: Client lost 2026-01-04T02:18:26.476839Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=ZDU1ZjBlYTEtNWQ0MWM3NmMtYmYxYjVjNDMtMjM3ZGNhMWI=, workerId: [4:7591325075139603981:2307], local sessions count: 1 2026-01-04T02:18:26.476948Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976580603.074701s seconds to be completed 2026-01-04T02:18:26.478663Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=4&id=OTM4Y2I1ZGMtMjhjOWFjN2UtMWIxMjJjYWUtN2I5M2EzYzI=, workerId: [4:7591325088024505998:2328], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:18:26.478795Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:18:26.479162Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], RunStreamQuery with text: DECLARE $value AS Text; DECLARE $table_size AS Uint64; SELECT x FROM AS_TABLE( ()->(Yql::ToStream(ListReplicate(<|x:$value|>, $table_size))) ); 2026-01-04T02:18:26.479243Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], Start read next stream part 2026-01-04T02:18:26.594254Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3crysf345vmrx0mtrz0s7g", Created new session, sessionId: ydb://session/3?node_id=4&id=NTVhODg2N2UtZjk2NTkzMjctYTA2OWRjLTExODAwMzIy, workerId: [4:7591325088024506002:2329], database: /dc-1, longSession: 0, local sessions count: 3 2026-01-04T02:18:26.594451Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3crysf345vmrx0mtrz0s7g, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=NTVhODg2N2UtZjk2NTkzMjctYTA2OWRjLTExODAwMzIy, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 5, targetId: [4:7591325088024506002:2329] 2026-01-04T02:18:26.594482Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 5 timeout: 600.000000s actor id: [4:7591325088024506003:2610] 2026-01-04T02:18:26.655362Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493106646, txId: 281474976710663] shutting down 2026-01-04T02:18:26.656096Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:18:26.657653Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3crysf345vmrx0mtrz0s7g", Forwarded response to sender actor, requestId: 5, sender: [4:7591325088024506000:2608], selfId: [4:7591325066549668736:2266], source: [4:7591325088024506002:2329] 2026-01-04T02:18:26.658087Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=NTVhODg2N2UtZjk2NTkzMjctYTA2OWRjLTExODAwMzIy, workerId: [4:7591325088024506002:2329], local sessions count: 2 2026-01-04T02:18:26.660852Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], Start read next stream part 2026-01-04T02:18:26.661061Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:18:26.661154Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [0:0:0], ActorId: [4:7591325088024505995:2608], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OTM4Y2I1ZGMtMjhjOWFjN2UtMWIxMjJjYWUtN2I5M2EzYzI=, TxId: 2026-01-04T02:18:26.661559Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=OTM4Y2I1ZGMtMjhjOWFjN2UtMWIxMjJjYWUtN2I5M2EzYzI=, workerId: [4:7591325088024505998:2328], local sessions count: 1 |80.7%| [TM] {BAZEL_UPLOAD} ydb/library/query_actor/ut/unittest >> KqpWorkloadService::TestZeroQueueSizeManyQueries [GOOD] >> KqpWorkloadServiceActors::TestCpuLoadActor >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount >> DescribeSchemaSecretsService::GetInParallel [GOOD] >> DescribeSchemaSecretsService::GetSameValueMultipleTimes >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections >> ResourcePoolsDdl::TestWorkloadConfigOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection >> ResourcePoolClassifiersDdl::TestResourcePoolClassifierRanks [GOOD] >> ResourcePoolClassifiersDdl::TestExplicitPoolId >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |80.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_range_ops/unittest |80.7%| [TS] {RESULT} ydb/core/raw_socket/ut/unittest |80.7%| [TM] {RESULT} ydb/library/query_actor/ut/unittest |80.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ru_calculator/ydb-core-tx-schemeshard-ut_ru_calculator |80.7%| [TS] {RESULT} ydb/core/tx/columnshard/splitter/ut/unittest >> ResourcePoolsDdl::TestCreateResourcePoolOnServerless [GOOD] >> ResourcePoolsDdl::TestDefaultPoolRestrictions >> TIndexProcesorTests::TestOver1000Queues [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection >> ResourcePoolClassifiersDdl::TestCreateResourcePoolClassifierOnServerless [GOOD] >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount >> TRUCalculatorTests::TestReadTable [GOOD] >> TRUCalculatorTests::TestBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/yc_search_ut/unittest >> TIndexProcesorTests::TestOver1000Queues [GOOD] Test command err: 2026-01-04T02:18:02.833986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324983814690394:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:02.834093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:02.868960Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:03.164660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:03.171892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.172023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.220677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.273499Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324983814690356:2081] 1767493082831469 != 1767493082831472 2026-01-04T02:18:03.274804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:03.364859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:03.364897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:03.364905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:03.364983Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:03.365056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:03.614694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:03.621846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:18:03.843309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:06.095232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:06.097113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:06.388094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:06.425447Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325000994560662:2665] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:18:06.584145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710664, at schemeshard: 72057594046644480 2026-01-04T02:18:06.610302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:06.611878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1767493086439, "myFolder", "{\"k1\": \"v1\"}"); 2026-01-04T02:18:06.802168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591325000994560828:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:06.802299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:06.802832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591325000994560840:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:06.802896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591325000994560841:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:06.803182Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:06.807271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710667:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:06.821043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591325000994560844:2359], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710667 completed, doublechecking } 2026-01-04T02:18:06.892710Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325000994560897:2817] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 52], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:07.837900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591324983814690394:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:07.837985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:06.439000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:06.439000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:07.976143Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-04T02:18:08.535622Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325009584495721:2900] txid# 281474976710680, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/SingleCreateQueueEvent\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/SingleCreateQueueEvent', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:18:08.632384Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325009584495730:2905] txid# 281474976710681, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:08.633659Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325009584495737:2910] txid# 281474976710682, issues: { message: "Check failed: path: \'/Root/SQS/SingleCreateQueueEvent/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 48], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO `/Root/SQS/SingleCreateQueueEvent/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue1", 1, "myQueueCustomName", 1767493088349, "myFolder", NULL); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:08.349000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:08.349000Z","resource_id":"queue1","name":"myQueueCustomName","service":"message-queue","reindex_timest ... ype":"message-queue","timestamp":"2026-01-04T02:18:11.889000Z","resource_id":"queue2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-04T02:18:11.889000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-04T02:18:14.207344Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325035354300365:3359] txid# 281474976710719, issues: { message: "Check failed: path: \'/Root/SQS/Root/SQS/DeletedQueueNotReindexed\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS/Root/SQS/DeletedQueueNotReindexed', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 57], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:18:14.238088Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325035354300374:3364] txid# 281474976710720, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Queues\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 59], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:14.240700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325035354300380:3368] txid# 281474976710721, issues: { message: "Check failed: path: \'/Root/SQS/DeletedQueueNotReindexed/.Events\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 60], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "queue2", "myQueueCustomName", 1767493092000, "myFolder", NULL); ===Execute query: UPSERT INTO `/Root/SQS/DeletedQueueNotReindexed/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "queue2", 0, "myQueueCustomName", 1767493093966, "myFolder", NULL); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:13.966000Z","resource_id":"queue2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-04T02:18:13.966000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}]} 2026-01-04T02:18:15.733164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710728:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:15.740819Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325039649267801:3423] txid# 281474976710729, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:18:15.951307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710731:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:15.954224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710732:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing1", "myQueueCustomName", 1767493094000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing2", "myQueueCustomName", 1767493094000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "existing3", "myQueueCustomName", 1767493094000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting1", "myQueueCustomName", 1767493094000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Queues` (Account, QueueName, CustomQueueName, CreatedTimestamp, FolderId, Tags) VALUES ("cloud1", "deleting2", "myQueueCustomName", 1767493094000, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting1", 0, "myQueueCustomName", 1767493095766, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "deleting2", 0, "myQueueCustomName", 1767493095766, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating1", 1, "myQueueCustomName", 1767493095766, "myFolder", "{\"k1\": \"v1\"}"); ===Execute query: UPSERT INTO `/Root/SQS/TestManyMessages/.Events` (Account, QueueName, EventType, CustomQueueName, EventTimestamp, FolderId, Labels) VALUES ("cloud1", "creating2", 1, "myQueueCustomName", 1767493095766, "myFolder", "{\"k1\": \"v1\"}"); ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"deleting2","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-04T02:18:15.766000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"deleting1","name":"myQueueCustomName","service":"message-queue","deleted":"2026-01-04T02:18:15.766000Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:14.000000Z","resource_id":"existing1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:17.917571Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:14.000000Z","resource_id":"existing2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:17.917623Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"creating1","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:17.917647Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:14.000000Z","resource_id":"existing3","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:17.917665Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} ===CheckEventsLine: {"resource_type":"message-queue","timestamp":"2026-01-04T02:18:15.766000Z","resource_id":"creating2","name":"myQueueCustomName","service":"message-queue","reindex_timestamp":"2026-01-04T02:18:17.917695Z","permission":"ymq.queues.list","cloud_id":"cloud1","folder_id":"myFolder","resource_path":[{"resource_type":"resource-manager.folder","resource_id":"myFolder"}],"attributes":{"labels":{"k1": "v1"}}} 2026-01-04T02:18:18.058887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:18:18.058925Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:18.570653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:18.576681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591325052534170301:3705] txid# 281474976710758, issues: { message: "Check failed: path: \'/Root/SQS\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/Root/SQS', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:18:18.803229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710760:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:18.804929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710761:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===Started add queue batch |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/ymq/actor/yc_search_ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyRequiredGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding |80.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TRUCalculatorTests::TestBulkUpsert [GOOD] |80.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ru_calculator/unittest >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding >> Vacuum::OutReadSetsCleanedAfterCopyTable [GOOD] >> Vacuum::BorrowerDataCleanedAfterCopyTable >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding >> DescribeSchemaSecretsService::GetSameValueMultipleTimes [GOOD] >> DescribeSchemaSecretsService::FailWithoutGrants >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding >> ResourcePoolsDdl::TestDefaultPoolRestrictions [GOOD] >> ResourcePoolsDdl::TestAlterResourcePool >> TControlPlaneProxyCheckNegativePermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceActors::TestCpuLoadActor [GOOD] Test command err: 2026-01-04T02:18:00.262020Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324976631318960:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.262102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.496164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.514486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.514607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.562906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.608624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.609773Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324976631318933:2081] 1767493080260662 != 1767493080260665 2026-01-04T02:18:00.652251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.652277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.652283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.652418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.717888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.833257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.269765Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.275334Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.275705Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:116: [WorkloadService] [Service] Resource pools was disabled 2026-01-04T02:18:03.278792Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA= temp_dir_name# a71d58e2-4e2a-2140-d658-9ca11161915e trace_id# 2026-01-04T02:18:03.279173Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.312755Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM= temp_dir_name# 41b5eaf4-40a1-06d9-28bf-038df8513c67 trace_id# 2026-01-04T02:18:03.312889Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.313402Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ReadyState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7591324989516221684:2495] database# Root database_id# /Root pool_id# trace_id# 2026-01-04T02:18:03.313554Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Sending CompileQuery request trace_id# 2026-01-04T02:18:03.639317Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136322052665432 trace_id# 2026-01-04T02:18:03.639414Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:03.639701Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Created new KQP executer executer_id# [1:7591324989516221689:2321] is_rollback# false trace_id# 2026-01-04T02:18:03.674199Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Forwarded TEvStreamData to [1:7591324989516221684:2495] trace_id# 2026-01-04T02:18:03.684562Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:03.684892Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, TxInfo status# Committed kind# Pure total_duration# 45.721 server_duration# 45.618 queries_count# 2 trace_id# 2026-01-04T02:18:03.684950Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:03.685125Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:03.685150Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, EndCleanup is_final# true trace_id# 2026-01-04T02:18:03.685184Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Sent query response back to proxy proxy_request_id# 3 proxy_id# [1:7591324976631319195:2265] trace_id# 2026-01-04T02:18:03.685207Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: unknown state, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:03.685538Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=ODExMzA5NTItYTRkNWVlMzQtMzQyNWE1NzktOTZiZDc5ZWM=, ActorId: [1:7591324989516221685:2321], ActorState: unknown state, LegacyTraceId: 01ke3cr85h8axmtsfjnv6cw1np, Session actor destroyed trace_id# 2026-01-04T02:18:03.701482Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:03.701537Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:03.701579Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:03.701603Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:03.701678Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=1&id=NGY2ZTQwZGYtNjRhYmIwOTktODdjZjZkZWEtYjQ3MzhhMjA=, ActorId: [1:7591324989516221683:2320], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:04.671576Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.013299s 2026-01-04T02:18:04.677971Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591324993884580017:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:04.678027Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:04.749750Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables ... 04T02:18:32.811499Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=OWVlYTVhMS04YTljMWUxNy1hNWJjYTI2YS02ZWJjMjZlYQ==, ActorId: [8:7591325115765714002:2330], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:32.811551Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=OWVlYTVhMS04YTljMWUxNy1hNWJjYTI2YS02ZWJjMjZlYQ==, ActorId: [8:7591325115765714002:2330], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:32.811581Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=OWVlYTVhMS04YTljMWUxNy1hNWJjYTI2YS02ZWJjMjZlYQ==, ActorId: [8:7591325115765714002:2330], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:32.811600Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=OWVlYTVhMS04YTljMWUxNy1hNWJjYTI2YS02ZWJjMjZlYQ==, ActorId: [8:7591325115765714002:2330], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:32.811670Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=OWVlYTVhMS04YTljMWUxNy1hNWJjYTI2YS02ZWJjMjZlYQ==, ActorId: [8:7591325115765714002:2330], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:32.812913Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:292: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714041:2895], ActorId: [8:7591325115765714042:2896], Starting query actor #1 [8:7591325115765714043:2897] 2026-01-04T02:18:32.812919Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:135: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], Bootstrap. Database: /Root, IsSystemUser: 0, run create session 2026-01-04T02:18:32.815345Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg== temp_dir_name# fd1a49f4-4a74-c7f3-6fac-8d8f90cd4b0d trace_id# 2026-01-04T02:18:32.815417Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:32.815670Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:156: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], Successfully created session: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, run query 2026-01-04T02:18:32.815692Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], RunDataQuery with SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, TxId: , text: -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; 2026-01-04T02:18:32.815936Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ReadyState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Received request proxy_request_id# 6 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TCpuLoadFetcherActor::OnRunQuery SELECT SUM(CpuThreads) AS ThreadsCount, SUM(CpuThreads * (1.0 - CpuIdle)) AS TotalLoad FROM `.sys/nodes`; rpc_actor# [8:7591325115765714046:2344] database# /Root database_id# /Root pool_id# trace_id# 2026-01-04T02:18:32.816258Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136322054241560 trace_id# 2026-01-04T02:18:32.816296Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:32.816389Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Created new KQP executer executer_id# [8:7591325115765714049:2343] is_rollback# false trace_id# 2026-01-04T02:18:32.819451Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:32.819553Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, TxInfo status# Committed kind# ReadOnly total_duration# 3.341 server_duration# 3.302 queries_count# 2 trace_id# 2026-01-04T02:18:32.819627Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:32.819681Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:32.819707Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, EndCleanup is_final# false trace_id# 2026-01-04T02:18:32.819751Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ExecuteState, LegacyTraceId: 01ke3cs4zf1v9pb86k60hhsjjx, Sent query response back to proxy proxy_request_id# 6 proxy_id# [8:7591325102880811090:2268] trace_id# 2026-01-04T02:18:32.819929Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, TxId: 2026-01-04T02:18:32.820022Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], Finish with SUCCESS, SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, TxId: 2026-01-04T02:18:32.820066Z node 8 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714042:2896], ActorId: [8:7591325115765714043:2897], Delete session: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg== 2026-01-04T02:18:32.820079Z node 8 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TCpuLoadFetcherActor] OwnerId: [8:7591325115765714041:2895], ActorId: [8:7591325115765714042:2896], Got response [8:7591325115765714043:2897] SUCCESS 2026-01-04T02:18:32.820183Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:32.820214Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:32.820238Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:32.820273Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:32.820343Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZmQ5YTg3NmEtOTAyZmMzZmItYjA1NDU1NGEtYzg3Y2Q2Zg==, ActorId: [8:7591325115765714045:2343], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:32.826088Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=Mjg1MzZjNjYtMTgyYjQ2YWEtYWVlODI0MzMtZTA5MmI1NDU=, ActorId: [8:7591325115765713892:2326], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:32.826136Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=Mjg1MzZjNjYtMTgyYjQ2YWEtYWVlODI0MzMtZTA5MmI1NDU=, ActorId: [8:7591325115765713892:2326], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:32.826165Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=Mjg1MzZjNjYtMTgyYjQ2YWEtYWVlODI0MzMtZTA5MmI1NDU=, ActorId: [8:7591325115765713892:2326], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:32.826183Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=Mjg1MzZjNjYtMTgyYjQ2YWEtYWVlODI0MzMtZTA5MmI1NDU=, ActorId: [8:7591325115765713892:2326], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:32.826236Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=Mjg1MzZjNjYtMTgyYjQ2YWEtYWVlODI0MzMtZTA5MmI1NDU=, ActorId: [8:7591325115765713892:2326], ActorState: unknown state, Session actor destroyed trace_id# |80.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery >> TGRpcRateLimiterTest::AcquireResourceManyRequiredActorApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData >> ResourcePoolClassifiersDdl::TestExplicitPoolId [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs >> ResourcePoolsSysView::TestResourcePoolsSysViewOnServerless [GOOD] >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection |80.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |80.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view |80.7%| [TM] {RESULT} ydb/core/ymq/actor/yc_search_ut/unittest |80.7%| [TS] {RESULT} ydb/core/tx/schemeshard/ut_ru_calculator/unittest |80.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/view/ydb-core-kqp-ut-view >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_vacuum/unittest >> Vacuum::BorrowerDataCleanedAfterCopyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:18:04.490061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:04.606223Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-04T02:18:04.615550Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:317:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:04.615907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:04.615962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:04.917331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:04.917481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:05.002501Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:36:2083] 1767493081580760 != 1767493081580764 2026-01-04T02:18:05.013403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:05.061209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:05.165195Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:05.465719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:05.484061Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:05.596461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:05.977720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:986:2843], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.977875Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.978040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.979212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1002:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.979407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:05.984142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:06.123824Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1000:2851], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:18:06.220650Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1058:2890] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-04T02:18:11.185104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:11.189995Z node 2 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-04T02:18:11.191012Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:18:11.195237Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:316:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:11.195514Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:11.195669Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:11.416497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:11.416650Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:11.446472Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:36:2083] 1767493088244085 != 1767493088244089 2026-01-04T02:18:11.454067Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:11.499390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:11.568343Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:11.838426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:11.851690Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:11.954547Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:12.256433Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:979:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:12.258603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:990:2842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:12.258693Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:12.259453Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:12.259506Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:12.262935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:12.366785Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:993:2845], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:18:12.402075Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1050:2883] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-04T02:18:16.570368Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:16.575896Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does ... , LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-04T02:18:31.526314Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:31.530219Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-04T02:18:31.531029Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:18:31.534085Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:116:2163], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:31.534265Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:31.534351Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:31.743421Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:31.743552Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:31.763444Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:36:2083] 1767493109192435 != 1767493109192439 2026-01-04T02:18:31.766047Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:31.807478Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:31.873472Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:32.126107Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:32.139002Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:32.239816Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:32.740848Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:32.740946Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1086:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:32.741004Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:32.741622Z node 6 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [6:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:32.741690Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:32.745101Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:32.915348Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [6:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:18:32.949783Z node 6 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [6:1150:2964] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-04T02:18:36.621401Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:36.625745Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group 2026-01-04T02:18:36.626503Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:18:36.628288Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:305:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:36.628418Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:36.628488Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:36.859239Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:36.859362Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:36.873923Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:36:2083] 1767493114529499 != 1767493114529502 2026-01-04T02:18:36.877910Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:36.919046Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:36.996439Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:37.239399Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:37.252363Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:37.351383Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:37.852894Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1076:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:37.852990Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1086:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:37.853045Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:37.853686Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:1091:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:37.853783Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:37.857110Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:37.984819Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:1090:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:18:38.018833Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:1149:2963] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:39.043092Z node 7 :TX_DATASHARD WARN: datashard__vacuum.cpp:37: Vacuum of tablet# 72075186224037888: has borrowed parts, requested from [7:832:2727] |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_vacuum/unittest >> DescribeSchemaSecretsService::FailWithoutGrants [GOOD] >> DescribeSchemaSecretsService::GroupGrants >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection >> TCreateAndDropViewTest::CheckCreatedView >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection >> KqpWorkloadService::TestCpuLoadThreshold [GOOD] >> KqpWorkloadService::TestCpuLoadThresholdRefresh >> ResourcePoolClassifiersDdl::TestAlterResourcePoolClassifier [GOOD] >> ResourcePoolClassifiersDdl::TestDropResourcePool >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount >> ResourcePoolsDdl::TestAlterResourcePool [GOOD] >> ResourcePoolsDdl::TestDropResourcePool >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection >> KqpWorkloadService::TestQueryCancelAfterPoolWithLimits [GOOD] >> KqpWorkloadService::TestStartQueryAfterCancel >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding >> TCreateAndDropViewTest::CheckCreatedView [GOOD] >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApi [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding >> DescribeSchemaSecretsService::GroupGrants [GOOD] >> DescribeSchemaSecretsService::BatchRequest >> TControlPlaneProxyCheckPermissionsControlPlaneStorageSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery >> KqpWorkloadService::TestCpuLoadThresholdRefresh [GOOD] >> KqpWorkloadService::TestHandlerActorCleanup >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries >> TCreateAndDropViewTest::CreateViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::InvalidQuery >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus >> ResourcePoolsDdl::TestDropResourcePool [GOOD] >> ResourcePoolClassifiersDdl::TestMultiGroupClassification [GOOD] >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsSysView::TestResourcePoolsSysViewFilters [GOOD] Test command err: 2026-01-04T02:18:00.261584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324975373512738:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.261643Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.481302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.489172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.489302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.558907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.592686Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324975373512709:2081] 1767493080260499 != 1767493080260502 2026-01-04T02:18:00.593670Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.641870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.641890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.641896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.641986Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.686295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.817705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.267249Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:02.963141Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:02.963529Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:02.963576Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:02.967027Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983963448156:2320], Start check tables existence, number paths: 2 2026-01-04T02:18:02.967643Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NTI4NTY3NDktNzJmMmI4YmYtNWI2MTNiZDUtYjZlZGE4MmY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NTI4NTY3NDktNzJmMmI4YmYtNWI2MTNiZDUtYjZlZGE4MmY= temp_dir_name# 61896564-4c1e-8b50-63f1-23a3b52ff60f trace_id# 2026-01-04T02:18:02.967844Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NTI4NTY3NDktNzJmMmI4YmYtNWI2MTNiZDUtYjZlZGE4MmY=, ActorId: [1:7591324983963448159:2323], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:02.986172Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:02.986723Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983963448156:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:02.986788Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983963448156:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:02.986844Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983963448156:2320], Successfully finished 2026-01-04T02:18:02.986930Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:02.994067Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.998305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:03.001176Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:18:03.010208Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:03.011946Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:18:03.100694Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.166246Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324988258415532:2528] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:03.166357Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983963448185:2496], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:03.168936Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY= temp_dir_name# 5b3a6a0f-4b03-198b-5256-52b6ccf1407d trace_id# 2026-01-04T02:18:03.169196Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY=, ActorId: [1:7591324988258415539:2325], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.169301Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY=, ActorId: [1:7591324988258415539:2325], ActorState: ReadyState, LegacyTraceId: 01ke3cr8118xzmmkfhtkdmn0d0, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7591324988258415538:2533] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-04T02:18:03.169479Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:03.169492Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:03.169543Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7591324988258415539:2325], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY= 2026-01-04T02:18:03.169585Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:03.169611Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324988258415542:2327], Database: /Root, Start database fetching 2026-01-04T02:18:03.169657Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988258415541:2326], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:03.170877Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324988258415542:2327], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-04T02:18:03.170954Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-04T02:18:03.171172Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988258415541:2326], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:03.171206Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-04T02:18:03.171218Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-04T02:18:03.171426Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7591324988258415552:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=MmI1YTg2YjItOWE4MmUxMDQtZTllMDI4MDAtMzljNjFhNzY=, Start pool fetching 2026-01-04T02:18:03.171476Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7591324988258415553:2329], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-04T02:18:03.171516Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988258415554:2330], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:03.171745Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988258415554:2330], DatabaseId: /Root, PoolId: sample_pool_id, Pool info succes ... U=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Forwarded TEvStreamData to [10:7591325170941700381:3887] trace_id# 2026-01-04T02:18:45.435624Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:45.435741Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, TxInfo status# Committed kind# ReadOnly total_duration# 8.543 server_duration# 8.471 queries_count# 2 trace_id# 2026-01-04T02:18:45.435805Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:45.436172Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:45.436206Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, EndCleanup is_final# true trace_id# 2026-01-04T02:18:45.436257Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Sent query response back to proxy proxy_request_id# 5 proxy_id# [12:7591325154182297959:2266] trace_id# 2026-01-04T02:18:45.436277Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: unknown state, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:45.436603Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=MTk2OTA0M2MtNmNkNTk5MGUtM2JjZGY4NjItMTgyNDAzZTU=, ActorId: [12:7591325171362168295:2391], ActorState: unknown state, LegacyTraceId: 01ke3csh4hbgvhvsg784ha0bp9, Session actor destroyed trace_id# 2026-01-04T02:18:45.439243Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU= temp_dir_name# 6d9ddb5e-4dee-cf7c-cf70-b399cb7ab7a7 trace_id# 2026-01-04T02:18:45.439321Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:45.439600Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ReadyState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Received request proxy_request_id# 6 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT * FROM `.sys/resource_pools` WHERE Name >= "default" rpc_actor# [10:7591325170941700391:3888] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-04T02:18:45.439622Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ReadyState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Request placed into pool from cache pool_id# default trace_id# 2026-01-04T02:18:45.439670Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Sending CompileQuery request trace_id# 2026-01-04T02:18:45.558155Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 137141263221528 trace_id# 2026-01-04T02:18:45.558208Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:45.558325Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Created new KQP executer executer_id# [12:7591325171362168323:2399] is_rollback# false trace_id# 2026-01-04T02:18:45.565328Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Forwarded TEvStreamData to [10:7591325170941700391:3888] trace_id# 2026-01-04T02:18:45.565870Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:45.565989Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, TxInfo status# Committed kind# ReadOnly total_duration# 7.91 server_duration# 7.848 queries_count# 2 trace_id# 2026-01-04T02:18:45.566043Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:45.566347Z node 12 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:45.566381Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, EndCleanup is_final# true trace_id# 2026-01-04T02:18:45.566434Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: ExecuteState, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Sent query response back to proxy proxy_request_id# 6 proxy_id# [12:7591325154182297959:2266] trace_id# 2026-01-04T02:18:45.566455Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: unknown state, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:45.566810Z node 12 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=12&id=ODEwMTU5N2MtMmYxZjVkZmYtMzY3MTU3ZDUtOTE1Zjk3MDU=, ActorId: [12:7591325171362168317:2399], ActorState: unknown state, LegacyTraceId: 01ke3csh9z700nbq5vdtfs22pr, Session actor destroyed trace_id# 2026-01-04T02:18:45.572310Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-04T02:18:45.572720Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:18:45.572888Z node 10 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-04T02:18:45.573115Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:18:45.574318Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=Y2I3YzU1ZGQtM2IwNDJmZDktYzRmNWZjYzgtNDJiZTYwOWY=, ActorId: [10:7591325153761829970:2330], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:45.574402Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Y2I3YzU1ZGQtM2IwNDJmZDktYzRmNWZjYzgtNDJiZTYwOWY=, ActorId: [10:7591325153761829970:2330], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:45.574463Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Y2I3YzU1ZGQtM2IwNDJmZDktYzRmNWZjYzgtNDJiZTYwOWY=, ActorId: [10:7591325153761829970:2330], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:45.574521Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=Y2I3YzU1ZGQtM2IwNDJmZDktYzRmNWZjYzgtNDJiZTYwOWY=, ActorId: [10:7591325153761829970:2330], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:45.574630Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=Y2I3YzU1ZGQtM2IwNDJmZDktYzRmNWZjYzgtNDJiZTYwOWY=, ActorId: [10:7591325153761829970:2330], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:46.449458Z node 12 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [12:7591325154182297819:2147], processor id# 72075186224037891, database# /Root/test-dedicated |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery >> TGRpcRateLimiterTest::AcquireResourceManyUsedGrpcApiWithCancelAfter [GOOD] >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter >> TCreateAndDropViewTest::InvalidQuery [GOOD] >> TCreateAndDropViewTest::ParsingSecurityInvoker ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolsDdl::TestDropResourcePool [GOOD] Test command err: 2026-01-04T02:18:00.409613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324977515079674:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.410237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.547045Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:00.791017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.820044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.820176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.859711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.862679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.948523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.948554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.948574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.948694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:01.083238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:01.108446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.409479Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.103910Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.104084Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990399982590:2326], Start check tables existence, number paths: 2 2026-01-04T02:18:03.107941Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NGQ1NDAwYWMtMWE5YjEzZDYtMjEzOGNmYzItZTkwNjQ0N2M=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NGQ1NDAwYWMtMWE5YjEzZDYtMjEzOGNmYzItZTkwNjQ0N2M= temp_dir_name# ca196ad5-4d8f-14a1-317c-78b5ee3d0c50 trace_id# 2026-01-04T02:18:03.108063Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990399982590:2326], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.108136Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990399982590:2326], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.108162Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324990399982590:2326], Successfully finished 2026-01-04T02:18:03.108448Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.108469Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.108496Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.108535Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NGQ1NDAwYWMtMWE5YjEzZDYtMjEzOGNmYzItZTkwNjQ0N2M=, ActorId: [1:7591324990399982609:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.108582Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:03.171024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:18:03.193280Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591324989826914645:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:03.193353Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:03.223110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.223234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.227214Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:18:03.230597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.239385Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-04T02:18:03.291782Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292031Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292126Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292217Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292307Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292373Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292444Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292533Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.292613Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.343297Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.343405Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.347431Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.453958Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-04T02:18:03.454155Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:03.596157Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:03.596185Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:03.596191Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:03.596277Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:03.637624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:18:03.654173Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591324989734911164:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:03.656750Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:03.679251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.679339Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.681683Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:18:03.953504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-04T02:18:03.949895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:04.060081Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:04.060165Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:04.064149Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:04.102147Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:04.102299Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:04.102363Z node 2 :HIVE WARN: tx__create_ ... # 2026-01-04T02:18:47.533995Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751882:2441], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.534062Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.535965Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: ExecuteState, LegacyTraceId: 01ke3cskay099fqbb0q9774w7b, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [10:7591325179983751831:2324] workload_service_cleanup# false trace_id# 2026-01-04T02:18:47.537922Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: CleanupState, LegacyTraceId: 01ke3cskay099fqbb0q9774w7b, EndCleanup is_final# false trace_id# 2026-01-04T02:18:47.537964Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: CleanupState, LegacyTraceId: 01ke3cskay099fqbb0q9774w7b, Sent query response back to proxy proxy_request_id# 17 proxy_id# [10:7591325158508914221:2265] trace_id# 2026-01-04T02:18:47.541294Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q= temp_dir_name# 5baa5805-40a1-880a-8482-f882b2cc982e trace_id# 2026-01-04T02:18:47.541385Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:47.541556Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-04T02:18:47.541592Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-04T02:18:47.541622Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: ReadyState, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Received request proxy_request_id# 19 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [10:7591325179983751894:2782] database# Root database_id# /Root pool_id# my_pool trace_id# 2026-01-04T02:18:47.541628Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751897:2443], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-04T02:18:47.541677Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [10:7591325179983751895:2442], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q= 2026-01-04T02:18:47.541748Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [10:7591325179983751898:2444], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, Start pool fetching 2026-01-04T02:18:47.541778Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751900:2445], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-04T02:18:47.541797Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751897:2443], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.541879Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.541922Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: my_pool 2026-01-04T02:18:47.541946Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751902:2446], DatabaseId: /Root, PoolId: my_pool, Start pool fetching 2026-01-04T02:18:47.541968Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751900:2445], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.542032Z node 10 :KQP_WORKLOAD_SERVICE ERROR: scheme_actors.cpp:56: [WorkloadService] [TPoolResolverActor] ActorId: [10:7591325179983751898:2444], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, Failed to fetch pool info NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.542041Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591325179983751902:2446], DatabaseId: /Root, PoolId: my_pool, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.542086Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool my_pool, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool my_pool not found or you don't have access permissions } 2026-01-04T02:18:47.542111Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:114: [WorkloadService] [TPoolResolverActor] ActorId: [10:7591325179983751898:2444], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, Failed to resolve pool, NOT_FOUND, issues: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-04T02:18:47.542193Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:553: [WorkloadService] [Service] Reply continue error NOT_FOUND to [10:7591325179983751895:2442]: {
: Error: Failed to resolve pool id my_pool subissue: {
: Error: Resource pool my_pool not found or you don't have access permissions } } 2026-01-04T02:18:47.542356Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: ExecuteState, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool status# NOT_FOUND issues# { message: "Failed to resolve pool id my_pool" severity: 1 issues { message: "Resource pool my_pool not found or you don\'t have access permissions" severity: 1 } } trace_id# 2026-01-04T02:18:47.542452Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: ExecuteState, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-04T02:18:47.542481Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [10:7591325179983751895:2442], DatabaseId: /Root, PoolId: my_pool, SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q= 2026-01-04T02:18:47.542532Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: CleanupState, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, EndCleanup is_final# true trace_id# 2026-01-04T02:18:47.542602Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: CleanupState, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Sent query response back to proxy proxy_request_id# 19 proxy_id# [10:7591325158508914221:2265] trace_id# 2026-01-04T02:18:47.542635Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: unknown state, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:47.542711Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=N2YxMTZmMTMtNjlkYTJiY2UtM2VhNzU4ZTEtNDEyM2M2M2Q=, ActorId: [10:7591325179983751895:2442], ActorState: unknown state, LegacyTraceId: 01ke3cskbnaetyh52821hmqdep, Session actor destroyed trace_id# 2026-01-04T02:18:47.548460Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:47.548510Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:47.548538Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:47.548563Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:47.548627Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=MzI1MGQ0YjAtYTE1ZGE4ZmYtYzNkNDc0MTYtZjU1N2UxZTc=, ActorId: [10:7591325171393816726:2324], ActorState: unknown state, Session actor destroyed trace_id# |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersDdl::TestDropResourcePool [GOOD] Test command err: 2026-01-04T02:18:00.279565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324975282410805:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.279640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.496017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.571665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.571805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.584790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.653148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.654244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324975282410776:2081] 1767493080278265 != 1767493080278268 2026-01-04T02:18:00.709156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.709186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.709200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.709303Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.754888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.913954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:00.918624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:18:01.285201Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.175923Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.178801Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324988167313522:2320], Start check tables existence, number paths: 2 2026-01-04T02:18:03.180044Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.180086Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.181986Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324988167313522:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.182060Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324988167313522:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.182091Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324988167313522:2320], Successfully finished 2026-01-04T02:18:03.182237Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.182308Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:03.189934Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MmU5YmM4MWQtNWY5YjBhNTktMmI2ZTA1ZjYtYzY2MWZmYzk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MmU5YmM4MWQtNWY5YjBhNTktMmI2ZTA1ZjYtYzY2MWZmYzk= temp_dir_name# 039b06dc-4888-5ae7-51ea-70b52ecdc599 trace_id# 2026-01-04T02:18:03.190086Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MmU5YmM4MWQtNWY5YjBhNTktMmI2ZTA1ZjYtYzY2MWZmYzk=, ActorId: [1:7591324988167313549:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.210162Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.214151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:03.216769Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:18:03.218498Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:03.225704Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:18:03.310617Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.348349Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324988167313604:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:03.348462Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324988167313551:2498], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:03.350903Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM= temp_dir_name# 003c0df2-4de9-c593-449d-509d53e86c73 trace_id# 2026-01-04T02:18:03.351467Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:03.351492Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:03.351526Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:03.351557Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM=, ActorId: [1:7591324988167313611:2326], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.351747Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM=, ActorId: [1:7591324988167313611:2326], ActorState: ReadyState, LegacyTraceId: 01ke3cr86q31g6rfrecryfrr9b, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7591324988167313610:2536] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-04T02:18:03.351791Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7591324988167313611:2326], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM= 2026-01-04T02:18:03.351835Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988167313613:2327], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:03.351902Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324988167313614:2328], Database: /Root, Start database fetching 2026-01-04T02:18:03.352885Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324988167313614:2328], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-04T02:18:03.353035Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988167313613:2327], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:03.353075Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-04T02:18:03.353119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-04T02:18:03.353129Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-04T02:18:03.353298Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7591324988167313624:2329], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=ODhmMWIxZDMtNDRhNTc0ZmItMjUwNWQyMjAtZTNhMjg5MzM=, Start pool fetching 2026-01-04T02:18:03.353356Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7591324988167313625:2330], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-04T02:18:03.353393Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988167313626:2331], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:03.353638Z node 1 ... LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 137143553627608 trace_id# 2026-01-04T02:18:49.404771Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:49.404853Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Created new KQP executer executer_id# [8:7591325188397810177:2617] is_rollback# false trace_id# 2026-01-04T02:18:49.405613Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Forwarded TEvStreamData to [8:7591325188397810168:3086] trace_id# 2026-01-04T02:18:49.406108Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:49.406218Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, TxInfo status# Committed kind# Pure total_duration# 1.526 server_duration# 1.488 queries_count# 2 trace_id# 2026-01-04T02:18:49.406272Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:49.406408Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:49.406448Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, EndCleanup is_final# true trace_id# 2026-01-04T02:18:49.406488Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: ExecuteState, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Sent query response back to proxy proxy_request_id# 48 proxy_id# [8:7591325154038069973:2265] trace_id# 2026-01-04T02:18:49.406518Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: unknown state, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:49.406761Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Acquire mvcc snapshot trace_id# 2026-01-04T02:18:49.406944Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ODJmYTRmMmYtYTRiNDFmZTEtNWYwMTA4NzgtNTQ4ZGRhMDk=, ActorId: [8:7591325188397810169:2617], ActorState: unknown state, LegacyTraceId: 01ke3csn3v7b4ahxtecvndcw1t, Session actor destroyed trace_id# 2026-01-04T02:18:49.407551Z node 8 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Read snapshot result status# UNAVAILABLE step# 1767493129388 tx_id# 18446744073709551615 trace_id# 2026-01-04T02:18:49.407589Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 137143551403864 trace_id# 2026-01-04T02:18:49.407623Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:49.407703Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Created new KQP executer executer_id# [8:7591325188397810187:2607] is_rollback# false trace_id# 2026-01-04T02:18:49.410417Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:18:49.410504Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, TxInfo status# Active kind# ReadOnly total_duration# 0 server_duration# 3.752 queries_count# 2 trace_id# 2026-01-04T02:18:49.410592Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:18:49.410786Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:49.410814Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, EndCleanup is_final# false trace_id# 2026-01-04T02:18:49.410846Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ExecuteState, LegacyTraceId: 01ke3csn1w0zps5jm0n2d1m4wh, Sent query response back to proxy proxy_request_id# 45 proxy_id# [8:7591325154038069973:2265] trace_id# 2026-01-04T02:18:49.411698Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:49.411749Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ReadyState, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:18:49.411807Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ReadyState, Created new KQP executer executer_id# [8:7591325188397810198:2607] is_rollback# true trace_id# 2026-01-04T02:18:49.411852Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 1 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:49.412179Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: CleanupState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:49.412213Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:49.412327Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZGQ4MDg4OGItOGFjYWMxZWMtYmM5MjBhYzEtNDU0OTU2MjM=, ActorId: [8:7591325188397810127:2607], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:18:49.413516Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=YmM1YmQyMjgtMTc0Y2I0OC01OTM3NDUzZC1hZDc1NDA5Mw==, ActorId: [8:7591325166922972484:2324], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:18:49.413558Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=YmM1YmQyMjgtMTc0Y2I0OC01OTM3NDUzZC1hZDc1NDA5Mw==, ActorId: [8:7591325166922972484:2324], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:18:49.413591Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=YmM1YmQyMjgtMTc0Y2I0OC01OTM3NDUzZC1hZDc1NDA5Mw==, ActorId: [8:7591325166922972484:2324], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:18:49.413615Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=YmM1YmQyMjgtMTc0Y2I0OC01OTM3NDUzZC1hZDc1NDA5Mw==, ActorId: [8:7591325166922972484:2324], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:18:49.413665Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=YmM1YmQyMjgtMTc0Y2I0OC01OTM3NDUzZC1hZDc1NDA5Mw==, ActorId: [8:7591325166922972484:2324], ActorState: unknown state, Session actor destroyed trace_id# |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut |80.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_vacuum/unittest |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/time_cast/ut/ydb-core-tx-time_cast-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount >> DescribeSchemaSecretsService::BatchRequest [GOOD] >> DescribeSchemaSecretsService::BigBatchRequest >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections >> TCreateAndDropViewTest::ParsingSecurityInvoker [GOOD] >> TCreateAndDropViewTest::ListCreatedView >> KqpWorkloadServiceDistributed::TestNodeDisconnect [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection >> MediatorTimeCast::ReadStepSubscribe >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount ------- [TM] {asan, default-linux-x86_64, release} ydb/services/rate_limiter/ut/unittest >> TGRpcRateLimiterTest::AcquireResourceManyUsedActorApiWithCancelAfter [GOOD] Test command err: 2026-01-04T02:18:00.473516Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324977233650384:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.473558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.754898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.781370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.781505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.816575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.833590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.946613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.946639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.946655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.946775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.983958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:01.101554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.178537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:04.606386Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591324992973883764:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:04.619958Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:04.683428Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:04.783151Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:04.823858Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:04.823933Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:04.871311Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:04.934011Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015678s 2026-01-04T02:18:04.964907Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:05.062465Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:05.062486Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:05.062493Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:05.062574Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:05.323409Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:05.445769Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:05.620102Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:09.324900Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591325014779938773:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:09.324965Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:09.407178Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:09.538763Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:09.556023Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:09.556104Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:09.564007Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:09.605891Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:09.670374Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:09.670405Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:09.670413Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:09.670491Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:09.900383Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:10.007350Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:10.334010Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:13.904150Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591325030781152607:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:13.909679Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:13.954656Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:14.094604Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:14.109490Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:14.109561Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:14.128237Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:14.135066Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:14.208346Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:14.208376Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:14.208384Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:14.208475Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:14.457418Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:14.555438Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:14.657829Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:18.284621Z node 13 :METAD ... v_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:37.442345Z node 28 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[28:7591325137676191546:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:37.442414Z node 28 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:37.488694Z node 28 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:37.548177Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:37.562992Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:37.563068Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:37.577247Z node 28 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(28, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:37.626819Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:37.626836Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:37.626842Z node 28 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:37.626905Z node 28 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:37.690402Z node 28 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:37.800430Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:37.865111Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:38.446401Z node 28 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:41.360193Z node 31 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[31:7591325151364477638:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:41.360246Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:41.373205Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:41.459462Z node 31 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:41.476839Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:41.476914Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:41.497004Z node 31 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:41.535184Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:41.535203Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:41.535210Z node 31 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:41.535283Z node 31 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:41.571286Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:41.782292Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:41.828107Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:42.365265Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:45.459857Z node 34 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[34:7591325171097916204:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:45.459928Z node 34 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:45.474994Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:45.568132Z node 34 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:45.585388Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:45.585470Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:45.608647Z node 34 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:45.652574Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:45.652592Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:45.652602Z node 34 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:45.652671Z node 34 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:45.746273Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:45.761567Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:45.811127Z node 34 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:46.468923Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:49.559585Z node 37 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[37:7591325185559258631:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:49.559869Z node 37 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:49.589265Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:49.683048Z node 37 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:49.710587Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:49.710701Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:49.736491Z node 37 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:49.772455Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:49.772482Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:49.772491Z node 37 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:49.772594Z node 37 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:49.876682Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:49.885644Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:49.920547Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateKesus, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_kesus.cpp:30) 2026-01-04T02:18:50.564938Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |80.8%| [TM] {BAZEL_UPLOAD} ydb/services/rate_limiter/ut/unittest |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut |80.8%| [TM] {RESULT} ydb/services/rate_limiter/ut/unittest |80.8%| [LD] {RESULT} $(B)/ydb/tests/fq/pq_async_io/ut/ydb-tests-fq-pq_async_io-ut >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection >> TCreateAndDropViewTest::ListCreatedView [GOOD] >> TCreateAndDropViewTest::CreateSameViewTwice >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding >> KqpWorkloadService::TestStartQueryAfterCancel [GOOD] >> KqpWorkloadService::TestLessConcurrentQueryLimit >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding >> MediatorTimeCast::ReadStepSubscribe [GOOD] >> MediatorTimeCast::GranularTimecast >> DescribeSchemaSecretsService::BigBatchRequest [GOOD] >> DescribeSchemaSecretsService::EmptyBatch >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding >> TCreateAndDropViewTest::CreateSameViewTwice [GOOD] >> TCreateAndDropViewTest::CreateViewOccupiedName >> TControlPlaneProxyCheckPermissionsFailed::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListQueries [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery >> MediatorTimeCast::GranularTimecast [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery >> TCreateAndDropViewTest::CreateViewOccupiedName [GOOD] >> TCreateAndDropViewTest::CreateViewIfNotExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/time_cast/ut/unittest >> MediatorTimeCast::GranularTimecast [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:18:55.112423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:55.189151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:55.189423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:55.189457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:55.369836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:55.369927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:55.422989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493133485891 != 1767493133485894 2026-01-04T02:18:55.432966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:55.472507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:55.559307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:18:55.800398Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:922: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvSubscribeReadStep{ CoordinatorId# 72057594046316545 } 2026-01-04T02:18:55.800712Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-04T02:18:55.803748Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 1 LastAcquireStep: 0 NextAcquireStep: 1000 2026-01-04T02:18:55.814247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:55.918242Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 1500 2026-01-04T02:18:56.000049Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 2000 2026-01-04T02:18:56.123426Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 3000 2026-01-04T02:18:56.246858Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 4000 2026-01-04T02:18:56.381188Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 5000 2026-01-04T02:18:56.546321Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 6000 2026-01-04T02:18:56.556675Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 8000 } 2026-01-04T02:18:56.680929Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 7000 2026-01-04T02:18:56.805287Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 1 NextAcquireStep: 8000 2026-01-04T02:18:56.806494Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [1:26:2073] HANDLE EvClientDestroyed 2026-01-04T02:18:56.823361Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [1:26:2073] HANDLE EvClientConnected 2026-01-04T02:18:56.823869Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:999: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepResult CoordinatorID: 72057594046316545 SeqNo: 2 LastAcquireStep: 0 NextAcquireStep: 8000 2026-01-04T02:18:56.835561Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:970: Actor# [1:26:2073] HANDLE NKikimr::TEvMediatorTimecast::TEvWaitReadStep{ CoordinatorId# 72057594046316545 ReadStep# 13000 } 2026-01-04T02:18:56.928620Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 8500 2026-01-04T02:18:57.021605Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 9000 2026-01-04T02:18:57.165945Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 10000 2026-01-04T02:18:57.310843Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 11000 2026-01-04T02:18:57.455809Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 12000 2026-01-04T02:18:57.579992Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:1049: Actor# [1:26:2073] HANDLE TEvSubscribeReadStepUpdate CoordinatorID: 72057594046316545 SeqNo: 2 NextAcquireStep: 13000 ... waiting for SysViewsRoster update finished 2026-01-04T02:18:59.672908Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:59.678306Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:18:59.682021Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:18:59.682259Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:18:59.682435Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:18:59.891628Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:59.891705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:59.905441Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493137932510 != 1767493137932514 2026-01-04T02:18:59.908649Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:59.947666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:19:00.013678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:19:00.308474Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365120 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-04T02:19:00.308956Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 1 Tablets: 72057594047365120 MinStep: 0 2026-01-04T02:19:00.308997Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:378: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-04T02:19:00.309029Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:886:2754] {TEvRegisterTabletResult TabletId# 72057594047365120 Entry# 0} 2026-01-04T02:19:00.309283Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-04T02:19:00.309392Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 1000 2026-01-04T02:19:00.309489Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 1000} 2026-01-04T02:19:00.309607Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365121 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-04T02:19:00.309657Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 2 AddTablets: 72057594047365121 2026-01-04T02:19:00.309701Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:889:2756] {TEvRegisterTabletResult TabletId# 72057594047365121 Entry# 1000} 2026-01-04T02:19:00.309850Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 2 LatestStep: 1000 2026-01-04T02:19:00.309968Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:460: Actor# [2:26:2073] HANDLE {TEvRegisterTablet TabletId# 72057594047365123 ProcessingParams { Version: 0 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 1 Mediators: 72057594046382081 }} 2026-01-04T02:19:00.310011Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:304: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatchModify Bucket: 0 SubscriptionId: 3 AddTablets: 72057594047365123 2026-01-04T02:19:00.310040Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:508: Actor# [2:26:2073] SEND to Sender# [2:890:2757] {TEvRegisterTabletResult TabletId# 72057594047365123 Entry# 1000} 2026-01-04T02:19:00.310145Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxM ... GranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:00.977013Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:00.987322Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 3500 FrozenTablets: 72057594047365120 FrozenTablets: 72057594047365121 FrozenSteps: 3499 FrozenSteps: 3499 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:00.997592Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4000 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:01.008067Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:01.028891Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 3999 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:01.049730Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 FrozenTablets: 72057594047365121 FrozenSteps: 4499 ... unblocking plan for tablet2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... unblocking update: Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... unblocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR 2026-01-04T02:19:01.070525Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 5 LatestStep: 4500 UnfrozenTablets: 72057594047365121 ... restarting mediator 2026-01-04T02:19:01.081652Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:593: Actor# [2:26:2073] HANDLE EvClientDestroyed 2026-01-04T02:19:01.081752Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 6 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-04T02:19:01.081794Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-04T02:19:01.082463Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected 2026-01-04T02:19:01.082543Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:276: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 NKikimrTxMediatorTimecast.TEvGranularWatch Bucket: 0 SubscriptionId: 7 Tablets: 72057594047365123 Tablets: 72057594047365120 Tablets: 72057594047365121 MinStep: 4500 2026-01-04T02:19:01.082574Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:361: Actor# [2:26:2073] SEND to Mediator# 72057594046382081 {TEvWatch Bucket# 0} 2026-01-04T02:19:01.096522Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:584: Actor# [2:26:2073] HANDLE EvClientConnected ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2026-01-04T02:19:01.096757Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 0} ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet cookie 0 ... fully unblocking tx1 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-04T02:19:01.118354Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3500} ... tablet1 at 3500 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx2 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 2026-01-04T02:19:01.128898Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... tablet1 at 4000 ... tablet2 at 4500 ... tablet3 at 4500 ... fully unblocking tx3 ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NMediatorTimeCastTest::NTestSuiteMediatorTimeCast::TTargetTablet ... blocking NKikimr::TEvMediatorTimecast::TEvGranularUpdate from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_MEDIATOR_TIMECAST_ACTOR cookie 0 2026-01-04T02:19:01.139424Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4500} ... blocking NKikimr::TEvTxProcessing::TEvPlanStepAck from TX_MEDIATOR_TABLET_QUEUE_ACTOR to TX_COORDINATOR_MEDIATORQ_ACTOR cookie 0 ... tablet1 at 4500 ... tablet2 at 4500 ... tablet3 at 4500 |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/time_cast/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListJobs [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> ResourcePoolClassifiersSysView::TestResourcePoolClassifiersSysViewFilters [GOOD] Test command err: 2026-01-04T02:18:00.270326Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324974942892517:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.270777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.333356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:00.559349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.559515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.561633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.561992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.563133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324974942892410:2081] 1767493080266793 != 1767493080266796 2026-01-04T02:18:00.591599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.697687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.697712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.697719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.697832Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.823151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.885028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:00.892794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:18:01.273289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:02.933886Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:02.941385Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MWRjYmQ2Mi1mNmQ5ZjA0MS04MmQzMWNjNS1kNzhhZGI1Zg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MWRjYmQ2Mi1mNmQ5ZjA0MS04MmQzMWNjNS1kNzhhZGI1Zg== temp_dir_name# 69d1e46c-483d-f5cc-8395-a0955379c27a trace_id# 2026-01-04T02:18:02.944099Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:02.944144Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:02.944431Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983532827861:2317], Start check tables existence, number paths: 2 2026-01-04T02:18:02.945389Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MWRjYmQ2Mi1mNmQ5ZjA0MS04MmQzMWNjNS1kNzhhZGI1Zg==, ActorId: [1:7591324983532827878:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:02.945748Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983532827861:2317], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:02.945844Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983532827861:2317], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:02.945875Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983532827861:2317], Successfully finished 2026-01-04T02:18:02.945952Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:02.945984Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:02.976711Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.980533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:02.981996Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:18:02.982120Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:02.992160Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:18:03.079144Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.125060Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324987827795233:2527] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:03.125169Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983532827886:2495], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:03.139005Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:03.139033Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:03.139123Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324987827795242:2326], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:18:03.139194Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:03.139252Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=MWRjYmQ2Mi1mNmQ5ZjA0MS04MmQzMWNjNS1kNzhhZGI1Zg==, ActorId: [1:7591324983532827878:2324], ActorState: ReadyState, LegacyTraceId: 01ke3cr802amhmzhbbprx43fyh, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT DESCRIBE SCHEMA ON `/Root` TO `user@test`; GRANT DESCRIBE SCHEMA, SELECT ROW ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `user@test`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-04T02:18:03.144692Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324987827795242:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.144808Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.144883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:03.144906Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324987827795252:2327], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:18:03.145190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324987827795252:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.145257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.423107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:18:03.435827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:18:03.438914Z node 1 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=1&id=MWRjYmQ2Mi1mNmQ5ZjA0MS04MmQzMWNjNS1kNzhhZGI1Zg==, ActorId: [1:7591324983532827878:2324], ActorState: ExecuteState, LegacyTraceId: 01ke3cr802amhmzhbbprx43fyh, Cleanup start is_final# false has_cleanup_ctx# true transactions_ ... 7, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/initialization/migrations]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:19:01.400212Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ExecuteState, LegacyTraceId: 01ke3ct0wa37tzr71tgq66a5p7, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:01.400232Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ExecuteState, LegacyTraceId: 01ke3ct0wa37tzr71tgq66a5p7, EndCleanup is_final# false trace_id# 2026-01-04T02:19:01.400321Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ExecuteState, LegacyTraceId: 01ke3ct0wa37tzr71tgq66a5p7, Sent query response back to proxy proxy_request_id# 80 proxy_id# [8:7591325197493657249:2266] trace_id# 2026-01-04T02:19:01.400768Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/initialization/migrations]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2026-01-04T02:19:01.400933Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/initialization/migrations]
: Error: LookupError, code: 2005 2026-01-04T02:19:01.401016Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:19:01.401046Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:01.401069Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:19:01.401093Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:19:01.401144Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=ZjQzNzk3ZTktYjIwM2JkYzktZmIwYmNkMWEtZDJjYjIxNjA=, ActorId: [8:7591325240443333308:2870], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:19:01.498747Z node 8 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [8:7591325197493657106:2147], processor id# 72075186224037891, database# /Root/test-dedicated 2026-01-04T02:19:01.652822Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA== temp_dir_name# 77f7238f-4c17-a9d6-e4a4-e8a5a1404624 trace_id# 2026-01-04T02:19:01.652945Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:19:01.653438Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ReadyState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, Received request proxy_request_id# 82 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# /*UI-QUERY-EXCLUDE*/ SELECT * FROM `//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers`; rpc_actor# [8:7591325240443333347:2877] database# /Root/test-dedicated database_id# /Root/test-dedicated pool_id# default trace_id# 2026-01-04T02:19:01.653473Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:280} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ReadyState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, Request placed into pool from cache pool_id# default trace_id# 2026-01-04T02:19:01.653587Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ExecuteState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, Sending CompileQuery request trace_id# 2026-01-04T02:19:01.665865Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7591325210378559982:2692][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 36 2026-01-04T02:19:01.665942Z node 8 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][8:7591325210378559982:2692][/Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers] Sync is incomplete in one of the ring groups: cookie# 37 2026-01-04T02:19:01.666600Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7591325240443333349:2878], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-04T02:19:01.666845Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ExecuteState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:19:01.666881Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ExecuteState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:01.666898Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ExecuteState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, EndCleanup is_final# false trace_id# 2026-01-04T02:19:01.666979Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ExecuteState, LegacyTraceId: 01ke3ct14nd257h9vk97sadkyk, Sent query response back to proxy proxy_request_id# 82 proxy_id# [8:7591325197493657249:2266] trace_id# 2026-01-04T02:19:01.667401Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor_cb.h:34;event=unexpected reply;response=operation { ready: true status: UNAVAILABLE issues { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 1 } message: "Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]" end_position { row: 2 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { } } } } ; 2026-01-04T02:19:01.667552Z node 8 :METADATA_PROVIDER ERROR: log.h:466: accessor_snapshot_base.cpp:16 :cannot construct snapshot: on request failed:
: Error: Table metadata loading, code: 1050
:2:1: Error: Failed to load metadata for table: db.[//Root/test-dedicated/.metadata/workload_manager/classifiers/resource_pool_classifiers]
: Error: LookupError, code: 2005 2026-01-04T02:19:01.667640Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:19:01.667674Z node 8 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:01.667698Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:19:01.667726Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:19:01.667777Z node 8 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=8&id=YTMwOWEyNGUtZGUyMWY0MS1lNWI1ZTUxYS05YWJmMmFkNA==, ActorId: [8:7591325240443333346:2876], ActorState: unknown state, Session actor destroyed trace_id# |80.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection >> KqpWorkloadServiceTables::TestLeaseExpiration [GOOD] >> KqpWorkloadServiceTables::TestLeaseUpdates >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.8%| [TM] {RESULT} ydb/core/tx/time_cast/ut/unittest |80.8%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/ut/ydb-core-fq-libs-row_dispatcher-ut >> DescribeSchemaSecretsService::EmptyBatch [GOOD] >> DescribeSchemaSecretsService::MixedGrantsInBatch >> TCreateAndDropViewTest::CreateViewIfNotExists [GOOD] >> TCreateAndDropViewTest::DropView >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListConnections [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_rs/ydb-core-tx-datashard-ut_rs >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection >> TDataShardRSTest::TestCleanupInRS+UseSink |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |80.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_reassign/ydb-core-tx-datashard-ut_reassign |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/apps/ydbd/ydbd |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/apps/ydbd/ydbd |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/config/ut/ydb-services-config-ut >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection |80.8%| [LD] {RESULT} $(B)/ydb/apps/ydbd/ydbd |80.8%| [LD] {RESULT} $(B)/ydb/services/config/ut/ydb-services-config-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/config/ut/ydb-services-config-ut >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnection [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount |80.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |80.8%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |80.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/fetcher/ut/ydb-core-persqueue-public-fetcher-ut |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/executer_actor/ut/ydb-core-kqp-executer_actor-ut |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut >> TCreateAndDropViewTest::DropView [GOOD] >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |80.9%| [LD] {RESULT} $(B)/ydb/library/yql/providers/generic/actors/ut/ydb-library-yql-providers-generic-actors-ut |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_followers/ydb-core-tx-datashard-ut_followers >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendTestConnectionWithServiceAccount [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings >> DataShardReassign::AutoReassignOnYellowFlag >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendListBindings [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut |80.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] >> test_canonical_records.py::test_dstool_evict_vdisk_grpc >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication |80.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_replication/ydb-core-tx-datashard-ut_replication >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |80.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] >> TControlPlaneProxyCheckPermissionsSuccess::ShouldSendDeleteBinding [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |80.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] |80.9%| [LD] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/pqtablet/partition/ut/ydb-core-persqueue-pqtablet-partition-ut >> DescribeSchemaSecretsService::MixedGrantsInBatch [GOOD] >> DescribeSchemaSecretsService::SchemeCacheRetryErrors |80.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |80.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_init/ydb-core-tx-datashard-ut_init |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/row_dispatcher/format_handler/ut/ydb-core-fq-libs-row_dispatcher-format_handler-ut |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/kqprun/kqprun |81.0%| [LD] {RESULT} $(B)/ydb/tests/tools/kqprun/kqprun |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/kqprun/kqprun >> TCreateAndDropViewTest::DropViewDisabledFeatureFlag [GOOD] >> TCreateAndDropViewTest::DropNonexistingView >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_reassign/unittest >> DataShardReassign::AutoReassignOnYellowFlag [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:19:14.680598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:19:14.853150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:19:14.853587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:19:14.853641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:19:15.171567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:19:15.171688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:19:15.257637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493151661862 != 1767493151661865 2026-01-04T02:19:15.273395Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:19:15.322993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:19:15.431384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:19:15.865389Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose 2026-01-04T02:19:15.865464Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:15.867772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:19:15.867999Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} hope 1 -> done Change{189, redo 997b alter 0b annex 0, ~{ 1, 33, 35, 42, 4 } -{ }, 0 gb} 2026-01-04T02:19:15.868095Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:190} Tx{267, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPropose} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:15.868998Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:1:24576:527:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.869107Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:190:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.869243Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} commited cookie 1 for step 190 2026-01-04T02:19:15.869544Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-04T02:19:15.869594Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:15.869770Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{190, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-04T02:19:15.886042Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:191} Tx{268, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:15.886581Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.886704Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:191:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.886815Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} commited cookie 1 for step 191 2026-01-04T02:19:15.886997Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-04T02:19:15.887050Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:15.887260Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{191, redo 175b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-04T02:19:15.887312Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:192} Tx{269, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:15.887572Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:1:24576:132:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.887655Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:192:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.887730Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} commited cookie 1 for step 192 2026-01-04T02:19:15.887879Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress 2026-01-04T02:19:15.887919Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:15.888122Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{192, redo 121b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:19:15.888167Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:193} Tx{270, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:15.888393Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:1:24576:97:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.888461Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:193:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:15.888523Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} commited cookie 1 for step 193 2026-01-04T02:19:15.889179Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion 2026-01-04T02:19:15.889237Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:15.889308Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} hope 1 -> done Change{193, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:19:15.889358Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{271, NKikimr::NSchemeShard::TSchemeShard::TTxNotifyCompletion} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:15.904513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:19:16.014140Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:19:16.014227Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:16.014475Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{8, redo 264b alter 0b annex 0, ~{ 0, 4, 2 } -{ }, 0 gb} 2026-01-04T02:19:16.014536Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:9} Tx{11, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:16.014945Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:1:24576:165:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:16.015014Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:16.015092Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:10} commited cookie 1 for step 9 2026-01-04T02:19:16.015549Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep 2026-01-04T02:19:16.015611Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:16.016142Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} hope 1 -> done Change{193, redo 819b alter 0b annex 0, ~{ 1, 18, 33, 27, 35, 42, 48, 4 } -{ }, 0 gb} 2026-01-04T02:19:16.016211Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:2:194} Tx{272, NKikimr::NSchemeShard::TSchemeShard::TTxOperationPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:16.016552Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:194:1:24576:427:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:16.016635Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:2:194:0:0:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:16.016699Z node 1 :TABLET_EXECUTOR DEBUG: Leader{7205759404 ... 10420Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:26} Tx{29, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.210784Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.210862Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.210962Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} commited cookie 1 for step 26 2026-01-04T02:19:19.369749Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:19:19.369845Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.369997Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{26, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-04T02:19:19.370059Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:27} Tx{30, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.370414Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.370509Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.370626Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} commited cookie 1 for step 27 2026-01-04T02:19:19.537227Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:19:19.537308Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.537441Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{27, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-04T02:19:19.537501Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:28} Tx{31, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.537942Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:1:24576:90:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.538014Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.538121Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} commited cookie 1 for step 28 2026-01-04T02:19:19.552704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:19:19.552767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:19:19.594218Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} queued, type NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics 2026-01-04T02:19:19.594300Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.594385Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:19:19.594442Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:10} Tx{18, NKikimr::NBsController::TBlobStorageController::TTxUpdateDiskMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.740544Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:19:19.740633Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.740769Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{28, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-04T02:19:19.740825Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:29} Tx{32, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.741315Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:1:24576:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.741401Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.741496Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} commited cookie 1 for step 29 2026-01-04T02:19:19.866078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T02:19:19.866149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T02:19:19.866249Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} queued, type NKikimr::NDataShard::TDataShard::TTxCleanupTransaction 2026-01-04T02:19:19.866309Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.866379Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 16000 last cleanup 0 2026-01-04T02:19:19.866444Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:19:19.866480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:19:19.866512Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:19:19.866544Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:19:19.866612Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:19:19.866682Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:10} Tx{19, NKikimr::NDataShard::TDataShard::TTxCleanupTransaction} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.866836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:19:19.938389Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:19:19.938468Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.938595Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{29, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-04T02:19:19.938667Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:30} Tx{33, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.939054Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:1:24576:87:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.939152Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.939241Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:31} commited cookie 1 for step 30 2026-01-04T02:19:19.966486Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-04T02:19:19.966562Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.966849Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{13, redo 193b alter 0b annex 0, ~{ 1, 16, 4 } -{ }, 0 gb} 2026-01-04T02:19:19.966915Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:11} Tx{18, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.967143Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-04T02:19:19.967193Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:19:19.967371Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{14, redo 329b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-04T02:19:19.967426Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} Tx{19, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:19:19.979242Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037968897:2:11:0:0:265:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:19:19.979348Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037968897:2:12} commited cookie 1 for step 11 --- Captured TEvCheckBlobstorageStatusResult event --- Waiting for TEvReassignTablet event... 2026-01-04T02:19:20.108816Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow current light yellow move channels: [ 0 1 ] 2026-01-04T02:19:20.108905Z node 1 :TABLET_EXECUTOR NOTICE: Leader{72075186224037888:1:10} CheckYellow reassign channels: [ 0 1 ] tablet# 72075186224037888 hive# 72057594037968897 --- Captured TEvReassignTablet event |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_reassign/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load |81.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_reassign/unittest |81.0%| [LD] {RESULT} $(B)/ydb/tests/olap/high_load/ydb-tests-olap-high_load >> TCreateAndDropViewTest::DropNonexistingView [FAIL] >> TCreateAndDropViewTest::CallDropViewOnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceTables::TestLeaseUpdates [GOOD] Test command err: 2026-01-04T02:18:00.268075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324978117434770:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.268196Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.489208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.496174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.496297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.585960Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324978117434743:2081] 1767493080266921 != 1767493080266924 2026-01-04T02:18:00.596346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.599540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.664204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.677623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.677657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.677666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.677776Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.892348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.273235Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:02.738728Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:02.738867Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:02.738894Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:02.738923Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324986707370197:2320], Start check tables existence, number paths: 2 2026-01-04T02:18:02.745841Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324986707370197:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:02.745906Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324986707370197:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:02.745934Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324986707370197:2320], Successfully finished 2026-01-04T02:18:02.746119Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:02.746167Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:02.746300Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZGNjZTgzMjktMTI0YmE0NDktMjZiYmE0NTgtMTZjYmRhNDQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZGNjZTgzMjktMTI0YmE0NDktMjZiYmE0NTgtMTZjYmRhNDQ= temp_dir_name# 877dfefc-496d-f717-4bb1-839a9e9d5422 trace_id# 2026-01-04T02:18:02.746437Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZGNjZTgzMjktMTI0YmE0NDktMjZiYmE0NTgtMTZjYmRhNDQ=, ActorId: [1:7591324986707370220:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:02.764423Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.768089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:02.769274Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-04T02:18:02.769429Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:02.776637Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:18:02.874336Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.918586Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324986707370273:2529] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:02.918773Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324986707370222:2497], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:02.919287Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-04T02:18:02.919315Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-04T02:18:02.919405Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370280:2325], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:02.920512Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370280:2325], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:02.920570Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-04T02:18:02.920597Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-04T02:18:02.920801Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7591324986707370289:2326], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-04T02:18:02.921742Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [1:7591324986707370289:2326], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-04T02:18:02.940820Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:02.940849Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:02.940883Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:02.941058Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=ZGNjZTgzMjktMTI0YmE0NDktMjZiYmE0NTgtMTZjYmRhNDQ=, ActorId: [1:7591324986707370220:2324], ActorState: ReadyState, LegacyTraceId: 01ke3cr7sv6adq0t05e1aeka2b, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# ALTER RESOURCE POOL sample_pool_id SET ( CONCURRENT_QUERY_LIMIT=42 ); rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-04T02:18:02.944572Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370301:2328], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:18:02.946709Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370301:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:02.946809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:02.947142Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:18:02.947180Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370311:2329], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:18:02.947365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324986707370311:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:02.947419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch poo ... ZDI5ZWYxYjM=, TxId: 2026-01-04T02:19:26.035917Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:201: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7591325341886984459:2872], ActorId: [10:7591325341886984460:2873], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Update lease, RunDataQuery with SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, TxId: , text: -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); 2026-01-04T02:19:26.036911Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ReadyState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Received request proxy_request_id# 29 prepared# false has_tx_control# true action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DML text# -- TRefreshPoolStateQuery::OnLeaseUpdated DECLARE $database_id AS Text; DECLARE $pool_id AS Text; SELECT COUNT(*) AS delayed_requests FROM `.metadata/workload_manager/delayed_requests` WHERE database = $database_id AND pool_id = $pool_id AND (wait_deadline IS NULL OR wait_deadline >= CurrentUtcTimestamp()) AND lease_deadline >= CurrentUtcTimestamp(); SELECT COUNT(*) AS running_requests FROM `.metadata/workload_manager/running_requests` WHERE database = $database_id AND pool_id = $pool_id AND lease_deadline >= CurrentUtcTimestamp(); rpc_actor# [10:7591325346181951790:2531] database# /Root database_id# /Root pool_id# trace_id# 2026-01-04T02:19:26.037770Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:967} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Acquire mvcc snapshot trace_id# 2026-01-04T02:19:26.050144Z node 10 :KQP_SESSION TRACE: {KQPSA@kqp_session_actor.cpp:1008} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Read snapshot result status# UNAVAILABLE step# 1767493166075 tx_id# 18446744073709551615 trace_id# 2026-01-04T02:19:26.050241Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, ExecutePhyTx literal# false commit# false deferred_effects_size# 0 tx# 136980219508952 trace_id# 2026-01-04T02:19:26.050334Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:19:26.050533Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Created new KQP executer executer_id# [10:7591325346181951798:2524] is_rollback# false trace_id# 2026-01-04T02:19:26.075504Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, TEvTxResponse current_tx# 1 transactions_size# 2 status# SUCCESS trace_id# 2026-01-04T02:19:26.075604Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, ExecutePhyTx literal# true commit# true deferred_effects_size# 0 tx# 136980216065432 trace_id# 2026-01-04T02:19:26.076599Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, TEvTxResponse current_tx# 2 transactions_size# 2 status# SUCCESS trace_id# 2026-01-04T02:19:26.076746Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, TxInfo status# Committed kind# ReadOnly total_duration# 39.11 server_duration# 38.969 queries_count# 2 trace_id# 2026-01-04T02:19:26.076878Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:19:26.076957Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Cleanup start is_final# false has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:26.076990Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, EndCleanup is_final# false trace_id# 2026-01-04T02:19:26.077051Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3ctrym78cavqdhvaz810st, Sent query response back to proxy proxy_request_id# 29 proxy_id# [10:7591325251692669819:2265] trace_id# 2026-01-04T02:19:26.077516Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:244: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7591325341886984459:2872], ActorId: [10:7591325341886984460:2873], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, DataQuery #2 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, TxId: 2026-01-04T02:19:26.077612Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.cpp:371: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7591325341886984459:2872], ActorId: [10:7591325341886984460:2873], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Finish with SUCCESS, SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, TxId: 2026-01-04T02:19:26.077646Z node 10 :KQP_WORKLOAD_SERVICE TRACE: query_actor.cpp:171: [TQueryBase] [TRefreshPoolStateQuery] OwnerId: [10:7591325341886984459:2872], ActorId: [10:7591325341886984460:2873], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , State: Describe pool, Delete session: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM= 2026-01-04T02:19:26.077746Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TRefreshPoolStateQuery] OwnerId: [10:7591325341886984458:2871], ActorId: [10:7591325341886984459:2872], TraceId: sample_pool_id, RequestDatabase: /Root, RequestSessionId: , Got response [10:7591325341886984460:2873] SUCCESS 2026-01-04T02:19:26.090796Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:19:26.090867Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:26.090915Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:19:26.090955Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:19:26.091135Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=M2Q4MjllYmUtYjM4YTNlMTctYzE1OWRkOTUtZDI5ZWYxYjM=, ActorId: [10:7591325341886984462:2524], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:19:26.114089Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=10&id=Y2IwY2ViZTktODE4ZmVhMC01MDlmZWQ5Yy0xZjg2YTU4MQ==, ActorId: [10:7591325264577572331:2325], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:19:26.114153Z node 10 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=10&id=Y2IwY2ViZTktODE4ZmVhMC01MDlmZWQ5Yy0xZjg2YTU4MQ==, ActorId: [10:7591325264577572331:2325], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:19:26.114192Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=10&id=Y2IwY2ViZTktODE4ZmVhMC01MDlmZWQ5Yy0xZjg2YTU4MQ==, ActorId: [10:7591325264577572331:2325], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:19:26.114248Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=10&id=Y2IwY2ViZTktODE4ZmVhMC01MDlmZWQ5Yy0xZjg2YTU4MQ==, ActorId: [10:7591325264577572331:2325], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:19:26.114355Z node 10 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=10&id=Y2IwY2ViZTktODE4ZmVhMC01MDlmZWQ5Yy0xZjg2YTU4MQ==, ActorId: [10:7591325264577572331:2325], ActorState: unknown state, Session actor destroyed trace_id# |81.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> DescribeSchemaSecretsService::SchemeCacheRetryErrors [GOOD] >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors >> KqpWorkloadService::TestLessConcurrentQueryLimit [GOOD] >> KqpWorkloadService::TestZeroConcurrentQueryLimit >> KqpWorkloadServiceDistributed::TestDistributedLargeConcurrentQueryLimit [GOOD] >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit |81.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> KqpWorkloadService::TestHandlerActorCleanup [GOOD] >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit >> TCreateAndDropViewTest::CallDropViewOnTable [FAIL] >> TCreateAndDropViewTest::DropSameViewTwice >> test_canonical_records.py::test_restart_pdisk >> YdbSdkSessionsPool::PeriodicTask/0 |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut |81.0%| [LD] {RESULT} $(B)/ydb/core/fq/libs/checkpointing/ut/ydb-core-fq-libs-checkpointing-ut >> TDataShardRSTest::TestCleanupInRS+UseSink [GOOD] >> TDataShardRSTest::TestCleanupInRS-UseSink |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.0%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_background_compaction/ydb-core-tx-datashard-ut_background_compaction |81.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.0%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/list_topics/ut/ydb-core-persqueue-public-list_topics-ut |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/memory_controller/ut/ydb-core-memory_controller-ut |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-fifo] [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/build_index/ut/ydb-core-tx-datashard-build_index-ut |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/public_http/ut/ydb-core-public_http-ut >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/shard/ut/ydb-core-graph-shard-ut >> test_canonical_records.py::test_replace_config |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/coordinator/ut/ydb-core-tx-coordinator-ut >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/ut/ydb-core-quoter-ut |81.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestZeroConcurrentQueryLimit [GOOD] Test command err: 2026-01-04T02:18:00.282884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324975149927043:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.282957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.541144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.541241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.542096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.547078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.672070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.672802Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324975149927014:2081] 1767493080281616 != 1767493080281619 2026-01-04T02:18:00.725113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.725142Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.725152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.725241Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.757927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.914850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.292094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:02.653123Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:02.653244Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983739862465:2320], Start check tables existence, number paths: 2 2026-01-04T02:18:02.656312Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:02.656378Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:02.656585Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983739862465:2320], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:02.656646Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983739862465:2320], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:02.656685Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324983739862465:2320], Successfully finished 2026-01-04T02:18:02.656761Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:02.656791Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:02.657419Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=N2YxMmJlYWMtMTcxYzc1M2UtMzQzZWRhMy03YTg0NDM3Yg==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# N2YxMmJlYWMtMTcxYzc1M2UtMzQzZWRhMy03YTg0NDM3Yg== temp_dir_name# cf2da1d5-4594-945c-334b-0a8e37a1fc5f trace_id# 2026-01-04T02:18:02.657513Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=N2YxMmJlYWMtMTcxYzc1M2UtMzQzZWRhMy03YTg0NDM3Yg==, ActorId: [1:7591324983739862491:2324], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:02.668049Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.671615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:02.672768Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976715658 2026-01-04T02:18:02.672956Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:02.681099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:18:02.775330Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:02.824710Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324983739862544:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:02.824843Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324983739862493:2499], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:02.827681Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw== temp_dir_name# efcffd1a-4561-8184-cf00-33acccdbfadb trace_id# 2026-01-04T02:18:02.827799Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw==, ActorId: [1:7591324983739862552:2325], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:02.828078Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:02.828099Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:02.828284Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324983739862554:2326], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:02.828321Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw==, ActorId: [1:7591324983739862552:2325], ActorState: ReadyState, LegacyTraceId: 01ke3cr7pc6n8t9rttd1mbyh6f, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [1:7591324983739862551:2537] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-04T02:18:02.828365Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:02.828397Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [1:7591324983739862552:2325], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw== 2026-01-04T02:18:02.828473Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324983739862555:2327], Database: /Root, Start database fetching 2026-01-04T02:18:02.828645Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591324983739862555:2327], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-04T02:18:02.828716Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-04T02:18:02.828778Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [1:7591324983739862562:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw==, Start pool fetching 2026-01-04T02:18:02.828816Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324983739862564:2329], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:18:02.829598Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324983739862554:2326], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:02.829607Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324983739862564:2329], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:18:02.829666Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [1:7591324983739862562:2328], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=1&id=NDI5MWRiMWItYTYxZjRkMzctMmFkYzdlMS0xNjg5Y2E5Yw==, Pool info successfully resolved 2026-01-04T02:18:02.829683Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-04T02:18:02.829700Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler ... 6-01-04T02:20:47.032082Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:20:47.036464Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591325693745327674:2566], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:20:47.042654Z node 7 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591325693745327674:2566], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:20:47.051173Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591325693745327674:2566], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:20:47.146150Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591325693745327674:2566], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:20:47.150241Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7591325693745327725:2598] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:20:47.150516Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591325693745327674:2566], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:20:47.161654Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ== temp_dir_name# 460e25e9-4022-1bf3-7ead-74bb095f06a2 trace_id# 2026-01-04T02:20:47.161896Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:20:47.166406Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:20:47.166438Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:20:47.166625Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: ReadyState, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_GENERIC_QUERY text# SELECT 42; rpc_actor# [7:7591325693745327731:2603] database# Root database_id# /Root pool_id# sample_pool_id trace_id# 2026-01-04T02:20:47.166683Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:176: [WorkloadService] [Service] Received new request from [7:7591325693745327732:2356], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ== 2026-01-04T02:20:47.166755Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591325693745327734:2357], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:20:47.167099Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7591325693745327735:2358], Database: /Root, Start database fetching 2026-01-04T02:20:47.167150Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:20:47.169221Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7591325693745327735:2358], Database: /Root, Database info successfully fetched, serverless: 0 2026-01-04T02:20:47.169398Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591325693745327734:2357], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:20:47.169457Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:247: [WorkloadService] [Service] Successfully fetched database info, DatabaseId: /Root, Serverless: 0 2026-01-04T02:20:47.169513Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root 2026-01-04T02:20:47.169535Z node 7 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-04T02:20:47.169778Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7591325693745327746:2360], DatabaseId: /Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-04T02:20:47.169860Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:44: [WorkloadService] [TPoolResolverActor] ActorId: [7:7591325693745327745:2359], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, Start pool fetching 2026-01-04T02:20:47.169908Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591325693745327747:2361], DatabaseId: /Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:20:47.171788Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [7:7591325693745327746:2360], DatabaseId: /Root, PoolId: sample_pool_id, Got watch notification 2026-01-04T02:20:47.172085Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591325693745327747:2361], DatabaseId: /Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:20:47.172173Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:107: [WorkloadService] [TPoolResolverActor] ActorId: [7:7591325693745327745:2359], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, Pool info successfully resolved 2026-01-04T02:20:47.172231Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:286: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: /Root, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ== 2026-01-04T02:20:47.172506Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: ExecuteState, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Create QueryResponse for error on request, msg: Query failed during adding/waiting in workload pool sample_pool_id status# PRECONDITION_FAILED issues# { message: "Resource pool sample_pool_id was disabled due to zero concurrent query limit" severity: 1 } trace_id# 2026-01-04T02:20:47.172669Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: ExecuteState, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-04T02:20:47.172924Z node 7 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:297: [WorkloadService] [Service] Request placed into pool, DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ== 2026-01-04T02:20:47.172998Z node 7 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:196: [WorkloadService] [Service] Finished request with worker actor [7:7591325693745327732:2356], DatabaseId: /Root, PoolId: sample_pool_id, SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ== 2026-01-04T02:20:47.173101Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: CleanupState, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, EndCleanup is_final# true trace_id# 2026-01-04T02:20:47.173226Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: CleanupState, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Sent query response back to proxy proxy_request_id# 3 proxy_id# [7:7591325607845981020:2265] trace_id# 2026-01-04T02:20:47.173266Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: unknown state, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:20:47.173405Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=Y2I1YWJhNjktZGY1MWI3Zi1hNTNiYmM2Ny01NzUxZjM3ZQ==, ActorId: [7:7591325693745327732:2356], ActorState: unknown state, LegacyTraceId: 01ke3cx85ya7668z02wkp0ftf3, Session actor destroyed trace_id# 2026-01-04T02:20:47.236784Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=7&id=N2IwZGFjMjctODJiZThkYmMtOGU1NjlhM2EtMjRjZTAyMTQ=, ActorId: [7:7591325693745327671:2354], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:20:47.236853Z node 7 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=7&id=N2IwZGFjMjctODJiZThkYmMtOGU1NjlhM2EtMjRjZTAyMTQ=, ActorId: [7:7591325693745327671:2354], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:20:47.236900Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=7&id=N2IwZGFjMjctODJiZThkYmMtOGU1NjlhM2EtMjRjZTAyMTQ=, ActorId: [7:7591325693745327671:2354], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:20:47.236954Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=7&id=N2IwZGFjMjctODJiZThkYmMtOGU1NjlhM2EtMjRjZTAyMTQ=, ActorId: [7:7591325693745327671:2354], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:20:47.237055Z node 7 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=7&id=N2IwZGFjMjctODJiZThkYmMtOGU1NjlhM2EtMjRjZTAyMTQ=, ActorId: [7:7591325693745327671:2354], ActorState: unknown state, Session actor destroyed trace_id# |81.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/describer/ut/ydb-core-persqueue-public-describer-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_auth_header [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] |81.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut |81.1%| [LD] {RESULT} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] |81.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/keyvalue/ut/ydb-services-keyvalue-ut >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] >> TCreateAndDropViewTest::DropSameViewTwice [GOOD] >> TCreateAndDropViewTest::DropViewIfExists >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.2%| [LD] {RESULT} $(B)/ydb/core/graph/ut/ydb-core-graph-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/graph/ut/ydb-core-graph-ut >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__ASYNC-pk_types10-all_types10-index10-pgtimestamp--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] >> YdbSdkSessionsPool::PeriodicTask/0 [GOOD] >> YdbSdkSessionsPool::PeriodicTask/1 >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/federated_query/ut_service/fast/unittest >> DescribeSchemaSecretsService::SchemeCacheMultipleNotRetryableErrors [GOOD] Test command err: Trying to start YDB, gRPC: 24650, MsgBus: 22050 2026-01-04T02:18:00.498056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324975251924902:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.506680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.747394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.747493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.785723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.802442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.820276Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.920427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.920455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.920469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.920571Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:00.968600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:01.253643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.302007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:01.424137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:01.522436Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:01.528641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:01.589342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.503053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988136828593:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.503228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.503612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324988136828603:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.503683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:03.799680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.823350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.847238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.869006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.893923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.924468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:03.968413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:04.030458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:04.117720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324992431796770:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.117789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.118279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324992431796776:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.118285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324992431796774:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.118322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.121507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:04.136315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324992431796779:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:18:04.196465Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324992431796830:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:05.497985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591324975251924902:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:05.498073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:18:05.786352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) Trying to start YDB, gRPC: 22781, MsgBus: 22623 2026-01-04T02:18:06.621273Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeli ... ;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:20:12.049205Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:20:12.085978Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7591325519411547232:2081] 1767493206811676 != 1767493206811679 2026-01-04T02:20:12.263532Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:20:12.296127Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:20:12.455194Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:20:12.494253Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:20:14.545524Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_self_pinger.cpp:17: Schemeshard 72057594046644480 response time is 1027 msec 2026-01-04T02:20:15.017631Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:20:15.017663Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:20:15.017675Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:20:15.017789Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:20:25.554468Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:20:25.554494Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:20:33.569826Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:20:33.763654Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:20:33.816108Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:35.229409Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:37.195159Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:38.156635Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:47.591213Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325691210241098:2429], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:47.592659Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:47.593600Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325695505208413:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:47.593686Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:47.594487Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325695505208415:2439], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:47.594613Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:48.065410Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:48.287649Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:48.416250Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:48.564228Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:48.736551Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:48.892162Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:49.031571Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:49.226192Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:20:49.482184Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325704095143913:2528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:49.482372Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:49.486711Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325704095143918:2531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:49.486847Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591325704095143919:2532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:49.486946Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:20:49.494972Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:20:49.524387Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591325704095143922:2533], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:20:49.619597Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7591325704095143973:3896] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:20:57.273243Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) |81.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/federated_query/ut_service/fast/unittest |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-fifo] [GOOD] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut |81.2%| [TM] {RESULT} ydb/core/kqp/federated_query/ut_service/fast/unittest |81.2%| [LD] {RESULT} $(B)/ydb/library/yql/providers/solomon/actors/ut/ydb-library-yql-providers-solomon-actors-ut >> TCreateAndDropViewTest::DropViewIfExists [FAIL] >> TCreateAndDropViewTest::DropViewInFolder >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_sqs_action_counters [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_sequence/ydb-core-tx-datashard-ut_sequence >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] |81.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable |81.2%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel_unstable/ydb-core-cms-ut_sentinel_unstable >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_disk_quotas/ydb-core-tx-datashard-ut_disk_quotas >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v0] [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sequenceproxy/ut/ydb-core-tx-sequenceproxy-ut |81.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |81.2%| [LD] {RESULT} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut |81.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/mediator/ut/ydb-core-tx-mediator-ut >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> TCreateAndDropViewTest::DropViewInFolder [GOOD] >> TCreateAndDropViewTest::ContextPollution >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-fifo] [GOOD] >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/pqtablet/quota/ut/ydb-core-persqueue-pqtablet-quota-ut |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v1-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_create_queue[tables_format_v0-std] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TDataShardRSTest::TestCleanupInRS-UseSink [GOOD] >> TDataShardRSTest::TestDelayedRSAckForUnknownTx |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut |81.3%| [LD] {RESULT} $(B)/ydb/core/ymq/actor/cloud_events/cloud_events_ut/ydb-core-ymq-actor-cloud_events-cloud_events_ut >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v1] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queue_counters_are_in_folder[tables_format_v1] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_queues_for_unknown_cloud[tables_format_v1] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/discovery/ydb-core-kqp-ut-discovery >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[std-tables_format_v1] [GOOD] |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |81.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot_isolation/ydb-core-tx-datashard-ut_snapshot_isolation |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/public/mlp/ut/ydb-core-persqueue-public-mlp-ut |81.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_evict_vdisk_grpc [FAIL] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[fifo-tables_format_v1] [GOOD] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_empty_access_key_id[tables_format_v1] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_compaction/ydb-core-tx-datashard-ut_compaction >> TCreateAndDropViewTest::ContextPollution [GOOD] >> TEvaluateExprInViewTest::EvaluateExpr >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v0] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v1-std] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TDataShardRSTest::TestDelayedRSAckForUnknownTx [GOOD] >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_ymq_send_read_delete [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v1-std] [GOOD] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-fifo] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_double_create_queue[fifo-tables_format_v1] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.4%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_stats/ydb-core-tx-datashard-ut_stats |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v1-std] [GOOD] |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::PeriodicTask/1 [GOOD] |81.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |81.5%| [LD] {RESULT} $(B)/ydb/core/grpc_services/tablet/ut/ydb-core-grpc_services-tablet-ut |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v1-std] [GOOD] |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TDataShardRSTest::TestDelayedRSAckForOutOfOrderCompletedTx [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionCommit |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |81.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io |81.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/ydb-core-blobstorage-ut_vdisk_io >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [FAIL] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] |81.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_setup_in_cloud[tables_format_v0-std] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-fifo] [GOOD] |81.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/ydb-core-tx-schemeshard-ut_secret |81.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |81.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain_reboots/ydb-core-tx-schemeshard-ut_extsubdomain_reboots |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithPath::test_private_queue_recreation[tables_format_v0-std] [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__ASYNC-pk_types13-all_types13-index13-DyNumber--ASYNC] [GOOD] |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> TEvaluateExprInViewTest::EvaluateExpr [GOOD] >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v1-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b folder_id=folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b iam_token=usr_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b cloud_account=acc_ff64869a-e913-11f0-bb82-d00dcd536b8b 2026-01-04T02:20:54.908747Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b]","tx_id":"281474976720690","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:20:55.158709Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b/000000000000000104at]","tx_id":"281474976720696","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:20:55.208069Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b/000000000000000104at/v2]","tx_id":"281474976720697","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:20:55.465616Z: {"request_id":"ae2c07d-9144f2ae-c5f1636-db32f7fe","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b"} ======================================== 2026-01-04T02:20:55.924158Z: {"request_id":"ae2c07d-9144f2ae-c5f1636-db32f7fe","permission":"ymq.queues.create","id":"7353433758714251890$CreateMessageQueue$2026-01-04T02:20:55.923964Z","idempotency_id":"7353433758714251890$CreateMessageQueue$2026-01-04T02:20:55.006000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:20:55.006000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:00.039690Z: {"request_id":"1b4bfe28-3ef68ffe-8f71e7d1-1b3d5eb5","permission":"ymq.queues.setAttributes","id":"10898078111235396837$UpdateMessageQueue$2026-01-04T02:21:00.039408Z","idempotency_id":"10898078111235396837$UpdateMessageQueue$2026-01-04T02:20:56.645000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:20:56.645000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:00.071640Z: {"request_id":"7bd2eaca-d618de9c-7453a10e-bdbc034d","permission":"ymq.queues.setAttributes","id":"5328083850729988973$UpdateMessageQueue$2026-01-04T02:21:00.039498Z","idempotency_id":"5328083850729988973$UpdateMessageQueue$2026-01-04T02:20:57.779000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:20:57.779000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:00.051583Z: {"request_id":"1b4bfe28-3ef68ffe-8f71e7d1-1b3d5eb5","permission":"ymq.queues.setAttributes","id":"10898078111235396837$UpdateMessageQueue$2026-01-04T02:21:00.051276Z","idempotency_id":"10898078111235396837$UpdateMessageQueue$2026-01-04T02:20:56.645000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:20:56.645000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:00.052242Z: {"request_id":"7bd2eaca-d618de9c-7453a10e-bdbc034d","permission":"ymq.queues.setAttributes","id":"5328083850729988973$UpdateMessageQueue$2026-01-04T02:21:00.051363Z","idempotency_id":"5328083850729988973$UpdateMessageQueue$2026-01-04T02:20:57.779000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:20:57.779000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:00.195764Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b/000000000000000104at/v2]","tx_id":"281474976720712","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:00.243925Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b/000000000000000104at]","tx_id":"281474976720713","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:00.264352Z: {"request_id":"319ae7b3-d5c77785-ac93e3ea-784f7b90","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","subject":"fake_user_sid@as","queue":"000000000000000104at","resource_id":"000000000000000104at","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b"} ======================================== 2026-01-04T02:21:02.101711Z: {"request_id":"319ae7b3-d5c77785-ac93e3ea-784f7b90","permission":"ymq.queues.delete","id":"17038248017504999667$DeleteMessageQueue$2026-01-04T02:21:02.101543Z","idempotency_id":"17038248017504999667$DeleteMessageQueue$2026-01-04T02:21:00.023000Z","cloud_id":"CLOUD_FOR_folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:21:00.023000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ff36f9af-e913-11f0-ad5b-d00dcd536b8b.fifo","resource_id":"000000000000000104at","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_ff64869a-e913-11f0-bb82-d00dcd536b8b","component":"ymq"} ======================================== ======================================== run test with cloud_id=CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b folder_id=folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b iam_token=usr_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b cloud_account=acc_0a190f0a-e914-11f0-8594-d00dcd536b8b 2026-01-04T02:21:12.815133Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b]","tx_id":"281474976720730","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:12.998719Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b/000000000000000303e5]","tx_id":"281474976720736","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:13.082858Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b/000000000000000303e5/v4]","tx_id":"281474976720737","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:13.353517Z: {"request_id":"dba6329d-f412dd1a-c1196ee8-b44c71dd","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b"} ======================================== 2026-01-04T02:21:14.455826Z: {"request_id":"dba6329d-f412dd1a-c1196ee8-b44c71dd","permission":"ymq.queues.create","id":"5880815907948074683$CreateMessageQueue$2026-01-04T02:21:14.455678Z","idempotency_id":"5880815907948074683$CreateMessageQueue$2026-01-04T02:21:12.895000Z","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:21:12.895000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_09f52b41-e914-11f0-9020-d00dcd536b8b","resource_id":"000000000000000303e5","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:14.756625Z: {"request_id":"b9c09368-3459daf2-77da36ce-b7af7ab3","permission":"ymq.queues.setAttributes","id":"12845489290651142890$UpdateMessageQueue$2026-01-04T02:21:14.756443Z","idempotency_id":"12845489290651142890$UpdateMessageQueue$2026-01-04T02:21:14.511000Z","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:21:14.511000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_09f52b41-e914-11f0-9020-d00dcd536b8b","resource_id":"000000000000000303e5","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:16.503351Z: {"request_id":"a793e1cd-160b98a8-7f192512-b423566c","permission":"ymq.queues.setAttributes","id":"5868837212868680707$UpdateMessageQueue$2026-01-04T02:21:16.503244Z","idempotency_id":"5868837212868680707$UpdateMessageQueue$2026-01-04T02:21:15.617000Z","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:21:15.617000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_09f52b41-e914-11f0-9020-d00dcd536b8b","resource_id":"000000000000000303e5","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:16.757276Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b/000000000000000303e5/v4]","tx_id":"281474976720760","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:16.783939Z: {"request_id":"644de1ee-4ee5ee4b-12d43a6f-f7891f29","permission":"ymq.queues.delete","id":"14230916920665673490$DeleteMessageQueue$2026-01-04T02:21:16.783830Z","idempotency_id":"14230916920665673490$DeleteMessageQueue$2026-01-04T02:21:16.691000Z","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:21:16.691000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_09f52b41-e914-11f0-9020-d00dcd536b8b","resource_id":"000000000000000303e5","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:21:16.787151Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b/000000000000000303e5]","tx_id":"281474976720761","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:21:16.801591Z: {"request_id":"644de1ee-4ee5ee4b-12d43a6f-f7891f29","cloud_id":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b","subject":"fake_user_sid@as","queue":"000000000000000303e5","resource_id":"000000000000000303e5","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_0a190f0a-e914-11f0-8594-d00dcd536b8b"} ======================================== ======================================== |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TDataShardRSTest::TestGenericReadSetDecisionCommit [GOOD] >> TDataShardRSTest::TestGenericReadSetDecisionAbort |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v0-std] [GOOD] |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/hive_metastore/ut/ydb-core-external_sources-hive_metastore-ut |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/proxy/ut/ydb-core-kesus-proxy-ut |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_count_queues[tables_format_v1] [GOOD] |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/runtime/ut/ydb-core-kqp-runtime-ut |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut |81.6%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/ydb-core-http_proxy-ut-sqs_topic_ut >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v0-std] [GOOD] |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_queue_recreation[tables_format_v1-std] [GOOD] |81.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |81.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |81.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |81.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap |81.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/ydb-core-kqp-ut-olap >> test_canonical_records.py::test_restart_pdisk [GOOD] >> TSchemeShardSecretTest::DefaultDescribeSecret >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain >> listing_paging.py::TestListingPaging::test_listing_paging_solomon >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors >> basic_reading.py::TestBasicReading::test_basic_reading_solomon >> TSchemeShardSecretTest::EmptySecretName >> data_paging.py::TestDataPaging::test_data_paging_solomon >> listing_batching.py::TestListingBatching::test_listing_batching_solomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_rs/unittest >> TDataShardRSTest::TestGenericReadSetDecisionAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:19:11.619999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:19:11.756632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:19:11.757137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:19:11.757205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:19:12.096166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:19:12.096323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:19:12.182600Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493148695540 != 1767493148695543 2026-01-04T02:19:12.200872Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:19:12.250289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:19:12.346202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:19:12.665159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:19:12.678610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:19:12.796625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:19:12.843365Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:899:2774] 2026-01-04T02:19:12.843589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:19:12.893305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:19:12.893536Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:19:12.895497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:19:12.895600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:19:12.895691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:19:12.896133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:19:12.896475Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-04T02:19:12.896725Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:19:12.911529Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:19:12.911638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:944:2774] in generation 1 2026-01-04T02:19:12.913783Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:19:12.913966Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:19:12.915569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:19:12.915648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:19:12.915707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:19:12.916046Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:19:12.916408Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:19:12.916481Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:955:2776] in generation 1 2026-01-04T02:19:12.916866Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-04T02:19:12.917085Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:19:12.932646Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-04T02:19:12.932887Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:19:12.947072Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:19:12.947259Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:19:12.948646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-04T02:19:12.948725Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-04T02:19:12.948779Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-04T02:19:12.949081Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:19:12.949269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:19:12.949329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-04T02:19:12.949663Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:19:12.949753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:19:12.951070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:19:12.951143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:19:12.951212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:19:12.951497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:19:12.951599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:19:12.951656Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-04T02:19:12.966781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:19:13.027825Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:19:13.028071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:19:13.028200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-04T02:19:13.028245Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:19:13.028280Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:19:13.028316Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:19:13.028422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:19:13.028462Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:19:13.028518Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:19:13.028573Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-04T02:19:13.028600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:19:13.028622Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:19:13.028676Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:19:13.029082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:19:13.029117Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-04T02:19:13.029167Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:19:13.029216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-04T02:19:13.029239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-04T02:19:13.029260Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-04T02:19:13.029298Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:19:13.029724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:19:13.029768Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:19:13.029835Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:19:13.029898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:977:2819] 2026-01-04T02:19:13.029929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:19:13.029962Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: Wai ... qpBuffer::TEvError in ExecuteState, status: ABORTED send to: [6:1194:2936] from: [6:1193:2936] trace_id# 2026-01-04T02:21:58.198088Z node 6 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [6:1194:2936] TxId: 281474976715664. Ctx: { TraceId: 01ke3czdczdswrgsy40jr56gfh, Database: , SessionId: ydb://session/3?node_id=6&id=YjIyNmE2YjMtNTRiM2Q3N2YtNGI3ZDYwNmItZDRhZmE3Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:21:58.198644Z node 6 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=6&id=YjIyNmE2YjMtNTRiM2Q3N2YtNGI3ZDYwNmItZDRhZmE3Y2U=, ActorId: [6:1135:2936], ActorState: ExecuteState, LegacyTraceId: 01ke3czdczdswrgsy40jr56gfh, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:21:58.199421Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [6:1211:2989], Recipient [6:970:2825]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:21:58.199471Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:21:58.199515Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [6:1209:2988], serverId# [6:1211:2989], sessionId# [0:0:0] 2026-01-04T02:21:58.199648Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [6:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:21:58.199688Z node 6 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [6:68:2115] TxId# 281474976715665 ProcessProposeKqpTransaction ... generic readset: Decision: DECISION_ABORT 2026-01-04T02:21:58.199987Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [6:886:2765], Recipient [6:970:2825]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:21:58.200026Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:21:58.200075Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715664 2026-01-04T02:21:58.200153Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:21:58.200220Z node 6 :TX_DATASHARD TRACE: volatile_tx.cpp:884: Processed readset with decision 2 from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2026-01-04T02:21:58.200338Z node 6 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:21:58.200914Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [6:1193:2936], Recipient [6:886:2765]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-04T02:21:58.200954Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-04T02:21:58.201232Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [6:886:2765], Recipient [6:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:21:58.201274Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:21:58.201365Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-04T02:21:58.201517Z node 6 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-04T02:21:58.206081Z node 6 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715662, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T02:21:58.206271Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-04T02:21:58.206368Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:21:58.206432Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-04T02:21:58.206491Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:21:58.206552Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:21:58.206616Z node 6 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715664 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3001/0 ImmediateWriteEdgeReplied# v3001/0 2026-01-04T02:21:58.206717Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-04T02:21:58.206774Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:21:58.206806Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:21:58.206837Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:21:58.206866Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:21:58.206896Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:21:58.206922Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:21:58.206948Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T02:21:58.206978Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:21:58.207021Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-04T02:21:58.207166Z node 6 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-04T02:21:58.207227Z node 6 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-04T02:21:58.207289Z node 6 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:21:58.207378Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:21:58.207412Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:21:58.207460Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:21:58.207508Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:21:58.207547Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-04T02:21:58.207590Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:21:58.207640Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:21:58.207683Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:21:58.207740Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:21:58.207768Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:21:58.207802Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-04T02:21:58.207893Z node 6 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:21:58.207953Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:21:58.208013Z node 6 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:21:58.208128Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:21:58.208882Z node 6 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976715664; 2026-01-04T02:21:58.209106Z node 6 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3002 : 281474976715664] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-04T02:21:58.209206Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:21:58.214937Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [6:970:2825], Recipient [6:886:2765]: {TEvReadSet step# 3002 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-04T02:21:58.215031Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:21:58.215099Z node 6 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 2026-01-04T02:21:58.215252Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [6:70:2117], Recipient [6:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715662 LockNode: 6 Status: STATUS_NOT_FOUND |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_rs/unittest >> TSchemeShardSecretTest::DefaultDescribeSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingSecret >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] >> TSchemeShardSecretTest::CreateSecretInSubdomain [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_canonical_records.py::test_replace_config [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v0-std] [GOOD] |81.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> TSchemeShardSecretTest::EmptySecretName [GOOD] >> TSchemeShardSecretTest::DropUnexistingSecret |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_queue_recreation[tables_format_v1-std] [GOOD] |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:21:59.037342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:21:59.037456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:21:59.037496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:21:59.037533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:21:59.037567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:21:59.037594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:21:59.037643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:21:59.037718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:21:59.039186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:21:59.039497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:21:59.149907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:21:59.149997Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:21:59.175100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:21:59.182749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:21:59.183006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:21:59.202963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:21:59.203278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:21:59.204003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:21:59.204324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:21:59.213633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:21:59.213907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:21:59.215281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:21:59.215370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:21:59.215513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:21:59.215563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:21:59.215668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:21:59.215872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:21:59.415191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.420958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.421024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.421083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.421157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.421260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-04T02:22:02.752027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-04T02:22:02.752077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:02.752976Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:22:02.753064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:22:02.753098Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:22:02.753138Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:22:02.753190Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:02.753907Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:22:02.753993Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:22:02.754023Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:22:02.754053Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:02.754083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:02.758535Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:22:02.763953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:22:02.764091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:22:02.764357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:22:02.764403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:22:02.764801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:22:02.764910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:22:02.764952Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:731:2720] TestWaitNotification: OK eventTxId 102 2026-01-04T02:22:02.765436Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:02.765681Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 272us result status StatusSuccess 2026-01-04T02:22:02.766038Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-04T02:22:02.769577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/dir" OperationType: ESchemeOpCreateSecret CreateSecret { Name: "test-secret" Value: "test-value-new" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:02.769907Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_secret.cpp:152: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0 2026-01-04T02:22:02.769999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_secret.cpp:160: [72057594046678944] TCreateSecret Propose, path: /MyRoot/dir/test-secret, opId: 103:0, secretDescription (without secret parts): Name: "test-secret" 2026-01-04T02:22:02.770150Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:22:02.775612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-04T02:22:02.775912Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateNoChanges), operation: CREATE SECRET, path: /MyRoot/dir/test-secret TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:22:02.776331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:22:02.776379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:22:02.776800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:22:02.776906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:22:02.776947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:739:2728] TestWaitNotification: OK eventTxId 103 2026-01-04T02:22:02.777463Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:02.777655Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 218us result status StatusSuccess 2026-01-04T02:22:02.778008Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-init" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretOverExistingObject ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:21:59.579180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:21:59.579268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:21:59.579307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:21:59.579349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:21:59.579385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:21:59.579449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:21:59.579524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:21:59.579611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:21:59.580555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:21:59.580888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:21:59.702451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:21:59.702505Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:21:59.732573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:21:59.737930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:21:59.738127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:21:59.743374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:21:59.743571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:21:59.744317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:21:59.744582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:21:59.747166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:21:59.747344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:21:59.748359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:21:59.748403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:21:59.748482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:21:59.748516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:21:59.748550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:21:59.748657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:21:59.941045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.945754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:21:59.946674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:03.583560Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 209us result status StatusSuccess 2026-01-04T02:22:03.583855Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:03.584396Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:03.584523Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 142us result status StatusSuccess 2026-01-04T02:22:03.584946Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:03.585451Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:03.585562Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 128us result status StatusSuccess 2026-01-04T02:22:03.586048Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user2 \003(\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:03.586563Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:03.586677Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 143us result status StatusSuccess 2026-01-04T02:22:03.587074Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\010\032\005user1 \003\n\016\010\001\020\200\010\032\005user2 \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "dir" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "secret" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropUnexistingSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:00.557620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:00.557710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:00.557750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:00.557791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:00.575114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:00.575198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:00.575313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:00.575407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:00.576400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:00.577389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:00.790545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:00.790610Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:00.833529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:00.846303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:00.846579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:00.857256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:00.857583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:00.858338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:00.858607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:00.866092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:00.866316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:00.867939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:00.868015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:00.868169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:00.868224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:00.868274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:00.868456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:01.152640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.153714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.153904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.154994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:01.155088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:04.790894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:22:04.791036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:04.793104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:22:04.793245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:22:04.793574Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:04.793710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:04.793765Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:22:04.794065Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:22:04.794119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:22:04.794282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:22:04.794353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:22:04.801663Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:04.801732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:04.801992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:04.802053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:22:04.802493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:04.802554Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:22:04.802663Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:22:04.802700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:04.802741Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:22:04.802792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:04.802838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:22:04.802885Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:04.802932Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:22:04.802964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:22:04.803052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:04.803095Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:22:04.803140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:22:04.803688Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:22:04.803822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:22:04.803884Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:22:04.803930Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:22:04.803973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:22:04.804075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:22:04.814366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:22:04.814886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:04.815298Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:674:2663] Bootstrap 2026-01-04T02:22:04.816405Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:674:2663] Become StateWork (SchemeCache [2:679:2668]) 2026-01-04T02:22:04.816726Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:04.816910Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/test-secret" took 216us result status StatusPathDoesNotExist 2026-01-04T02:22:04.817121Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:22:04.819753Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:674:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:22:04.824151Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResults wait txId: 101 2026-01-04T02:22:04.827283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "test-secret" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:04.827482Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 101:0, path: /MyRoot/test-secret 2026-01-04T02:22:04.827603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-04T02:22:04.830327Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:04.830604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/test-secret', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP SECRET, path: /MyRoot/test-secret TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:22:04.830959Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:22:04.830999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:22:04.831373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:22:04.831475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:22:04.831512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:690:2679] TestWaitNotification: OK eventTxId 101 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_ttl.py::TestTTLOnIndexedTable::test_case >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> KqpWorkloadService::TestDiskIsFullRunBelowQueryLimit [GOOD] >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> TSchemeShardSecretTest::InheritPermissionsWithDifferentInheritanceTypes [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v1] [GOOD] |81.7%| [TM] {RESULT} ydb/core/tx/datashard/ut_rs/unittest >> TSchemeShardSecretTest::CreateSecretOverExistingObject [GOOD] >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/tx-replication-controller-ut_stream_creator |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TEvaluateExprInViewTest::NakedCallToCurrentTimeFunction [GOOD] >> TSelectFromViewTest::OneTable >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/ydb-core-tx-schemeshard-ut_topic_splitmerge >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty >> TSchemeShardSecretTest::AlterExistingSecretMultipleTImes [GOOD] >> TSchemeShardSecretTest::AlterUnexistingSecret >> TSchemeShardSecretTest::AsyncCreateDifferentSecrets [GOOD] >> TSchemeShardSecretTest::AsyncCreateSameSecret ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:05.288046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:05.288188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:05.288234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:05.288270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:05.288307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:05.288389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:05.288443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:05.288508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:05.289338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:05.289616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:05.414817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:05.414884Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:05.432427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:05.432662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:05.432818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:05.469340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:05.469586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:05.470323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:05.470593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:05.475351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:05.475538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:05.476718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:05.476782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:05.476891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:05.476933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:05.477054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:05.477229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:05.735713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.736826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.736949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.737026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.737095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.737162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.737242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.737316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.738666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.738782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.738852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.738921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.738983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.739061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:05.739196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... : true 2026-01-04T02:22:09.034954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:22:09.035138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:09.035624Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-04T02:22:09.036230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:09.036377Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:09.036433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_secret.cpp:66: [72057594046678944] TCreateSecret::TPropose, opId: 103:0HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:22:09.036607Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-04T02:22:09.036789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:22:09.036858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:22:09.043269Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:09.043313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:09.043453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:09.043565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:09.043599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:839:2796], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:22:09.043634Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:839:2796], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:22:09.043895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:22:09.043941Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:22:09.044041Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:09.044094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:09.044136Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:09.044168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:09.044208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:22:09.044250Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:09.044287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:22:09.044341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:22:09.044419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:22:09.044456Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-04T02:22:09.044491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-04T02:22:09.044519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:09.045182Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:09.045263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:09.045298Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:09.045336Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:22:09.045375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:09.046595Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:09.046674Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:09.046703Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:09.046730Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:09.046758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:09.046827Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:22:09.049723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:22:09.049826Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:22:09.050076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:22:09.050118Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:22:09.050520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:22:09.050605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:22:09.050643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:872:2827] TestWaitNotification: OK eventTxId 103 2026-01-04T02:22:09.051161Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-name" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:09.051341Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-name" took 217us result status StatusSuccess 2026-01-04T02:22:09.051648Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-name" PathDescription { Self { Name: "test-name" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-name" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretNoInheritPermissions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:06.056758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:06.056854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:06.056898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:06.056939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:06.056977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:06.057056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:06.057139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:06.057210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:06.058281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:06.058629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:06.242688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:06.242772Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:06.262241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:06.268508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:06.268676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:06.274297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:06.274543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:06.275224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:06.275469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:06.279468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:06.279664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:06.280963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:06.281030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:06.281143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:06.281195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:06.281239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:06.281402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:06.423375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:06.424960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ration: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:10.518122Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 6 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:10.518167Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:10.518200Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 6 2026-01-04T02:22:10.518254Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:22:10.519493Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:10.519599Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 5 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:10.519642Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:10.519673Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 5 2026-01-04T02:22:10.519707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:22:10.519804Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:22:10.523617Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:10.523991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:10.525153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:10.525422Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:10.525482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:10.525951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:10.526064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:10.526107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:848:2837] TestWaitNotification: OK eventTxId 105 2026-01-04T02:22:10.526654Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:10.526879Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/secret" took 253us result status StatusSuccess 2026-01-04T02:22:10.527314Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/secret" PathDescription { Self { Name: "secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:10.527867Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir/secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:10.528009Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir/secret" took 156us result status StatusSuccess 2026-01-04T02:22:10.528365Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir/secret" PathDescription { Self { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" EffectiveACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 1 EffectiveACLVersion: 3 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "secret" Version: 0 } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:10.528837Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/subdir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:10.529005Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/subdir" took 165us result status StatusSuccess 2026-01-04T02:22:10.529397Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/subdir" PathDescription { Self { Name: "subdir" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "\n\020\010\000\020\200\004\032\005user2 \003(\001\n\020\010\001\020\200\004\032\005user1 \003(\001\n\020\010\001\020\200\010\032\005user1 \003(\001\n\020\010\001\020\200\004\032\005user2 \003(\001" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "secret" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 105 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "\n\016\010\000\020\200\004\032\005user2 \003\n\016\010\001\020\200\004\032\005user1 \003\n\016\010\001\020\200\004\032\005user2 \003\020\001" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl >> TSchemeShardSecretTest::AsyncCreateSameSecret [GOOD] >> TSchemeShardSecretTest::AsyncAlterSameSecret |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_restart_pdisk [GOOD] |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage >> TestSqsTopicHttpProxy::TestSendMessageTooBig |81.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |81.7%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber |81.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_subscriber/ydb-core-tx-scheme_board-ut_subscriber >> TSchemeShardSecretTest::AlterUnexistingSecret [GOOD] >> TSchemeShardSecretTest::AlterNotASecret >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessage >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> TSchemeShardSecretTest::AlterNotASecret [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[delete] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/00215a/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk17/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.delete/audit_log.blaga4mz.txt 2026-01-04T02:21:53.385076Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:21:53.385026Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-04T02:21:53.152517Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncAlterSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:07.839000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:07.839088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:07.846319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:07.846405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:07.846453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:07.846500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:07.846609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:07.846693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:07.847669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:07.847999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:08.305863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:08.305939Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:08.366715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:08.389922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:08.390171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:08.408640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:08.408917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:08.415461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:08.415881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:08.426668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:08.426899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:08.449613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:08.449708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:08.449877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:08.449951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:08.450004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:08.450928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:08.742332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.743933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.744620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n_alter_secret.cpp:31: [72057594046678944] TAlterSecret TPropose operationId# 103:0 ProgressState 2026-01-04T02:22:13.293945Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-04T02:22:13.294090Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:13.294587Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusMultipleModifications Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-04T02:22:13.294771Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusMultipleModifications, reason: Check failed: path: '/MyRoot/dir/test-secret', error: path is under operation (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSecret, state: EPathStateAlter), operation: ALTER SECRET, path: /MyRoot/dir/test-secret 2026-01-04T02:22:13.296808Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:22:13.296953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-04T02:22:13.297303Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:13.297489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904048 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:13.297562Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_secret.cpp:44: [72057594046678944] TAlterSecret TPropose operationId# 103:0HandleReply TEvOperationPlan: step# 5000040 2026-01-04T02:22:13.297709Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-04T02:22:13.297951Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:13.300392Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:13.300456Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:13.300643Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:13.300695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 103, path id: 39 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:22:13.301214Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:22:13.301270Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:22:13.301393Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:13.301436Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:13.301486Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:13.301543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:13.301595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:22:13.301644Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:13.301690Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:22:13.301735Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:22:13.301847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:22:13.301906Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-04T02:22:13.301955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-04T02:22:13.302591Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:13.302720Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:13.302771Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:13.302818Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:22:13.302888Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:13.303001Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:22:13.306711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-04T02:22:13.307073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:22:13.307136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-04T02:22:13.307236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:22:13.307260Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:22:13.308030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:22:13.308171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:22:13.308233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [3:767:2757] 2026-01-04T02:22:13.308428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:22:13.308579Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:22:13.308610Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:767:2757] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 2026-01-04T02:22:13.309123Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:13.309356Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 271us result status StatusSuccess 2026-01-04T02:22:13.309714Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir/test-secret" PathDescription { Self { Name: "test-secret" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value-new" Version: 1 } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> data_paging.py::TestDataPaging::test_data_paging_solomon [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_solomon [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring >> data_paging.py::TestDataPaging::test_listing_paging_monitoring >> TestSqsTopicHttpProxy::TestSendMessageBatch |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |81.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AlterNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:07.960948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:07.961039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:07.961078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:07.961115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:07.961333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:07.961366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:07.961419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:07.961487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:07.967035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:07.967391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:08.265431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:08.265495Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:08.306489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:08.306731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:08.306931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:08.361527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:08.361794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:08.362565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:08.362857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:08.369126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:08.369338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:08.370838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:08.370909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:08.371042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:08.371092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:08.371210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:08.371418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:08.565494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.566628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.566770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.566865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.566949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:08.567841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:13.721397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:13.721517Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:13.721553Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:22:13.721595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:22:13.721662Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:22:13.721706Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:22:13.721849Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:22:13.721894Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:13.721941Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:22:13.721981Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:13.722022Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:22:13.722094Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:13.722139Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:22:13.722175Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:22:13.722273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:13.722316Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:22:13.722372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:22:13.722408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:22:13.723656Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:13.723742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:13.723800Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:13.723867Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:22:13.723916Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:13.724685Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:13.724797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:13.724834Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:13.724868Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:22:13.724915Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:22:13.725002Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:22:13.729858Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:22:13.730393Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:22:13.730658Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:22:13.730709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:22:13.731144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:22:13.731253Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:22:13.731297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [3:721:2711] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:22:13.739802Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSecret AlterSecret { Name: "dir" Value: "" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:13.740034Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_secret.cpp:113: [72057594046678944] TAlterSecret Propose, path: /MyRoot/dir, opId: 102:0 2026-01-04T02:22:13.740211Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:22:13.743875Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:22:13.744182Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: ALTER SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:22:13.744558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:22:13.744605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:22:13.745086Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:22:13.745203Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:22:13.745243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:728:2718] TestWaitNotification: OK eventTxId 102 2026-01-04T02:22:13.745767Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:13.745986Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 250us result status StatusSuccess 2026-01-04T02:22:13.746388Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_replace_config [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002044/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk6/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_bad_dynconfig/audit_log.skge2gpl.txt 2026-01-04T02:21:55.062847Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"ERROR","subject":"root@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] >> TKqpScanData::ArrowToUnboxedValueConverter >> TKqpScanData::ArrowToUnboxedValueConverter [GOOD] >> TKqpScanData::DifferentNumberOfInputAndResultColumns [GOOD] >> TKqpScanData::EmptyColumns [GOOD] >> TKqpScheduler::SingleDatabasePoolQueryStructure [GOOD] >> TKqpScheduler::WeightedDatabase [GOOD] |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TestSqsTopicHttpProxy::TestGetQueueUrl >> TKqpScanFetcher::ScanDelayedRetry |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedDatabase [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TKqpScanFetcher::ScanDelayedRetry [GOOD] >> TKqpScheduler::AddUpdatePools [GOOD] >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlEmpty [GOOD] >> StreamCreator::TopicAutoPartitioning |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::EmptyColumns [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::AddUpdateDeleteNonExistent [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=535;kqp_scan_fetcher_actor.cpp:50 :META:Reads { ShardId: 1001001 KeyRanges { } } 2026-01-04T02:22:17.763964Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-04T02:22:17.764046Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-04T02:22:17.774531Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [1:7:2054]. Got EvDeliveryProblem, TabletId: 1001001, NotDelivered: 0, Starting 2026-01-04T02:22:17.774610Z node 1 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [1:7:2054]. TKqpScanFetcherActor: broken pipe with tablet 1001001, restarting scan from last received key , attempt #2 (total 2) schedule after 0.250000s |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestSqsTopicHttpProxy::TestReceiveMessageNonExistingQueue [GOOD] >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidQueueUrl [GOOD] >> TestSqsTopicHttpProxy::TestSendMessage [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize >> TSubscriberSyncQuorumTest::OneRingGroup >> TSubscriberTest::StrongNotificationAfterCommit >> TestSqsTopicHttpProxy::TestReceiveMessageEmpty [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl >> TSubscriberSyncQuorumTest::OneRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidThreeChildren [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition >> TSubscriberTest::StrongNotificationAfterCommit [GOOD] >> TSubscriberTest::Sync >> TestSqsTopicHttpProxy::TestSendMessageTooBig [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessage [GOOD] >> TSubscriberSyncQuorumTest::OneSynchronizedRingGroup [GOOD] >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionTo [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition >> TestSqsTopicHttpProxy::TestReceiveMessageGroup >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberSyncQuorumTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest >> TSubscriberTest::Sync [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBatchLong >> TSubscriberSyncQuorumTest::ReconfigurationWithDelayedSyncRequest [GOOD] >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest >> TestSqsTopicHttpProxy::TestDeleteMessageBatch >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-04T02:22:20.193642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:20.196147Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-04T02:22:20.196256Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-04T02:22:20.196335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-04T02:22:20.196611Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-04T02:22:20.196750Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-04T02:22:20.196925Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-04T02:22:20.197001Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.197083Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-04T02:22:20.197138Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.197278Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-04T02:22:20.197357Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-04T02:22:20.197410Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-04T02:22:20.197551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-04T02:22:20.197609Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-04T02:22:20.197767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-04T02:22:20.197854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:20.197947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-04T02:22:20.197992Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:20.198065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-04T02:22:20.198097Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-04T02:22:20.198141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-04T02:22:20.198208Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:24339059:0] whose ring group state is: 0 2026-01-04T02:22:20.198335Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-04T02:22:20.198477Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-04T02:22:20.198536Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-04T02:22:20.198597Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-04T02:22:20.198637Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:20.198767Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-01-04T02:22:20.198862Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:20:2066], cookie# 12346 2026-01-04T02:22:20.198907Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-04T02:22:20.198943Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:19:2066][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-04T02:22:20.199013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-04T02:22:20.199063Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.199114Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12346 2026-01-04T02:22:20.199141Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:21:2066], cookie# 12346, current cookie# 0 ... waiting for initial path lookups 2026-01-04T02:22:20.489032Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[2:3298559222387:0], [2:4398070850163:0], [2:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-04T02:22:20.490250Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-04T02:22:20.490345Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-04T02:22:20.490396Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] 2026-01-04T02:22:20.490432Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:13:2060] 2026-01-04T02:22:20.490474Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:14:2061] 2026-01-04T02:22:20.490517Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr:: ... 919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:30:2075], cookie# 12346 2026-01-04T02:22:20.495089Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:20.495111Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:20.495155Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:32:2075], cookie# 12346 2026-01-04T02:22:20.495195Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:20.495220Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-01-04T02:22:20.495259Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [2:33:2075], cookie# 12346 2026-01-04T02:22:20.495292Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:20.495314Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][2:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-01-04T02:22:20.495338Z node 2 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][2:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 ... waiting for initial path lookups 2026-01-04T02:22:20.776593Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-04T02:22:20.777219Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-04T02:22:20.777291Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-04T02:22:20.777333Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-04T02:22:20.777621Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-04T02:22:20.777726Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-04T02:22:20.777847Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-04T02:22:20.777938Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.778028Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-04T02:22:20.778072Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.778165Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-04T02:22:20.778221Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-04T02:22:20.778296Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-04T02:22:20.778385Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-04T02:22:20.778439Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-04T02:22:20.778595Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-04T02:22:20.778642Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:20.778689Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-04T02:22:20.778727Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:20.778806Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-04T02:22:20.778843Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-04T02:22:20.778904Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-04T02:22:20.778948Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-04T02:22:20.779076Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-04T02:22:20.779205Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-04T02:22:20.779286Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-04T02:22:20.779348Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-04T02:22:20.779400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:20.779493Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-04T02:22:20.779565Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-04T02:22:20.779602Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-04T02:22:20.779635Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-04T02:22:20.779694Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-04T02:22:20.779739Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.779782Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-04T02:22:20.779807Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Sync [GOOD] Test command err: 2026-01-04T02:22:20.181582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:20.183719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-04T02:22:20.183830Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:20.183873Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-04T02:22:20.183944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-04T02:22:20.184032Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-04T02:22:20.184096Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.184169Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-04T02:22:20.184224Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.184589Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-04T02:22:20.184672Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-04T02:22:20.184718Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.184878Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:20.184941Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-04T02:22:20.185006Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.661249Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:20.661891Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-04T02:22:20.661963Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:6:2053] 2026-01-04T02:22:20.662001Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:9:2056] 2026-01-04T02:22:20.662061Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:38:2067] 2026-01-04T02:22:20.662115Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:39:2067] 2026-01-04T02:22:20.662169Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.662309Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:40:2067] 2026-01-04T02:22:20.662366Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.662470Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-04T02:22:20.662554Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-04T02:22:20.662614Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-04T02:22:20.662649Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-04T02:22:20.662765Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-04T02:22:20.662807Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-04T02:22:20.662837Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-04T02:22:20.662887Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-04T02:22:20.662937Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:20.662970Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-04T02:22:20.663013Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:20.663088Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-04T02:22:20.663124Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::Boot |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TestSqsTopicHttpProxy::TestGetQueueUrlWithConsumer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::ReconfigurationWithCurrentSyncRequest [GOOD] Test command err: ... waiting for initial path lookups 2026-01-04T02:22:20.780880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:20.782608Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:23:2066] 2026-01-04T02:22:20.782674Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:4:2051] Upsert description: path# TestPath 2026-01-04T02:22:20.782810Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:4:2051] Subscribe: subscriber# [1:23:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:20.782990Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:5:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:24:2066] 2026-01-04T02:22:20.783015Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:5:2052] Upsert description: path# TestPath 2026-01-04T02:22:20.783069Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:5:2052] Subscribe: subscriber# [1:24:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:20.783193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [1:25:2066] 2026-01-04T02:22:20.783231Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# TestPath 2026-01-04T02:22:20.783264Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:25:2066], path# TestPath, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:20.783316Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-04T02:22:20.783367Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:23:2066] 2026-01-04T02:22:20.783421Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-04T02:22:20.783465Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:5:2052] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:24:2066] 2026-01-04T02:22:20.783498Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:20.783542Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:25:2066] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] 2026-01-04T02:22:20.783821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12345 2026-01-04T02:22:20.783894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-04T02:22:20.784027Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-04T02:22:20.784108Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.784170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-04T02:22:20.784211Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:20.784326Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12345 2026-01-04T02:22:20.784378Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12345 2026-01-04T02:22:20.784450Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12345 2026-01-04T02:22:20.784486Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12345 2026-01-04T02:22:20.784522Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:22:2066], cookie# 12345 2026-01-04T02:22:20.784587Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-04T02:22:20.784635Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-04T02:22:20.784788Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:20:2066], cookie# 12345 2026-01-04T02:22:20.784831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:20.784899Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:21:2066], cookie# 12345 2026-01-04T02:22:20.784950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:20.785020Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12345 2026-01-04T02:22:20.785049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:19:2066][TestPath] Sync cookie mismatch: sender# [1:22:2066], cookie# 12345, current cookie# 0 2026-01-04T02:22:20.785103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-04T02:22:20.785152Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Updating cluster state generation on replica: [1:24339059:0] 2026-01-04T02:22:20.785266Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:18:2065], cookie# 12346 2026-01-04T02:22:20.785386Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:1240: [1:4:2051] Handle {EvUpdateGroupConfig GroupConfig: empty BoardConfig: empty SchemeBoardConfig: {RingGroups# [0:{NToSelect# 3 Rings# [0:{[[1:24339059:0]]} 1:{[[1:1099535966835:0]]} 2:{[[1:2199047594611:0]]}}] StateStorageVersion# 0 CompatibleVersions# [] ClusterStateGeneration# 1 ClusterStateGuid# 0}} 2026-01-04T02:22:20.785518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:20:2066], cookie# 12346 2026-01-04T02:22:20.785567Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:4:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:23:2066], cookie# 12346 2026-01-04T02:22:20.785631Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:21:2066], cookie# 12346 2026-01-04T02:22:20.785662Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:5:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:24:2066], cookie# 12346 2026-01-04T02:22:20.785714Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:22:2066], cookie# 12346 2026-01-04T02:22:20.785755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:19:2066][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:20.785868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:4:2051], cookie# 12346 2026-01-04T02:22:20.785913Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12346 2026-01-04T02:22:20.785977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { Generation: 1 Guid: 0 } }: sender# [1:20:2066], cookie# 12346 2026-01-04T02:22:20.786080Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:952: [main][1:19:2066][TestPath] Cluster State mismatch in sync version response: sender# [1:20:2066], cookie# 12346, subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-04T02:22:20.786123Z node 1 :SCHEME_BOARD_S ... [TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:21.345501Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-04T02:22:21.345590Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-04T02:22:21.345643Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:21.345882Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:19:2066][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:18:2065], cookie# 12345 ... waiting for some sync responses 2026-01-04T02:22:21.345965Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:20:2066] 2026-01-04T02:22:21.346031Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:21:2066] 2026-01-04T02:22:21.346088Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:21.346143Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:22:2066] 2026-01-04T02:22:21.346210Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:21.346311Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:20:2066], cookie# 12345 2026-01-04T02:22:21.346374Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:21:2066], cookie# 12345 2026-01-04T02:22:21.346421Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:22:2066], cookie# 12345 2026-01-04T02:22:21.346515Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-04T02:22:21.346581Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-04T02:22:21.346623Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 12345 ... waiting for some sync responses (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:21.346910Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:20:2066], cookie# 12345 2026-01-04T02:22:21.346966Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:21.347104Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:21.347161Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:1041: [main][3:19:2066][TestPath] Delay current sync request: 12345 ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:21.347628Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:21:2066], cookie# 12345 2026-01-04T02:22:21.347665Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:21:2066], cookie# 12345, current cookie# 0 2026-01-04T02:22:21.347708Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:22:2066], cookie# 12345 2026-01-04T02:22:21.347731Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:22:2066], cookie# 12345, current cookie# 0 2026-01-04T02:22:21.348479Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-04T02:22:21.348546Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-04T02:22:21.348604Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] 2026-01-04T02:22:21.348666Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:27:2066] 2026-01-04T02:22:21.348727Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:28:2066] 2026-01-04T02:22:21.348796Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:19:2066][TestPath] Set up state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:21.348853Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2066] 2026-01-04T02:22:21.348894Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:19:2066][TestPath] Ignore empty state: owner# [3:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:21.348970Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:27:2066], cookie# 12345 2026-01-04T02:22:21.349071Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:28:2066], cookie# 12345 2026-01-04T02:22:21.349110Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2066], cookie# 12345 2026-01-04T02:22:21.349191Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:30:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-04T02:22:21.349237Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:31:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-04T02:22:21.349282Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 12345 2026-01-04T02:22:21.349362Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:27:2066], cookie# 12345 2026-01-04T02:22:21.349398Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:19:2066][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:21.349461Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:28:2066], cookie# 12345 2026-01-04T02:22:21.349507Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:19:2066][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:21.349550Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2066], cookie# 12345 2026-01-04T02:22:21.349571Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:19:2066][TestPath] Sync cookie mismatch: sender# [3:29:2066], cookie# 12345, current cookie# 0 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_sid_is_expected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002153/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk13/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_sid_is_expected/audit_log.8lg6vnd7.txt |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling >> TestSqsTopicHttpProxy::TestReceiveMessage >> TestSqsTopicHttpProxy::TestSendMessageBatch [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_solomon [GOOD] >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] >> TSubscriberCombinationsTest::CombinationsRootDomain >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TSubscriberSyncQuorumTest::TwoRingGroups >> TSelectFromViewTest::OneTable [GOOD] >> TSelectFromViewTest::OneTableUsingRelativeName >> TSchemeShardTopicSplitMergeTest::MargeUnorderedPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 >> TestSqsTopicHttpProxy::TestGetQueueUrl [GOOD] >> TSubscriberSyncQuorumTest::TwoRingGroups [GOOD] >> TSubscriberTest::Boot >> TSubscriberTest::NotifyUpdate >> TSubscriberSinglePathUpdateTest::OneRingGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:18.658946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:18.659063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:18.659103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:18.659145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:18.659189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:18.659224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:18.659272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:18.659331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:18.660202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:18.660485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:18.748019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:18.748106Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:18.762272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:18.762497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:18.762676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:18.772869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:18.773086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:18.773768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:18.774061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:18.776761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:18.776935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:18.778053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:18.778118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:18.778224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:18.778274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:18.778364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:18.778544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:18.920381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.921954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.922445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eshard: 72057594046678944, cookie: 281474976710672 2026-01-04T02:22:23.314355Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-04T02:22:23.314439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [3:216:2217] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-04T02:22:23.314490Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [3:216:2217] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-04T02:22:23.321185Z node 3 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-04T02:22:23.332101Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:23.332328Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.332532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:22:23.332583Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:22:23.332768Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:22:23.332841Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:23.337935Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:23.338142Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:22:23.338408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.338474Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:22:23.338527Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:22:23.338571Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:22:23.341147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.341203Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:22:23.341259Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:22:23.342807Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.342851Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.342905Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:22:23.342970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:22:23.343098Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:23.344742Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:22:23.344871Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:22:23.345207Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:23.345344Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 12884904048 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:23.345396Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:22:23.345687Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:22:23.345756Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:22:23.345962Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:22:23.346055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:22:23.348045Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:23.348125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:23.348361Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:23.348412Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:22:23.348498Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:22:23.348549Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:22:23.348678Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:22:23.348749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:23.348801Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:22:23.348839Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:23.348882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:22:23.348931Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:22:23.348969Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:22:23.349006Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:22:23.349083Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:23.349127Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:22:23.349165Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:22:23.350156Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:22:23.350278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:22:23.350329Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:22:23.350375Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:22:23.350420Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:22:23.350517Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:22:23.353386Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:22:23.354005Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSubscriberSinglePathUpdateTest::TwoRingGroups >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> TSubscriberSinglePathUpdateTest::OneRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> TSubscriberTest::Boot [GOOD] >> TSubscriberSinglePathUpdateTest::TwoRingGroups [GOOD] >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch >> TSubscriberTest::NotifyDelete >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid >> TSubscriberTest::NotifyUpdate [GOOD] >> TSubscriberTest::ReconnectOnFailure |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] >> TSubscriberSinglePathUpdateTest::OneDisconnectedRingGroup [GOOD] >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TSubscriberSinglePathUpdateTest::ReplicaConfigMismatch [GOOD] >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] >> TSubscriberTest::NotifyDelete [GOOD] >> TSubscriberTest::InvalidNotification >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::Boot [GOOD] Test command err: ... waiting for initial path lookups 2026-01-04T02:22:24.239735Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-04T02:22:24.242396Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-04T02:22:24.242870Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-04T02:22:24.242937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:24.242975Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-04T02:22:24.243012Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-04T02:22:24.243054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [1:2199047594611:0] Poisoning replica: [1:5497582477939:0] 2026-01-04T02:22:24.243470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12345 2026-01-04T02:22:24.243561Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-04T02:22:24.243733Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-04T02:22:24.243774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-04T02:22:24.243807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-04T02:22:24.243849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-04T02:22:24.243927Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.243989Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-04T02:22:24.244050Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.244221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12345 2026-01-04T02:22:24.244288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12345 2026-01-04T02:22:24.244331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:31:2075], cookie# 12345 2026-01-04T02:22:24.244392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12345 2026-01-04T02:22:24.244438Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12345 2026-01-04T02:22:24.244488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:34:2075], cookie# 12345 2026-01-04T02:22:24.244623Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12345 2026-01-04T02:22:24.244683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:5:2052], cookie# 12345 2026-01-04T02:22:24.244781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12345 2026-01-04T02:22:24.244825Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12345 2026-01-04T02:22:24.244957Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12345 2026-01-04T02:22:24.245002Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:24.245084Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:30:2075], cookie# 12345 2026-01-04T02:22:24.245127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:24.245159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-04T02:22:24.245245Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12345 2026-01-04T02:22:24.245273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-04T02:22:24.245299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 0, failures# 0 2026-01-04T02:22:24.245332Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-04T02:22:24.245376Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.245417Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12345 2026-01-04T02:22:24.245454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-04T02:22:24.245483Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:24.245526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12345 2026-01-04T02:22:24.245556Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-04T02:22:24.245607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 1, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:24.245665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12345 2026-01-04T02:22:24.245691Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:28:2075][TestPath] Sync cookie mismatch: sender# [1:34:2075], cookie# 12345, current cookie# 0 2026-01-04T02:22:24.245752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-04T02:22:24.245852Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [1:1099535966835:0] whose ring group state is: 0 2026-01-04T02:22:24.245981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:27:2074], cookie# 12346 2026-01-04T02:22:24.246172Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:29:2075], cookie# 12346 2026-01-04T02:22:24.246251Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:30:2075], cookie# 12346 2026-01-04T02:22:24.246339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:31:2075], cookie# 12346 2026-01-04T02:22:24.246386Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:24.246430Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:32:2075], cookie# 12346 2026-01-04T02:22:24.246473Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [1:33:2075], cookie# 12346 2026-01-04T02:22:24.246533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:34:2075], cookie# 12346 2026-01-04T02:22:24.246572Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:24.246624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:4:2051], cookie# 12346 2026-01-04T02:22:24.246719Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:13:2060], cookie# 12346 2026-01-04T02:22:24.246755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:14:2061], cookie# 12346 2026-01-04T02:22:24.246819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:29:2075], cookie# 12346 2026-01-04T02:22:24.246847Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-04T02:22:24.246881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:30:2075], cookie# 12346 2026-01-04T02:22:24.246934Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:24.246977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:24.247015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-04T02:22:24.247075Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.247186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:32:2075], cookie# 12346 2026-01-04T02:22:24.247226Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:24.247273Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 1, failures# 1 2026-01-04T02:22:24.247331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:33:2075], cookie# 12346 2026-01-04T02:22:24.247368Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 1, failures# 2, partial# 1 2026-01-04T02:22:24.247392Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 1, size# 3, half# 1, successes# 2, failures# 1, partial# 1 2026-01-04T02:22:24.247416Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-04T02:22:24.556940Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.557763Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:3:2050] 2026-01-04T02:22:24.557915Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:6:2053] 2026-01-04T02:22:24.557976Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:9:2056] 2026-01-04T02:22:24.558044Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:37:2066] 2026-01-04T02:22:24.558142Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:38:2066] 2026-01-04T02:22:24.558203Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:36:2066][path] Set up state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.558292Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [2:39:2066] 2026-01-04T02:22:24.558348Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:36:2066][path] Ignore empty state: owner# [2:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> basic_reading.py::TestBasicReading::test_basic_reading_monitoring [GOOD] >> TSubscriberTest::ReconnectOnFailure [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:18.582873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:18.582974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:18.583019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:18.583080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:18.583127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:18.583157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:18.583206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:18.583263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:18.584075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:18.584321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:18.671330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:18.671419Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:18.686214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:18.686440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:18.686624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:18.698846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:18.699101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:18.699790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:18.700106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:18.702956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:18.703162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:18.704368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:18.704430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:18.704562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:18.704611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:18.704707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:18.704885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:18.862758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.863784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.863900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.863960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:18.864905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -04T02:22:24.306800Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:24.306893Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:24.306964Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:24.307012Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:24.307222Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:22:24.307443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:22:24.307523Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:24.310920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:24.311956Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:24.312019Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:24.312245Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:24.312465Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:24.312517Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:22:24.312575Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:22:24.312953Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:24.313018Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:22:24.313143Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:24.313183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:24.313235Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:24.313276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:24.313321Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:22:24.313378Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:24.313426Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:22:24.313463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:22:24.313642Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:24.313696Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2026-01-04T02:22:24.313736Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:22:24.313771Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:22:24.315656Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:24.315819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:24.315869Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:24.315917Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:22:24.315975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:22:24.317898Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:24.317996Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:24.318035Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:24.318072Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:22:24.318108Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:24.318201Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2026-01-04T02:22:24.318259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:823:2790] 2026-01-04T02:22:24.322343Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:24.322458Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:24.322525Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:22:24.322573Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:954:2890] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } TestModificationResults wait txId: 105 2026-01-04T02:22:24.326772Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "W" ChildPartitionIds: 1 ChildPartitionIds: 2 CreateRootLevelSibling: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:24.327066Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:24.327346Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), at schemeshard: 72057594046678944 2026-01-04T02:22:24.329968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:24.330287Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split with prescribed partition ids: Attempt to reserve partition id (1) that is less than the first availiable id (3), operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:24.330635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:24.330682Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:24.331089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:24.331195Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:24.331236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1010:2928] TestWaitNotification: OK eventTxId 105 |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSubscriberTest::InvalidNotification [GOOD] >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSinglePathUpdateTest::OneWriteOnlyRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-04T02:22:24.698042Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.700298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-04T02:22:24.700409Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-04T02:22:24.700493Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:24.700744Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:20:2066] 2026-01-04T02:22:24.700907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:21:2066] 2026-01-04T02:22:24.701087Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:19:2066][TestPath] Set up state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.701174Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:22:2066] 2026-01-04T02:22:24.701220Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:19:2066][TestPath] Ignore empty state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-04T02:22:24.701624Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-04T02:22:24.701703Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:20:2066] 2026-01-04T02:22:24.701762Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Update to strong state: owner# [1:18:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-04T02:22:24.702120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:24:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-04T02:22:24.702181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:21:2066] 2026-01-04T02:22:24.702250Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-04T02:22:24.702470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:25:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-04T02:22:24.702518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:22:2066] 2026-01-04T02:22:24.702581Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:19:2066][TestPath] Path was updated to new version: owner# [1:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-04T02:22:24.977968Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.978706Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:4:2051] 2026-01-04T02:22:24.978788Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:5:2052] 2026-01-04T02:22:24.978849Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:24.979129Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:29:2075] 2026-01-04T02:22:24.979235Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:30:2075] 2026-01-04T02:22:24.979285Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:28:2075][TestPath] Set up state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.979342Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:31:2075] 2026-01-04T02:22:24.979386Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:28:2075][TestPath] Ignore empty state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:24339059:0] 2026-01-04T02:22:24.979728Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:4:2051] 2026-01-04T02:22:24.979799Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [2:29:2075] 2026-01-04T02:22:24.979857Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Update to strong state: owner# [2:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:1099535966835:0] 2026-01-04T02:22:24.980192Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:5:2052] 2026-01-04T02:22:24.980253Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:30:2075] 2026-01-04T02:22:24.980309Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:2199047594611:0] 2026-01-04T02:22:24.980548Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:6:2053] 2026-01-04T02:22:24.980611Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [2:31:2075] 2026-01-04T02:22:24.980660Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][2:28:2075][TestPath] Path was updated to new version: owner# [2:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [2:3298559222387:0] Sending path update to replica: [2:4398070850163:0] Sending path update to replica: [2:5497582477939:0] ... waiting for initial path lookups 2026-01-04T02:22:25.245685Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[3:3298559222387:0], [3:4398070850163:0], [3:5497582477939:0]] WriteOnly: 1 State: 0}]} 2026-01-04T02:22:25.246512Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-04T02:22:25.246587Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-04T02:22:25.246631Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:25.246869Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-04T02:22:25.246975Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-04T02:22:25.247037Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.247130Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-04T02:22:25.247175Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:24339059:0] 2026-01-04T02:22:25.247683Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:4:2051] 2026-01-04T02:22:25.247745Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:29:2075] 2026-01-04T02:22:25.247796Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Update to strong state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:1099535966835:0] 2026-01-04T02:22:25.248056Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:5:2052] 2026-01-04T02:22:25.248116Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [3:30:2075] 2026-01-04T02:22:25.248180Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:2199047594611:0] 2026-01-04T02:22:25.248386Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:6:2053] 2026-01-04T02:22:25.248447Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [3:31:2075] 2026-01-04T02:22:25.248482Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][3:28:2075][TestPath] Path was updated to new version: owner# [3:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [3:3298559222387:0] Sending path update to replica: [3:4398070850163:0] Sending path update to replica: [3:5497582477939:0] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberSyncQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: ... waiting for initial path lookups 2026-01-04T02:22:24.734057Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}, {Replicas: [[1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0]] WriteOnly: 0 State: 1}]} 2026-01-04T02:22:24.739247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:4:2051] 2026-01-04T02:22:24.739420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:5:2052] 2026-01-04T02:22:24.739476Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:24.739539Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:13:2060] 2026-01-04T02:22:24.739585Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:14:2061] 2026-01-04T02:22:24.739634Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR 2026-01-04T02:22:24.740155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:29:2075] 2026-01-04T02:22:24.740297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:30:2075] 2026-01-04T02:22:24.740348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:31:2075] 2026-01-04T02:22:24.740395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:32:2075] 2026-01-04T02:22:24.740436Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:33:2075] 2026-01-04T02:22:24.740504Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:28:2075][TestPath] Set up state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.740563Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [1:34:2075] 2026-01-04T02:22:24.740614Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:28:2075][TestPath] Ignore empty state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:24339059:0] 2026-01-04T02:22:24.741041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:35:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:4:2051] 2026-01-04T02:22:24.741144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:29:2075] 2026-01-04T02:22:24.741211Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Update to strong state: owner# [1:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:1099535966835:0] 2026-01-04T02:22:24.741625Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:36:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:5:2052] 2026-01-04T02:22:24.741698Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [1:30:2075] 2026-01-04T02:22:24.741783Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:2199047594611:0] 2026-01-04T02:22:24.742165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:37:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:6:2053] 2026-01-04T02:22:24.742225Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 3 }: sender# [1:31:2075] 2026-01-04T02:22:24.742274Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 2) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:3298559222387:0] 2026-01-04T02:22:24.742518Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:38:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:13:2060] 2026-01-04T02:22:24.742578Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 4 }: sender# [1:32:2075] 2026-01-04T02:22:24.742678Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 3) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:4398070850163:0] 2026-01-04T02:22:24.742907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:39:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:14:2061] 2026-01-04T02:22:24.742999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 5 }: sender# [1:33:2075] 2026-01-04T02:22:24.743064Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 4) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements } Sending path update to replica: [1:5497582477939:0] 2026-01-04T02:22:24.743392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:15:2062] 2026-01-04T02:22:24.743466Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 6 }: sender# [1:34:2075] 2026-01-04T02:22:24.743526Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:28:2075][TestPath] Path was updated to new version: owner# [1:27:2074], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 5) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 6) DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-04T02:22:25.028838Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:19:2066][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[2:24339059:0], [2:1099535966835:0], [2:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:25.029332Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:4:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: TestPath DomainOwnerId: 1 }: sender# [2:23:2066] 2026-01-04T02:22:25.029388Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:4:2051] Upsert description: path# TestPath 2026-01-0 ... Update { Owner: 1 Generation: 1 }: sender# [2:18:2065], cookie# 0, event size# 80 2026-01-04T02:22:25.038468Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:4:2051] Update description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:22:25.038537Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:4:2051] Upsert description: path# TestPath, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path TestPath, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 34} 2026-01-04T02:22:25.038728Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:23:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath PathId: [OwnerId: 1, LocalPathId: 1] Version: 2 }: sender# [2:4:2051] 2026-01-04T02:22:25.038802Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:4:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 2 }: sender# [2:23:2066] 2026-01-04T02:22:25.038938Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:520: [proxy][2:20:2066][TestPath] Cluster state mismatch in replica notification: sender# [2:23:2066], subscriber cluster state# {Generation: 0, GUID: 0}, replica cluster state# {Generation: 1 Guid: 0} 2026-01-04T02:22:25.039126Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:4:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: TestPath }: sender# [2:23:2066] 2026-01-04T02:22:25.039194Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:4:2051] Unsubscribe: subscriber# [2:23:2066], path# TestPath 2026-01-04T02:22:25.039297Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:19:2066][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [2:20:2066] 2026-01-04T02:22:25.039386Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:19:2066][TestPath] Ignore empty state: owner# [2:18:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ... waiting for initial path lookups 2026-01-04T02:22:25.303743Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:28:2075][TestPath] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:25.304400Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:4:2051] 2026-01-04T02:22:25.304466Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:5:2052] 2026-01-04T02:22:25.304491Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:6:2053] ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... blocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR cookie 0 ... waiting for initial path lookups (done) ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR ... unblocking NKikimr::NSchemeBoard::NInternalEvents::TEvNotifyBuilder from SCHEME_BOARD_SUBSCRIBER_PROXY_ACTOR to SCHEME_BOARD_SUBSCRIBER_ACTOR Poisoning replica: [3:2199047594611:0] Poisoning replica: [3:3298559222387:0] Poisoning replica: [3:4398070850163:0] Poisoning replica: [3:5497582477939:0] 2026-01-04T02:22:25.304706Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12345 2026-01-04T02:22:25.304813Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-04T02:22:25.304945Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:30:2075] 2026-01-04T02:22:25.305018Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:28:2075][TestPath] Set up state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.305109Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-04T02:22:25.305164Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.305289Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12345 2026-01-04T02:22:25.305367Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12345 2026-01-04T02:22:25.305427Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:34:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:31:2075], cookie# 12345 2026-01-04T02:22:25.305496Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:4:2051], cookie# 12345 2026-01-04T02:22:25.305534Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12345 2026-01-04T02:22:25.305646Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:29:2075], cookie# 12345 2026-01-04T02:22:25.305697Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:25.305766Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12345 2026-01-04T02:22:25.305796Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12345, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:25.305881Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12345 2026-01-04T02:22:25.305910Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:31:2075], cookie# 12345, current cookie# 0 2026-01-04T02:22:25.305948Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:31:2075] 2026-01-04T02:22:25.305995Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } Poisoning replica: [3:24339059:0] whose ring group state is: 0 2026-01-04T02:22:25.306122Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:28:2075][TestPath] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:27:2074], cookie# 12346 2026-01-04T02:22:25.306309Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:32:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:29:2075], cookie# 12346 2026-01-04T02:22:25.306377Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: TestPath }: sender# [3:30:2075], cookie# 12346 2026-01-04T02:22:25.306449Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:31:2075], cookie# 12346 2026-01-04T02:22:25.306493Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:28:2075][TestPath] Sync is in progress: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:25.306585Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:33:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:5:2052], cookie# 12346 2026-01-04T02:22:25.306665Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [3:29:2075], cookie# 12346 2026-01-04T02:22:25.306712Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][3:28:2075][TestPath] Sync is done in the ring group: cookie# 12346, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-04T02:22:25.306782Z node 3 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][3:28:2075][TestPath] Sync is incomplete in one of the ring groups: cookie# 12346 2026-01-04T02:22:25.306893Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvNotify { Path: TestPath Version: 0 }: sender# [3:29:2075] 2026-01-04T02:22:25.306971Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:28:2075][TestPath] Ignore empty state: owner# [3:27:2074], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.307021Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:28:2075][TestPath] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [3:30:2075], cookie# 12346 2026-01-04T02:22:25.307062Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:28:2075][TestPath] Sync cookie mismatch: sender# [3:30:2075], cookie# 12346, current cookie# 0 >> TSubscriberTest::SyncPartial |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::ReconnectOnFailure [GOOD] Test command err: 2026-01-04T02:22:24.668431Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.671288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-04T02:22:24.671435Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:24.671488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-04T02:22:24.671564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-04T02:22:24.671665Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-04T02:22:24.671713Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.671802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-04T02:22:24.671863Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.672468Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-04T02:22:24.672565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:37:2066] 2026-01-04T02:22:24.672628Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:36:2066][path] Update to strong state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.151327Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][4:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:25.152596Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-04T02:22:25.152672Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-04T02:22:25.152724Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-04T02:22:25.152893Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-04T02:22:25.152954Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-04T02:22:25.152991Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][4:36:2066][path] Set up state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.153058Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-04T02:22:25.153093Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.153514Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-04T02:22:25.153575Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.153618Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-04T02:22:25.153660Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.153749Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-04T02:22:25.153784Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.165283Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-04T02:22:25.165458Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:37:2066] 2026-01-04T02:22:25.165550Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.165685Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:48:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-04T02:22:25.165797Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:49:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-04T02:22:25.165952Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:38:2066] 2026-01-04T02:22:25.166010Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.166051Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [4:39:2066] 2026-01-04T02:22:25.166086Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:36:2066][path] Ignore empty state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.166657Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][4:47:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [3:3:2050] 2026-01-04T02:22:25.166805Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [4:37:2066] 2026-01-04T02:22:25.166870Z node 4 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][4:36:2066][path] Update to strong state: owner# [4:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::InvalidNotification [GOOD] Test command err: 2026-01-04T02:22:25.070819Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:25.072392Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:3:2050] 2026-01-04T02:22:25.072472Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:6:2053] 2026-01-04T02:22:25.072509Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:9:2056] 2026-01-04T02:22:25.072564Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:38:2067] 2026-01-04T02:22:25.072610Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:39:2067] 2026-01-04T02:22:25.072651Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:37:2067][path] Set up state: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.072716Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 1 }: sender# [1:40:2067] 2026-01-04T02:22:25.072761Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.073091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:3:2050] 2026-01-04T02:22:25.073150Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:6:2053] 2026-01-04T02:22:25.073219Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:38:2067] 2026-01-04T02:22:25.073278Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:37:2067][path] Path was updated to new version: owner# [1:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 1) DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.073334Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 1] Version: 18446744073709551615 }: sender# [1:39:2067] 2026-01-04T02:22:25.073373Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:37:2067][path] Path was already updated: owner# [1:35:2065], state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: (PathId: [OwnerId: 1, LocalPathId: 1], Version: 18446744073709551615) DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.630308Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:25.631012Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:3:2050] 2026-01-04T02:22:25.631084Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:6:2053] 2026-01-04T02:22:25.631125Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:9:2056] 2026-01-04T02:22:25.631186Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:37:2066] 2026-01-04T02:22:25.631254Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:38:2066] 2026-01-04T02:22:25.631307Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:36:2066][path] Set up state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.631370Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [3:39:2066] 2026-01-04T02:22:25.631423Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:36:2066][path] Ignore empty state: owner# [3:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:25.631554Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] 2026-01-04T02:22:25.631595Z node 3 :SCHEME_BOARD_SUBSCRIBER ERROR: subscriber.cpp:818: [main][3:36:2066][path] Suspicious NKikimrSchemeBoard.TEvNotify { PathId: [OwnerId: 1, LocalPathId: 1] Version: 0 }: sender# [3:35:2065] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncPartial [GOOD] >> TSubscriberTest::SyncWithOutdatedReplica >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] Test command err: 2026-01-04T02:22:11.244375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326055467630209:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:11.244448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:11.543869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:11.556289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:11.556394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:11.624216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:11.698475Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326055467630111:2081] 1767493331237164 != 1767493331237167 2026-01-04T02:22:11.732534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:11.805190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:11.848085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:11.848120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:11.848127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:11.848206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:12.096115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:12.253714Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:12.303139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:12.310161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:12.312676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:12.348827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:22:12.410352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.599387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.658363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.750161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:22:12.756056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.799310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.873499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.929725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.978455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.027078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.080114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.822520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068352533659:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.822642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.822899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068352533671:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.822946Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068352533672:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.823061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.827685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:14.843361Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326068352533675:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:14.905285Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326068352533726:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:15.396752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.449725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.487465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.537260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:23.838091Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:22:23.838165Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 10ms 2026-01-04T02:22:23.838444Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:23.841605Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591326108128819537:2440], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:22:23.841688Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591326108128819538:2441], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:22:23.841761Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326108128819532:2438], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:23.844415Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:23.855207Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:23.855247Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 26ms 2026-01-04T02:22:23.855643Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:23.855681Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:22:23.855779Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 26ms 2026-01-04T02:22:23.856263Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:23.884431Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326108128819568:2443], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:23.884534Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:24.078968Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326108128819535:2439]: Pool not found 2026-01-04T02:22:24.079251Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:22:24.396231Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326108128819508:2435]: Pool not found 2026-01-04T02:22:24.396657Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:22:24.399777Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326112423786922:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:24.399801Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591326112423786923:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:22:24.399865Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:24.400257Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326112423786926:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:24.400316Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:24.725084Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326112423786920:2455]: Pool not found 2026-01-04T02:22:24.725754Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::MargePartitions2 [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:20.321630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:20.321753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:20.322036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:20.322083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:20.322127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:20.322163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:20.322223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:20.322434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:20.323489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:20.323859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:20.431256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:20.431325Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:20.444515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:20.448859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:20.449052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:20.463693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:20.463925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:20.464741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:20.465002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:20.470301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:20.470511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:20.471841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:20.471910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:20.472047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:20.472121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:20.472190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:20.472350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:20.632682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.633687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.633839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.633924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.633994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:20.634678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Step: 300 2026-01-04T02:22:25.711171Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2026-01-04T02:22:25.711242Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:25.711286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:25.711447Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:22:25.711628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:25.717665Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:25.718302Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:25.718366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:25.718759Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:25.718824Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:25.719359Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:25.719438Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:25.719589Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:25.719635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:25.719681Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:25.719722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:25.719778Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:25.719823Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:25.719871Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:25.719909Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:25.720109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:25.720162Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-04T02:22:25.720227Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:25.721199Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:25.721330Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:25.721382Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:25.721429Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:25.721480Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:25.721580Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-04T02:22:25.721632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:823:2790] 2026-01-04T02:22:25.725689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:25.725795Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:25.725854Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1044:2956] TestWaitNotification: OK eventTxId 105 2026-01-04T02:22:25.726471Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:25.726765Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 331us result status StatusSuccess 2026-01-04T02:22:25.727584Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Inactive ChildPartitionIds: 4 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\375" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 ParentPartitionIds: 2 KeyRange { FromBound: "?\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277\377\377\377\377\377\377\377\377\377\377\377\377\377\377\374" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> StreamCreator::TopicAutoPartitioning [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] |81.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition >> TestSqsTopicHttpProxy::TestDeleteMessageBatch [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberTest::SyncWithOutdatedReplica [GOOD] Test command err: 2026-01-04T02:22:26.334400Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:36:2066][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:26.337124Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:3:2050] 2026-01-04T02:22:26.337240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:26.337282Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:9:2056] 2026-01-04T02:22:26.337346Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-04T02:22:26.337429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-04T02:22:26.337473Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:36:2066][path] Set up state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.337543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-04T02:22:26.337596Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.338054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 1 2026-01-04T02:22:26.338223Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:40:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:37:2066], cookie# 1 2026-01-04T02:22:26.338297Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 1 2026-01-04T02:22:26.338345Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 1 2026-01-04T02:22:26.338500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:6:2053], cookie# 1 2026-01-04T02:22:26.338544Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 1 2026-01-04T02:22:26.338621Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 1 2026-01-04T02:22:26.338664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:26.338721Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:37:2066] 2026-01-04T02:22:26.338778Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.338849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:38:2066], cookie# 1 2026-01-04T02:22:26.338879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 1 2026-01-04T02:22:26.338929Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 1 2026-01-04T02:22:26.338982Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:36:2066][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 1, partial# 0 2026-01-04T02:22:26.339098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 2 2026-01-04T02:22:26.339206Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 2 2026-01-04T02:22:26.339239Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:26.339276Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:41:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:38:2066], cookie# 2 2026-01-04T02:22:26.339327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 2 2026-01-04T02:22:26.339478Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:9:2056], cookie# 2 2026-01-04T02:22:26.339559Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 2 2026-01-04T02:22:26.339599Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-04T02:22:26.339635Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 2 2026-01-04T02:22:26.339715Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:38:2066] 2026-01-04T02:22:26.339776Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.339829Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 0 Cluster State: { } }: sender# [1:39:2066], cookie# 2 2026-01-04T02:22:26.339856Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 2, current cookie# 0 2026-01-04T02:22:26.339952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:36:2066][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:35:2065], cookie# 3 2026-01-04T02:22:26.340081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:37:2066], cookie# 3 2026-01-04T02:22:26.340113Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:36:2066][path] Sync is in progress: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 1 2026-01-04T02:22:26.340162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:38:2066], cookie# 3 2026-01-04T02:22:26.340210Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:1001: [main][1:36:2066][path] Sync is done in the ring group: cookie# 3, ring group# 0, size# 3, half# 1, successes# 0, failures# 2, partial# 1 2026-01-04T02:22:26.340244Z node 1 :SCHEME_BOARD_SUBSCRIBER WARN: subscriber.cpp:1006: [main][1:36:2066][path] Sync is incomplete in one of the ring groups: cookie# 3 2026-01-04T02:22:26.340288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:42:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [1:39:2066], cookie# 3 2026-01-04T02:22:26.340429Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 0 Partial: 1 Cluster State: { } }: sender# [1:39:2066], cookie# 3 2026-01-04T02:22:26.340457Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:36:2066][path] Sync cookie mismatch: sender# [1:39:2066], cookie# 3, current cookie# 0 2026-01-04T02:22:26.340519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:36:2066][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path Version: 0 }: sender# [1:39:2066] 2026-01-04T02:22:26.340565Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:36:2066][path] Ignore empty state: owner# [1:35:2065], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.857057Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:37:2067][path] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:26.857757Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:3:2050] 2026-01-04T02:22:26.857873Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:6:2053] 2026-01-04T02:22:26.857919Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:9:2056] 2026-01-04T02:22:26.857985Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 1, LocalPathId: 2] Version: 2 }: sender# [3:38:2067] 2026-01-04T02:22:26.858062Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:39:2067] 2026-01-04T02:22:26.858125Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:37:2067][path] Set up state: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.858249Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvNotify { Path: path PathId: [OwnerId: 2, LocalPathId: 2] Version: 1 }: sender# [3:40:2067] 2026-01-04T02:22:26.858311Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:37:2067][path] Path was already updated: owner# [3:35:2065], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 2, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 2, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:26.858455Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:37:2067][path] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:35:2065], cookie# 1 2026-01-04T02:22:26.858593Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:38:2067], cookie# 1 2026-01-04T02:22:26.858668Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:39:2067], cookie# 1 2026-01-04T02:22:26.858714Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:40:2067], cookie# 1 2026-01-04T02:22:26.858801Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:41:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:3:2050], cookie# 1 2026-01-04T02:22:26.858848Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:42:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:6:2053], cookie# 1 2026-01-04T02:22:26.858881Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:43:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:9:2056], cookie# 1 2026-01-04T02:22:26.858943Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [3:38:2067], cookie# 1 2026-01-04T02:22:26.858996Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:37:2067][path] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:22:26.859044Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:39:2067], cookie# 1 2026-01-04T02:22:26.859083Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:37:2067][path] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:22:26.859141Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:37:2067][path] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 1 Partial: 0 Cluster State: { } }: sender# [3:40:2067], cookie# 1 2026-01-04T02:22:26.859168Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:37:2067][path] Sync cookie mismatch: sender# [3:40:2067], cookie# 1, current cookie# 0 |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::TopicAutoPartitioning [GOOD] Test command err: 2026-01-04T02:22:18.061140Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326083933251384:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:18.062887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:18.351402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:18.351555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:18.387691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:18.424358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:18.448259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:18.642762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:18.787616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:18.787644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:18.787651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:18.787731Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:19.087832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:19.190943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:19.210712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.383380Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-04T02:22:21.992550Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7591326096818154236:2333] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:22:22.004591Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:22.004618Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-04T02:22:22.031772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:22:22.054086Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:22.054112Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-04T02:22:22.960939Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326103209230273:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:22.965180Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:22.992386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:23.074162Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:23.117564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:23.117627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:23.120894Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:23.265715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:23.341678Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:23.341702Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:23.341708Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:23.341785Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:23.601981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:23.607906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:23.610838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:23.674315Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-04T02:22:23.975975Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:26.241127Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7591326120389100483:2333] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:22:26.244351Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:26.244372Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-04T02:22:26.253330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:22:26.271524Z node 2 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:26.271549Z node 2 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |81.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |81.8%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut |81.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/ydb-services-persqueue_v1-ut-new_schemecache_ut >> StreamCreator::Basic |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |81.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |81.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToGapIndices [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageInvalidSize [GOOD] Test command err: 2026-01-04T02:22:12.141055Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326057657162337:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:12.141296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:12.184603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:12.468170Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:12.523910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:12.524002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:12.608892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:12.737286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:12.749414Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326057657162112:2081] 1767493332121162 != 1767493332121165 2026-01-04T02:22:12.767367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:12.869472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:12.869509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:12.869524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:12.869587Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:13.097313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:13.137271Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:13.327373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:13.337387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:22:13.343817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:13.366582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:22:13.462279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.581693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.628965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715663, at schemeshard: 72057594046644480 2026-01-04T02:22:13.637973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.688896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.733521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.780353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.825393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.887654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.941523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.982883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.258564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326074837032953:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.258661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.258800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326074837032961:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.262019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326074837032967:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.262096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.263897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:16.288959Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326074837032968:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:22:16.361029Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326074837033020:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:16.882543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.962444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.995234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.033186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose ... AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":-10, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-04T02:22:26.642950Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46320) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 82866441-3eadcf1f-91001718-d28a3ea7 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-04T02:22:26.643068Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46320) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-04T02:22:26.645084Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:46334) incoming connection opened 2026-01-04T02:22:26.645160Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:46334) -> (POST /Root, 100 bytes) 2026-01-04T02:22:26.645317Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [78c8:d0ee:17c:0:60c8:d0ee:17c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 2ccec4e1-52b42ebf-3b7f04e-6e259728 2026-01-04T02:22:26.645685Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [2ccec4e1-52b42ebf-3b7f04e-6e259728] got new request from [78c8:d0ee:17c:0:60c8:d0ee:17c:0] database '/Root' stream '' 2026-01-04T02:22:26.646575Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [2ccec4e1-52b42ebf-3b7f04e-6e259728] [auth] Authorized successfully 2026-01-04T02:22:26.646620Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [2ccec4e1-52b42ebf-3b7f04e-6e259728] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.646947Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [2ccec4e1-52b42ebf-3b7f04e-6e259728] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-04T02:22:26.647000Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [2ccec4e1-52b42ebf-3b7f04e-6e259728] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 2026-01-04T02:22:26.647193Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:46334) <- (400 InvalidParameterValue, 81 bytes) 2026-01-04T02:22:26.647248Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#40,[::1]:46334) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":0, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-04T02:22:26.647272Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#40,[::1]:46334) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 2ccec4e1-52b42ebf-3b7f04e-6e259728 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-04T02:22:26.647360Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:46334) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-04T02:22:26.648553Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46344) incoming connection opened 2026-01-04T02:22:26.648669Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46344) -> (POST /Root, 101 bytes) 2026-01-04T02:22:26.648791Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38d0:d2ee:17c:0:20d0:d2ee:17c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 3c29e019-5f436ee6-53cd56d-40b8d071 2026-01-04T02:22:26.649100Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [3c29e019-5f436ee6-53cd56d-40b8d071] got new request from [38d0:d2ee:17c:0:20d0:d2ee:17c:0] database '/Root' stream '' 2026-01-04T02:22:26.649636Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [3c29e019-5f436ee6-53cd56d-40b8d071] [auth] Authorized successfully 2026-01-04T02:22:26.649674Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [3c29e019-5f436ee6-53cd56d-40b8d071] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.650158Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [3c29e019-5f436ee6-53cd56d-40b8d071] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-04T02:22:26.650214Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [3c29e019-5f436ee6-53cd56d-40b8d071] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-04T02:22:26.650390Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46344) <- (400 InvalidParameterValue, 85 bytes) 2026-01-04T02:22:26.650446Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46344) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":50, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-04T02:22:26.650475Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46344) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 3c29e019-5f436ee6-53cd56d-40b8d071 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2026-01-04T02:22:26.650551Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46344) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-04T02:22:26.651484Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#40,[::1]:46348) incoming connection opened 2026-01-04T02:22:26.651555Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#40,[::1]:46348) -> (POST /Root, 109 bytes) 2026-01-04T02:22:26.651771Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38cd:d2ee:17c:0:20cd:d2ee:17c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 4c3b5315-94eacc14-61084315-852af404 2026-01-04T02:22:26.652061Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [4c3b5315-94eacc14-61084315-852af404] got new request from [38cd:d2ee:17c:0:20cd:d2ee:17c:0] database '/Root' stream '' 2026-01-04T02:22:26.652722Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [4c3b5315-94eacc14-61084315-852af404] [auth] Authorized successfully 2026-01-04T02:22:26.652764Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [4c3b5315-94eacc14-61084315-852af404] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.653145Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [4c3b5315-94eacc14-61084315-852af404] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-04T02:22:26.653198Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [4c3b5315-94eacc14-61084315-852af404] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is greater than 10 2026-01-04T02:22:26.653404Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#40,[::1]:46348) <- (400 InvalidParameterValue, 85 bytes) 2026-01-04T02:22:26.653441Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#40,[::1]:46348) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":2147483647, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-04T02:22:26.653464Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#40,[::1]:46348) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 4c3b5315-94eacc14-61084315-852af404 Content-Type: application/x-amz-json-1.1 Content-Length: 85 2026-01-04T02:22:26.653526Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#40,[::1]:46348) connection closed Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is greater than 10"} 2026-01-04T02:22:26.654525Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46356) incoming connection opened 2026-01-04T02:22:26.654583Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46356) -> (POST /Root, 110 bytes) 2026-01-04T02:22:26.655331Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d8f3:d2ee:17c:0:c0f3:d2ee:17c:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 402014cd-18cf759d-e5f3ddb3-af9b8294 2026-01-04T02:22:26.655629Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [402014cd-18cf759d-e5f3ddb3-af9b8294] got new request from [d8f3:d2ee:17c:0:c0f3:d2ee:17c:0] database '/Root' stream '' 2026-01-04T02:22:26.656166Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [402014cd-18cf759d-e5f3ddb3-af9b8294] [auth] Authorized successfully 2026-01-04T02:22:26.656202Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [402014cd-18cf759d-e5f3ddb3-af9b8294] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.656495Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [ReceiveMessage] requestId [402014cd-18cf759d-e5f3ddb3-af9b8294] Not retrying GRPC response. Code: 400, Error: InvalidParameterValue 2026-01-04T02:22:26.656565Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [ReceiveMessage] requestId [402014cd-18cf759d-e5f3ddb3-af9b8294] reply with status: STATUS_UNDEFINED message: MaxNumberOfMessages is less than 1 Http output full {"__type":"InvalidParameterValue","message":"MaxNumberOfMessages is less than 1"} 2026-01-04T02:22:26.656694Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46356) <- (400 InvalidParameterValue, 81 bytes) 2026-01-04T02:22:26.656736Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:46356) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.ReceiveMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "MaxNumberOfMessages":-2147483648, "WaitTimeSeconds":1, "QueueUrl":"/v1/5//Root/6/topic1/8/consumer" } 2026-01-04T02:22:26.656758Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:46356) Response: HTTP/1.1 400 InvalidParameterValue Connection: close x-amzn-requestid: 402014cd-18cf759d-e5f3ddb3-af9b8294 Content-Type: application/x-amz-json-1.1 Content-Length: 81 2026-01-04T02:22:26.656828Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46356) connection closed 2026-01-04T02:22:26.742295Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:26.742331Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:26.742344Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:26.742369Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:26.742382Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBadQueueUrl [GOOD] Test command err: 2026-01-04T02:22:12.537909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326060809594748:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:12.538302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:12.926869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:12.963665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:12.963779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:13.039117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:13.113449Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326060809594525:2081] 1767493332475072 != 1767493332475075 2026-01-04T02:22:13.130588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:13.134721Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:13.211869Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:13.211892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:13.211899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:13.211987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:13.484692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:13.492206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:22:13.517940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:13.714999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:13.722897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:22:13.730592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:13.758714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:22:13.824560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.944701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.994496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.040736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-04T02:22:14.053421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.089114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.127557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.186359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.249358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.282870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.326854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.232594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077989465373:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.232694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.233100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077989465384:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.233164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077989465386:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.233210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.235270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077989465390:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.235372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.237523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:16.250385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326077989465389:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:22:16.317159Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326077989465443:3109] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:16.796639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.841096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/ ... ctness of the path } 2026-01-04T02:22:25.795214Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326116262507704:2443], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:25.795311Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:25.795336Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:22:25.941521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326116262507701:2441]: Pool not found 2026-01-04T02:22:25.941703Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:22:26.194287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326116262507645:2436]: Pool not found 2026-01-04T02:22:26.195145Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:22:26.198205Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326120557475061:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:26.198309Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591326120557475062:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:22:26.198362Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:26.201363Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326120557475065:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:26.201494Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:26.448399Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591326120557475059:2455]: Pool not found 2026-01-04T02:22:26.448600Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:22:26.777248Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:57348) incoming connection opened 2026-01-04T02:22:26.777343Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:57348) -> (POST /Root, 52 bytes) 2026-01-04T02:22:26.777456Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7840:51ec:727b:0:6040:51ec:727b:0] request [SendMessage] url [/Root] database [/Root] requestId: 32cd21c2-45c9f95-8bb05daf-7b48e7da 2026-01-04T02:22:26.777842Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [32cd21c2-45c9f95-8bb05daf-7b48e7da] got new request from [7840:51ec:727b:0:6040:51ec:727b:0] database '/Root' stream '' 2026-01-04T02:22:26.778343Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-04T02:22:26.778412Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-04T02:22:26.778463Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-04T02:22:26.778507Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-04T02:22:26.778534Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-04T02:22:26.778557Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-04T02:22:26.778604Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-04T02:22:26.793298Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797317Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-04T02:22:26.797392Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797419Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797444Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797467Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797488Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:26.797599Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-04T02:22:26.798067Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [32cd21c2-45c9f95-8bb05daf-7b48e7da] [auth] Authorized successfully 2026-01-04T02:22:26.798122Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [32cd21c2-45c9f95-8bb05daf-7b48e7da] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.799819Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [32cd21c2-45c9f95-8bb05daf-7b48e7da] Not retrying GRPC response. Code: 400, Error: MissingParameter 2026-01-04T02:22:26.799919Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [32cd21c2-45c9f95-8bb05daf-7b48e7da] reply with status: STATUS_UNDEFINED message: No QueueUrl parameter. 2026-01-04T02:22:26.800197Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:57348) <- (400 MissingParameter, 64 bytes) 2026-01-04T02:22:26.800263Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:57348) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"", "MessageBody":"MessageBody-0" } Http output full {"__type":"MissingParameter","message":"No QueueUrl parameter."} 2026-01-04T02:22:26.800301Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:57348) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: 32cd21c2-45c9f95-8bb05daf-7b48e7da Content-Type: application/x-amz-json-1.1 Content-Length: 64 2026-01-04T02:22:26.800415Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:57348) connection closed 2026-01-04T02:22:26.801228Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:57354) incoming connection opened 2026-01-04T02:22:26.801285Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:57354) -> (POST /Root, 100 bytes) 2026-01-04T02:22:26.801407Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1855:51ec:727b:0:55:51ec:727b:0] request [SendMessage] url [/Root] database [/Root] requestId: 228271f4-a848cffd-c2e213f-ce1c42ea 2026-01-04T02:22:26.801703Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessage] requestId [228271f4-a848cffd-c2e213f-ce1c42ea] got new request from [1855:51ec:727b:0:55:51ec:727b:0] database '/Root' stream '' 2026-01-04T02:22:26.802157Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessage] requestId [228271f4-a848cffd-c2e213f-ce1c42ea] [auth] Authorized successfully 2026-01-04T02:22:26.802195Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessage] requestId [228271f4-a848cffd-c2e213f-ce1c42ea] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:26.804829Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessage] requestId [228271f4-a848cffd-c2e213f-ce1c42ea] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.NonExistentQueue 2026-01-04T02:22:26.804904Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessage] requestId [228271f4-a848cffd-c2e213f-ce1c42ea] reply with status: STATUS_UNDEFINED message: You do not have access or the '/Root/ExampleQueueName' does not exist 2026-01-04T02:22:26.805008Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:57354) <- (400 AWS.SimpleQueueService.NonExistentQueue, 134 bytes) 2026-01-04T02:22:26.805062Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:57354) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessage X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/16/ExampleQueueName/13/user_consumer", "MessageBody":"MessageBody-0" } 2026-01-04T02:22:26.805091Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:57354) Response: HTTP/1.1 400 AWS.SimpleQueueService.NonExistentQueue Connection: close x-amzn-requestid: 228271f4-a848cffd-c2e213f-ce1c42ea Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-04T02:22:26.805178Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:57354) connection closed Http output full {"__type":"AWS.SimpleQueueService.NonExistentQueue","message":"You do not have access or the '/Root/ExampleQueueName' does not exist"} |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |81.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |81.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |82.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore/ydb-core-tx-schemeshard-ut_incremental_restore |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchLong [GOOD] Test command err: 2026-01-04T02:22:12.562231Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326061021201779:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:12.562509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:12.928505Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:12.941250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:12.941368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:12.974902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:13.129565Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:13.148866Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326061021201625:2081] 1767493332399820 != 1767493332399823 2026-01-04T02:22:13.234288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:13.280094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:13.280124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:13.280132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:13.280260Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:13.512005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:13.547797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:13.562614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:13.791629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:13.807097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:13.815065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:13.937603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.086802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.150585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:22:14.155941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.217253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:22:14.222745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.267075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.310044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.381088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.424332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.465178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.512929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.313090Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326078201072457:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.313226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.313574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326078201072469:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.313613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326078201072470:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.313868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.318337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:16.337136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326078201072473:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:16.403021Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326078201072524:3098] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:16.767763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.816970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.859972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.900053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.272964Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:22:27.273101Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976715690 2026-01-04T02:22:27.273116Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:22:27.273127Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:22:27.273140Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:27.273300Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:22:27.275192Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:22:27.275351Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-04T02:22:27.275364Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-04T02:22:27.275445Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:22:27.275463Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.275474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.275485Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.275499Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.275509Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:27.275526Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:27.275837Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-04T02:22:27.275890Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-04T02:22:27.275991Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:22:27.278607Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:6319 { status: SUCCESS, issues: }consumer 2026-01-04T02:22:27.298120Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:41712) incoming connection opened 2026-01-04T02:22:27.298202Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:41712) -> (POST /Root, 1406 bytes) 2026-01-04T02:22:27.298312Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [9891:714f:2a7c:0:8091:714f:2a7c:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: c6149770-ede0ce43-1d03e120-7cb69fc4 2026-01-04T02:22:27.299298Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [c6149770-ede0ce43-1d03e120-7cb69fc4] got new request from [9891:714f:2a7c:0:8091:714f:2a7c:0] database '/Root' stream '' 2026-01-04T02:22:27.301224Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-04T02:22:27.301307Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-04T02:22:27.301335Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-04T02:22:27.301361Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-04T02:22:27.301389Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-04T02:22:27.301415Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-04T02:22:27.301443Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-04T02:22:27.312907Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.320624Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.320826Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.320968Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.321023Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-04T02:22:27.321087Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.321134Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:27.321218Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-04T02:22:27.321337Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [c6149770-ede0ce43-1d03e120-7cb69fc4] [auth] Authorized successfully 2026-01-04T02:22:27.321365Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [c6149770-ede0ce43-1d03e120-7cb69fc4] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:27.321902Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [c6149770-ede0ce43-1d03e120-7cb69fc4] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.TooManyEntriesInBatchRequest 2026-01-04T02:22:27.322054Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [c6149770-ede0ce43-1d03e120-7cb69fc4] reply with status: STATUS_UNDEFINED message: The batch request contains more entries than permissible. 2026-01-04T02:22:27.322251Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:41712) <- (400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest, 134 bytes) 2026-01-04T02:22:27.322330Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:41712) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ { "Id":"Id-1", "MessageGroupId":"MessageGroupId-1", "MessageBody":"MessageBody-1" }, { "Id":"Id-2", "MessageGroupId":"MessageGroupId-2", "MessageBody":"MessageBody-2" }, { "Id":"Id-3", "MessageGroupId":"MessageGroupId-3", "MessageBody":"MessageBody-3" }, { "Id":"Id-4", "MessageGroupId":"MessageGroupId-4", "MessageBody":"MessageBody-4" }, { "Id":"Id-5", "MessageGroupId":"MessageGroupId-5", "MessageBody":"MessageBody-5" }, { "Id":"Id-6", "MessageGroupId":"MessageGroupId-6", "MessageBody":"MessageBody-6" 2026-01-04T02:22:27.322369Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:41712) Response: HTTP/1.1 400 AWS.SimpleQueueService.TooManyEntriesInBatchRequest Connection: close x-amzn-requestid: c6149770-ede0ce43-1d03e120-7cb69fc4 Content-Type: application/x-amz-json-1.1 Content-Length: 134 2026-01-04T02:22:27.322457Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:41712) connection closed Http output full {"__type":"AWS.SimpleQueueService.TooManyEntriesInBatchRequest","message":"The batch request contains more entries than permissible."} 2026-01-04T02:22:27.372932Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.372965Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.372981Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.373007Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.373019Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:27.473241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.473282Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.473296Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.473317Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.473329Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> StreamCreator::WithResolvedTimestamps |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition >> TSchemeShardTopicSplitMergeTest::MargeNotAdjacentRangePartitions [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitions [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithDuplicatePartition [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-04T02:22:14.061458Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326069300700071:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:14.062769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:14.373961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:14.415510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:14.415607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:14.476325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:14.531021Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:14.557912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326069300700044:2081] 1767493334057300 != 1767493334057303 2026-01-04T02:22:14.645005Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:14.651538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:14.651558Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:14.651584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:14.651674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:14.977190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:14.985845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:15.088078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:15.295291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:15.301560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:15.303524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:15.325444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:22:15.400894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.624313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.674074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.731640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.771522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.816911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.863708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.904654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.962631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.003131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.624769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326082185603591:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:17.624844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326082185603603:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:17.624887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:17.625258Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326082185603605:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:17.625317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:17.631100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:17.648722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326082185603606:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:17.718743Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326082185603658:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:18.100863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.130461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.169780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.201894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... s before, last time 2026-01-04T02:22:27.000000Z 2026-01-04T02:22:27.768662Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2026-01-04T02:22:27.768677Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1767493347765 queuesize 0 startOffset 0 2026-01-04T02:22:27.768683Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:42646) <- (200 , 436 bytes) 2026-01-04T02:22:27.768706Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:22:27.768742Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2026-01-04T02:22:27.768752Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-04T02:22:27.768799Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:42646) connection closed 2026-01-04T02:22:27.768806Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 Http output full {"Successful":[{"SequenceNumber":"0","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"SequenceNumber":"0","Id":"Id-2","MD5OfMessageBody":"7034dd2039d12b6dd94a9e6dfb820b77","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"SequenceNumber":"0","Id":"Id-3","MD5OfMessageBody":"f23251df60f088df56a4be0a5fb1ae75","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-04T02:22:27.769615Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:42654) incoming connection opened 2026-01-04T02:22:27.769673Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:42654) -> (POST /Root, 101 bytes) 2026-01-04T02:22:27.769761Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38e3:fb3a:f37b:0:20e3:fb3a:f37b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: d6049115-92fa21a6-601aff1f-2bcdddc3 2026-01-04T02:22:27.770098Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [d6049115-92fa21a6-601aff1f-2bcdddc3] got new request from [38e3:fb3a:f37b:0:20e3:fb3a:f37b:0] database '/Root' stream '' 2026-01-04T02:22:27.770450Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [d6049115-92fa21a6-601aff1f-2bcdddc3] [auth] Authorized successfully 2026-01-04T02:22:27.770457Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [d6049115-92fa21a6-601aff1f-2bcdddc3] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:27.771078Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767493352770 VisibilityDeadlineMilliseconds: 1767493377770 MaxNumberOfMessages: 10 2026-01-04T02:22:27.771800Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-04T02:22:27.771824Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-04T02:22:27.771895Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 3 size 26214400 endOffset 3 max time lag 0ms effective offset 0 2026-01-04T02:22:27.772036Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 195 count 3 last offset 0, current partition end offset: 3 2026-01-04T02:22:27.772054Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-04T02:22:27.772103Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 195 accessed 2 times before, last time 2026-01-04T02:22:27.000000Z 2026-01-04T02:22:27.772129Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-04T02:22:27.772155Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:22:27.772173Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-04T02:22:27.772225Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 175 from pos 0 cbcount 3 2026-01-04T02:22:27.772294Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:22:27.773205Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [d6049115-92fa21a6-601aff1f-2bcdddc3] reply ok 2026-01-04T02:22:27.773443Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:42654) <- (200 , 641 bytes) Http output full {"Messages":[{"MD5OfBody":"3bf7e6d806a0b8062135ae945eca30bf","Attributes":{"SentTimestamp":"1767493347765","MessageGroupId":"MessageGroupId-1"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-1","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"},{"MD5OfBody":"7034dd2039d12b6dd94a9e6dfb820b77","Attributes":{"SentTimestamp":"1767493347766"},"ReceiptHandle":"CAAQAQ==","Body":"MessageBody-2","MessageId":"E43C2D66-7305-5166-8EE2-D86E421B5F98"},{"MD5OfBody":"f23251df60f088df56a4be0a5fb1ae75","Attributes":{"SentTimestamp":"1767493347766"},"ReceiptHandle":"CAAQAg==","Body":"MessageBody-3","MessageId":"F4B995B7-701A-5815-BE20-755014677779"}]} 2026-01-04T02:22:27.773539Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:42654) connection closed 2026-01-04T02:22:27.774360Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:42660) incoming connection opened 2026-01-04T02:22:27.774412Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:42660) -> (POST /Root, 397 bytes) 2026-01-04T02:22:27.774545Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [38f8:fb3a:f37b:0:20f8:fb3a:f37b:0] request [DeleteMessageBatch] url [/Root] database [/Root] requestId: 9bbe6086-86690698-30216ca2-aea4d471 2026-01-04T02:22:27.775005Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessageBatch] requestId [9bbe6086-86690698-30216ca2-aea4d471] got new request from [38f8:fb3a:f37b:0:20f8:fb3a:f37b:0] database '/Root' stream '' 2026-01-04T02:22:27.775514Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessageBatch] requestId [9bbe6086-86690698-30216ca2-aea4d471] [auth] Authorized successfully 2026-01-04T02:22:27.775531Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessageBatch] requestId [9bbe6086-86690698-30216ca2-aea4d471] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:27.776594Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 2 Offset: 0 Offset: 1 2026-01-04T02:22:27.777319Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.777336Z node 2 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:22:27.777354Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:22:27.777363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.777376Z node 2 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:22:27.777411Z node 2 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 3 (startOffset 0) session 2026-01-04T02:22:27.777423Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:22:27.777433Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:22:27.777446Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:27.777569Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:22:27.778702Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [9bbe6086-86690698-30216ca2-aea4d471] reply ok 2026-01-04T02:22:27.779027Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:42660) <- (200 , 219 bytes) 2026-01-04T02:22:27.779130Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:42660) connection closed Http output full {"Successful":[{"Id":"delete-id-2"},{"Id":"delete-id-0"},{"Id":"delete-id-1"}],"Failed":[{"Message":"The specified receipt handle isn't valid.","Id":"delete-invalid","Code":"ReceiptHandleIsInvalid","SenderFault":true}]} 2026-01-04T02:22:27.779674Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:22:27.779722Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 3 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:22:27.779756Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:22:27.779777Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.779788Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.779797Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.779811Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.779821Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:27.779841Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:27.825620Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:27.825654Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.825667Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:27.825687Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:27.825700Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToDifferentOrder [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_all_types-pk_types12-all_types12-index12---] [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v1-std] [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |82.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TKqpScheduler::QueriesWithFairShareOverlimit+AllowOverlimit [GOOD] >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongBoundary [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition >> TKqpScheduler::WeightedPools [GOOD] >> TKqpScheduler::WeightedQueries [GOOD] >> TKqpScheduler::ZeroQueries [GOOD] >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge >> TKqpScanData::UnboxedValueSize >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithTotalOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap >> TKqpScanData::UnboxedValueSize [GOOD] >> TKqpScanData::EmptyColumnsAndNonEmptyArrowBatch [GOOD] >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageInvalid [GOOD] >> TKqpScheduler::DemandIsCutOffByLimit [GOOD] >> TKqpScheduler::AddUpdateQueries >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] >> TKqpScheduler::AddUpdateQueries [GOOD] >> TKqpScheduler::DeleteQueries [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsWithOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::WeightedQueries [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::QueriesWithFairShareOverlimit-AllowOverlimit [GOOD] |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::LeftFairShareIsDistributed [GOOD] >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] >> TKqpScheduler::ZeroLimits [GOOD] >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroWeightDatabasePoolQuery [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageReturnToQueue [GOOD] Test command err: 2026-01-04T02:22:11.710180Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326056172416325:2239];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:11.710239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:12.245946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:12.252975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:12.253102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:12.331692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:12.503963Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.118059s 2026-01-04T02:22:12.504182Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:12.512197Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.126286s 2026-01-04T02:22:12.523231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326056172416111:2081] 1767493331605370 != 1767493331605373 2026-01-04T02:22:12.655012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:12.702593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:12.702615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:12.702625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:12.702697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:12.750018Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:13.006351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:13.013593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:13.260888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:13.273594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:13.282512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:13.318400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:22:13.421134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.584429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.643827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:22:13.648845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.753122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.813352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.900355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.952903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:13.997136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.069492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.119499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.135900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077647254253:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.136077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.136459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077647254265:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.136503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326077647254266:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.136631Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:16.141061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:16.155225Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326077647254269:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:16.240842Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326077647254320:3108] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:16.680471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326056172416325:2239];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:16.680578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:16.834637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.875217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... n and tx pending commits 2026-01-04T02:22:28.739136Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:28.739147Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:28.839409Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:28.839438Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:28.839447Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:28.839458Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:28.839468Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:28.939778Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:28.939816Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:28.939827Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:28.939849Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:28.939860Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.040105Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.040135Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.040146Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.040163Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.040174Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.140473Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.140499Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.140508Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.140521Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.140529Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.240818Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.240850Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.240865Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.240895Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.240906Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.341147Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.341186Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.341197Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.341215Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.341223Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.441531Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.441573Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.441587Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.441605Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.441616Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.517660Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 1 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 64 2026-01-04T02:22:29.518566Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-04T02:22:29.518595Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-04T02:22:29.518684Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 6 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-04T02:22:29.518813Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 6 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-04T02:22:29.518834Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 6. Send blob request. 2026-01-04T02:22:29.518869Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 3 times before, last time 2026-01-04T02:22:26.000000Z 2026-01-04T02:22:29.518891Z node 2 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 6. All 1 blobs are from cache. 2026-01-04T02:22:29.518911Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:22:29.518964Z node 2 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-04T02:22:29.518964Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:22:29.519031Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:22:29.520144Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [eaf1b66a-17cd23af-5097f397-b4e51f94] reply ok 2026-01-04T02:22:29.520316Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:36954) <- (200 , 211 bytes) 2026-01-04T02:22:29.520416Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:36954) connection closed Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1767493346573"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} jsonReceived = { "Messages": [ { "Attributes": { "SentTimestamp":"1767493346573" }, "Body":"MessageBody-0", "MD5OfBody":"94a29778a1f1f41bf68142847b2e6106", "MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125", "ReceiptHandle":"CAAQAA==" } ] } 2026-01-04T02:22:29.541920Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.541959Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.541971Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.541988Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.542001Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.642342Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.642372Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.642380Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.642393Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.642401Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:29.742594Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:29.742624Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.742636Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:29.742652Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:29.742663Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::DeleteQueries [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScanData::FailOnUnsupportedPgType [GOOD] >> TSchemeShardTopicSplitMergeTest::MargeInactivePartitions [GOOD] >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::ZeroLimitDbWithNonZeroPools [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/runtime/ut/unittest >> TKqpScheduler::MultipleDatabasesPoolsQueries [GOOD] >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/runtime/ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit >> TProxyActorTest::TestAttachSession [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestSendMessageBatchEmpty [GOOD] Test command err: 2026-01-04T02:22:15.092439Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326070286973900:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:15.093027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:15.436832Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:15.479396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:15.479492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:15.535159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:15.608415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:15.626359Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326070286973655:2081] 1767493335058463 != 1767493335058466 2026-01-04T02:22:15.657230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:15.750556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:15.750580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:15.750586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:15.750651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:15.996359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:16.006344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:16.089562Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:16.232662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:16.241506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:16.246295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:16.368521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.505086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.554704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.597746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:22:16.602851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.638549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.671515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.712730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.759621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.805211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.851541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.700789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326083171877208:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.700789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326083171877197:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.700903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.705198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:18.705959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326083171877212:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.706118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.719188Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326083171877211:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:18.802681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326083171877264:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:19.280288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.341325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.395483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.443943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:22:30.255068Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:30.255385Z node 2 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767493350294, TxId 281474976715690 2026-01-04T02:22:30.255418Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.255431Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:22:30.255441Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.255468Z node 2 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:22:30.255635Z node 2 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037907][Partition][0][StateIdle] Schedule reply tx done 281474976715690 2026-01-04T02:22:30.255653Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:22:30.255684Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:22:30.255700Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.256010Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:22:30.257638Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:22:30.257873Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037907][Partition][0][StateIdle] Initializing MLP Consumers: 1 2026-01-04T02:22:30.257888Z node 2 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037907][Partition][0][StateIdle] Updateing MLP consumer 'consumer' config 2026-01-04T02:22:30.257992Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:22:30.258015Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.258027Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.258037Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.258065Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.258080Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.258102Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:30.258530Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037907] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 LifetimeSeconds: 86400 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic1" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/topic1" YcCloudId: "cloud4" YcFolderId: "folder4" YdbDatabaseId: "database4" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } Consumers { Name: "consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: false DeadLetterPolicy: DEAD_LETTER_POLICY_UNSPECIFIED MaxProcessingAttempts: 0 DefaultProcessingTimeoutSeconds: 20 } MonitoringProjectId: "" 2026-01-04T02:22:30.258594Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:947: [PQ: 72075186224037907] metering mode METERING_MODE_REQUEST_UNITS 2026-01-04T02:22:30.258726Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037907] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:22:30.262237Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037907] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) { status: SUCCESS, issues: }ct.IsSuccess() = 1, ct.IsTransportError() = 0, ct.GetEndpoint() = [::]:15515 { status: SUCCESS, issues: }consumer 2026-01-04T02:22:30.279984Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#38,[::1]:56520) incoming connection opened 2026-01-04T02:22:30.280098Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#38,[::1]:56520) -> (POST /Root, 76 bytes) 2026-01-04T02:22:30.280225Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [78cc:bdb6:c97b:0:60cc:bdb6:c97b:0] request [SendMessageBatch] url [/Root] database [/Root] requestId: bc35a56d-97891073-5ad1444e-2b0c4dc0 2026-01-04T02:22:30.280586Z node 2 :HTTP_PROXY INFO: http_req.cpp:1324: http request [SendMessageBatch] requestId [bc35a56d-97891073-5ad1444e-2b0c4dc0] got new request from [78cc:bdb6:c97b:0:60cc:bdb6:c97b:0] database '/Root' stream '' 2026-01-04T02:22:30.281067Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.schemas.getMetadata) 2026-01-04T02:22:30.281145Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.connect) 2026-01-04T02:22:30.281175Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.create) 2026-01-04T02:22:30.281199Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.select) 2026-01-04T02:22:30.281232Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.tables.write) 2026-01-04T02:22:30.281255Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.streams.write) 2026-01-04T02:22:30.281310Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (C9049D91) asking for AccessServiceAuthorization(ydb.databases.list) 2026-01-04T02:22:30.289022Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.292456Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (C9049D91) permission ydb.databases.list now has a valid subject "Service1_id@as" 2026-01-04T02:22:30.292716Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.create now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.292745Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.databases.connect now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.292900Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.tables.select now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.292932Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.schemas.getMetadata now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.292956Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (C9049D91) permission ydb.streams.write now has a permanent error "Permission Denied" retryable:0 2026-01-04T02:22:30.293043Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (C9049D91) () has now valid token of Service1_id@as 2026-01-04T02:22:30.293186Z node 2 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [SendMessageBatch] requestId [bc35a56d-97891073-5ad1444e-2b0c4dc0] [auth] Authorized successfully 2026-01-04T02:22:30.293241Z node 2 :HTTP_PROXY INFO: http_req.cpp:1077: http request [SendMessageBatch] requestId [bc35a56d-97891073-5ad1444e-2b0c4dc0] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:30.293669Z node 2 :HTTP_PROXY DEBUG: http_req.cpp:1254: http request [SendMessageBatch] requestId [bc35a56d-97891073-5ad1444e-2b0c4dc0] Not retrying GRPC response. Code: 400, Error: AWS.SimpleQueueService.EmptyBatchRequest 2026-01-04T02:22:30.293787Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [SendMessageBatch] requestId [bc35a56d-97891073-5ad1444e-2b0c4dc0] reply with status: STATUS_UNDEFINED message: The batch request doesn't contain any entries. 2026-01-04T02:22:30.294037Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#38,[::1]:56520) <- (400 AWS.SimpleQueueService.EmptyBatchRequest, 112 bytes) 2026-01-04T02:22:30.294116Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#38,[::1]:56520) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: AmazonSQS.SendMessageBatch X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "QueueUrl":"/v1/5//Root/6/topic1/8/consumer", "Entries": [ ] } 2026-01-04T02:22:30.294165Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#38,[::1]:56520) Response: HTTP/1.1 400 AWS.SimpleQueueService.EmptyBatchRequest Connection: close x-amzn-requestid: bc35a56d-97891073-5ad1444e-2b0c4dc0 Content-Type: application/x-amz-json-1.1 Content-Length: 112 Http output full {"__type":"AWS.SimpleQueueService.EmptyBatchRequest","message":"The batch request doesn't contain any entries."} 2026-01-04T02:22:30.294293Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#38,[::1]:56520) connection closed 2026-01-04T02:22:30.357227Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.357264Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.357277Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.357297Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.357310Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.457587Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.457627Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.457640Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.457659Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.457673Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> StreamCreator::Basic [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlap [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithOnePartitionToInvalidSingleChild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:27.191194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:27.191306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.191352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:27.191391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:27.191429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:27.191463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:27.191540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.191618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:27.192599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:27.192919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:27.295452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:27.295531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:27.310918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:27.311183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:27.311387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:27.321421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:27.321667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:27.322510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:27.322808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:27.326513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:27.326726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:27.327964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:27.328049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:27.328199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:27.328249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:27.328354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:27.328554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:27.493474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.497986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.498083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.498160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.498243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.498337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:31.558614Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:31.558773Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:31.558853Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:31.558912Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.558956Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:31.559130Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:22:31.559299Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:22:31.559351Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:31.565025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.565399Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:31.565476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:31.565666Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:31.567123Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:31.567219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:22:31.567284Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:22:31.568107Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.568176Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:22:31.568299Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:31.568340Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:31.568387Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:31.568424Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:31.568473Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:22:31.568522Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:31.568590Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:22:31.568631Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:22:31.568835Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:31.568887Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2026-01-04T02:22:31.568938Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:22:31.568975Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:22:31.569975Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:31.570087Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:31.570133Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:31.570177Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:22:31.570226Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:22:31.571077Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:31.571207Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:31.571240Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:31.571271Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:22:31.571304Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:31.571387Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2026-01-04T02:22:31.571437Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:823:2790] 2026-01-04T02:22:31.575847Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:31.576147Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:31.576228Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:22:31.576273Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:954:2890] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } TestModificationResults wait txId: 105 2026-01-04T02:22:31.579974Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 0 SplitBoundary: "\177" ChildPartitionIds: 1 } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:31.580215Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.580400Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid number of child partitions: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:31.582550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid number of child partitions: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:31.582804Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid number of child partitions: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:31.583115Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:31.583159Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:31.583559Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:31.583649Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:31.583694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:996:2924] TestWaitNotification: OK eventTxId 105 >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitTwoPartitionsMixed [GOOD] >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 |82.0%| [TA] $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestAttachSession [GOOD] |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessage [GOOD] Test command err: 2026-01-04T02:22:15.109226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326073102156761:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:15.109757Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:15.169412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:15.683785Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:15.697919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:15.698012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:15.740434Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:15.880626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:15.987137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:16.063175Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:16.063201Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:16.063212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:16.063292Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:16.132479Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:16.288385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:16.295080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:16.542348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:16.550845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:16.559194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:16.584035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:22:16.670415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.798142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.852438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:22:16.857161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.908929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.943497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:16.981024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.013211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.048913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.084721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:17.125173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.939552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326085987060265:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.939552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326085987060254:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.939675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.940286Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326085987060269:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.940340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:18.944616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:18.964224Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326085987060268:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:19.064385Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326090282027618:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:19.550650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.629124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.668608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.710639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo ... EUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.159374Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 1716 2026-01-04T02:22:30.174018Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.174051Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.174062Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.174079Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.174089Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.274371Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.274401Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.274412Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.274429Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.274440Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.375327Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.375355Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.375363Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.375377Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.375388Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.475871Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.475909Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.475923Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.475943Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.475953Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.575967Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.576002Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.576013Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.576029Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.576037Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.676431Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.676462Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.676474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.676490Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.676500Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.776663Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.776697Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.776707Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.776724Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.776734Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.877033Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.877071Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.877082Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.877099Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.877110Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:30.977362Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:30.977401Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.977412Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:30.977428Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:30.977440Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:31.077696Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:31.077728Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.077752Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:31.077772Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.077784Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:31.160504Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 1 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 1 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 90 2026-01-04T02:22:31.160807Z node 2 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [8d8f90a1-29d0b204-4bded0b5-84af6a79] reply ok 2026-01-04T02:22:31.161025Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:49000) <- (200 , 2 bytes) 2026-01-04T02:22:31.161138Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:49000) connection closed Http output full {} 2026-01-04T02:22:31.182927Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:31.182963Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.182976Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:31.182994Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.183005Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:31.282510Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:31.282540Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.282552Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:31.282570Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:31.282582Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |82.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |82.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut |82.0%| [TA] {RESULT} $(B)/ydb/core/kqp/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/ydb-core-sys_view-partition_stats-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithWrongPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:27.662709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:27.662799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.662842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:27.662878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:27.662914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:27.662964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:27.663029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.663106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:27.663993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:27.664281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:27.759128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:27.759173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:27.776251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:27.776466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:27.776658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:27.795941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:27.796190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:27.796937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:27.797235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:27.804417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:27.804634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:27.806025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:27.806093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:27.806221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:27.806278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:27.806381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:27.806580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:27.941732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.942993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:27.943558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:22:31.971696Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:31.991746Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:31.991920Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:31.992002Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 104 Step: 250 2026-01-04T02:22:31.992062Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.992122Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:31.992351Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:22:31.992543Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:22:31.992615Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:31.997349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.998734Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:31.998801Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:31.999113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:31.999350Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:31.999404Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:22:31.999474Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:22:31.999791Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:22:31.999866Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:22:31.999980Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:32.000025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:32.000091Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:22:32.000129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:32.000174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:22:32.000230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:22:32.000281Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:22:32.000322Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:22:32.000489Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:32.000550Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 1 2026-01-04T02:22:32.000593Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:22:32.000627Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:22:32.002265Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:32.002375Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:32.002421Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:32.002470Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:22:32.002515Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:22:32.005076Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:32.005150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:22:32.005190Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:22:32.005215Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:22:32.005244Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:32.005317Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 1 2026-01-04T02:22:32.005352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:823:2790] 2026-01-04T02:22:32.009038Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:32.009140Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:22:32.009250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:22:32.009315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [3:954:2890] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } TestModificationResults wait txId: 105 2026-01-04T02:22:32.013125Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 7 SplitBoundary: "W" } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:32.013378Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.013564Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 7, at schemeshard: 72057594046678944 2026-01-04T02:22:32.019467Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 7" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:32.019764Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 7, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:32.020156Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:32.020211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:32.020626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:32.020734Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:32.020781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1010:2928] TestWaitNotification: OK eventTxId 105 |82.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::Basic [GOOD] Test command err: 2026-01-04T02:22:28.654216Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326126543449183:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:28.654388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:28.877903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:28.887601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:28.887764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:28.987157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:29.030849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:29.036412Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326126543449143:2081] 1767493348653047 != 1767493348653050 2026-01-04T02:22:29.096126Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:29.292820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:29.292848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:29.292861Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:29.292965Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:29.551858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:29.559606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:29.661166Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:29.684922Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:22:29.684958Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:22:29.685503Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:22:31.869210Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493349650, tx_id: 281474976715658 } } } 2026-01-04T02:22:31.869621Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:22:31.871149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:31.872277Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:22:31.872292Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:22:31.900380Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:22:31.900481Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:22:31.901373Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-04T02:22:31.999014Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7591326139428352167:2337] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:22:32.009445Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:32.009468Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-04T02:22:32.027964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:22:32.038548Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:32.038574Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:27.994382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:27.994505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.994568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:27.994626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:27.994668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:27.994704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:27.994762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.994843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:27.995899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:27.996224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:28.107463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:28.107523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:28.124572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:28.128890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:28.129130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:28.135982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:28.136232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:28.136836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:28.137030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:28.140170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.140355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:28.141693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:28.141761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.141897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:28.141941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:28.141992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:28.142127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:28.278340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.281649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.281821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.281927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.282550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-04T02:22:32.765819Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:32.782176Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:32.782530Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:32.782582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:32.783085Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:32.783146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-04T02:22:32.783188Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:32.823511Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 300, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:32.823629Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 300 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:32.823683Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 300, at tablet: 72057594046678944 2026-01-04T02:22:32.823741Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:32.841979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:32.842167Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2026-01-04T02:22:32.842252Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 300 2026-01-04T02:22:32.842315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.842367Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:32.842547Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:22:32.842749Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:32.849465Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.849947Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:32.850024Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:32.850365Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:32.850421Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:32.850738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.850807Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:32.850932Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:32.850979Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.851029Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:32.851073Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.851118Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:32.851171Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.851219Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:32.851263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:32.851445Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:32.851495Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-04T02:22:32.851532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:32.853508Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:32.853626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:32.853675Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:32.853720Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:32.853766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:32.853899Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-04T02:22:32.853947Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [3:823:2790] 2026-01-04T02:22:32.861831Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:32.863449Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:32.863524Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1038:2955] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-04T02:22:32.867842Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:32.868130Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.868354Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-04T02:22:32.873970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:32.874302Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:22:32.874695Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:22:32.874748Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:22:32.875208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:22:32.875320Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:22:32.875371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1090:2992] TestWaitNotification: OK eventTxId 106 >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] >> TControlPlaneProxyShouldPassHids::ShouldCheckScenario [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::DisableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:27.935654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:27.935739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.935790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:27.935830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:27.935873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:27.935915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:27.935987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:27.936057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:27.936867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:27.937174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:28.029685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:28.029742Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:28.042688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:28.047082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:28.047294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:28.052398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:28.052607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:28.053304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:28.053521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:28.055999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.056177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:28.057298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:28.057354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.057463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:28.057510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:28.057564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:28.057690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:28.207024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.210665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.210820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.210906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.211858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... chemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.870281Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:32.870338Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:32.870689Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:32.870733Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:32.871287Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:32.871346Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:32.871500Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:32.871548Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.871601Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:32.871640Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.871689Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:32.871744Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:32.871793Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:32.871837Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:32.872025Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:32.872096Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-04T02:22:32.872142Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:32.873110Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:32.873211Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:32.873256Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:32.873312Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:32.873374Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:32.873496Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:22:32.877124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:32.877425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:32.877484Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:32.878069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:32.878188Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:32.878241Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1078:2988] TestWaitNotification: OK eventTxId 105 2026-01-04T02:22:33.487858Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:33.488183Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 340us result status StatusSuccess 2026-01-04T02:22:33.488854Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\010" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\010" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\010" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\010" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } TestModificationResults wait txId: 106 2026-01-04T02:22:33.492595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } PartitionStrategy { PartitionStrategyType: DISABLED } } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:33.492777Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:22:33.492914Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Can`t disable auto partitioning., at schemeshard: 72057594046678944 2026-01-04T02:22:33.495826Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Can`t disable auto partitioning." TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:33.496141Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Can`t disable auto partitioning., operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:22:33.496511Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:22:33.496567Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:22:33.497085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:22:33.497189Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:22:33.497243Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:1093:3002] TestWaitNotification: OK eventTxId 106 |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_stream_creator/unittest >> StreamCreator::WithResolvedTimestamps [GOOD] Test command err: 2026-01-04T02:22:29.774025Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326130549608107:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:29.774142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:30.042424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:30.052622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:30.052711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:30.091894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:30.119743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:30.121087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326130549607983:2081] 1767493349767057 != 1767493349767060 2026-01-04T02:22:30.244165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:30.365908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:30.365934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:30.365946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:30.366031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:30.613127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:30.620143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:30.624142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:30.747140Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:22:30.747164Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:22:30.747699Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:22:30.794014Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:33.033729Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493350714, tx_id: 281474976710658 } } } 2026-01-04T02:22:33.034190Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:22:33.035973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:33.037605Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-04T02:22:33.037618Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-04T02:22:33.079258Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-04T02:22:33.079290Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:22:33.080152Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:59: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::NController::TEvPrivate::TEvAllowCreateStream 2026-01-04T02:22:33.164053Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][1:7591326147729478304:2337] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:41:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:22:33.173321Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:87: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTableResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:33.173341Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:102: [StreamCreator][rid 1][tid 1] Success: issues# 2026-01-04T02:22:33.183854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:22:33.198355Z node 1 :REPLICATION_CONTROLLER TRACE: stream_creator.cpp:139: [StreamCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvAlterTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:22:33.198383Z node 1 :REPLICATION_CONTROLLER INFO: stream_creator.cpp:157: [StreamCreator][rid 1][tid 1] Success: issues# |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_stream_creator/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergeTest::EnableSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:28.335396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:28.335484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.335532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:28.335571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:28.335607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:28.335636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:28.335687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.335770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:28.336660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:28.336883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:28.421416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:28.421458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:28.436720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:28.440737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:28.440945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:28.446701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:28.446949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:28.447685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:28.447905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:28.452586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.452805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:28.454048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:28.454116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.454256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:28.454334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:28.454399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:28.454576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:28.586784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.587925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.588445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:33.790985Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 1250, at tablet: 72057594046678944 2026-01-04T02:22:33.791039Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:33.800783Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:33.800927Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2026-01-04T02:22:33.801018Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 1250 2026-01-04T02:22:33.801089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:33.801141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:33.801310Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:22:33.801503Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:33.804770Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:33.805647Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:33.805697Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:33.805992Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:33.806033Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:33.806364Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:33.806419Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:33.806558Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:33.806600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:33.806654Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:33.806694Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:33.806740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:33.806790Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:33.806836Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:33.806872Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:33.807032Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:33.807080Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 0 2026-01-04T02:22:33.807117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:33.809902Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:33.809995Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:33.810034Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:33.810083Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:33.810139Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:33.810250Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:22:33.813183Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:33.813486Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:33.813544Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:33.814042Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:33.814164Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:33.814209Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [3:1084:2992] TestWaitNotification: OK eventTxId 105 2026-01-04T02:22:34.338115Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:34.338455Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 388us result status StatusSuccess 2026-01-04T02:22:34.339232Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Verify partition 0 >>>>> Verify partition 1 >>>>> Verify partition 2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitWithExistingPartitionWithPartialOverlapAndCreateRootLevelSibling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:28.420565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:28.420656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.420716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:28.420749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:28.420779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:28.420813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:28.420862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.420929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:28.421667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:28.421935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:28.505426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:28.505487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:28.518163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:28.518352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:28.518532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:28.528475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:28.528687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:28.529415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:28.529669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:28.532612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.532767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:28.533627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:28.533671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.533788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:28.533855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:28.533977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:28.534177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:28.670678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.674709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.674887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.675850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1-04T02:22:34.275656Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:34.275727Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-04T02:22:34.275788Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:34.294659Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:34.294884Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:22:34.295025Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:22:34.295097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.295146Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:34.295333Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:22:34.295558Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:34.304509Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.305403Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:34.305471Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:34.305839Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:34.305912Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:213:2213], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:34.306640Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.306707Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:34.306837Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:34.306888Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.306944Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:34.307011Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.307060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:34.307107Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.307157Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:34.307196Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:34.307372Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:34.307439Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-04T02:22:34.307484Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:34.309942Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:34.310082Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:34.310135Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:34.310215Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:34.310282Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:34.310402Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-04T02:22:34.310461Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:816:2782] 2026-01-04T02:22:34.313609Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:34.313725Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:34.313776Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1033:2951] TestWaitNotification: OK eventTxId 105 2026-01-04T02:22:34.314496Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:34.314815Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 350us result status StatusSuccess 2026-01-04T02:22:34.315627Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 4 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 4 NextPartitionId: 4 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TProxyActorTest::TestCreateSemaphoreInterrupted |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> data_paging.py::TestDataPaging::test_listing_paging_monitoring [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateQuery [GOOD] >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TSchemeShardTopicSplitMergePrescribedPartitionsTest::SplitInactivePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:28.368472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:28.368599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.368649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:28.368689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:28.368729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:28.368768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:28.368834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:28.368932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:28.371206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:28.371616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:28.457771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:28.457834Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:28.469118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:28.469328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:28.469503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:28.478108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:28.478356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:28.479103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:28.479355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:28.482471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.482668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:28.483851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:28.483914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:28.484029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:28.484085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:28.484187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:28.484347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:28.624079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.624785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.624892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.624941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.624983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:28.625489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-04T02:22:34.211218Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:34.212952Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:22:34.213235Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:34.213299Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:22:34.216702Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:34.216746Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-04T02:22:34.216782Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:34.273934Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 250, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:34.274105Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72075186233409547 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:34.274189Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:673: NPQState::TPropose operationId# 105:0 HandleReply TEvOperationPlan, step: 250, at tablet: 72057594046678944 2026-01-04T02:22:34.274249Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 105:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:22:34.292795Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409548, partId: 0 2026-01-04T02:22:34.292962Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:22:34.293031Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:22:34.293079Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.293113Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:22:34.293274Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:22:34.293443Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:34.298829Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.300056Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:34.300149Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:22:34.300521Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:34.300575Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [4:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:22:34.301250Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.301301Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:22:34.301427Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:34.301474Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.301523Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:22:34.301558Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.301607Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:22:34.301675Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:22:34.301748Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:22:34.301793Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:22:34.301994Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:22:34.302046Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-04T02:22:34.302086Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:22:34.307210Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:34.307348Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:22:34.307392Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:22:34.307435Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:22:34.307477Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:22:34.307591Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-04T02:22:34.307642Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [4:813:2780] 2026-01-04T02:22:34.311060Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:22:34.311180Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:34.311224Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [4:1032:2951] TestWaitNotification: OK eventTxId 105 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } TestModificationResults wait txId: 106 2026-01-04T02:22:34.314248Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "W" } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:34.314597Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:22:34.314820Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid partition status: 2, at schemeshard: 72057594046678944 2026-01-04T02:22:34.317608Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid partition status: 2" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:34.317882Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Invalid partition status: 2, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:22:34.318354Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:22:34.318404Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:22:34.318925Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:22:34.319046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:22:34.319092Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [4:1083:2986] TestWaitNotification: OK eventTxId 106 |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_splitmerge/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] |82.1%| [TA] $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSelectFromViewTest::OneTableUsingRelativeName [GOOD] |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphore [GOOD] >> TSelectFromViewTest::DisabledFeatureFlag |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TSchemeShardSecretTest::DropSecret |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestCreateSemaphoreInterrupted [GOOD] |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest >> TControlPlaneProxyTest::FailsOnCreateQueryWhenRateLimiterResourceNotCreated [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] |82.1%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::CollectorOverload [GOOD] |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/partition_stats/ut/unittest >> PartitionStats::Collector [GOOD] >> TControlPlaneProxyTest::ShouldSendListQueries [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeQuery >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/partition_stats/ut/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/proxy/ut/unittest >> TProxyActorTest::TestDisconnectWhileAttaching [GOOD] Test command err: ... waiting for blocked registrations ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR cookie 0 ... waiting for blocked registrations (done) 2026-01-04T02:22:37.231623Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 2 ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from KESUS_PROXY_ACTOR to KESUS_TABLET_ACTOR |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/proxy/ut/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> TSchemeShardSecretTest::CreateNotInDatabase |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount >> TSchemeShardSecretTest::CreateSecret |82.2%| [TA] $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program >> TSchemeShardSecretTest::DropSecret [GOOD] >> TSchemeShardSecretTest::DropNotASecret |82.2%| [TA] $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> TControlPlaneProxyTest::ShouldSendDescribeQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetQueryStatus >> TPersQueueCommonTest::Auth_MultipleUpdateTokenRequestIterationsWithValidToken_GotUpdateTokenResponseForEachRequest [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> TPersqueueControlPlaneTestSuite::TestAddRemoveReadRule [GOOD] >> TPersqueueDataPlaneTestSuite::WriteSession >> TPersQueueCommonTest::Auth_CreateGrpcStreamWithInvalidTokenInInitialMetadata_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithBlobsRateLimit [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit >> settings_validation.py::TestSettingsValidation::test_settings_validation_solomon_program [GOOD] >> TSchemeShardSecretTest::CreateNotInDatabase [GOOD] >> TSchemeShardSecretTest::AsyncDropSameSecret >> TSchemeShardSecretTest::DropNotASecret [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors >> TPersqueueControlPlaneTestSuite::SetupReadLockSessionWithDatabase [GOOD] >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestDeleteMessageIdempotence [GOOD] Test command err: 2026-01-04T02:22:17.641436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326082419363800:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:17.642089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:17.881873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:17.882011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:17.922098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:18.007200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:18.009160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:18.010438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326082419363770:2081] 1767493337639902 != 1767493337639905 2026-01-04T02:22:18.076864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:18.076915Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:18.076926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:18.077045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:18.274276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:18.315570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:18.579777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:18.590118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:18.615293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:22:18.654591Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:18.697899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.865419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.934420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:22:18.939098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:18.983007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:22:18.988110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.029687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.074861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.117272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.161285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.203871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:19.253324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:21.332551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326099599234608:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:21.332659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:21.332805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326099599234616:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:21.332989Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326099599234622:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:21.333024Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:21.336824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:21.348259Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326099599234624:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:21.418679Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326099599234675:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:21.862679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:21.899261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:21.930246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:21.972007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... G: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 4. All 1 blobs are from cache. 2026-01-04T02:22:38.603810Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'topic1' partition 0 user consumer readTimeStamp done, result 1767493358517 queuesize 0 startOffset 0 2026-01-04T02:22:38.603844Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:22:38.603900Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-04T02:22:38.603958Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:22:38.604742Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:22:38.604778Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:22:38.605159Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessage] requestId [f4622a4c-ff17e041-a38c3ac0-5d2a62c6] reply ok 2026-01-04T02:22:38.605413Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:55550) <- (200 , 127 bytes) Http output full {"SequenceNumber":"0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"} 2026-01-04T02:22:38.605538Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:55550) connection closed 2026-01-04T02:22:38.610490Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:55562) incoming connection opened 2026-01-04T02:22:38.610571Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:55562) -> (POST /Root, 74 bytes) 2026-01-04T02:22:38.610701Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [98c4:5c73:c47b:0:80c4:5c73:c47b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 1a0a0640-9f29f22d-f2886b63-e9d6c701 2026-01-04T02:22:38.611119Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [ReceiveMessage] requestId [1a0a0640-9f29f22d-f2886b63-e9d6c701] got new request from [98c4:5c73:c47b:0:80c4:5c73:c47b:0] database '/Root' stream '' 2026-01-04T02:22:38.611688Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ReceiveMessage] requestId [1a0a0640-9f29f22d-f2886b63-e9d6c701] [auth] Authorized successfully 2026-01-04T02:22:38.611735Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [ReceiveMessage] requestId [1a0a0640-9f29f22d-f2886b63-e9d6c701] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:38.615212Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:45: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPReadRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 WaitDeadlineMilliseconds: 1767493378614 VisibilityDeadlineMilliseconds: 1767493388614 MaxNumberOfMessages: 1 2026-01-04T02:22:38.620532Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic1' requestId: 2026-01-04T02:22:38.620568Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037907] got client message batch for topic 'topic1' partition 0 2026-01-04T02:22:38.620662Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037907][Partition][0][StateIdle] read cookie 5 Topic 'topic1' partition 0 user consumer offset 0 partno 0 count 1 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-04T02:22:38.620848Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037907][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 72 count 1 last offset 0, current partition end offset: 1 2026-01-04T02:22:38.620873Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037907][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-04T02:22:38.620920Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 72 accessed 2 times before, last time 2026-01-04T02:22:38.000000Z 2026-01-04T02:22:38.620947Z node 3 :PERSQUEUE DEBUG: read.h:126: [72075186224037907][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-04T02:22:38.620977Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:22:38.621014Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037907' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:22:38.621055Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 54 from pos 0 cbcount 1 2026-01-04T02:22:38.621128Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic1' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:22:38.621945Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [1a0a0640-9f29f22d-f2886b63-e9d6c701] reply ok Http output full {"Messages":[{"MD5OfBody":"94a29778a1f1f41bf68142847b2e6106","Attributes":{"SentTimestamp":"1767493358517"},"ReceiptHandle":"CAAQAA==","Body":"MessageBody-0","MessageId":"67507D10-9D95-5D16-B4E8-D9FAD99F9125"}]} 2026-01-04T02:22:38.622182Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:55562) <- (200 , 211 bytes) 2026-01-04T02:22:38.622299Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:55562) connection closed 2026-01-04T02:22:38.623060Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:55574) incoming connection opened 2026-01-04T02:22:38.623120Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:55574) -> (POST /Root, 80 bytes) 2026-01-04T02:22:38.623230Z node 3 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [189f:5373:c47b:0:9f:5373:c47b:0] request [DeleteMessage] url [/Root] database [/Root] requestId: 3736b789-e25c2554-da50092d-d64d9f64 2026-01-04T02:22:38.623587Z node 3 :HTTP_PROXY INFO: http_req.cpp:1324: http request [DeleteMessage] requestId [3736b789-e25c2554-da50092d-d64d9f64] got new request from [189f:5373:c47b:0:9f:5373:c47b:0] database '/Root' stream '' 2026-01-04T02:22:38.625752Z node 3 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DeleteMessage] requestId [3736b789-e25c2554-da50092d-d64d9f64] [auth] Authorized successfully 2026-01-04T02:22:38.625843Z node 3 :HTTP_PROXY INFO: http_req.cpp:1077: http request [DeleteMessage] requestId [3736b789-e25c2554-da50092d-d64d9f64] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:22:38.626903Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:50: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPCommitRequest Topic: "/Root/topic1" Consumer: "consumer" PartitionId: 0 Offset: 0 2026-01-04T02:22:38.628294Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:38.628321Z node 3 :PERSQUEUE DEBUG: partition.cpp:2409: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:22:38.628358Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:22:38.628372Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:38.628393Z node 3 :PERSQUEUE DEBUG: partition.cpp:2473: [72075186224037907][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:22:38.628459Z node 3 :PERSQUEUE DEBUG: partition.cpp:3880: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer offset is set to 1 (startOffset 0) session 2026-01-04T02:22:38.628496Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:22:38.628516Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037907][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:22:38.628533Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:38.628750Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037907][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:22:38.628754Z node 3 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessage] requestId [3736b789-e25c2554-da50092d-d64d9f64] reply ok 2026-01-04T02:22:38.628927Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:55574) <- (200 , 2 bytes) 2026-01-04T02:22:38.629036Z node 3 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:55574) connection closed Http output full {} 2026-01-04T02:22:38.629697Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037907][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:22:38.629774Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:934: [72075186224037907][Partition][0][StateIdle] Topic 'topic1' partition 0 user consumer readTimeStamp for offset 1 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:22:38.629830Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037907][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:22:38.629852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:38.629866Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.629875Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:38.629888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.629899Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:38.629924Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037907][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:38.693837Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:38.693870Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.693881Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:38.693898Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.693907Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:22:38.795296Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:38.795334Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.795347Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:38.795365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:38.795375Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TSchemeShardSecretTest::CreateSecret [GOOD] >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs >> TControlPlaneProxyTest::ShouldSendGetQueryStatus [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::DropNotASecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:22:37.685283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:37.685373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:37.685415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:37.685453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:37.685487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:37.685544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:37.685675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:37.685740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:37.686903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:37.687231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:37.796741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:37.796808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:37.818463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:37.822666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:37.822864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:37.828214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:37.828471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:37.829141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:37.829371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:37.834622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:37.834814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:37.836100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:37.836163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:37.836278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:37.836327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:37.836366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:37.836521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:38.004479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.005999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:38.006517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:40.437575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:22:40.437656Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:40.437686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:22:40.437723Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:22:40.437789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:22:40.437853Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:22:40.437973Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:22:40.438011Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:40.438051Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:22:40.438085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:40.438121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:22:40.438160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:22:40.438198Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:22:40.438231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:22:40.438297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:40.438357Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:22:40.438393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:22:40.438426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:22:40.439236Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:40.439314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:40.439346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:40.439388Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:22:40.439434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:40.440413Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:40.440489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:40.440517Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:40.440543Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:22:40.440572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:22:40.440633Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:22:40.442373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:22:40.443179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:22:40.443378Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:22:40.443424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:22:40.443751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:22:40.443828Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:22:40.443882Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:707:2697] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:22:40.446752Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropSecret Drop { Name: "dir" } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:22:40.446900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_secret.cpp:120: [72057594046678944] TDropSecret Propose, opId: 102:0, path: /MyRoot/dir 2026-01-04T02:22:40.447009Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:22:40.448818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/dir\', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:22:40.449026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/dir', error: path is not a secret (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), operation: DROP SECRET, path: /MyRoot/dir TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:22:40.449256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:22:40.449297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:22:40.449631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:22:40.449706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:22:40.449744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:714:2704] TestWaitNotification: OK eventTxId 102 2026-01-04T02:22:40.450158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:40.450302Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir" took 160us result status StatusSuccess 2026-01-04T02:22:40.450642Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir" PathDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithInvalidToken_SessionClosedWithUnauthenticatedError [GOOD] >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_selectors [GOOD] >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteQuery |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v1-std] [GOOD] |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_ttl.py::TestTTLAlterSettings::test_case ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::AsyncDropSameSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:39.480288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:39.480378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:39.480420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:39.480464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:39.480504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:39.480534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:39.480591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:39.480655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:39.481472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:39.481765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:39.597363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:39.597453Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:39.613667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:39.613868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:39.614063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:39.623676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:39.623887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:39.624479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:39.624688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:39.629011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:39.629302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:39.630544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:39.630615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:39.630726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:39.630771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:39.630871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:39.631080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:39.787082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.788980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.789047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.789111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.789209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.789318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:41.717045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-04T02:22:41.717092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:22:41.717120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:22:41.717482Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:22:41.717547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:22:41.717660Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:41.717696Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:41.717744Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:22:41.717787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:41.717857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:22:41.717904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:22:41.717942Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:22:41.717977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:22:41.718048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:41.718087Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-04T02:22:41.718139Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:22:41.718172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:22:41.718196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-04T02:22:41.719219Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.719318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.719362Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:41.719400Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:22:41.719456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:41.720818Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.720897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.720926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:41.720955Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:22:41.720987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:41.721890Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.721965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:22:41.721999Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:22:41.722026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:22:41.722053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:22:41.722129Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:22:41.722513Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:22:41.722561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:22:41.722633Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:22:41.724391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:22:41.726651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:22:41.727221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:22:41.727907Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 104, wait until txId: 105 TestModificationResults wait txId: 105 TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 105 2026-01-04T02:22:41.728325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:22:41.728375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 TestWaitNotification wait txId: 106 2026-01-04T02:22:41.728462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:22:41.728489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:22:41.729152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:22:41.729291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:22:41.729353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:22:41.729397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:768:2757] 2026-01-04T02:22:41.729556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:22:41.729586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:768:2757] TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 2026-01-04T02:22:41.730182Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir/test-secret" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:22:41.730397Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir/test-secret" took 269us result status StatusPathDoesNotExist 2026-01-04T02:22:41.730592Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/dir/test-secret\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/dir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/dir/test-secret" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/dir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "dir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_secret/unittest >> TSchemeShardSecretTest::CreateSecretAndIntermediateDirs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:22:39.612832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:22:39.612921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:39.612967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:22:39.613012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:22:39.613047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:22:39.613077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:22:39.613141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:22:39.613208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:22:39.614111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:22:39.614400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:22:39.705315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:22:39.705385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:39.718088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:22:39.718261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:22:39.718383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:22:39.728472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:22:39.728651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:22:39.729228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:22:39.729424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:22:39.732259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:39.732477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:22:39.733563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:22:39.733626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:22:39.733748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:22:39.733834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:22:39.733941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:22:39.734132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:22:39.875832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.876917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.877791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.878935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.879004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:22:39.879085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eration IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:22:42.077060Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:22:42.077085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:22:42.077110Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-04T02:22:42.077147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:22:42.077205Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:22:42.077248Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:22:42.077329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:22:42.077368Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-04T02:22:42.077393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-04T02:22:42.077431Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:22:42.077456Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-04T02:22:42.077478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-04T02:22:42.077515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:22:42.077548Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-04T02:22:42.077588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:22:42.077626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-04T02:22:42.077651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-04T02:22:42.077677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-04T02:22:42.079689Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.079790Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.079838Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:42.079885Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:22:42.079941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:22:42.081083Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.081195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.081234Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:42.081267Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-04T02:22:42.081301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:22:42.082969Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.083056Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.083093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:42.083124Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:22:42.083156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:22:42.083678Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.083757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:22:42.083790Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:22:42.083821Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:22:42.083853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:22:42.083950Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:22:42.086897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:22:42.087276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:22:42.087430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:22:42.088419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:22:42.088694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:22:42.088758Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:22:42.089205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:22:42.089309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:22:42.089348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:721:2710] TestWaitNotification: OK eventTxId 101 2026-01-04T02:22:42.089788Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/dir1/dir2/test-secret" Options { ReturnSecretValue: true }, at schemeshard: 72057594046678944 2026-01-04T02:22:42.090044Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/dir1/dir2/test-secret" took 295us result status StatusSuccess 2026-01-04T02:22:42.090376Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/dir1/dir2/test-secret" PathDescription { Self { Name: "test-secret" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSecret CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\020\001" EffectiveACL: "\020\001" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 1 EffectiveACLVersion: 1 UserAttrsVersion: 1 ChildrenVersion: 0 SecretVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SecretDescription { Name: "test-secret" Value: "test-value" Version: 0 } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_secret/unittest >> test_ttl.py::TestTTLDefaultEnv::test_case |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> TControlPlaneProxyTest::ShouldSendDeleteQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendControlQuery |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test |82.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |82.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |82.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_stream_creator/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [TA] {RESULT} $(B)/ydb/core/sys_view/partition_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.2%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/knn/ydb-core-kqp-ut-knn |82.2%| [TA] {RESULT} $(B)/ydb/core/kesus/proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TControlPlaneProxyTest::ShouldSendControlQuery [GOOD] >> TControlPlaneProxyTest::ShouldSendGetResultData |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |82.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/close_with_load/ydb-core-kqp-ut-close_with_load >> TControlPlaneProxyTest::ShouldSendGetResultData [GOOD] >> TControlPlaneProxyTest::ShouldSendListJobs >> TSelectFromViewTest::DisabledFeatureFlag [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueDataPlaneTestSuite::WriteSession [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:32.591308Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326146118204231:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.591490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:32.667808Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:32.671648Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326142986963713:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.671997Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:32.684061Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:32.856528Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:32.858348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.014607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.014740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.021491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.021611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.055367Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.055577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.056905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.211492Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.223838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.308238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.436514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d22/r3tmp/yandexHeSJGi.tmp 2026-01-04T02:22:33.436542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d22/r3tmp/yandexHeSJGi.tmp 2026-01-04T02:22:33.507361Z INFO: TTestServer started on Port 22265 GrpcPort 65084 2026-01-04T02:22:33.519398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d22/r3tmp/yandexHeSJGi.tmp 2026-01-04T02:22:33.519581Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:33.613283Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:33.684577Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:65084 === TenantModeEnabled() = 1 === Init PQ - start server on port 65084 2026-01-04T02:22:33.989551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:33.989688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:33.989843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:33.989867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:33.990030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:33.990067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:33.992816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:33.992921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:33.993035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:33.993070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:33.993086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:33.993097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:33.995670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:33.995708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:33.995719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:33.997752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:33.997857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:33.997902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:33.997965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:34.011847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:34.012290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.012317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:34.012334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.014935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:34.015061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:34.017266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493354060, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.017379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493354060 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:34.017402Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.017635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:34.017671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.017797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:34.017859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:34.019394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:34.019419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPub ... only_original: true consumer: "consumer_aba" read_params { max_read_size: 104857600 } } } 2026-01-04T02:22:44.099974Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 auth for : consumer_aba 2026-01-04T02:22:44.100465Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 Handle describe topics response 2026-01-04T02:22:44.102082Z :INFO: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] [null] Server session id: consumer_aba_3_2_6991958310903957257_v1 2026-01-04T02:22:44.100557Z node 3 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 auth is DEAD 2026-01-04T02:22:44.100620Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 auth ok: topics# 1, initDone# 0 2026-01-04T02:22:44.101761Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 register session: topic# /Root/account1/write_topic 2026-01-04T02:22:44.102466Z :DEBUG: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:22:44.102648Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 grpc read done: success# 1, data# { read { } } 2026-01-04T02:22:44.102660Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][write_topic] pipe [3:7591326194790528218:2385] connected; active server actors: 1 2026-01-04T02:22:44.102848Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 got read request: guid# e32582c7-eb2e2bed-c2154f83-aa7667d6 2026-01-04T02:22:44.103053Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037894][write_topic] consumer "consumer_aba" register session for pipe [3:7591326194790528218:2385] session consumer_aba_3_2_6991958310903957257_v1 2026-01-04T02:22:44.103128Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037894][write_topic] consumer consumer_aba register readable partition 0 2026-01-04T02:22:44.103181Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037894][write_topic] consumer consumer_aba family created family=1 (Status=Free, Partitions=[0]) 2026-01-04T02:22:44.103225Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037894][write_topic] consumer consumer_aba register reading session ReadingSession "consumer_aba_3_2_6991958310903957257_v1" (Sender=[3:7591326194790528215:2385], Pipe=[3:7591326194790528218:2385], Partitions=[], ActiveFamilyCount=0) 2026-01-04T02:22:44.103259Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037894][write_topic] consumer consumer_aba rebalancing was scheduled 2026-01-04T02:22:44.103355Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037894][write_topic] consumer consumer_aba balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-04T02:22:44.103421Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037894][write_topic] consumer consumer_aba balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "consumer_aba_3_2_6991958310903957257_v1" (Sender=[3:7591326194790528215:2385], Pipe=[3:7591326194790528218:2385], Partitions=[], ActiveFamilyCount=0) 2026-01-04T02:22:44.103495Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037894][write_topic] consumer consumer_aba family 1 status Active partitions [0] session "consumer_aba_3_2_6991958310903957257_v1" sender [3:7591326194790528215:2385] lock partition 0 for ReadingSession "consumer_aba_3_2_6991958310903957257_v1" (Sender=[3:7591326194790528215:2385], Pipe=[3:7591326194790528218:2385], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-04T02:22:44.103593Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037894][write_topic] consumer consumer_aba start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-04T02:22:44.103629Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037894][write_topic] consumer consumer_aba balancing duration: 0.000242s 2026-01-04T02:22:44.104276Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 assign: record# { Partition: 0 TabletId: 72075186224037893 Topic: "write_topic" Generation: 1 Step: 1 Session: "consumer_aba_3_2_6991958310903957257_v1" ClientId: "consumer_aba" PipeClient { RawX1: 7591326194790528218 RawX2: 4503612512274769 } Path: "/Root/account1/write_topic" } 2026-01-04T02:22:44.104421Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 INITING TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) 2026-01-04T02:22:44.104672Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037893 Generation: 1, pipe: [3:7591326194790528220:2388] 2026-01-04T02:22:44.104743Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: consumer_aba_3_2_6991958310903957257_v1:1 with generation 1 2026-01-04T02:22:44.116999Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 1 WriteTimestampMS: 1767493363987 CreateTimestampMS: 1767493363985 SizeLag: 165 WriteTimestampEstimateMS: 1767493363987 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-04T02:22:44.117075Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 INIT DONE TopicId: Topic /Root/account1/write_topic in database: Root, partition 0(assignId:1) EndOffset 1 readOffset 0 committedOffset 0 2026-01-04T02:22:44.117160Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 sending to client partition status Got new read session event: CreatePartitionStream { PartitionStreamId: 1 TopicPath: account1/write_topic Cluster: PartitionId: 0 CommittedOffset: 0 EndOffset: 1 } 2026-01-04T02:22:44.118136Z :INFO: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] Closing read session. Close timeout: 0.000000s 2026-01-04T02:22:44.118221Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:account1/write_topic:0:1:0:0 2026-01-04T02:22:44.118275Z :INFO: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] Counters: { Errors: 0 CurrentSessionLifetimeMs: 23 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:22:44.118372Z :NOTICE: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:22:44.118423Z :DEBUG: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] [null] Abort session to cluster 2026-01-04T02:22:44.119198Z :NOTICE: [/Root] [/Root] [99d5f7da-9546c2bb-760d15a6-ff466524] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:22:44.119269Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 grpc read done: success# 0, data# { } 2026-01-04T02:22:44.119316Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 grpc read failed 2026-01-04T02:22:44.119351Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 grpc closed 2026-01-04T02:22:44.119398Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer consumer_aba session consumer_aba_3_2_6991958310903957257_v1 is DEAD 2026-01-04T02:22:44.119614Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer_aba_3_2_6991958310903957257_v1 2026-01-04T02:22:44.120665Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][write_topic] pipe [3:7591326194790528218:2385] disconnected. 2026-01-04T02:22:44.120714Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][write_topic] pipe [3:7591326194790528218:2385] disconnected; active server actors: 1 2026-01-04T02:22:44.120747Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][write_topic] pipe [3:7591326194790528218:2385] client consumer_aba disconnected session consumer_aba_3_2_6991958310903957257_v1 2026-01-04T02:22:44.324826Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591326194790528240:2395], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:22:44.325286Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ZGJmZDgzM2ItNmUwMzcwNjgtYTdkMWNhNWQtYmYwYjMyOWM=, ActorId: [3:7591326194790528233:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3d0tj86nbfzwwr0vkcgmc6, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:22:44.325729Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } |82.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TControlPlaneProxyTest::ShouldSendListJobs [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeJob >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_MultipleInflightWriteUpdateTokenRequestWithDifferentValidToken_SessionClosedWithOverloadedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:33.068936Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326147423716593:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.070956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.251983Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.275382Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326149601848611:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.275423Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.345518Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.554001Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.607932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.782794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.782886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.825605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.825716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.854759Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.854935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.859621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.928508Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.930486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.950302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:34.158200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.243140Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d1b/r3tmp/yandexAbbWsr.tmp 2026-01-04T02:22:34.243191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d1b/r3tmp/yandexAbbWsr.tmp 2026-01-04T02:22:34.243343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d1b/r3tmp/yandexAbbWsr.tmp 2026-01-04T02:22:34.245272Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:34.338002Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.353771Z INFO: TTestServer started on Port 21777 GrpcPort 1161 PQClient connected to localhost:1161 === TenantModeEnabled() = 1 === Init PQ - start server on port 1161 2026-01-04T02:22:34.788897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:34.789076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.789260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:34.789288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:34.789485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:34.789523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.792022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.792147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:34.792325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.792362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:34.792373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:34.792384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:34.794347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.794384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:34.794412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:34.796583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.796597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:34.796612Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.798730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.798755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.798774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.798800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:34.819031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:34.820921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:34.821031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:34.824299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493354865, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.824463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493354865 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:34.824500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.826290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:34.826362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.826519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:34.826556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:34.828451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:34.828504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublis ... InitializePQ completed BEFORE MODIFY PERMISSIONS 2026-01-04T02:22:44.225613Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_0@builtin \003\n\031\010\001\022\025\032\023test_user_1@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_1@builtin \003\n\031\010\001\022\025\032\023test_user_2@builtin\n!\010\000\022\035\010\001\020\366\213\001\032\023test_user_2@builtin \003" } } TxId: 281474976715665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:54114" , at schemeshard: 72057594046644480 2026-01-04T02:22:44.226574Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976715665:0, at schemeshard: 72057594046644480 2026-01-04T02:22:44.226738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-04T02:22:44.226751Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:44.226903Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:44.226957Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:44.227030Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-04T02:22:44.227042Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-04T02:22:44.227059Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715665:0 progress is 1/1 2026-01-04T02:22:44.227071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-04T02:22:44.227104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-04T02:22:44.227157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715665, ready parts: 1/1, is published: false 2026-01-04T02:22:44.227174Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-04T02:22:44.227187Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715665 ready parts: 1/1 2026-01-04T02:22:44.227217Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-04T02:22:44.227230Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 1, subscribers: 0 2026-01-04T02:22:44.227240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976715665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-04T02:22:44.229071Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715665, response: Status: StatusSuccess TxId: 281474976715665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:44.229430Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user_0@builtin, add access: +W:test_user_1@builtin, add access: +W:test_user_2@builtin, remove access: -():test_user_0@builtin:-, remove access: -():test_user_1@builtin:-, remove access: -():test_user_2@builtin:- 2026-01-04T02:22:44.229577Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:44.229589Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:44.229790Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:22:44.229822Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7591326179324077029:2405], at schemeshard: 72057594046644480, txId: 281474976715665, path id: 46 2026-01-04T02:22:44.230376Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-04T02:22:44.230442Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976715665 2026-01-04T02:22:44.230452Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976715665 2026-01-04T02:22:44.230465Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-04T02:22:44.230479Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-04T02:22:44.230550Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 0 2026-01-04T02:22:44.232558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715665 2026-01-04T02:22:44.232976Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-04T02:22:44.232998Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-04T02:22:44.233322Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-group-id" } 2026-01-04T02:22:44.233407Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-group-id" from ipv6:[::1]:54096 2026-01-04T02:22:44.233425Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:54096 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-04T02:22:44.233431Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:44.234621Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-04T02:22:44.234773Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:22:44.234787Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:22:44.234794Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:22:44.234821Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591326196503947471:2373] (SourceId=test-group-id, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-04T02:22:44.234838Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-04T02:22:44.235493Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-04T02:22:44.235690Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-01-04T02:22:44.236099Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 2026-01-04T02:22:44.237060Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-04T02:22:44.237176Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-04T02:22:44.237218Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: got another 'update_token_request' while previous still in progress, only single token update is allowed at a time sessionId: test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 2026-01-04T02:22:44.237377Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|27b1c900-bcc8998a-5669fca1-76b552bc_0 is DEAD 2026-01-04T02:22:44.237610Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |82.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_batching.py::TestListingBatching::test_listing_batching_monitoring [GOOD] |82.4%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersqueueControlPlaneTestSuite::SetupWriteLockSessionWithDatabase [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:34.481172Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326151888851551:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.481583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.535252Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:34.541304Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326152674382642:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.541727Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.554855Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:34.555036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:34.750238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:34.779746Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:34.829518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:34.829640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:34.838422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:34.838523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:34.880838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:34.899609Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:34.900469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:34.979139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:34.989894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:35.035164Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:35.210486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0018c6/r3tmp/yandexSDUbt4.tmp 2026-01-04T02:22:35.210510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0018c6/r3tmp/yandexSDUbt4.tmp 2026-01-04T02:22:35.210702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0018c6/r3tmp/yandexSDUbt4.tmp 2026-01-04T02:22:35.210805Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:35.254322Z INFO: TTestServer started on Port 23275 GrpcPort 15527 PQClient connected to localhost:15527 === TenantModeEnabled() = 1 === Init PQ - start server on port 15527 2026-01-04T02:22:35.485607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:35.545629Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:35.688300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:35.688478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:35.688663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:35.688683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:35.688835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:35.688872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:35.698794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:35.698933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:35.699122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:35.699176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:35.699190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:35.699207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:35.700644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:35.700663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:35.700709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:35.702175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:35.702217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:35.702235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:35.710855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:35.710904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:35.710922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:35.710945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:35.715392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:35.717266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:35.717388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:35.720096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493355761, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:35.720218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493355761 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:35.720252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:35.720477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:35.720514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:35.720663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:35.720699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:35.722930Z node 1 :FLAT_TX_SCHEMESHARD I ... 4046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 47 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-04T02:22:44.758868Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976720664 2026-01-04T02:22:44.758883Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 47], version: 5 2026-01-04T02:22:44.758898Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 47] was 2 2026-01-04T02:22:44.759075Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 48 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-04T02:22:44.759104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 48 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976720664 2026-01-04T02:22:44.759109Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976720664 2026-01-04T02:22:44.759115Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976720664, pathId: [OwnerId: 72057594046644480, LocalPathId: 48], version: 2 2026-01-04T02:22:44.759123Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 48] was 4 2026-01-04T02:22:44.759148Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976720664, subscribers: 1 2026-01-04T02:22:44.759157Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [3:7591326198500442704:2358] 2026-01-04T02:22:44.760711Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 2026-01-04T02:22:44.760738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976720664 Create topic result: 1 === EnablePQLogs === CreateChannel === NewStub === InitializeWritePQService === InitializeWritePQService start iteration === InitializeWritePQService create streamingWriter === InitializeWritePQService Write 2026-01-04T02:22:44.869255Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-04T02:22:44.869292Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 1 2026-01-04T02:22:44.869620Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: grpc read done: success: 1 data: init_request { topic: "Root/acc/topic1" message_group_id: "12345678" } 2026-01-04T02:22:44.869707Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 1 topic: "Root/acc/topic1" message_group_id: "12345678" from ipv6:[::1]:42302 2026-01-04T02:22:44.869722Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=1 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42302 proto=v1 topic=Root/acc/topic1 durationSec=0 2026-01-04T02:22:44.869731Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:44.872538Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 1 sessionId: describe result for acl check 2026-01-04T02:22:44.872699Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:22:44.872711Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:22:44.872720Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:22:44.872752Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591326198500442901:2362] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-04T02:22:44.872771Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-04T02:22:44.873342Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-04T02:22:44.873409Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-04T02:22:44.873698Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 2026-01-04T02:22:44.874537Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 grpc read done: success: 0 data: 2026-01-04T02:22:44.874560Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 grpc read failed Finish: 0 === InitializeWritePQService done === PersQueueClient 2026-01-04T02:22:44.874693Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 1 sessionId: 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 2026-01-04T02:22:44.874712Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: 12345678|e55bb74a-4fdbfd0f-ff7f830e-53bc2a56_0 is DEAD 2026-01-04T02:22:44.875011Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed 2026-01-04T02:22:44.884747Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-04T02:22:44.884774Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-04T02:22:44.885059Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "topic1" message_group_id: "12345678" } 2026-01-04T02:22:44.885149Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "topic1" message_group_id: "12345678" from ipv6:[::1]:42302 2026-01-04T02:22:44.885169Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42302 proto=v1 topic=topic1 durationSec=0 2026-01-04T02:22:44.885177Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:44.887029Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-04T02:22:44.887155Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:22:44.887170Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:22:44.887177Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:22:44.887206Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591326198500442921:2371] (SourceId=12345678, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-04T02:22:44.887221Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-04T02:22:44.887640Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-04T02:22:44.887782Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 generated for partition 0 topic 'acc/topic1' owner 12345678 2026-01-04T02:22:44.888164Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 2026-01-04T02:22:44.889777Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 grpc read done: success: 0 data: 2026-01-04T02:22:44.889797Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 grpc read failed 2026-01-04T02:22:44.889843Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 grpc closed 2026-01-04T02:22:44.889856Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: 12345678|602dd86-d360140-ffca7e81-70d76fe6_0 is DEAD 2026-01-04T02:22:44.891705Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |82.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteSessionWithValidTokenAndACEAndThenRemoveACEAndSendWriteRequest_SessionClosedWithUnauthorizedErrorAfterSuccessfullWriteResponse [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:32.848436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326145273637481:2204];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.852857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:32.898315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:32.903018Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326144785979926:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.903059Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:32.898740Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:32.994149Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.341924Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.348873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.380683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.380823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.382541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.382622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.391506Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.391640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.410302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.429434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.573060Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.626619Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.661492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d1e/r3tmp/yandexo7AYNb.tmp 2026-01-04T02:22:33.661522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d1e/r3tmp/yandexo7AYNb.tmp 2026-01-04T02:22:33.661713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d1e/r3tmp/yandexo7AYNb.tmp 2026-01-04T02:22:33.661915Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:33.711902Z INFO: TTestServer started on Port 20898 GrpcPort 18543 2026-01-04T02:22:33.849261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:18543 === TenantModeEnabled() = 1 === Init PQ - start server on port 18543 2026-01-04T02:22:33.978256Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.068021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:34.068228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.068460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:34.068488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:34.068694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:34.068765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.070853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.070985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:34.071159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.071215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:34.071235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:34.071247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:34.073673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.073696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:34.073719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.080203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.080257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:34.080277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:34.082972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.083014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.085892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.085945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:34.092201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:34.095020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:34.095169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:34.109371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493354144, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.109482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493354144 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:34.109506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.109823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:34.109867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.110042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:34.110095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:34.126781Z node 1 :FLAT_TX_SCHEMESHARD I ... fd_0 generated for partition 0 topic 'acc/topic1' owner test-group-id 2026-01-04T02:22:44.136180Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-group-id|e3593aca-80ba9a4-8fe01523-4634c5fd_0 ===Assert streaming op1 ===Assert streaming op2 2026-01-04T02:22:44.137329Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-group-id|e3593aca-80ba9a4-8fe01523-4634c5fd_0 grpc read done: success: 1 data: write_request[data omitted] 2026-01-04T02:22:44.137609Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2026-01-04T02:22:44.137872Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2026-01-04T02:22:44.143421Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse ===ModifyAcl BEFORE MODIFY PERMISSIONS 2026-01-04T02:22:44.153647Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\031\010\001\022\025\032\023test_user_0@builtin" } } TxId: 281474976710666 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:39078" , at schemeshard: 72057594046644480 2026-01-04T02:22:44.153839Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710666:0, at schemeshard: 72057594046644480 2026-01-04T02:22:44.153968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-04T02:22:44.153989Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:44.154161Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710666:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:44.154201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:44.154281Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-01-04T02:22:44.154297Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-04T02:22:44.154315Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710666:0 progress is 1/1 2026-01-04T02:22:44.154325Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-04T02:22:44.154366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-04T02:22:44.154406Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710666, ready parts: 1/1, is published: false 2026-01-04T02:22:44.154428Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-04T02:22:44.154440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710666 ready parts: 1/1 2026-01-04T02:22:44.154451Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710666:0 2026-01-04T02:22:44.154465Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710666, publications: 1, subscribers: 0 2026-01-04T02:22:44.154477Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710666, [OwnerId: 72057594046644480, LocalPathId: 46], 4 2026-01-04T02:22:44.156278Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710666, response: Status: StatusSuccess TxId: 281474976710666 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:44.156573Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710666, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, remove access: -():test_user_0@builtin:- 2026-01-04T02:22:44.156844Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:44.156867Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710666, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:44.157015Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:22:44.157036Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7591326177477712350:2393], at schemeshard: 72057594046644480, txId: 281474976710666, path id: 46 2026-01-04T02:22:44.157590Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-01-04T02:22:44.157670Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976710666 2026-01-04T02:22:44.157688Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710666 2026-01-04T02:22:44.157704Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710666, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 4 2026-01-04T02:22:44.157721Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 ===Wait for session created with token with removed ACE to die2026-01-04T02:22:44.157798Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710666, subscribers: 0 2026-01-04T02:22:44.159600Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710666 2026-01-04T02:22:44.768656Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591326194657582817:2382], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:22:44.769090Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=ODQ3MjVkODMtZGU3NWY4OWEtOTk4NTI5YjEtM2NjNzYxZWM=, ActorId: [3:7591326194657582810:2378], ActorState: ExecuteState, LegacyTraceId: 01ke3d0v04fgqgmxvmd6v9cac5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:22:44.769552Z node 3 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:22:45.117533Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326177477711721:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:45.117620Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:45.133922Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591326179590813065:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:45.134008Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:45.137508Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:45.139044Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: test-group-id|e3593aca-80ba9a4-8fe01523-4634c5fd_0 describe result for acl check 2026-01-04T02:22:45.139189Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_0@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-group-id|e3593aca-80ba9a4-8fe01523-4634c5fd_0 2026-01-04T02:22:45.139592Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-group-id|e3593aca-80ba9a4-8fe01523-4634c5fd_0 is DEAD 2026-01-04T02:22:45.139937Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison status: UNAUTHORIZED issues { message: "access to topic \'Topic /Root/acc/topic1 in database: /Root\' denied for \'test_user_0@builtin\' due to \'no WriteTopic rights\', Marker# PQ1125" issue_code: 500018 severity: 1 } >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_3 [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 |82.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case >> TControlPlaneProxyTest::ShouldSendDescribeJob [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut |82.4%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/ydb-public-sdk-cpp-src-client-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestLimiterLimitsWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:32.911781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326144635772272:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.911976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:32.958250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:32.969214Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.015284Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.262932Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.263078Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:22:33.281887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.315227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.315405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.315585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.315654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.381841Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.389499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.392354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.475774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.494673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.494618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.733732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d1d/r3tmp/yandexhTu6hl.tmp 2026-01-04T02:22:33.733759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d1d/r3tmp/yandexhTu6hl.tmp 2026-01-04T02:22:33.790519Z INFO: TTestServer started on Port 23719 GrpcPort 1397 2026-01-04T02:22:33.817965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d1d/r3tmp/yandexhTu6hl.tmp 2026-01-04T02:22:33.818124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:33.921580Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:33.996461Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:1397 === TenantModeEnabled() = 1 === Init PQ - start server on port 1397 2026-01-04T02:22:34.396872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:34.397040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.397201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:34.397241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:34.397402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:34.397444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.399382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.399506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:34.399652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.399692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:34.399706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:34.399718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:34.402232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.402270Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:34.402285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:34.405494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.405526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.405556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.405592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:34.409327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:34.411968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.411999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:34.412020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.412791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:34.412886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:34.415073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493354459, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.415192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493354459 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:34.415216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.415514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:34.415560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.415691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:34.415731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:34.417950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:34.417975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.c ... 558144Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.558176Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.558198Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.558219Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.558233Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.571953Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.571982Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.571994Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.572012Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.572024Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.641606Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.641644Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.641656Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.641689Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.641704Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.658505Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.658537Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.658550Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.658581Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.658595Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.672333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.672364Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.672376Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.672396Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.672410Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.741937Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.741966Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.741991Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.742015Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.742027Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.758870Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.758902Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.758914Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.758936Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.758950Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.772675Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.772710Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.772723Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.772743Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.772757Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.842328Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.842364Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.842377Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.842397Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.842423Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.859212Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.859257Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.859270Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.859291Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.859304Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.873052Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.873089Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.873101Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.873123Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.873140Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.943839Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.943879Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.943891Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.943915Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.943931Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.949905Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326179747142723:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:45.949984Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:45.959517Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.959544Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.959553Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.959567Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.959576Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:45.973443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:45.973481Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.973495Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:45.973516Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:45.973530Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:46.045287Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:46.045328Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:46.045340Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:46.045358Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:46.045371Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist |82.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |82.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::Auth_WriteUpdateTokenRequestWithValidTokenButWithoutACL_SessionClosedWithUnauthorizedError [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:34.566444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326155020377144:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.566507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.658917Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326153002732088:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.658953Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.735694Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:34.854871Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:35.113666Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.115794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.285285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.532587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:35.541262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:35.541364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:35.562906Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:35.578674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:35.578745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:35.581966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.582062Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:22:35.643638Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:35.643773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:35.658454Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:35.659718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:35.853909Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:36.094452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:36.108919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0018c4/r3tmp/yandexUUsq7i.tmp 2026-01-04T02:22:36.108953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0018c4/r3tmp/yandexUUsq7i.tmp 2026-01-04T02:22:36.109113Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0018c4/r3tmp/yandexUUsq7i.tmp 2026-01-04T02:22:36.109188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:36.220677Z INFO: TTestServer started on Port 30034 GrpcPort 11821 PQClient connected to localhost:11821 === TenantModeEnabled() = 1 === Init PQ - start server on port 11821 2026-01-04T02:22:36.659809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:36.659983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:36.660167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:36.660190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:36.660348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:36.660393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:36.670635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:36.670788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:36.670986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:36.671072Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:36.671096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:36.671115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-04T02:22:36.675087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:36.675125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:36.675141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-04T02:22:36.677223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:36.677274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:36.677348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T02:22:36.677384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-04T02:22:36.681753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:36.682080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:22:36.682108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-04T02:22:36.682126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:22:36.683793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-04T02:22:36.683900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:36.686315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493356734, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:36.686450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493356734 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:36.686480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T02:22:36.686751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-04T02:22:36.686792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# ... is DEAD 2026-01-04T02:22:45.894989Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison === InitializePQ completed BEFORE MODIFY PERMISSIONS 2026-01-04T02:22:45.915233Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root/acc" OperationType: ESchemeOpModifyACL ModifyACL { Name: "topic1" DiffACL: "\n\027\010\001\022\023\032\021test_user@builtin\n\037\010\000\022\033\010\001\020\366\213\001\032\021test_user@builtin \003" } } TxId: 281474976710665 TabletId: 72057594046644480 PeerName: "ipv6:[::1]:42276" , at schemeshard: 72057594046644480 2026-01-04T02:22:45.915488Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_modify_acl.cpp:33: TModifyACL Propose, path: /Root/acc/topic1, operationId: 281474976710665:0, at schemeshard: 72057594046644480 2026-01-04T02:22:45.915621Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046644480, LocalPathId: 46] name: topic1 type: EPathTypePersQueueGroup state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046644480, LocalPathId: 45] 2026-01-04T02:22:45.915632Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:45.915823Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710665:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:45.915873Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:45.915956Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-04T02:22:45.915967Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-04T02:22:45.915985Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710665:0 progress is 1/1 2026-01-04T02:22:45.915994Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-04T02:22:45.916046Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 3 2026-01-04T02:22:45.916110Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710665, ready parts: 1/1, is published: false 2026-01-04T02:22:45.916136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 46], at schemeshard: 72057594046644480 2026-01-04T02:22:45.916148Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710665 ready parts: 1/1 2026-01-04T02:22:45.916161Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710665:0 2026-01-04T02:22:45.916174Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710665, publications: 1, subscribers: 0 2026-01-04T02:22:45.916185Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710665, [OwnerId: 72057594046644480, LocalPathId: 46], 3 2026-01-04T02:22:45.922939Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710665, response: Status: StatusSuccess TxId: 281474976710665 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:45.923277Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710665, database: /Root, subject: , status: StatusSuccess, operation: MODIFY ACL, path: /Root/acc/topic1, add access: +W:test_user@builtin, remove access: -():test_user@builtin:- 2026-01-04T02:22:45.923445Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:22:45.923478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710665, path id: [OwnerId: 72057594046644480, LocalPathId: 46] 2026-01-04T02:22:45.923706Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:22:45.923722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:7591326189175713635:2392], at schemeshard: 72057594046644480, txId: 281474976710665, path id: 46 2026-01-04T02:22:45.925070Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-04T02:22:45.925145Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 46 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710665 2026-01-04T02:22:45.925160Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710665 2026-01-04T02:22:45.925180Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710665, pathId: [OwnerId: 72057594046644480, LocalPathId: 46], version: 3 2026-01-04T02:22:45.925199Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 46] was 4 2026-01-04T02:22:45.925300Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710665, subscribers: 0 2026-01-04T02:22:45.927663Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710665 2026-01-04T02:22:45.928701Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:107: new grpc connection 2026-01-04T02:22:45.928717Z node 3 :PQ_WRITE_PROXY DEBUG: grpc_pq_write.h:141: new session created cookie 2 2026-01-04T02:22:45.929026Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: grpc read done: success: 1 data: init_request { topic: "/Root/acc/topic1" message_group_id: "test-message-group" } 2026-01-04T02:22:45.929109Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:447: session request cookie: 2 topic: "/Root/acc/topic1" message_group_id: "test-message-group" from ipv6:[::1]:42258 2026-01-04T02:22:45.929127Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1538: write session: cookie=2 sessionId= userAgent="pqv1 server" ip=ipv6:[::1]:42258 proto=v1 topic=/Root/acc/topic1 durationSec=0 2026-01-04T02:22:45.929136Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:45.930074Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: describe result for acl check 2026-01-04T02:22:45.930267Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:22:45.930277Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:22:45.930288Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:22:45.930325Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591326202060616777:2373] (SourceId=test-message-group, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=0 2026-01-04T02:22:45.930346Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 2 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-04T02:22:45.931370Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037889 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037889, NodeId 3, Generation: 1 2026-01-04T02:22:45.931489Z node 3 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 generated for partition 0 topic 'acc/topic1' owner test-message-group 2026-01-04T02:22:45.931906Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 2 partition: 0 MaxSeqNo: 0 sessionId: test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 2026-01-04T02:22:45.933062Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 grpc read done: success: 1 data: update_token_request [content omitted] 2026-01-04T02:22:45.933390Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:1352: updating token 2026-01-04T02:22:45.933438Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:571: init check schema 2026-01-04T02:22:45.934244Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:632: session v1 cookie: 2 sessionId: test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 describe result for acl check 2026-01-04T02:22:45.934367Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:815: session v1 error cookie: 2 reason: access to topic 'Topic /Root/acc/topic1 in database: /Root' denied for 'test_user_2@builtin' due to 'no WriteTopic rights', Marker# PQ1125 sessionId: test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 2026-01-04T02:22:45.934695Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group|ef781665-7bbf5ae4-59caf9c-7df9494c_0 is DEAD 2026-01-04T02:22:45.935075Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037889 (partition=0) Received event: NActors::TEvents::TEvPoison |82.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.4%| [LD] {RESULT} $(B)/ydb/core/kafka_proxy/ut/ydb-core-kafka_proxy-ut |82.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcWriteNoDC [GOOD] >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC |82.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |82.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] >> TControlPlaneProxyTest::ShouldSendCreateConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendListConnections |82.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |82.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |82.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v0-tables_format_v0-std] [GOOD] |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> TControlPlaneProxyTest::ShouldSendListConnections [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeConnection |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueCommonTest::TestWriteWithRateLimiterWithUserPayloadRateLimit [GOOD] Test command err: === Server->StartServer(false); 2026-01-04T02:22:32.953486Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326143971017694:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:32.953553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.026943Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326149427014246:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.026999Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.049917Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.131679Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.497920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.548168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.611172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.611290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.615861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.615944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.696037Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.701513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.757639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.759303Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.761559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.832517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:34.008836Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.041618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d20/r3tmp/yandexHnBvnz.tmp 2026-01-04T02:22:34.041653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d20/r3tmp/yandexHnBvnz.tmp 2026-01-04T02:22:34.041859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d20/r3tmp/yandexHnBvnz.tmp 2026-01-04T02:22:34.042003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:34.045764Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:34.129292Z INFO: TTestServer started on Port 19587 GrpcPort 2257 2026-01-04T02:22:34.134067Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:2257 === TenantModeEnabled() = 1 === Init PQ - start server on port 2257 2026-01-04T02:22:34.650472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:22:34.653459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.653655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:22:34.653682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:22:34.662044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:22:34.662206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.673489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.673683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:22:34.673862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.673929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:22:34.673945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T02:22:34.673976Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T02:22:34.682586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.682657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:22:34.682673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T02:22:34.684114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.684130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T02:22:34.684145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:34.688121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.688149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:22:34.688169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.688187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T02:22:34.693893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:22:34.696608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T02:22:34.696702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:22:34.702569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493354746, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:22:34.702712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493354746 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:22:34.702742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.702994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T02:22:34.707745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T02:22:34.708463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:22:34.708533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:22:34.711420Z node 1 :FLA ... EBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.745861Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.750295Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.750333Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.750346Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.750363Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.750372Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.750428Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.750439Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.750449Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.750461Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.750468Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.769017Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.769053Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.769063Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.769080Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.769091Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.846243Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.846283Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.846295Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.846314Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.846325Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.852969Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.853005Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.853019Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.853041Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.853054Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.853106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.853115Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.853136Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.853147Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.853155Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.862184Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:22:48.869115Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.869144Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.869152Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.869167Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.869176Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.946470Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.946509Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.946521Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.946541Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.946554Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.953103Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.953146Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.953159Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.953178Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.953189Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.953250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.953269Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.953277Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.953290Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.953300Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:48.969478Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:48.969509Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.969522Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:48.969539Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:48.969550Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist 2026-01-04T02:22:49.046827Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:49.046860Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.046876Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:49.046893Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.046904Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:22:49.058485Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:49.058519Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.058536Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:49.058554Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.058564Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:22:49.058621Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:49.058631Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.058638Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:49.058647Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.058655Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:22:49.069825Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:22:49.069857Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.069867Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:22:49.069883Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:22:49.069893Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][0][StateIdle] Try persist |82.5%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> TControlPlaneProxyTest::ShouldSendDescribeConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyConnection |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/ydb-core-tx-datashard-ut_change_collector |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/ydb-core-tx-datashard-ut_incremental_backup |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/ydb-core-tx-schemeshard-ut_subdomain >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] >> TControlPlaneProxyTest::ShouldSendModifyConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteConnection |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_queues_count_over_limit[tables_format_v1] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TControlPlaneProxyTest::ShouldSendDeleteConnection [GOOD] >> TControlPlaneProxyTest::ShouldSendTestConnection |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TControlPlaneProxyTest::ShouldSendTestConnection [GOOD] >> test_simple.py::TestSimple::test[alter_table] >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] >> test_alter_compression.py::TestAlterCompression::test[alter_compression] |82.5%| [LD] {RESULT} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut >> TControlPlaneProxyTest::ShouldSendCreateBinding |82.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/address_classification/ut/ydb-core-mind-address_classification-ut |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot |82.5%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/ydb-core-tx-datashard-ut_snapshot >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] >> test_alter_tiering.py::TestAlterTiering::test[many_tables] |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_splitmerge/test-results/unittest/{meta.json ... results_accumulator.log} |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |82.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret/test-results/unittest/{meta.json ... results_accumulator.log} |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |82.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |82.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> TControlPlaneProxyTest::ShouldSendCreateBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClass [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI |82.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_compaction/ydb-core-tx-schemeshard-ut_compaction >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> TControlPlaneProxyTest::ShouldSendListBindings [GOOD] >> TControlPlaneProxyTest::ShouldSendDescribeBinding |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_table/ydb-core-tx-datashard-ut_read_table >> TSchemeShardSubDomainTest::ForceDropTwice >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables >> TSchemeShardSubDomainTest::Redefine |82.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLAlterSettings::test_case [GOOD] |82.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |82.6%| [LD] {RESULT} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/secret/ut/ydb-services-metadata-secret-ut >> TControlPlaneProxyTest::ShouldSendDescribeBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendModifyBinding >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:03.741125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:03.741212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.741259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:03.741297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:03.741361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:03.741391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:03.741459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.741540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:03.742761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:03.743057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:03.890887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:03.890944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:03.911656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:03.916345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:03.916541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:03.927411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:03.927687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:03.928267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.928452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:03.931994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.932191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:03.933416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:03.933476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.933588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:03.933644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:03.933705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:03.933877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.958239Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:23:04.094687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:04.094871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.095016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:04.095063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:04.095241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:04.095291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:04.097192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:04.097381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:04.097530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.097583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:04.097622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:04.097651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:04.099191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.099236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:04.099269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:04.100561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.100593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.100632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.100676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:04.103390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:04.105465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:04.105605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:04.106473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:04.106581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:04.106626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.106824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:04.106873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.107037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:04.107101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:04.109969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:04.110017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 04T02:23:04.168068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:04.168110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:04.169375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:23:04.169547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:23:04.169729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:23:04.169758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:279:2269] 2026-01-04T02:23:04.170007Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 2026-01-04T02:23:04.170227Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-04T02:23:04.170354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-04T02:23:04.170695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-04T02:23:04.171060Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-04T02:23:04.171140Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 2026-01-04T02:23:04.171262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:04.171468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:23:04.171709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:23:04.171864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:04.172060Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-04T02:23:04.172320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:23:04.172468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:23:04.172821Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-04T02:23:04.172927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:04.173069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:23:04.173392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:23:04.173545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:23:04.174538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:04.174591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:04.174717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:23:04.175125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:04.175171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:04.175232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:04.175987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:23:04.178101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:04.178209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:23:04.178334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:23:04.182871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:04.182977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:23:04.183169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:04.183296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-04T02:23:04.183926Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:04.184153Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusPathDoesNotExist 2026-01-04T02:23:04.184332Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:04.184714Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:04.184921Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 169us result status StatusSuccess 2026-01-04T02:23:04.185452Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ForceDropTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:23:03.263729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:03.263826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.263862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:03.263901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:03.263956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:03.263989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:03.264045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.264169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:03.264992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:03.265285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:03.430857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:03.430918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:03.448479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:03.448704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:03.448898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:03.482918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:03.483157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:03.483850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.484119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:03.496403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.496608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:03.509669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:03.509770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.509924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:03.509977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:03.510106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:03.510359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.538343Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:23:03.759478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:03.759687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.759868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:03.759924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:03.760135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:03.760203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:03.763396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.763566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:03.763789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.763842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:03.763885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:03.763916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:03.766671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.766748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:03.766801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:03.768810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.768862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.768917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:03.768967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:03.782808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:03.790201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:03.790440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:03.791549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.791701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:03.791751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:03.792026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:03.792100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:03.792280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:03.792390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:03.795096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:03.795144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 409551 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186233409551 2026-01-04T02:23:04.202888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:23:04.203064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:04.203630Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:23:04.204259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:23:04.204430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2026-01-04T02:23:04.206768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:04.206944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:23:04.208492Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-04T02:23:04.210615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:23:04.210823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:23:04.211729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:04.211785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:04.211907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:23:04.212600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:04.212667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:04.212750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:04.213513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:23:04.213568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:23:04.217242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:04.217300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:23:04.217421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:23:04.217443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:23:04.217588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:23:04.217619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:23:04.220790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:04.220832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:23:04.220921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:23:04.220964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:23:04.221137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:04.221216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 102 2026-01-04T02:23:04.221501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:23:04.221552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:23:04.221627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:23:04.221648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:23:04.222111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:23:04.222234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:23:04.222284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:670:2574] 2026-01-04T02:23:04.222511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:23:04.222581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:23:04.222607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:670:2574] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-04T02:23:04.223084Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:04.223323Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 228us result status StatusPathDoesNotExist 2026-01-04T02:23:04.223499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:04.223878Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:04.224485Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 162us result status StatusSuccess 2026-01-04T02:23:04.224931Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication_reboots/ydb-core-tx-schemeshard-ut_replication_reboots >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] >> TSchemeShardSubDomainTest::Redefine [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:05.389786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:05.393789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.393881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:05.393926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:05.393992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:05.394030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:05.394108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.394202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:05.395150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:05.395554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:05.629642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:05.629699Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:05.663466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:05.668179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:05.668401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:05.683765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:05.684097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:05.684852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.685160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:05.691883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.692137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:05.693489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:05.693558Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.693662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:05.693709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:05.693772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:05.693971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.717080Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:23:05.989549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:05.989787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.990044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:05.990111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:05.990353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:05.990422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:05.998726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.999038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:05.999270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.999345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:05.999388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:05.999422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:06.001584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:06.001649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:06.001692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:06.003542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:06.003599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:06.003662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:06.003729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:06.022270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:06.033758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:06.033966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:06.035054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:06.035178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:06.035229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:06.035533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:06.035597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:06.035771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:06.035890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:06.039623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:06.039667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-04T02:23:06.322484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:06.322783Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:23:06.323005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:23:06.323167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2026-01-04T02:23:06.324520Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:23:06.330895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:06.331196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409551 Forgetting tablet 72075186233409547 2026-01-04T02:23:06.333514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:23:06.333754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409549 2026-01-04T02:23:06.335294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.335351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.335490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:23:06.338761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.338819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.338892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:06.340431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:23:06.340489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:23:06.340659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:06.340685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:23:06.341412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-04T02:23:06.343939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:23:06.343983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:23:06.344090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:23:06.344115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:23:06.344235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-04T02:23:06.344288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:06.344310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:23:06.346693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:23:06.346752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:23:06.346828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:06.347035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-04T02:23:06.347275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:23:06.347330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:23:06.347418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:23:06.347444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:23:06.347861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:23:06.347954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:23:06.347987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:599:2514] 2026-01-04T02:23:06.348190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:23:06.348267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:23:06.348290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:599:2514] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:23:06.348732Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.349022Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 177us result status StatusPathDoesNotExist 2026-01-04T02:23:06.349242Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:06.349724Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.350197Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 485us result status StatusSuccess 2026-01-04T02:23:06.350695Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejectsWithIndexedTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:03.772508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:03.772597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.772648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:03.772691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:03.772726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:03.772767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:03.772835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.772925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:03.773780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:03.774843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:03.876122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:03.876179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:03.890931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:03.895659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:03.895845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:03.901278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:03.901554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:03.902337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.902574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:03.905833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.906043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:03.907276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:03.907337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.907442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:03.907491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:03.907561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:03.907697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:03.923238Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:23:04.092517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:04.092753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.092944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:04.092994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:04.093216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:04.093297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:04.095634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:04.095817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:04.096038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.096143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:04.096191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:04.096226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:04.098264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.098330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:04.098374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:04.099988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.100026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.100097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.100142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:04.103501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:04.106457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:04.106624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:04.107590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:04.107708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:04.107755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.108005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:04.108049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:04.108210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:04.108282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:04.112733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:04.112781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... shard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969843 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-04T02:23:05.349638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409549, partId: 2 2026-01-04T02:23:05.349765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:2, at schemeshard: 72057594046678944, message: Source { RawX1: 608 RawX2: 4294969843 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-04T02:23:05.349837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:23:05.349939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 107:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 608 RawX2: 4294969843 } Origin: 72075186233409549 State: 2 TxId: 107 Step: 0 Generation: 2 2026-01-04T02:23:05.349997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 107:2, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.350029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-04T02:23:05.350060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:2, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:23:05.350111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:2 129 -> 240 2026-01-04T02:23:05.351460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:23:05.356147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:23:05.356285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:23:05.356352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:23:05.356419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.356518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-04T02:23:05.362878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.363294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.363349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-04T02:23:05.363450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2026-01-04T02:23:05.363487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-04T02:23:05.363537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 2/3 2026-01-04T02:23:05.363569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 2/3 2026-01-04T02:23:05.363626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 2/3, is published: true 2026-01-04T02:23:05.363929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-04T02:23:05.364472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-04T02:23:05.364516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:2 ProgressState 2026-01-04T02:23:05.364586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2026-01-04T02:23:05.364611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-04T02:23:05.364641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 3/3 2026-01-04T02:23:05.364663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-04T02:23:05.364688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 3/3, is published: true 2026-01-04T02:23:05.364776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:481:2430] message: TxId: 107 2026-01-04T02:23:05.364840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 3/3 2026-01-04T02:23:05.364885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:23:05.364936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-04T02:23:05.365048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-04T02:23:05.365491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-04T02:23:05.365522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-04T02:23:05.365562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-04T02:23:05.365591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-04T02:23:05.365611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-04T02:23:05.365654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-04T02:23:05.368267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:23:05.368311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:535:2484] TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-04T02:23:05.372328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "Table7" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value0" Type: "Utf8" } Columns { Name: "Value1" Type: "Utf8" } Columns { Name: "Value2" Type: "Utf8" } Columns { Name: "Value3" Type: "Utf8" } Columns { Name: "Value4" Type: "Utf8" } KeyColumnNames: "RowId" } IndexDescription { Name: "UserDefinedIndexByValue0" KeyColumnNames: "Value0" } IndexDescription { Name: "UserDefinedIndexByValue1" KeyColumnNames: "Value1" } IndexDescription { Name: "UserDefinedIndexByValue2" KeyColumnNames: "Value2" } IndexDescription { Name: "UserDefinedIndexByValue3" KeyColumnNames: "Value3" } IndexDescription { Name: "UserDefinedIndexByValue4" KeyColumnNames: "Value4" } } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:05.372856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_indexed_table.cpp:101: TCreateTableIndex construct operation table path: /MyRoot/USER_0/Table7 domain path id: [OwnerId: 72057594046678944, LocalPathId: 2] domain path: /MyRoot/USER_0 shardsToCreate: 6 GetShardsInside: 4 MaxShards: 7 2026-01-04T02:23:05.372991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 108:0, explain: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-04T02:23:05.373037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, at schemeshard: 72057594046678944 2026-01-04T02:23:05.375698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusResourceExhausted Reason: "indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5" TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:05.375978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/USER_0, subject: , status: StatusResourceExhausted, reason: indexes count has reached maximum value in the table, children limit for dir in domain: 4, intention to create new children: 5, operation: CREATE TABLE WITH INDEXES, path: /MyRoot/USER_0/Table7 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-04T02:23:05.376454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-04T02:23:05.376502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-04T02:23:05.376998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-04T02:23:05.377090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:23:05.377126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:726:2646] TestWaitNotification: OK eventTxId 108 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Redefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:04.447555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:04.447680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:04.447738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:04.447777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:04.447812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:04.447839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:04.447889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:04.447992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:04.454031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:04.454421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:05.085716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:05.085789Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:05.146293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:05.150691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:05.150892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:05.170802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:05.171191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:05.171898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.172144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:05.180055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.180278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:05.181556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:05.181616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.181735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:05.181784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:05.181865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:05.182030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.199242Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:23:05.585704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:05.586017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.586267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:05.586320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:05.586629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:05.586724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:05.595281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.595571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:05.595810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.595895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:05.595942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:05.595979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:05.602583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.602671Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:05.602722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:05.611263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.611334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.611397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.611506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:05.615578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:05.618187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:05.618386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:05.619697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.619856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:05.619910Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.620257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:05.620320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.620512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:05.620616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:05.623586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:05.623650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... : 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:23:06.216050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:23:06.216100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-04T02:23:06.216131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:06.216199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:23:06.256416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:23:06.256579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:06.256649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:06.256674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:06.256960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:23:06.301237Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-04T02:23:06.302363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:06.302728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:23:06.303081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:23:06.303548Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:23:06.304277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:23:06.304505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:23:06.305067Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 Forgetting tablet 72075186233409547 2026-01-04T02:23:06.315828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:06.316131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:23:06.317528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.317587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.317751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:23:06.318254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.318308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.318395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:06.335236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:06.335315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:23:06.335830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:23:06.335863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:23:06.335938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:06.336016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:23:06.345909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:06.346048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:23:06.346424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:23:06.346483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:23:06.347003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:23:06.347129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:23:06.347189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:577:2533] TestWaitNotification: OK eventTxId 104 2026-01-04T02:23:06.347857Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.348095Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 227us result status StatusPathDoesNotExist 2026-01-04T02:23:06.348277Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:06.348870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.349101Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 193us result status StatusSuccess 2026-01-04T02:23:06.349553Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TControlPlaneProxyTest::ShouldSendModifyBinding [GOOD] >> TControlPlaneProxyTest::ShouldSendDeleteBinding |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |82.6%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator/ut/ydb-core-tx-tx_allocator-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantDirTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:05.339395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:05.339503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.339561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:05.339604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:05.339648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:05.339680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:05.339742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.339835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:05.340724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:05.341000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:05.597159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:05.597221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:05.612008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:05.616100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:05.616287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:05.622082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:05.622332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:05.623068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.623348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:05.626962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.627150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:05.628338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:05.628389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:05.628494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:05.628542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:05.628601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:05.628731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.638125Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:23:05.820048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:23:05.820327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.820537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:23:05.820584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:23:05.820821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:23:05.820899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:05.824377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.824621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:23:05.824844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.824919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:23:05.824959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:23:05.824994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:23:05.828038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.828132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:23:05.828195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:23:05.831542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.831631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:23:05.831706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.831769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:23:05.837050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:23:05.841959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:23:05.842206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:23:05.843365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:05.843508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:23:05.843563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.843862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:23:05.843918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:23:05.844126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:05.844239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:23:05.846842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:05.846890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 7594046678944 2026-01-04T02:23:06.948225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 Forgetting tablet 72075186233409551 2026-01-04T02:23:06.949395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:23:06.950766Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:23:06.950965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:23:06.951291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:06.951667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-04T02:23:06.952702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:23:06.952945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:23:06.957229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:06.957505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:23:06.958284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:23:06.958477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:23:06.958782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:23:06.959235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.959293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.959420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:23:06.967215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:23:06.967308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:23:06.967517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-04T02:23:06.967544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-04T02:23:06.967661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:06.967845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:06.967900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:06.967993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:06.968283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:06.968330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:23:06.972672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:23:06.972722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:23:06.972828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:23:06.972857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:23:06.972909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:06.972938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:23:06.973015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:23:06.973078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:23:06.973189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:06.975898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:23:06.976170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:23:06.976231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:23:06.976751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:23:06.976862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:23:06.976907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:786:2676] TestWaitNotification: OK eventTxId 103 2026-01-04T02:23:06.977526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.977771Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 279us result status StatusPathDoesNotExist 2026-01-04T02:23:06.978010Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:06.978532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:06.978749Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2026-01-04T02:23:06.979222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::CheckGrpcReadNoDC [GOOD] Test command err: 2026-01-04T02:22:34.629564Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.016091s 2026-01-04T02:22:34.742226Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326155558016121:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.742296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.822637Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:34.822932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:22:34.866803Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326153625396945:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:34.866881Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:34.883582Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:35.277355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.385357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:35.483548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:35.483640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:35.485480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:35.485557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:35.498810Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:35.499026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:35.500830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:35.651140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:35.658301Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:35.736453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:35.808786Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007900s 2026-01-04T02:22:35.819003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:35.870003Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:36.015811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d11/r3tmp/yandex2qndrB.tmp 2026-01-04T02:22:36.015856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d11/r3tmp/yandex2qndrB.tmp 2026-01-04T02:22:36.016059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d11/r3tmp/yandex2qndrB.tmp 2026-01-04T02:22:36.016197Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:36.099597Z INFO: TTestServer started on Port 9203 GrpcPort 2896 PQClient connected to localhost:2896 2026-01-04T02:22:36.365625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:36.495930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:39.716779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326177032853949:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:39.716854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326177032853960:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:39.716906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:39.720169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326177032853964:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:39.720336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:39.720644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:39.738469Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326155558016121:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:39.738528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:39.747533Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326177032853963:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:22:39.810732Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326177032854071:3068] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:39.850622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326153625396945:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:39.850716Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:40.052547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:40.061854Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326177032854081:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:22:40.064559Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTViOWE3ZmYtODhmOTA1MDQtZmFkNzNhOTUtNDVjMWY3OGU=, ActorId: [1:7591326177032853947:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3d0p339s26v8rfb04sam76, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:22:40.083738Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:22:40.117785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:40.286641Z node 1 :FLAT_TX_SCHEMESH ... ts: 0, PendingWrites: 0 2026-01-04T02:23:02.578764Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.578786Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:02.578805Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.578812Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.578820Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.578826Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.622093Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:02.622135Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.622153Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.622173Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.622188Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.650209Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591326216623019679:2118], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:02.650371Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591326216623019679:2118], cacheItem# { Subscriber: { Subscriber: [3:7591326220917988077:2970] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:02.650495Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591326272457598199:4800], recipient# [3:7591326272457598198:2548], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:02.657922Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:02.657956Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.657969Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.657988Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.657999Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-04T02:23:02.658036Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:02.658044Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.658055Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.658065Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.658072Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-04T02:23:02.681952Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:02.681991Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682028Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682041Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-04T02:23:02.682082Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:02.682091Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682099Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682113Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682122Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-04T02:23:02.682147Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:02.682157Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682165Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682174Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682182Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-04T02:23:02.682205Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:02.682215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682231Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682238Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-04T02:23:02.682262Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:02.682270Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682278Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682289Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682297Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.682323Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:02.682333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682345Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.682356Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.682365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.726126Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:02.726164Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.726180Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.726201Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.726215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-04T02:23:02.758071Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:02.758104Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.758118Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.758135Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.758149Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-04T02:23:02.758190Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:02.758201Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.758209Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:02.758221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:02.758230Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist |82.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> DataShardSnapshots::MvccSnapshotAndSplit >> TSchemeShardSubDomainTest::CreateForceDropSolomon >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] |82.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test |82.6%| [LD] {RESULT} $(B)/ydb/core/quoter/quoter_service_bandwidth_test/quoter_service_bandwidth_test >> DataShardSnapshots::VolatileSnapshotSplit >> TSubscriberCombinationsTest::CombinationsRootDomain [GOOD] >> TSubscriberCombinationsTest::MigratedPathRecreation >> DataShardSnapshots::MvccSnapshotTailCleanup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:03.338465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:03.338587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.338653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:03.338709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:03.338747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:03.338777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:03.338827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:03.338926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:03.339810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:03.350231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:03.698700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:03.698774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:03.735587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:03.740271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:03.740513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:03.765205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:03.765490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:03.766347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:03.766657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:03.781102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.781379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:03.783085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:03.783156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:03.783309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:03.783362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:03.783420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:03.783580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:04.256305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.257421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.257584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.257698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.257773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.270936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.271011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:04.271111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... complete, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:23:12.115842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:23:12.115922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:23:12.116186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-04T02:23:12.116368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:23:12.116414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-04T02:23:12.116463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-04T02:23:12.116962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:23:12.117040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-04T02:23:12.117142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:23:12.117182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-04T02:23:12.117232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-04T02:23:12.119091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:23:12.119219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:23:12.119262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:23:12.119304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-04T02:23:12.119351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-04T02:23:12.119944Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:23:12.120019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:23:12.120049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:23:12.120098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-04T02:23:12.120128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-04T02:23:12.120197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:23:12.137507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:23:12.137623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-04T02:23:12.138013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-04T02:23:12.138229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:23:12.138267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:23:12.138308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:23:12.138342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:23:12.138377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:23:12.138462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:997:2938] message: TxId: 104 2026-01-04T02:23:12.138507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:23:12.138545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:23:12.138580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:23:12.138701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-04T02:23:12.139094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:23:12.139131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:23:12.140013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:23:12.142000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:23:12.144487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:23:12.144563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:855:2809], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-04T02:23:12.144675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:23:12.144714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1352:3270] 2026-01-04T02:23:12.145562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-04T02:23:12.146945Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-04T02:23:12.147191Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 282us result status StatusSuccess 2026-01-04T02:23:12.147719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-true [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D0C371A8100 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:23:06.106964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:06.107101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:06.107166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:06.107209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:06.107252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:06.107284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:06.107365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:06.107463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:06.108492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:06.108846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:06.309217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:06.309307Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:06.347068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:06.347300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:06.347499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:06.374550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:06.374828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:06.375599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:06.375891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:06.390443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:06.390748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:06.392114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:06.392207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:06.392336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:06.392393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:06.392534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:06.392764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:06.712774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.714966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.715605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 Leader for TabletID 72057594046678944 is [1:963:2923] sender: [1:1022:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 2026-01-04T02:23:12.395507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 2026-01-04T02:23:12.445975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 0 2026-01-04T02:23:12.446180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:23:12.446253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-04T02:23:12.446334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-04T02:23:12.446399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D0C37351100, stats written 1 2026-01-04T02:23:12.446978Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:12.447242Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 309us result status StatusSuccess 2026-01-04T02:23:12.447758Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 147 LastUpdateTime: 147 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink >> TSubscriberCombinationsTest::MigratedPathRecreation [GOOD] >> TSubscriberCombinationsTest::CombinationsMigratedPath >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink >> TNetClassifierTest::TestInitFromFile >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink >> TSchemeShardSubDomainTest::CreateForceDropSolomon [GOOD] >> TSchemeShardSubDomainTest::CreateDropSolomon >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/control_plane_proxy/ut/unittest >> TControlPlaneProxyTest::ShouldSendDeleteBinding [GOOD] Test command err: 2026-01-04T02:18:15.722473Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-04T02:18:16.105306Z node 2 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:16.557759Z node 3 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:17.010027Z node 4 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:17.369358Z node 5 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:17.865613Z node 6 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:18.319241Z node 7 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:18.705530Z node 8 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:19.133601Z node 9 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-04T02:18:19.559153Z node 10 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:19.936478Z node 11 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:20.329031Z node 12 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:20.731716Z node 13 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:21.199825Z node 14 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:21.574900Z node 15 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:21.940360Z node 16 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:22.323217Z node 17 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:22.700341Z node 18 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:23.070749Z node 19 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:23.486567Z node 20 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:23.859268Z node 21 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:24.286446Z node 22 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:24.690002Z node 23 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:25.072982Z node 24 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:46.647494Z node 72 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-04T02:18:47.314782Z node 73 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:47.805617Z node 74 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:48.291614Z node 75 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:48.787704Z node 76 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:49.266990Z node 77 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:49.764140Z node 78 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:50.243912Z node 79 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:50.738205Z node 80 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-04T02:18:51.242334Z node 81 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:51.744547Z node 82 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:52.354387Z node 83 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) content { setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:52.859051Z node 84 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:53.368065Z node 85 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:53.880794Z node 86 :YQ_CONTROL_PLANE_STORAG ... L_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:55.978917Z node 90 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) setting { ydb_database { auth { service_account { id: "my_sa_id" } } } } error:
: Error: No permission iam.serviceAccounts.use@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:56.502784Z node 91 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:57.039209Z node 92 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:57.572274Z node 93 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:58.157074Z node 94 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:18:58.715893Z node 95 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:34.381517Z node 163 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:682: CreateQueryRequest, validation failed: test_user_3@staff **** (00000000) content { name: "my_query_name" } error:
: Error: No permission yq.queries.create@as in a given scope , code: 1000 2026-01-04T02:21:38.039945Z node 166 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:884: GetQueryStatusRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getStatus@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:39.145082Z node 167 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:41.377252Z node 168 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:42.777029Z node 169 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:44.038504Z node 170 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1158: GetResultDataRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.queries.getData@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:49.888442Z node 173 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:53.696280Z node 176 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:55.087839Z node 177 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:56.377205Z node 178 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:21:57.826968Z node 179 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:02.370420Z node 182 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:04.456805Z node 183 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_3@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:09.430086Z node 185 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:752: ListQueriesRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:11.217876Z node 186 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:817: DescribeQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:14.136386Z node 188 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:950: ModifyQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:15.536710Z node 189 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1025: DeleteQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:16.982228Z node 190 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1090: ControlQueryRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.queries.control@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:19.422613Z node 192 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1223: ListJobsRequest, validation failed: test_user_4@staff **** (00000000) query_id: "my_query_id" error:
: Error: No permission yq.jobs.get@as in a given scope , code: 1000 2026-01-04T02:22:20.844040Z node 193 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1288: DescribeJobRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.jobs.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:22.038999Z node 194 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1359: CreateConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:23.412915Z node 195 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1507: ListConnectionsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:24.698690Z node 196 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1572: DescribeConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:25.787732Z node 197 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1642: ModifyConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:26.979702Z node 198 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1793: DeleteConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.delete@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:28.144983Z node 199 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1914: TestConnectionRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.connections.create@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:29.185317Z node 200 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:1972: CreateBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.create@as in a given scope yandexcloud://my_folder, code: 1000
: Error: No permission yq.connections.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:30.326915Z node 201 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2126: ListBindingsRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:31.422764Z node 202 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2191: DescribeBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.get@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:32.540824Z node 203 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2256: ModifyBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.update@as in a given scope yandexcloud://my_folder, code: 1000 2026-01-04T02:22:33.848862Z node 204 :YQ_CONTROL_PLANE_STORAGE ERROR: control_plane_proxy.cpp:2402: DeleteBindingRequest, validation failed: yandexcloud://my_folder test_user_4@staff **** (00000000) error:
: Error: No permission yq.bindings.delete@as in a given scope yandexcloud://my_folder, code: 1000 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/control_plane_proxy/ut/unittest |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestReadAtTimestamp_10 [GOOD] Test command err: 2026-01-04T02:22:33.221235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326147900636869:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.221314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.263788Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.265427Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326151238095397:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.270617Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.279391Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.498541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.512603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.562488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.562640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.565384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.565528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.590323Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.590506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.603548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.630228Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.736468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.758303Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:33.790548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d19/r3tmp/yandexvLWNWJ.tmp 2026-01-04T02:22:33.790577Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d19/r3tmp/yandexvLWNWJ.tmp 2026-01-04T02:22:33.790759Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d19/r3tmp/yandexvLWNWJ.tmp 2026-01-04T02:22:33.790876Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:33.826256Z INFO: TTestServer started on Port 28399 GrpcPort 62256 PQClient connected to localhost:62256 2026-01-04T02:22:34.040814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.182757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:34.238150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.276435Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:37.594588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165080507375:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.594599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165080507383:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.595101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.595686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165080507391:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.595771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.598816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:37.634266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326165080507390:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:22:37.826107Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326165080507481:3054] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:37.855529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:37.964327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:38.054931Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326165080507494:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:22:38.055483Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTMyMTA5Ny1hOWQ0NWE1MS1jMTM3Y2VjZS0zMDZmOWI2Ng==, ActorId: [1:7591326165080507373:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3d0m0rd0x02xjpvh26hcrk, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:22:38.058039Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:22:38.196104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:38.222302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326147900636869:2136];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:38.222385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:38.262250Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326151238095397:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:38.262321Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.y ... 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.422775Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.422787Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.422796Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-04T02:23:17.422840Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:17.422852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.422862Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.422875Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.422884Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist 2026-01-04T02:23:17.451146Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591326214632677458:2122], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:17.451317Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591326214632677458:2122], cacheItem# { Subscriber: { Subscriber: [3:7591326218927645838:2984] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:17.451397Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591326214632677458:2122], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:17.451467Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591326214632677458:2122], cacheItem# { Subscriber: { Subscriber: [3:7591326231812547798:3005] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:17.451559Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591326339186733981:5384], recipient# [3:7591326339186733979:2852], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:17.451620Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591326339186733982:5385], recipient# [3:7591326339186733980:2853], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:17.526221Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037900][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526269Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526286Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037900][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526309Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037900][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526326Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037900][Partition][0][StateIdle] Try persist 2026-01-04T02:23:17.526397Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526412Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526425Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526452Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526471Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][3][StateIdle] Try persist 2026-01-04T02:23:17.526519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037899][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526533Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526544Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037899][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526563Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037899][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526580Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037899][Partition][4][StateIdle] Try persist 2026-01-04T02:23:17.526634Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526649Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526661Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526674Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526684Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:23:17.526726Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526738Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526748Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526761Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526771Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][3][StateIdle] Try persist 2026-01-04T02:23:17.526797Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037903][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526808Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526818Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037903][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526830Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037903][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526840Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037903][Partition][4][StateIdle] Try persist 2026-01-04T02:23:17.526870Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526880Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526902Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526921Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-04T02:23:17.526946Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:17.526959Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526969Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.526983Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.526997Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-04T02:23:17.527030Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037904][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:17.527053Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.527062Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037904][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:17.527074Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037904][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:17.527083Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037904][Partition][0][StateIdle] Try persist |82.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:23:12.716809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:12.716899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:12.716935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:12.716968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:12.717006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:12.717035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:12.717084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:12.717162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:12.724776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:12.725174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:12.946729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:12.946797Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:13.017057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:13.034288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:13.034584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:13.070379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:13.070740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:13.075573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:13.075974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:13.091849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:13.092116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:13.093453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:13.093519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:13.093623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:13.093669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:13.093782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:13.094055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:13.560003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.561789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.570699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.570834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.570912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.571015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.571116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.571205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.571312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:13.571385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:23:22.323610Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:23:22.323644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:23:22.323675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-04T02:23:22.324859Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:23:22.324944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:23:22.324979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:23:22.325019Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:23:22.325062Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:23:22.325668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:23:22.325739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:23:22.325764Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:23:22.325791Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-04T02:23:22.325938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:23:22.326008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:23:22.328065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:23:22.328187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:22.328227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:23:22.328842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:23:22.329796Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:23:22.330896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:22.331202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409546 2026-01-04T02:23:22.332189Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:23:22.332511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:23:22.332697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:23:22.333045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:22.333095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:22.333206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409547 2026-01-04T02:23:22.334668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:23:22.334722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:23:22.334793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:23:22.335609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:23:22.338094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:23:22.338165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:23:22.338520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:23:22.338565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:23:22.339654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:23:22.339787Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:23:22.340100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:23:22.340148Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:23:22.340623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:23:22.340725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:23:22.340763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:2099:3702] TestWaitNotification: OK eventTxId 104 2026-01-04T02:23:22.349771Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:22.350143Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/Solomon" took 422us result status StatusPathDoesNotExist 2026-01-04T02:23:22.350298Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:23:22.350948Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:22.351082Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 148us result status StatusPathDoesNotExist 2026-01-04T02:23:22.351193Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |82.7%| [TM] {RESULT} ydb/core/fq/libs/control_plane_proxy/ut/unittest |82.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/effects/ydb-core-kqp-ut-effects |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |82.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test >> TNetClassifierTest::TestInitFromFile [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadService::TestDiskIsFullRunOverQueryLimit [GOOD] Test command err: 2026-01-04T02:18:00.497679Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324976696520393:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.497750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.563478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:00.816692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.838331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.838473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.863610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.942309Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.989390Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.989432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.989442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.989547Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:01.050746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:01.135712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.508041Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.295650Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.295737Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.296192Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.299353Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324989581423438:2323], Start check tables existence, number paths: 2 2026-01-04T02:18:03.300625Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NTQzMDY3NDMtNzk0NGUwMmQtNTA1Mjc5N2ItZjdkMTFlNmQ=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NTQzMDY3NDMtNzk0NGUwMmQtNTA1Mjc5N2ItZjdkMTFlNmQ= temp_dir_name# adbd004c-4bd5-0bf7-3c82-9f9a49a9d549 trace_id# 2026-01-04T02:18:03.301713Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NTQzMDY3NDMtNzk0NGUwMmQtNTA1Mjc5N2ItZjdkMTFlNmQ=, ActorId: [1:7591324989581423461:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.301940Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324989581423438:2323], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.301991Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324989581423438:2323], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.302031Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324989581423438:2323], Successfully finished 2026-01-04T02:18:03.302092Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.302117Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:18:03.339965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:18:03.361733Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591324990223424291:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:03.361821Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:03.403794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.403899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.414544Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:18:03.416739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.417682Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-dedicated/.metadata/script_executions 2026-01-04T02:18:03.474511Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.474700Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.474779Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.474852Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.474900Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.474981Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.475091Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.475302Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.475395Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.515195Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.515308Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.519809Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.619067Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:03.621577Z node 3 :STATISTICS WARN: tx_init.cpp:298: [72075186224037894] TTxInit::Complete. EnableColumnStatistics=false 2026-01-04T02:18:03.742863Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:03.742893Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:03.742901Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:03.742988Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:03.772130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:18:03.799236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:03.799317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:03.794975Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591324989274820272:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:03.795025Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:03.802315Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:18:03.803810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:03.829014Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829169Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829334Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829390Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829501Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:18:03.829616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04 ... AxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc= 2026-01-04T02:23:10.046423Z node 13 :KQP_WORKLOAD_SERVICE DEBUG: query_actor.h:311: [TQueryRetryActor] [TDelayRequestQuery] OwnerId: [13:7591326093023083319:2369], ActorId: [13:7591326299181519171:3756], TraceId: sample_pool_id, RequestDatabase: /Root/test-dedicated, RequestSessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, Got response [13:7591326307771453922:3796] UNAVAILABLE 2026-01-04T02:23:10.046580Z node 13 :KQP_WORKLOAD_SERVICE ERROR: pool_handlers_actors.cpp:777: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7591326093023083319:2369], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, failed to delay request UNAVAILABLE, session id: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, issues: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } 2026-01-04T02:23:10.046782Z node 13 :KQP_WORKLOAD_SERVICE WARN: pool_handlers_actors.cpp:323: [WorkloadService] [TPoolHandlerActorBase] ActorId: [13:7591326093023083319:2369], DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Reply continue error UNAVAILABLE to [13:7591326161742564193:3423], session id: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, issues: {
: Error: Failed to put request in queue, code: 2033 subissue: {
: Error: Disk space exhausted. Table `/Root/test-dedicated/.metadata/workload_manager/delayed_requests`., code: 2033 subissue: {
: Error: Cannot perform writes: database is out of disk space, code: 2033 } } } 2026-01-04T02:23:10.046875Z node 13 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:547} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Database disks are without free space pool_id# sample_pool_id trace_id# 2026-01-04T02:23:10.046930Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:553} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Continue request pool_id# sample_pool_id trace_id# 2026-01-04T02:23:10.047091Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:659} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Sending CompileQuery request trace_id# 2026-01-04T02:23:10.047148Z node 13 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:342: [WorkloadService] [Service] Request finished in pool, DatabaseId: /Root/test-dedicated, PoolId: sample_pool_id, Duration: 0.000000s, CpuConsumed: 0.000000s, AdjustCpuQuota: 0 2026-01-04T02:23:10.048881Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=13&id=NzYyOGExMTEtZDAxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc=, ActorId: [13:7591326307771453924:3798], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:23:10.048936Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=NzYyOGExMTEtZDAxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc=, ActorId: [13:7591326307771453924:3798], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:23:10.048980Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=NzYyOGExMTEtZDAxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc=, ActorId: [13:7591326307771453924:3798], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:23:10.049022Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=NzYyOGExMTEtZDAxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc=, ActorId: [13:7591326307771453924:3798], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:23:10.049118Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=NzYyOGExMTEtZDAxNThmMTgtMjFhM2YzYjEtZTNkNDg0Mzc=, ActorId: [13:7591326307771453924:3798], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:23:10.673188Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1704} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, ExecutePhyTx literal# false commit# true deferred_effects_size# 0 tx# 136171611209624 trace_id# 2026-01-04T02:23:10.673293Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1869} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Sending to Executer span_id_size# 8 trace_id# 2026-01-04T02:23:10.673463Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:1939} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Created new KQP executer executer_id# [13:7591326307771453947:3423] is_rollback# false trace_id# 2026-01-04T02:23:10.681126Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2278} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Forwarded TEvStreamData to [11:7591326160739142088:4208] trace_id# 2026-01-04T02:23:10.682951Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2171} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, TEvTxResponse current_tx# 1 transactions_size# 1 status# SUCCESS trace_id# 2026-01-04T02:23:10.683126Z node 13 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3d0jr51qg6ahf2sc1rnmrp", SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, Slow query, duration: 34.389319s, status: STATUS_CODE_UNSPECIFIED, user: user@system, results: 0b, text: "SELECT 42;", parameters: 0b 2026-01-04T02:23:10.683194Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2479} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, TxInfo status# Committed kind# Pure total_duration# 10.185 server_duration# 10.079 queries_count# 2 trace_id# 2026-01-04T02:23:10.683280Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2641} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Create QueryResponse for action with SUCCESS status action# QUERY_ACTION_EXECUTE trace_id# 2026-01-04T02:23:10.683592Z node 13 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: ExecuteState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Cleanup start is_final# true has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# true trace_id# 2026-01-04T02:23:10.683914Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: CleanupState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, EndCleanup is_final# true trace_id# 2026-01-04T02:23:10.684063Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: CleanupState, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Sent query response back to proxy proxy_request_id# 215 proxy_id# [13:7591326067253278579:2227] trace_id# 2026-01-04T02:23:10.684141Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: unknown state, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:23:10.684298Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=13&id=MjFhZjFkYzgtMzM0YmU4ZjAtNjFhMTVkOTAtZTUwMjgwY2U=, ActorId: [13:7591326161742564193:3423], ActorState: unknown state, LegacyTraceId: 01ke3d0jr51qg6ahf2sc1rnmrp, Session actor destroyed trace_id# 2026-01-04T02:23:10.740620Z node 11 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 13 2026-01-04T02:23:10.741317Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:23:10.741550Z node 11 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 12 2026-01-04T02:23:10.746811Z node 11 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=11&id=MjQ2YjkwZDUtNzVlOWRiN2MtODcyOWIyMzMtODBiYjk1MDQ=, ActorId: [11:7591326066249859789:2335], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:23:10.746885Z node 11 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=11&id=MjQ2YjkwZDUtNzVlOWRiN2MtODcyOWIyMzMtODBiYjk1MDQ=, ActorId: [11:7591326066249859789:2335], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:23:10.746943Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=11&id=MjQ2YjkwZDUtNzVlOWRiN2MtODcyOWIyMzMtODBiYjk1MDQ=, ActorId: [11:7591326066249859789:2335], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:23:10.746990Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=11&id=MjQ2YjkwZDUtNzVlOWRiN2MtODcyOWIyMzMtODBiYjk1MDQ=, ActorId: [11:7591326066249859789:2335], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:23:10.747098Z node 11 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=11&id=MjQ2YjkwZDUtNzVlOWRiN2MtODcyOWIyMzMtODBiYjk1MDQ=, ActorId: [11:7591326066249859789:2335], ActorState: unknown state, Session actor destroyed trace_id# 2026-01-04T02:23:10.769498Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:23:10.875667Z node 13 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [13:7591326067253278410:2083], processor id# 72075186224037891, database# /Root/test-dedicated 2026-01-04T02:23:11.161431Z node 13 :SYSTEM_VIEWS WARN: sysview_service.cpp:813: Summary delivery problem: service id# [13:7591326067253278410:2083], processor id# 72075186224037891, database# /Root/test-dedicated |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> DataShardSnapshots::VolatileSnapshotSplit [GOOD] >> DataShardSnapshots::VolatileSnapshotMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007DEF526A4500 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:23:05.889112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:23:05.889206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.889243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:23:05.889284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:23:05.889335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:23:05.889377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:23:05.889454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:23:05.889555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:23:05.895786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:23:05.896157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:23:06.149734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:23:06.149797Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:06.182920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:23:06.191531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:23:06.191734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:23:06.204509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:23:06.204775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:23:06.205506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:23:06.205774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:23:06.214649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:06.214869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:06.216233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:06.216300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:06.216410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:06.216458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:06.216519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:06.216667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:23:06.830563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.831726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.831906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:23:06.832916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... 8010Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678340Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678412Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678659Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678804Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.678939Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679109Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679183Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679606Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679844Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679915Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.679973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.680125Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.680173Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.680220Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:23:23.680456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:23:23.735808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:23:23.761931Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:23:23.762057Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:23:23.762560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:23:23.762631Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:23:23.762687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:23:23.762798Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 Leader for TabletID 72057594046678944 is [2:976:2935] sender: [2:1029:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 2026-01-04T02:23:23.848179Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 2026-01-04T02:23:23.882291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 0 2026-01-04T02:23:23.882536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:23:23.882601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-04T02:23:23.882647Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-04T02:23:23.882703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007DEF5263A100, stats written 1 2026-01-04T02:23:23.883333Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:23:23.883598Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 326us result status StatusSuccess 2026-01-04T02:23:23.884114Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 153 LastUpdateTime: 153 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromFile [GOOD] Test command err: 2026-01-04T02:23:17.514914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326336334129925:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:17.515080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:18.514133Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:18.573371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:18.594030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:23:18.715811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:18.715931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:18.844399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:18.869084Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326336334129694:2081] 1767493397449729 != 1767493397449732 2026-01-04T02:23:18.985903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:23:18.990539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:19.054591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002470/r3tmp/yandex7RmdPk.tmp 2026-01-04T02:23:19.054622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002470/r3tmp/yandex7RmdPk.tmp 2026-01-04T02:23:19.054884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002470/r3tmp/yandex7RmdPk.tmp 2026-01-04T02:23:19.054985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> DataShardReadTableSnapshots::ReadTableDropColumn |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot >> DataShardSnapshots::LockedWriteBulkUpsertConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink >> TTxLocatorTest::Boot [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [FAIL] >> DataShardSnapshots::UncommittedChangesRenameTable+UseSink [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBasic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::Boot [GOOD] Test command err: 2026-01-04T02:23:29.868368Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:29.868881Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:29.869616Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:29.871563Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:29.872044Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:29.884540Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:29.884677Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:29.884776Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:29.884910Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:29.884975Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:29.885061Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:29.885172Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> DataShardSnapshots::MvccSnapshotAndSplit [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink >> TTxLocatorTest::TestAllocateAllByPieces >> TTxLocatorTest::TestAllocateAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestImposibleSize [GOOD] Test command err: 2026-01-04T02:23:31.997030Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:31.997616Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:31.998532Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:32.000427Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.000968Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:32.035489Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.040159Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.040342Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:32.040505Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.040595Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.040733Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:32.040891Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:23:32.041752Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710656 2026-01-04T02:23:32.041952Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 0 Reserved to# 0 2026-01-04T02:23:32.050523Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-04T02:23:32.051242Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2110] requested range size#123456 2026-01-04T02:23:32.051862Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.051933Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.052063Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 123456 2026-01-04T02:23:32.052136Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2110] TEvAllocateResult from# 0 to# 123456 expected SUCCESS 2026-01-04T02:23:32.052556Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2114] requested range size#281474976587200 2026-01-04T02:23:32.052720Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 123456 Reserved to# 0 2026-01-04T02:23:32.052767Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:80:2114] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE 2026-01-04T02:23:32.053296Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:83:2117] requested range size#246912 2026-01-04T02:23:32.053773Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.061229Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:32.061426Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 123456 Reserved to# 370368 2026-01-04T02:23:32.061482Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:83:2117] TEvAllocateResult from# 123456 to# 370368 expected SUCCESS 2026-01-04T02:23:32.127435Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:87:2121] requested range size#281474976340288 2026-01-04T02:23:32.127632Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 370368 Reserved to# 0 2026-01-04T02:23:32.127688Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:87:2121] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] |82.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> DataShardSnapshots::MvccSnapshotTailCleanup [GOOD] >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAllByPieces [GOOD] Test command err: 2026-01-04T02:23:33.888176Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:33.888715Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:33.889457Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:33.891150Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.891622Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:33.947621Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.947748Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.947856Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:33.947986Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.948094Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.948184Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:33.948338Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:23:33.949049Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#8796093022207 2026-01-04T02:23:33.949572Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.949645Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.949759Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 8796093022207 2026-01-04T02:23:33.956905Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 8796093022207 expected SUCCESS 2026-01-04T02:23:34.000207Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#8796093022207 2026-01-04T02:23:34.000765Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.000836Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.000948Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8796093022207 Reserved to# 17592186044414 2026-01-04T02:23:34.000988Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult from# 8796093022207 to# 17592186044414 expected SUCCESS 2026-01-04T02:23:34.001726Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:81:2115] requested range size#8796093022207 2026-01-04T02:23:34.002155Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.002237Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.002615Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 17592186044414 Reserved to# 26388279066621 2026-01-04T02:23:34.002676Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:81:2115] TEvAllocateResult from# 17592186044414 to# 26388279066621 expected SUCCESS 2026-01-04T02:23:34.003099Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:85:2119] requested range size#8796093022207 2026-01-04T02:23:34.003586Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.003714Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.003822Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 26388279066621 Reserved to# 35184372088828 2026-01-04T02:23:34.003864Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:85:2119] TEvAllocateResult from# 26388279066621 to# 35184372088828 expected SUCCESS 2026-01-04T02:23:34.004374Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:89:2123] requested range size#8796093022207 2026-01-04T02:23:34.004747Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.004815Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.004898Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 35184372088828 Reserved to# 43980465111035 2026-01-04T02:23:34.004937Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:89:2123] TEvAllocateResult from# 35184372088828 to# 43980465111035 expected SUCCESS 2026-01-04T02:23:34.005366Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:93:2127] requested range size#8796093022207 2026-01-04T02:23:34.005747Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.006011Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.006108Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 43980465111035 Reserved to# 52776558133242 2026-01-04T02:23:34.006158Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:93:2127] TEvAllocateResult from# 43980465111035 to# 52776558133242 expected SUCCESS 2026-01-04T02:23:34.006673Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:97:2131] requested range size#8796093022207 2026-01-04T02:23:34.007036Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.007135Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.007282Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 52776558133242 Reserved to# 61572651155449 2026-01-04T02:23:34.007326Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:97:2131] TEvAllocateResult from# 52776558133242 to# 61572651155449 expected SUCCESS 2026-01-04T02:23:34.007805Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:101:2135] requested range size#8796093022207 2026-01-04T02:23:34.008281Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.008377Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.008464Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 61572651155449 Reserved to# 70368744177656 2026-01-04T02:23:34.008505Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:101:2135] TEvAllocateResult from# 61572651155449 to# 70368744177656 expected SUCCESS 2026-01-04T02:23:34.008987Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:105:2139] requested range size#8796093022207 2026-01-04T02:23:34.009335Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.009432Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.009520Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 70368744177656 Reserved to# 79164837199863 2026-01-04T02:23:34.009562Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:105:2139] TEvAllocateResult from# 70368744177656 to# 79164837199863 expected SUCCESS 2026-01-04T02:23:34.010874Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:109:2143] requested range size#8796093022207 2026-01-04T02:23:34.011301Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.011381Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.011489Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Suc ... node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:157:2191] TEvAllocateResult from# 184717953466347 to# 193514046488554 expected SUCCESS 2026-01-04T02:23:34.036807Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:161:2195] requested range size#8796093022207 2026-01-04T02:23:34.037220Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.037309Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:25:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.037411Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 193514046488554 Reserved to# 202310139510761 2026-01-04T02:23:34.037484Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:161:2195] TEvAllocateResult from# 193514046488554 to# 202310139510761 expected SUCCESS 2026-01-04T02:23:34.040593Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:165:2199] requested range size#8796093022207 2026-01-04T02:23:34.041118Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.041218Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:26:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.041345Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 202310139510761 Reserved to# 211106232532968 2026-01-04T02:23:34.041405Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:165:2199] TEvAllocateResult from# 202310139510761 to# 211106232532968 expected SUCCESS 2026-01-04T02:23:34.042292Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:169:2203] requested range size#8796093022207 2026-01-04T02:23:34.042730Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.042811Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:27:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.042918Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 211106232532968 Reserved to# 219902325555175 2026-01-04T02:23:34.043572Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:169:2203] TEvAllocateResult from# 211106232532968 to# 219902325555175 expected SUCCESS 2026-01-04T02:23:34.044618Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:173:2207] requested range size#8796093022207 2026-01-04T02:23:34.045038Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.045099Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:28:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.045230Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 219902325555175 Reserved to# 228698418577382 2026-01-04T02:23:34.045274Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:173:2207] TEvAllocateResult from# 219902325555175 to# 228698418577382 expected SUCCESS 2026-01-04T02:23:34.046125Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:177:2211] requested range size#8796093022207 2026-01-04T02:23:34.046527Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.046628Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:29:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.046767Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 228698418577382 Reserved to# 237494511599589 2026-01-04T02:23:34.046824Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:177:2211] TEvAllocateResult from# 228698418577382 to# 237494511599589 expected SUCCESS 2026-01-04T02:23:34.047634Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:181:2215] requested range size#8796093022207 2026-01-04T02:23:34.048340Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:1:24576:73:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.048418Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:30:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.048538Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 237494511599589 Reserved to# 246290604621796 2026-01-04T02:23:34.048582Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:181:2215] TEvAllocateResult from# 237494511599589 to# 246290604621796 expected SUCCESS 2026-01-04T02:23:34.049507Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:185:2219] requested range size#8796093022207 2026-01-04T02:23:34.049962Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.050049Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:31:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.050141Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 246290604621796 Reserved to# 255086697644003 2026-01-04T02:23:34.050182Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:185:2219] TEvAllocateResult from# 246290604621796 to# 255086697644003 expected SUCCESS 2026-01-04T02:23:34.051068Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:189:2223] requested range size#8796093022207 2026-01-04T02:23:34.052152Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.052258Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:32:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.052360Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 255086697644003 Reserved to# 263882790666210 2026-01-04T02:23:34.052419Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:189:2223] TEvAllocateResult from# 255086697644003 to# 263882790666210 expected SUCCESS 2026-01-04T02:23:34.053348Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:193:2227] requested range size#8796093022207 2026-01-04T02:23:34.053835Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:1:24576:77:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.053931Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:33:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.054038Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 263882790666210 Reserved to# 272678883688417 2026-01-04T02:23:34.054078Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:193:2227] TEvAllocateResult from# 263882790666210 to# 272678883688417 expected SUCCESS 2026-01-04T02:23:34.054885Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:197:2231] requested range size#8796093022207 2026-01-04T02:23:34.055225Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.055351Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:34:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.055478Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 272678883688417 Reserved to# 281474976710624 2026-01-04T02:23:34.055536Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:197:2231] TEvAllocateResult from# 272678883688417 to# 281474976710624 expected SUCCESS 2026-01-04T02:23:34.058439Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:201:2235] requested range size#31 2026-01-04T02:23:34.058963Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.059026Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:35:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:34.059138Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 281474976710624 Reserved to# 281474976710655 2026-01-04T02:23:34.059199Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:201:2235] TEvAllocateResult from# 281474976710624 to# 281474976710655 expected SUCCESS 2026-01-04T02:23:34.060198Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:205:2239] requested range size#1 2026-01-04T02:23:34.060337Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-04T02:23:34.060399Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:205:2239] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestAllocateAll [GOOD] Test command err: 2026-01-04T02:23:33.807870Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:33.808446Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:33.809290Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:33.811531Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.812069Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:33.824597Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.824752Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.824850Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:33.824980Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.825054Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.825135Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:33.825246Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:23:33.826004Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#281474976710655 2026-01-04T02:23:33.826627Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.826692Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:33.826836Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 281474976710655 2026-01-04T02:23:33.826878Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 281474976710655 expected SUCCESS 2026-01-04T02:23:33.831825Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:77:2111] requested range size#1 2026-01-04T02:23:33.832018Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 0 Reserved from# 281474976710655 Reserved to# 0 2026-01-04T02:23:33.832058Z node 1 :TX_ALLOCATOR ERROR: txallocator_impl.cpp:84: tablet# 72057594046447617 Send to Sender# [1:77:2111] TEvAllocateResult status# IMPOSIBLE expected IMPOSIBLE |82.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> DataShardSnapshots::LockedWriteReuseAfterCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection_reboots/tx-schemeshard-ut_backup_collection_reboots >> KqpWrite::Insert |82.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |82.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables |82.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/ydb-core-tx-schemeshard-ut_consistent_copy_tables >> TTxLocatorTest::TestZeroRange >> TTxLocatorTest::TestZeroRange [GOOD] |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/ydb-core-tx-schemeshard-ut_cdc_stream |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |82.8%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/ydb-core-http_proxy-ut-inside_ydb_ut >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/ydb-core-tx-columnshard-engines-ut |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLOnIndexedTable::test_case [GOOD] >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestZeroRange [GOOD] Test command err: 2026-01-04T02:23:36.894517Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:36.895061Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:36.895822Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:36.897615Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.902340Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:36.938145Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.938270Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.938391Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:36.938535Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.938629Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.938726Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:36.938853Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:23:36.939651Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#0 2026-01-04T02:23:36.940210Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.940282Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:36.940408Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 0 2026-01-04T02:23:36.940478Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 0 expected SUCCESS |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> settings_validation.py::TestSettingsValidation::test_settings_validation_monitoring_program [GOOD] |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink >> KqpInplaceUpdate::SingleRowArithm-UseSink >> DataShardSnapshots::VolatileSnapshotMerge [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |82.8%| [LD] {RESULT} $(B)/ydb/core/mind/ut/ydb-core-mind-ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut/ydb-core-mind-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestSignificantRequestWhenRunReserveTx [GOOD] Test command err: 2026-01-04T02:23:38.231046Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:23:38.231630Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:23:38.232431Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:23:38.245736Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.246404Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:23:38.264998Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.265176Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.265283Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:23:38.265425Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.265498Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.265584Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:23:38.265716Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:23:38.267268Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-04T02:23:38.267921Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-04T02:23:38.268237Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-04T02:23:38.268530Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-04T02:23:38.268954Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-04T02:23:38.269306Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-04T02:23:38.269585Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.270456Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-04T02:23:38.270747Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.270832Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.271216Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.271342Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-04T02:23:38.271522Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.271653Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-04T02:23:38.271824Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.271905Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.272074Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.272225Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-04T02:23:38.272489Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.272629Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-04T02:23:38.272684Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-04T02:23:38.272815Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.272920Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-04T02:23:38.272944Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-04T02:23:38.273014Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-04T02:23:38.273056Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-04T02:23:38.273170Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.273249Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-04T02:23:38.273271Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-04T02:23:38.273345Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.273431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.273483Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-04T02:23:38.273513Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-04T02:23:38.273643Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.273752Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.276220Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-04T02:23:38.276265Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-04T02:23:38.276433Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.276510Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-04T02:23:38.276536Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-04T02:23:38.276704Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.276760Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-04T02:23:38.276800Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-04T02:23:38.276936Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.277027Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.277076Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-04T02:23:38.277100Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-04T02:23:38.277200Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.277265Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-04T02:23:38.277290Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-04T02:23:38.295406Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 720575 ... Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.513049Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.513128Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8600000 Reserved to# 8700000 2026-01-04T02:23:38.513154Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:405:2439] TEvAllocateResult from# 8600000 to# 8700000 2026-01-04T02:23:38.513244Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:92:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.513306Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8700000 Reserved to# 8800000 2026-01-04T02:23:38.513334Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:407:2441] TEvAllocateResult from# 8700000 to# 8800000 2026-01-04T02:23:38.513431Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8800000 Reserved to# 8900000 2026-01-04T02:23:38.513461Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:409:2443] TEvAllocateResult from# 8800000 to# 8900000 2026-01-04T02:23:38.513581Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 8900000 Reserved to# 9000000 2026-01-04T02:23:38.513610Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:411:2445] TEvAllocateResult from# 8900000 to# 9000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-04T02:23:38.523024Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:433:2467] requested range size#100000 2026-01-04T02:23:38.523639Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:435:2469] requested range size#100000 2026-01-04T02:23:38.524055Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:437:2471] requested range size#100000 2026-01-04T02:23:38.530505Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.530861Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:93:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.531104Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:439:2473] requested range size#100000 2026-01-04T02:23:38.531415Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.531604Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:441:2475] requested range size#100000 2026-01-04T02:23:38.531734Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:94:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.532116Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:443:2477] requested range size#100000 2026-01-04T02:23:38.532292Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.532472Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:95:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.532624Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:445:2479] requested range size#100000 2026-01-04T02:23:38.533020Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:447:2481] requested range size#100000 2026-01-04T02:23:38.533196Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.533351Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:96:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.533403Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.533522Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:449:2483] requested range size#100000 2026-01-04T02:23:38.533720Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:97:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.538771Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.539029Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9000000 Reserved to# 9100000 2026-01-04T02:23:38.539068Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:433:2467] TEvAllocateResult from# 9000000 to# 9100000 2026-01-04T02:23:38.539230Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:451:2485] requested range size#100000 2026-01-04T02:23:38.539496Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:98:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.539592Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.539805Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9100000 Reserved to# 9200000 2026-01-04T02:23:38.539848Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:435:2469] TEvAllocateResult from# 9100000 to# 9200000 2026-01-04T02:23:38.539912Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:99:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.540120Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9200000 Reserved to# 9300000 2026-01-04T02:23:38.540161Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:437:2471] TEvAllocateResult from# 9200000 to# 9300000 2026-01-04T02:23:38.540231Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.540364Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9300000 Reserved to# 9400000 2026-01-04T02:23:38.540390Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:439:2473] TEvAllocateResult from# 9300000 to# 9400000 2026-01-04T02:23:38.540436Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:100:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.540599Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9400000 Reserved to# 9500000 2026-01-04T02:23:38.540630Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:441:2475] TEvAllocateResult from# 9400000 to# 9500000 2026-01-04T02:23:38.540687Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.540769Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-04T02:23:38.540793Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:443:2477] TEvAllocateResult from# 9500000 to# 9600000 2026-01-04T02:23:38.540851Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:101:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.540986Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-04T02:23:38.541037Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:445:2479] TEvAllocateResult from# 9600000 to# 9700000 2026-01-04T02:23:38.541119Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.541258Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-04T02:23:38.541290Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:447:2481] TEvAllocateResult from# 9700000 to# 9800000 2026-01-04T02:23:38.541344Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:102:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:23:38.541467Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-04T02:23:38.541493Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:449:2483] TEvAllocateResult from# 9800000 to# 9900000 2026-01-04T02:23:38.541643Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-04T02:23:38.541667Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:451:2485] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS |82.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> DataShardSnapshots::LockedWriteBulkUpsertConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink >> KqpWrite::ProjectReplace+UseSink >> KqpInplaceUpdate::SingleRowIf-UseSink >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink >> DataShardReadTableSnapshots::ReadTableDropColumn [GOOD] >> DataShardReadTableSnapshots::CorruptedDyNumber >> KqpImmediateEffects::Upsert >> DataShardSnapshots::ShardRestartWholeShardLockBasic [GOOD] >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert >> KqpImmediateEffects::InsertDuplicates-UseSink >> KqpWrite::Insert [GOOD] >> KqpWrite::InsertRevert >> DataShardSnapshots::MvccSnapshotLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink >> test_simple.py::TestSimple::test[alter_table] [GOOD] >> test_simple.py::TestSimple::test[alter_tablestore] >> KqpImmediateEffects::ForceImmediateEffectsExecution-UseSink [GOOD] >> KqpImmediateEffects::ImmediateUpdate |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |82.8%| [LD] {RESULT} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/table_split_ut/ydb-services-ydb-table_split_ut >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_arent_logged_when_anonymous [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/00233b/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk12/testing_out_stuff/test_auditlog.py.test_dml_requests_arent_logged_when_anonymous/audit_log.cxb8j01u.txt |82.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |82.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats |82.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/ydb-core-tx-schemeshard-ut_stats >> DataShardSnapshots::LockedWriteReuseAfterCommit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink |82.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/parametrized_queries/py3test >> test_parametrized_queries.py::TestParametrizedQueries::test_parametrized_queries[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/parametrized_queries/py3test |82.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/ydb/ut/ydb-core-fq-libs-ydb-ut >> KqpEffects::InsertAbort_Params_Duplicates+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> TPersQueueNewSchemeCacheTest::TestWriteStat1stClassTopicAPI [GOOD] Test command err: 2026-01-04T02:22:33.246989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326148150522028:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:33.247289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:33.329681Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.440662Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:22:33.673688Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.674010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:22:33.692337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:33.718815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.718995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.719775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:33.719851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:33.763376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.769156Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:22:33.769351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:33.826346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:33.865928Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:34.044240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:34.072410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d18/r3tmp/yandexV3TxUK.tmp 2026-01-04T02:22:34.072453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d18/r3tmp/yandexV3TxUK.tmp 2026-01-04T02:22:34.133358Z INFO: TTestServer started on Port 8145 GrpcPort 17870 2026-01-04T02:22:34.173973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d18/r3tmp/yandexV3TxUK.tmp 2026-01-04T02:22:34.174481Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:34.242286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:17870 2026-01-04T02:22:34.364036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:34.409771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:22:34.425951Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:34.525507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:37.748027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165330392519:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.748203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.748680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165330392533:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.748794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326165330392534:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.748837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:37.752915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:37.794195Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326165330392537:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:22:38.024775Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326165330392622:3033] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:38.055626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:38.220949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:38.226510Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326169625359934:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:22:38.229364Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDhjY2Q1YTMtZDVlZWRjNy0yODQ1MTU0YS05Y2NkNzExNQ==, ActorId: [1:7591326165330392509:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3d0m5jedtdzarz0qtwywc9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:22:38.234182Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:22:38.237225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326148150522028:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:38.237281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:38.400569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591326169625360385:3374] === CheckClustersList. Ok 2026-01-04T02:22:44.886218Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591326148150522180:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [18446744073709551615:18446744073709551615:0] R ... 26-01-04T02:23:46.368214Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591326462358140019:4404], recipient# [7:7591326462358140017:2536], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } },{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByPath Operation: OpUnknown RedirectRequired: true ShowPrivatePath: false SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:23:46.368728Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7591326385048725360:2121], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:46:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:46.368847Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591326385048725360:2121], cacheItem# { Subscriber: { Subscriber: [7:7591326419408465232:3076] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767493417151 PathId: [OwnerId: 72057594046644480, LocalPathId: 46] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:46:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:46.368950Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7591326385048725360:2121], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:48:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:46.369015Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591326385048725360:2121], cacheItem# { Subscriber: { Subscriber: [7:7591326419408465233:3077] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 18 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767493417739 PathId: [OwnerId: 72057594046644480, LocalPathId: 48] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: TableId: [72057594046644480:48:0] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:46.369099Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591326462358140022:4405], recipient# [7:7591326385048725350:2098], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Cluster TableId: [72057594046644480:46:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:23:46.369141Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591326462358140023:4406], recipient# [7:7591326385048725350:2098], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/PQ/Config/V2/Versions TableId: [72057594046644480:48:1] RequestType: ByTableId Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:23:46.369190Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7591326385048725360:2121], request# { ErrorCount: 0 DatabaseName: Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:23:46.369253Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591326385048725360:2121], cacheItem# { Subscriber: { Subscriber: [7:7591326385048725618:2161] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 27 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493410207 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:23:46.369368Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591326462358140024:4407], recipient# [7:7591326385048725350:2098], result# { ErrorCount: 0 DatabaseName: Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 2 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:23:46.395309Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:46.395353Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395371Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.395393Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395410Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:23:46.395486Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:46.395496Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395505Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.395517Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395525Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-04T02:23:46.395552Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:46.395562Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395572Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.395583Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.395590Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist 2026-01-04T02:23:46.495687Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:46.495731Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495752Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.495786Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495804Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:23:46.495881Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:23:46.495891Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495899Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.495922Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495931Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][3][StateIdle] Try persist 2026-01-04T02:23:46.495956Z node 7 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:23:46.495966Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495973Z node 7 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:46.495985Z node 7 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:46.495993Z node 7 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][4][StateIdle] Try persist |82.9%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/new_schemecache_ut/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] >> DataShardSnapshots::VolatileSnapshotAndLocalMKQLUpdate [GOOD] >> DataShardSnapshots::VolatileSnapshotReadTable >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink |82.9%| [TA] $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} |82.9%| [TA] $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink |82.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [FAIL] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpInplaceUpdate::SingleRowArithm-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowIf+UseSink >> DataShardSnapshots::MvccSnapshotReadWithLongPlanQueue [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink >> KqpImmediateEffects::UpsertDuplicates |82.9%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/new_schemecache_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardSnapshots::ShardRestartLockUnrelatedUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByConflict ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::CorruptedDyNumber [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:35.507722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:36.146830Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:36.147407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:36.147477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:37.540492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:37.540633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:37.749546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493408768611 != 1767493408768614 2026-01-04T02:23:37.776744Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:37.890971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:38.011430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:38.445447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:38.461712Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:38.574148Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:38.574231Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:38.574356Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:38.857686Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:38.858457Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:38.859210Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:38.859338Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:38.859719Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:38.859893Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:38.860047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:38.896745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:38.897339Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:38.898270Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:38.898346Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:38.976109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:38.977259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:38.977624Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:23:38.984633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:39.104882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:39.105980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:39.106146Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:39.116472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:39.116623Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:39.116703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:39.117634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:39.117871Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:39.118003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:39.128954Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:39.183101Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:39.183339Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:39.183474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:39.183522Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:39.183581Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:39.183629Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:39.183800Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:39.183861Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:39.184304Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:39.184414Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:39.184511Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:39.184557Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:39.184614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:39.184667Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:39.184723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:39.184786Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:39.184838Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:39.184988Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:39.185034Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:39.185101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:39.185543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:39.185600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:39.185712Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:39.186074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:39.186160Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:39.186251Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:39.186305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... g event TEvTxProcessing::TEvStreamClearancePending 2026-01-04T02:23:52.768587Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715659 Cleared: true 2026-01-04T02:23:52.768617Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-04T02:23:52.768773Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:52.768809Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:52.768859Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:52.768894Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:23:52.768937Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:23:52.768972Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit WaitForStreamClearance 2026-01-04T02:23:52.769009Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715659] at 72075186224037888 2026-01-04T02:23:52.769043Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-04T02:23:52.769071Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-04T02:23:52.769103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit ReadTableScan 2026-01-04T02:23:52.769132Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-04T02:23:52.769320Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Continue 2026-01-04T02:23:52.769347Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:23:52.769373Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:23:52.769400Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:52.769425Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:52.769473Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:52.769785Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:975:2831], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T02:23:52.790314Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-04T02:23:52.790548Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715659 ShardId: 72075186224037888 2026-01-04T02:23:52.790602Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-04T02:23:52.791130Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:958:2816], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715658 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-04T02:23:52.791194Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:959:2816] TxId# 281474976715658] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:23:52.791239Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:959:2816] TxId# 281474976715658] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-04T02:23:52.791308Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-04T02:23:52.791430Z node 2 :TX_DATASHARD ERROR: read_table_scan.cpp:681: Got scan fatal error: Invalid DyNumber binary representation 2026-01-04T02:23:52.791487Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715659, MessageQuota: 1 2026-01-04T02:23:52.791723Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:975:2831], Recipient [2:959:2816]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715659 ShardId: 72075186224037888 2026-01-04T02:23:52.791761Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-04T02:23:52.791798Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:959:2816] TxId# 281474976715658] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-04T02:23:52.791866Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:23:52.791905Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715659, at: 72075186224037888 2026-01-04T02:23:52.792101Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:52.792159Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:52.792236Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:52.792280Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:23:52.792330Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715659] at 72075186224037888 for ReadTableScan 2026-01-04T02:23:52.792370Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit ReadTableScan 2026-01-04T02:23:52.792426Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715659] at 72075186224037888 error: Invalid DyNumber binary representation, IsFatalError: 1 2026-01-04T02:23:52.792487Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-04T02:23:52.792530Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit ReadTableScan 2026-01-04T02:23:52.792569Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:23:52.792604Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-04T02:23:52.792644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is DelayComplete 2026-01-04T02:23:52.792687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:23:52.792723Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715659] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:23:52.792754Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715659] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:23:52.792806Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715659] at 72075186224037888 is Executed 2026-01-04T02:23:52.792829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715659] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:23:52.792856Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715659] at 72075186224037888 has finished 2026-01-04T02:23:52.792892Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:52.792925Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:23:52.792966Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:52.792999Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:52.793077Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:52.793115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715659] at 72075186224037888 on unit FinishPropose 2026-01-04T02:23:52.793153Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715659 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: EXEC_ERROR 2026-01-04T02:23:52.793200Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715659 at tablet 72075186224037888 status: EXEC_ERROR errors: PROGRAM_ERROR (Invalid DyNumber binary representation) | 2026-01-04T02:23:52.793274Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:52.793608Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:886:2765], Recipient [2:959:2816]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037888 Status: EXEC_ERROR Error { Kind: PROGRAM_ERROR Reason: "Invalid DyNumber binary representation" } TxId: 281474976715659 Step: 0 OrderId: 281474976715659 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 423 } } CommitVersion { Step: 0 TxId: 281474976715659 } 2026-01-04T02:23:52.793653Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1922: [ReadTable [2:959:2816] TxId# 281474976715658] Received TEvProposeTransactionResult Status# EXEC_ERROR ShardId# 72075186224037888 2026-01-04T02:23:52.793713Z node 2 :TX_PROXY ERROR: read_table_impl.cpp:2920: [ReadTable [2:959:2816] TxId# 281474976715658] RESPONSE Status# ExecError shard: 72075186224037888 table: /Root/Table 2026-01-04T02:23:52.819499Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:959:2816], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 2500 TxId: 281474976715658 |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |82.9%| [TA] {RESULT} $(B)/ydb/tests/datashard/parametrized_queries/test-results/py3test/{meta.json ... results_accumulator.log} |82.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/ydb-core-tx-tx_proxy-ut_storage_tenant >> KqpWrite::ProjectReplace+UseSink [GOOD] >> KqpWrite::OutOfSpace >> KqpWrite::InsertRevert [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink >> KqpWrite::CastValues >> KqpInplaceUpdate::SingleRowIf-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.9%| [LD] {RESULT} $(B)/ydb/tools/query_replay_yt/query_replay_yt |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay_yt/query_replay_yt ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::InsertRevert [GOOD] Test command err: Trying to start YDB, gRPC: 15720, MsgBus: 4346 2026-01-04T02:23:36.721950Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326420755898899:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:36.722881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:37.184261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:37.266311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:37.266415Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:37.330530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:37.517254Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326420755898683:2081] 1767493416670960 != 1767493416670963 2026-01-04T02:23:37.556942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:37.568411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:37.705673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:37.705710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:37.705716Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:37.713956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:37.728898Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:38.534386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:38.542599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:23:38.647937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:38.879300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:39.135915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:39.274643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:40.982426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326437935769760:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:40.982559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:40.983530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326437935769770:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:40.983604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:41.430495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.500291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.617267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.672031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.715687Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326420755898899:2241];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:41.715855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:41.739671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.833499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:41.965074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.053377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.259343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326446525705232:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.259433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.259974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326446525705237:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.260013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326446525705238:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.260045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.264906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:42.290333Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326446525705241:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:23:42.506677Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326446525705293:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:44.750488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... DB, gRPC: 10257, MsgBus: 7120 2026-01-04T02:23:46.587542Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326462322491352:2098];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:46.625661Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:46.764477Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:46.767691Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:46.769441Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326462322491273:2081] 1767493426523687 != 1767493426523690 2026-01-04T02:23:46.783865Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:46.783960Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:46.810220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:46.955969Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:46.955990Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:46.955997Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:46.956099Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:47.019336Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:47.625761Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:47.746600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:47.772381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:47.860367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.081729Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.229273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.691112Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326479502362328:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.691243Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.691875Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326479502362338:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.691959Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.785447Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.883014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.917947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.947076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.995811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.047030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.093600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.147438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.277336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326483797330507:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.277396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326483797330512:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.277474Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.278575Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326483797330515:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.278670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.285741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:51.306652Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326483797330514:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:51.398428Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326483797330567:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:51.565346Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326462322491352:2098];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:51.566114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:55.297121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |82.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |82.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/pg/ydb-core-kqp-ut-pg >> DataShardSnapshots::MvccSnapshotLockedWrites-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink >> KqpImmediateEffects::Upsert [GOOD] >> KqpImmediateEffects::UpdateOn >> KqpImmediateEffects::ImmediateUpdate [GOOD] >> KqpImmediateEffects::ImmediateUpdateSelect >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink >> KqpImmediateEffects::InsertDuplicates-UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink >> test_simple.py::TestSimple::test[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test[table] >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitSuccess+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [FAIL] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/ut/ydb-core-client-ut |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/ut/ydb-core-client-ut |82.9%| [LD] {RESULT} $(B)/ydb/core/client/ut/ydb-core-client-ut >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLValueSinceUnixEpoch::test_case [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> DataShardSnapshots::ShardRestartLockBrokenByConflict [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert |82.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/ttl/py3test >> test_ttl.py::TestTTLDefaultEnv::test_case [GOOD] |82.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/ttl/py3test >> DataShardSnapshots::VolatileSnapshotReadTable [GOOD] >> DataShardSnapshots::VolatileSnapshotRefreshDiscard >> KqpEffects::InsertAbort_Params_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Duplicates-UseSink >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscription [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange |82.9%| [TA] $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |82.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpImmediateEffects::InteractiveTxWithReadAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::Interactive >> DataShardSnapshots::LockedWriteDistributedCommitAborted-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |82.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data |82.9%| [TA] {RESULT} $(B)/ydb/tests/functional/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |82.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data/ydb-core-kqp-ut-data >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink >> test_simple.py::TestSimple::test[table] [GOOD] >> test_simple.py::TestSimple::test[tablestores] |82.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |83.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/datastreams/ydb-core-kqp-ut-federated_query-datastreams >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] >> TSelectFromViewTest::ReadTestCasesFromFiles [GOOD] >> TSelectFromViewTest::QueryCacheIsUpdated >> DataShardSnapshots::MvccSnapshotLockedWritesRestart+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink >> KqpImmediateEffects::UpsertDuplicates [GOOD] >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted >> KqpWrite::CastValues [GOOD] >> KqpWrite::CastValuesOptional |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> DataShardSnapshots::VolatileSnapshotRefreshDiscard [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowIf+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25526, MsgBus: 65221 2026-01-04T02:23:39.811027Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326431536435695:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:39.811093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:39.900383Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:40.714994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:40.715084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:40.814225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:40.869409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:40.883460Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:40.885410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:40.898156Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326431536435674:2081] 1767493419802123 != 1767493419802126 2026-01-04T02:23:41.160353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:41.266478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:41.266503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:41.266510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:41.266626Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:42.589008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:42.743041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.044981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.421321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.562530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:44.814014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326431536435695:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:44.814097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:47.436109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326465896175933:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:47.436219Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:47.436757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326465896175943:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:47.436805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:47.926790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.023189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.151339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.194697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.270229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.355150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.429116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.547725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.889002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326470191144150:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.889099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.889588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326470191144155:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.889640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326470191144156:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.889668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.895844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:48.922919Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326470191144159:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:23:49.023931Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326474486111508:3786] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:54.162489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, bu ... cher.cpp:1374: Notification cookie mismatch for subscription [2:7591326510805986810:2081] 1767493437374850 != 1767493437374853 2026-01-04T02:23:57.702463Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:57.924102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:58.010860Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:58.010960Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:58.032184Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:58.302281Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:58.302305Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:58.302313Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:58.302406Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:58.418251Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:58.437976Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:00.268246Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:00.290986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:00.298330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:00.600224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:00.915659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:01.191669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.419619Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326510805986899:2097];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:02.419691Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:06.839765Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326549460694398:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:06.839842Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:06.840258Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326549460694408:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:06.840300Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:06.930451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.014926Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.111723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.172322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.241168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.323873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.434835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.539642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.689903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326553755662582:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.690016Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.690625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326553755662587:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.690666Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326553755662588:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.690700Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.696346Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:07.729723Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326553755662591:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:07.806057Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326553755662644:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:11.081058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.654335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:12.654359Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [FAIL] >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] |83.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/query_stats/ut/ydb-core-sys_view-query_stats-ut |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/control/ut/ydb-core-control-ut |83.0%| [LD] {RESULT} $(B)/ydb/core/control/ut/ydb-core-control-ut |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/control/ut/ydb-core-control-ut >> KqpImmediateEffects::UpdateOn [GOOD] >> KqpImmediateEffects::InsertExistingKey+UseSink [GOOD] >> KqpImmediateEffects::InsertExistingKey-UseSink >> KqpEffects::DeleteWithJoinAndIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless/ydb-core-tx-schemeshard-ut_serverless |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24890, MsgBus: 15037 2026-01-04T02:23:40.599418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326436523039924:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:40.613939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:41.605118Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:42.096260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:42.096382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:42.258840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:42.430139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:42.430274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:42.562014Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326436523039724:2081] 1767493420584004 != 1767493420584007 2026-01-04T02:23:42.562261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:42.839189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:43.139025Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:43.139044Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:43.139051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:43.139115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:45.229516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:45.244952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:23:45.414093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.721416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326436523039924:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:45.721484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:46.018186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.344397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.501738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.609051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326479472714594:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.609187Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.610048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326479472714604:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.610108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.932741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.137419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.214541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.282736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.319946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.401035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.476889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.632473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.789869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488062650097:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.789972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.790349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488062650102:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.790397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488062650103:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.790636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.795088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:52.819275Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326488062650106:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:52.891596Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326488062650160:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type ... 02:24:00.916445Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326522750807110:2081] 1767493440593068 != 1767493440593071 2026-01-04T02:24:00.961031Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:00.961115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:01.000397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:01.043158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:01.258708Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:01.258730Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:01.258738Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:01.258822Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:01.532393Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:01.647194Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:02.482120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:02.497123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:24:02.516924Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.656830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.924544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.057743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:05.772647Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326522750807257:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:05.772968Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:08.819335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326557110547394:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.819407Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.820083Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326557110547404:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.820148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.932057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.977961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.044294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.105238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.172501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.276546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.367018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.452655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.745693Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326561405515565:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.745778Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.746286Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326561405515570:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.746329Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326561405515571:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.746361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.751431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:09.794353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-04T02:24:09.794686Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326561405515574:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:09.872886Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326561405515629:3781] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:14.714078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:15.887871Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:15.887906Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertSelectTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3232, MsgBus: 21647 2026-01-04T02:23:39.576446Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326433614616955:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:39.576517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:39.783754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:39.917724Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:39.925915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:39.980406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:40.004099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:40.091763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:40.238639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:40.238668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:40.238684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:40.238772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:40.583541Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:41.548946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:41.708266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.234768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.552971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.702749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:44.576597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326433614616955:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:44.576670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:45.383643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326459384422556:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:45.383746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:45.384281Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326459384422566:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:45.384538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.326902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.368250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.411662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.451810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.527188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.587382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.671108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.759995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:46.905737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326463679390742:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.905913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.905937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326463679390747:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.906187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326463679390749:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.906229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:46.909615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:46.928139Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326463679390750:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:23:47.044172Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326467974358100:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:49.054337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.489904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpA ... 026-01-04T02:24:01.002061Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:01.002502Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:01.063427Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:01.165925Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:01.314413Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:01.422615Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:01.422642Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:01.422651Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:01.422743Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:02.032557Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:03.131721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:03.149658Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.390785Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.368123Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.700027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.414193Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326568089966995:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.414271Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.415230Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326568089967005:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.415286Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.658793Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.795463Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.837060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.884171Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.980852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.102709Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.202602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.347130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.441606Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326572384935173:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:12.441702Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:12.442575Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326572384935178:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:12.442647Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326572384935179:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:12.442770Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:12.447147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:12.458285Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:24:12.459143Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326572384935182:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:12.528708Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326572384935233:3797] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:15.210751Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:15.990393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:15.990420Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:16.085486Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:24:17.024126Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NjVjYzkxNzMtODEwMTc3NGYtYTRkZmFkM2MtZDljNDc1ZjI=, ActorId: [3:7591326585269837420:2549], ActorState: ExecuteState, LegacyTraceId: 01ke3d3md21yaajj8h8063yj5q, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Tables: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink >> DataShardSnapshots::LockedWriteDistributedCommitSuccess-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ImmediateUpdateSelect [GOOD] Test command err: Trying to start YDB, gRPC: 13314, MsgBus: 26213 2026-01-04T02:23:37.421233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326422642111825:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:37.422483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:37.478892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:38.025644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:38.025736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:38.038538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:38.090129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:38.247073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:38.431008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:38.437788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:38.449494Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:23:38.449553Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:23:38.449581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:38.545092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:38.545121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:38.545128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:38.545258Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:38.894958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:39.194764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:39.251750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:39.421130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:39.692387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:39.807201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:42.413875Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326422642111825:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:42.413946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:42.911999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326444116950060:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.912130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.913313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326444116950070:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:42.913386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:43.456592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.494888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.537840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.619812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.664404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.723840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.757925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:43.834261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:44.030349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326452706885540:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:44.030431Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:44.030676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326452706885546:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:44.030715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:44.030716Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326452706885545:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:44.035077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:44.062645Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326452706885549:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublech ... 99536658146131:7762515]; 2026-01-04T02:24:02.653480Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:02.748449Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:02.861875Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:02.866150Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591326531956406060:2081] 1767493442644635 != 1767493442644638 2026-01-04T02:24:02.884426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:02.884507Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:02.923331Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:03.041158Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:03.124215Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:03.124237Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:03.124245Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:03.124334Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:03.658488Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:03.747560Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:03.793691Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.892894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.254940Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.542270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.656378Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326531956406206:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:07.656462Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:08.492013Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326557726211710:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.492126Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.497402Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326557726211720:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.497523Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.593166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.654815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.755843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.844867Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.926715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.072777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.150376Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.361632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.552571Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326562021179915:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.552712Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.553487Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326562021179921:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.553790Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326562021179920:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.553849Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.560051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:09.586858Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326562021179924:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:09.693177Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326562021179977:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:15.038396Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.838144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:17.838174Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 3414, MsgBus: 6188 2026-01-04T02:23:44.786275Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326452175767457:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:44.799473Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:45.526602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:45.526732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:45.807616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:45.829416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:45.881750Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326452175767416:2081] 1767493424754189 != 1767493424754192 2026-01-04T02:23:45.887137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:45.896751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:45.899438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:46.104301Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:46.190348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:46.190364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:46.190376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:46.190445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:47.259113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:47.482367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:47.935213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.348115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.551434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.786758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326452175767457:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:49.786838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:54.224508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326495125442316:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.224608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.230348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326495125442326:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.230434Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:55.030428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.200329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.282800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.366881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.467465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.585399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.695736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:55.812706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:56.121253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326503715377805:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:56.121345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:56.122338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326503715377810:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:56.122403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326503715377811:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:56.122534Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:56.128797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:56.160732Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326503715377814:2503], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:56.317794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326503715377865:3806] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:59.347152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, b ... 01-04T02:24:02.635675Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:02.720831Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:03.122323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:03.122404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:03.125299Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:03.142444Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326532172037599:2081] 1767493442607023 != 1767493442607026 2026-01-04T02:24:03.149821Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:03.170440Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:03.450400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:03.450422Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:03.450427Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:03.450493Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:03.610022Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:03.681546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:05.063152Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:05.089053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:05.316756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:05.835663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:06.023993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:07.638348Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326532172037759:2189];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:07.638414Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:10.826170Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326566531777877:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.826258Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.833944Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326566531777886:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.834038Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.168076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.251908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.313255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.364021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.411760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.495380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.576016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.773022Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.924811Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326570826746082:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.924901Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.926138Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326570826746087:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.926207Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326570826746088:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.926243Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:11.932328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:11.954928Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326570826746091:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:12.038776Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326575121713438:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:15.736503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |83.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/ydb-core-tx-schemeshard-ut_extsubdomain >> KqpImmediateEffects::Interactive [GOOD] >> DataShardSnapshots::ShardRestartWholeShardLockBrokenByUpsert [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink >> TNodeBrokerTest::NodesMigrationRemovedChanged |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpInplaceUpdate::Negative_SingleRowWithKeyCast+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink >> TStorageTenantTest::CreateTableInsideSubDomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 21710, MsgBus: 18462 2026-01-04T02:23:43.274058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326451222865937:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:43.274142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:43.320667Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:43.675928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:43.676020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:43.699068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:43.781015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:43.786109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:44.010124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:44.010146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:44.010157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:44.010321Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:44.067743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:44.291614Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:44.851504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:44.961109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.185827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.464557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.592042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.270064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326451222865937:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:48.270127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:48.676490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326472697704250:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.676615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.681260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326472697704260:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.681340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.054337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.110923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.189745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.260709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.309373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.371107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.509789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.652775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.820911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326476992672430:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.820992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.821691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326476992672435:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.821759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326476992672436:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.821898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.825264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:49.838738Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326476992672439:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:49.907445Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326476992672490:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:51.949156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesha ... 01-04T02:24:11.937491Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:11.965887Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:12.205952Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591326569045520448:2081] 1767493451851361 != 1767493451851364 2026-01-04T02:24:12.214002Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:12.216875Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:12.216947Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:12.217401Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:12.247947Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:12.361786Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:12.361831Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:12.361838Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:12.361923Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:12.612169Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:12.875420Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:12.922279Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:12.953679Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.211525Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.472390Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.606606Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:16.862057Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326569045520559:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:16.862126Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:17.523685Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326594815326112:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.523776Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.524715Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326594815326121:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.524783Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.684265Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.767366Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.812565Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.898081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.960026Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.059112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.199488Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.351023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.659474Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326599110294310:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.659573Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.660026Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326599110294315:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.660071Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326599110294316:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.660199Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.664632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:18.701986Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326599110294319:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:18.768797Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326599110294370:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:20.781853Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpWrite::OutOfSpace [GOOD] >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_subscriber/unittest >> TSubscriberCombinationsTest::CombinationsMigratedPath [GOOD] Test command err: =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-04T02:22:24.026062Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-04T02:22:24.026160Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-04T02:22:24.026434Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:36:2066] 2026-01-04T02:22:24.026479Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:3:2050] Commit generation: owner# 800, generation# 1 2026-01-04T02:22:24.026579Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-04T02:22:24.026642Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:6:2053] Successful handshake: owner# 800, generation# 1 2026-01-04T02:22:24.026901Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:37:2067] 2026-01-04T02:22:24.026934Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:6:2053] Commit generation: owner# 800, generation# 1 2026-01-04T02:22:24.027110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.027636Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:43:2069] 2026-01-04T02:22:24.027693Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:3:2050] Upsert description: path# /root/tenant 2026-01-04T02:22:24.027850Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:3:2050] Subscribe: subscriber# [1:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.028132Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:44:2069] 2026-01-04T02:22:24.028175Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:6:2053] Upsert description: path# /root/tenant 2026-01-04T02:22:24.028226Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:6:2053] Subscribe: subscriber# [1:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.028376Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [1:45:2069] 2026-01-04T02:22:24.028409Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:9:2056] Upsert description: path# /root/tenant 2026-01-04T02:22:24.028455Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:9:2056] Subscribe: subscriber# [1:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.028540Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:3:2050] 2026-01-04T02:22:24.028630Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:43:2069] 2026-01-04T02:22:24.028713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:6:2053] 2026-01-04T02:22:24.028766Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:44:2069] 2026-01-04T02:22:24.028814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:45:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:9:2056] 2026-01-04T02:22:24.028871Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:45:2069] 2026-01-04T02:22:24.028986Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:40:2069] 2026-01-04T02:22:24.029090Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:41:2069] 2026-01-04T02:22:24.029157Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:39:2069][/root/tenant] Set up state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:22:24.029240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [1:42:2069] 2026-01-04T02:22:24.029295Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:39:2069][/root/tenant] Ignore empty state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== !argsLeft.IsDeletion 2026-01-04T02:22:24.029591Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:3:2050] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:36:2066], cookie# 0, event size# 103 2026-01-04T02:22:24.029648Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:3:2050] Update description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:22:24.029736Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:3:2050] Upsert description: path# /root/tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /root/tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:22:24.029964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:3:2050] 2026-01-04T02:22:24.030034Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [1:43:2069] 2026-01-04T02:22:24.030109Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:39:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant PathId: [OwnerId: 800, LocalPathId: 2] Version: 1 }: sender# [1:40:2069] 2026-01-04T02:22:24.030194Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:39:2069][/root/tenant] Update to strong state: owner# [1:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 800, LocalPathId: 2], Version: 1) DomainId: [OwnerId: 800, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/root/tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-04T02:22:24.515887Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-04T02:22:24.515966Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:3:2050] Successful handshake: owner# 800, generation# 1 2026-01-04T02:22:24.516145Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:36:2066] 2026-01-04T02:22:24.516180Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:3:2050] Commit generation: owner# 800, generation# 1 2026-01-04T02:22:24.516228Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-04T02:22:24.516258Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:6:2053] Successful handshake: owner# 900, generation# 1 2026-01-04T02:22:24.516453Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:37:2067] 2026-01-04T02:22:24.516488Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:6:2053] Commit generation: owner# 900, generation# 1 2026-01-04T02:22:24.516605Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:39:2069][/root/tenant] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:22:24.517010Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:43:2069] 2026-01-04T02:22:24.517047Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:3:2050] Upsert description: path# /root/tenant 2026-01-04T02:22:24.517141Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:3:2050] Subscribe: subscriber# [3:43:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.517294Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:44:2069] 2026-01-04T02:22:24.517323Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:6:2053] Upsert description: path# /root/tenant 2026-01-04T02:22:24.517373Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:6:2053] Subscribe: subscriber# [3:44:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.517495Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/tenant DomainOwnerId: 1 }: sender# [3:45:2069] 2026-01-04T02:22:24.517520Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:9:2056] Upsert description: path# /root/tenant 2026-01-04T02:22:24.517557Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:9:2056] Subscribe: subscriber# [3:45:2069], path# /root/tenant, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:22:24.517645Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:43:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:3:2050] 2026-01-04T02:22:24.517700Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:43:2069] 2026-01-04T02:22:24.517769Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:44:2069][/root/tenant] Handle NKikimrSchemeBoard.TEvNotify { Path: /root/tenant Version: 0 }: sender# [3:6:2053] 2026-01-04 ... 94: [399:3:2050] Commit generation: owner# 910, generation# 1 2026-01-04T02:24:22.262436Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [399:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-04T02:24:22.262474Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [399:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-04T02:24:22.262692Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [399:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [399:37:2067] 2026-01-04T02:24:22.262729Z node 399 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [399:6:2053] Commit generation: owner# 910, generation# 1 2026-01-04T02:24:22.262866Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][399:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[399:24339059:0], [399:1099535966835:0], [399:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:24:22.263382Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:43:2069] 2026-01-04T02:24:22.263430Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.263519Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:3:2050] Subscribe: subscriber# [399:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.263699Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:44:2069] 2026-01-04T02:24:22.263733Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.263782Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:6:2053] Subscribe: subscriber# [399:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.263928Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [399:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [399:45:2069] 2026-01-04T02:24:22.263958Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [399:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.264005Z node 399 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [399:9:2056] Subscribe: subscriber# [399:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.264084Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:3:2050] 2026-01-04T02:24:22.264167Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:43:2069] 2026-01-04T02:24:22.264224Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:6:2053] 2026-01-04T02:24:22.264272Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:44:2069] 2026-01-04T02:24:22.264317Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][399:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:9:2056] 2026-01-04T02:24:22.264362Z node 399 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [399:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [399:45:2069] 2026-01-04T02:24:22.264452Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:40:2069] 2026-01-04T02:24:22.264533Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:41:2069] 2026-01-04T02:24:22.264592Z node 399 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][399:39:2069][/Root/Tenant/table_inside] Set up state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:22.264653Z node 399 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][399:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [399:42:2069] 2026-01-04T02:24:22.264702Z node 399 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][399:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [399:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 2026-01-04T02:24:22.741684Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:3:2050] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-04T02:24:22.741759Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:3:2050] Successful handshake: owner# 910, generation# 1 2026-01-04T02:24:22.741985Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:3:2050] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:36:2066] 2026-01-04T02:24:22.742024Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:3:2050] Commit generation: owner# 910, generation# 1 2026-01-04T02:24:22.742076Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [401:6:2053] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-04T02:24:22.742111Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [401:6:2053] Successful handshake: owner# 910, generation# 1 2026-01-04T02:24:22.742307Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [401:6:2053] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [401:37:2067] 2026-01-04T02:24:22.742337Z node 401 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [401:6:2053] Commit generation: owner# 910, generation# 1 2026-01-04T02:24:22.742457Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][401:39:2069][/Root/Tenant/table_inside] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[401:24339059:0], [401:1099535966835:0], [401:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:24:22.742905Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:3:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:43:2069] 2026-01-04T02:24:22.742944Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:3:2050] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.743022Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:3:2050] Subscribe: subscriber# [401:43:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.743185Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:6:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:44:2069] 2026-01-04T02:24:22.743213Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:6:2053] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.743258Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:6:2053] Subscribe: subscriber# [401:44:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.743400Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [401:9:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 1 }: sender# [401:45:2069] 2026-01-04T02:24:22.743429Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [401:9:2056] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:24:22.743469Z node 401 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [401:9:2056] Subscribe: subscriber# [401:45:2069], path# /Root/Tenant/table_inside, domainOwnerId# 1, capabilities# AckNotifications: true 2026-01-04T02:24:22.743536Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:43:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:3:2050] 2026-01-04T02:24:22.743594Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:3:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:43:2069] 2026-01-04T02:24:22.743643Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:44:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:6:2053] 2026-01-04T02:24:22.743685Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:6:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:44:2069] 2026-01-04T02:24:22.743732Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][401:45:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:9:2056] 2026-01-04T02:24:22.743774Z node 401 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [401:9:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [401:45:2069] 2026-01-04T02:24:22.743864Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:40:2069] 2026-01-04T02:24:22.743935Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:41:2069] 2026-01-04T02:24:22.743990Z node 401 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][401:39:2069][/Root/Tenant/table_inside] Set up state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:22.744045Z node 401 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][401:39:2069][/Root/Tenant/table_inside] Handle NKikimrSchemeBoard.TEvNotify { Path: /Root/Tenant/table_inside Version: 0 }: sender# [401:42:2069] 2026-01-04T02:24:22.744111Z node 401 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][401:39:2069][/Root/Tenant/table_inside] Ignore empty state: owner# [401:38:2068], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } =========== argsLeft.GetSuperId() >= argsRight.GetSuperId() |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_subscriber/unittest >> KqpEffects::InsertAbort_Params_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict-UseSink >> DataShardSnapshots::MvccSnapshotReadLockedWrites+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |83.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |83.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |83.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |83.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut |83.0%| [LD] {RESULT} $(B)/ydb/library/ncloud/impl/ut/ydb-library-ncloud-impl-ut >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::OutOfSpace [GOOD] Test command err: Trying to start YDB, gRPC: 26321, MsgBus: 2338 2026-01-04T02:23:40.600058Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326437430536008:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:40.600256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:41.433779Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:41.482370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:41.482480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:41.658260Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:41.744482Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:41.777956Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326437430535899:2081] 1767493420571950 != 1767493420571953 2026-01-04T02:23:41.780653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:41.869937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:42.325042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:42.325060Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:42.325067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:42.325135Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:43.661063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:43.895774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:44.799777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.393230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.672456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326437430536008:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:45.672508Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:45.766640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.842471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326480380210781:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.842622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.854085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326480380210791:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.854201Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:51.699128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.794680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.861150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:51.924555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.001532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.114781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.311438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.482928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:52.612614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488970146275:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.612691Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.613111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488970146281:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.613361Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326488970146280:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.613395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.617474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:52.650473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:23:52.650801Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326488970146284:2502], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:52.727797Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326488970146337:3800] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:56.398512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:23:56.398538Z node 1 :IMPORT WARN: sche ... node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:09.568892Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:24:10.273152Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:10.273214Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:10.273250Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:10.286636Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:10.299058Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:11.816743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:11.911866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:12.365474Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.070255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.798860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:15.528325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1918:3524], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:15.528790Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:15.530127Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1992:3543], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:15.538211Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:15.726183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:16.050475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:16.397969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:16.774200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.300189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.734823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.287428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.967090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:19.768435Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2804:4184], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:19.768563Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:19.769060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2808:4188], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:19.769136Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:19.769210Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2811:4191], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:19.800966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:20.032531Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2813:4193], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:20.168262Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2887:4248] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_DISK_GROUP_OUT_OF_SPACE;details=;tx_id=3; 2026-01-04T02:24:24.193184Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [2:3208:4481], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [2:3182:4481]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[2:3208:4481]. Ignored this error.{
: Error: , code: 2006 } 2026-01-04T02:24:24.193827Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3201:4481], SessionActorId: [2:3182:4481], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036
: Error: , code: 2006 . sessionActorId=[2:3182:4481]. 2026-01-04T02:24:24.194267Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=NTlkMTUwM2YtYzM4MTk1NjUtYzRlNTY1NjktMTBlYmFlYw==, ActorId: [2:3182:4481], ActorState: ExecuteState, LegacyTraceId: 01ke3d3vr6a0mrzp1234ya7hkx, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [2:3202:4481] from: [2:3201:4481] trace_id# 2026-01-04T02:24:24.194425Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:3202:4481] TxId: 281474976715673. Ctx: { TraceId: 01ke3d3vr6a0mrzp1234ya7hkx, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTlkMTUwM2YtYzM4MTk1NjUtYzRlNTY1NjktMTBlYmFlYw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`., code: 2036 subissue: {
: Error: , code: 2006 } } trace_id# 2026-01-04T02:24:24.194886Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NTlkMTUwM2YtYzM4MTk1NjUtYzRlNTY1NjktMTBlYmFlYw==, ActorId: [2:3182:4481], ActorState: ExecuteState, LegacyTraceId: 01ke3d3vr6a0mrzp1234ya7hkx, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037911 is out of space. Table `/Root/KeyValue`." issue_code: 2036 severity: 1 issues { issue_code: 2006 severity: 1 } } trace_id# |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |83.1%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/ut/ydb-core-tablet_flat-ut |83.1%| [TA] $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test[tablestores] [GOOD] >> test_simple.py::TestSimple::test_multi[alter_table] [GOOD] |83.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> test_simple.py::TestSimple::test_multi[alter_tablestore] [GOOD] >> test_simple.py::TestSimple::test_multi[table] [GOOD] >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |83.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] >> TStorageTenantTest::Boot >> listing_paging.py::TestListingPaging::test_listing_paging_solomon [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemovedChanged [GOOD] Test command err: 2026-01-04T02:24:27.169443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:27.169516Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx |83.1%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |83.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/tx/ydb-core-kqp-ut-tx >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-04T02:24:26.273348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326633178525745:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:26.277118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:26.469895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:26.545274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:26.545399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:26.552529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:26.655183Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:26.767204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:24:26.968998Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591326633178525959:2128] Handle TEvNavigate describe path dc-1 2026-01-04T02:24:26.969040Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591326633178526451:2457] HANDLE EvNavigateScheme dc-1 2026-01-04T02:24:26.969149Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591326633178525987:2144], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:26.969232Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591326633178526201:2283][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591326633178525987:2144], cookie# 1 2026-01-04T02:24:26.970717Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326633178526229:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526226:2283], cookie# 1 2026-01-04T02:24:26.970755Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326633178526230:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526227:2283], cookie# 1 2026-01-04T02:24:26.970768Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326633178526231:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526228:2283], cookie# 1 2026-01-04T02:24:26.970812Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326633178525630:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526229:2283], cookie# 1 2026-01-04T02:24:26.970839Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326633178525633:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526230:2283], cookie# 1 2026-01-04T02:24:26.970853Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326633178525636:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326633178526231:2283], cookie# 1 2026-01-04T02:24:26.970894Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326633178526229:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178525630:2051], cookie# 1 2026-01-04T02:24:26.970919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326633178526230:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178525633:2054], cookie# 1 2026-01-04T02:24:26.970936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326633178526231:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178525636:2057], cookie# 1 2026-01-04T02:24:26.971015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326633178526201:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178526226:2283], cookie# 1 2026-01-04T02:24:26.971039Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591326633178526201:2283][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:24:26.971053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326633178526201:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178526227:2283], cookie# 1 2026-01-04T02:24:26.971075Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591326633178526201:2283][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:24:26.971108Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326633178526201:2283][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326633178526228:2283], cookie# 1 2026-01-04T02:24:26.971120Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591326633178526201:2283][/dc-1] Sync cookie mismatch: sender# [1:7591326633178526228:2283], cookie# 1, current cookie# 0 2026-01-04T02:24:26.971174Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591326633178525987:2144], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:24:26.993134Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591326633178525987:2144], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591326633178526201:2283] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:24:27.010196Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591326633178525987:2144], cacheItem# { Subscriber: { Subscriber: [1:7591326633178526201:2283] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:24:27.012807Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591326637473493748:2458], recipient# [1:7591326633178526451:2457], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:24:27.012907Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591326633178526451:2457] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:24:27.118497Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591326633178526451:2457] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:24:27.139678Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591326633178526451:2457] Handle TEvDescribeSchemeResult Forward to# [1:7591326633178526450:2456] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:24:27.147929Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591326633178525959:2128] Handle TEvProposeTransaction 2026-01-04T02:24:27.147973Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591326633178525959:2128] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:24:27.148035Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591326633178525959:2128] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591326637473493754:2463] 2026-01-04T02:24:27.241938Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591326637473493754:2463] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-04T02:24:27.241983Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:29.309796Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788803:2364] 2026-01-04T02:24:29.309827Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:29.634130Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788801:2364] 2026-01-04T02:24:29.634218Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:29.634273Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788802:2364] 2026-01-04T02:24:29.634327Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:29.634348Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788803:2364] 2026-01-04T02:24:29.634366Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:29.998362Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591326644598788348:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:29.998502Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591326644598788348:2110], cacheItem# { Subscriber: { Subscriber: [3:7591326644598788800:2364] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:24:29.998663Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591326648893756124:2367], recipient# [3:7591326648893756123:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:29.998803Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:30.276282Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788801:2364] 2026-01-04T02:24:30.276374Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:30.276535Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788802:2364] 2026-01-04T02:24:30.276567Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:30.276583Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788803:2364] 2026-01-04T02:24:30.276604Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:31.002799Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591326644598788348:2110], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:31.002926Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591326644598788348:2110], cacheItem# { Subscriber: { Subscriber: [3:7591326644598788800:2364] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:24:31.003054Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591326657483690722:2369], recipient# [3:7591326657483690721:2314], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:31.003382Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_0/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:31.557728Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788801:2364] 2026-01-04T02:24:31.557819Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:31.557841Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788802:2364] 2026-01-04T02:24:31.557862Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:31.557884Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/initialization/migrations Version: 0 }: sender# [3:7591326644598788803:2364] 2026-01-04T02:24:31.557900Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591326644598788800:2364][/dc-1/USER_0/.metadata/initialization/migrations] Ignore empty state: owner# [3:7591326644598788348:2110], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode >> DataShardSnapshots::MvccSnapshotLockedWritesRestart-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink >> KqpWrite::CastValuesOptional [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowArithm+UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |83.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/describes_ut/ydb-services-persqueue_v1-ut-describes_ut >> TLocksTest::Range_BrokenLock0 >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::CastValuesOptional [GOOD] Test command err: Trying to start YDB, gRPC: 8840, MsgBus: 2517 2026-01-04T02:24:00.660561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326523390045390:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:00.660923Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:01.479747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:01.479903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:01.546152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:01.573081Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:01.663644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:01.673929Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326523390045134:2081] 1767493440578451 != 1767493440578454 2026-01-04T02:24:01.706528Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:01.765918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:02.014700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:02.014722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:02.014739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:02.014817Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:03.697556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:03.880215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.192711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.716995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:04.971248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:05.656241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326523390045390:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:05.656294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:09.088177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326562044752715:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.088328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.102934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326562044752725:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.103046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.605390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.721127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.813320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.911986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:10.014014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:10.115373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:10.213422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:10.315459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:10.522524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326566339720916:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.522642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.523494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326566339720921:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.523561Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326566339720922:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.523873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:10.529507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:10.563829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326566339720925:2498], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:10.707415Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326566339720977:3791] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:16.376152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:16.376181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 65197, MsgBus: 31949 2026-01-04T02:24:18.989148Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326600620661471:2225];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:18.997918Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:19.125932Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:19.381896Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:19.435790Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:19.435940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:19.457651Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326600620661260:2081] 1767493458887135 != 1767493458887138 2026-01-04T02:24:19.545205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:19.570953Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:19.912069Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:19.926446Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:19.926471Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:19.926479Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:19.926550Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:19.961924Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:21.065077Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:21.078504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:21.090386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.224426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.649345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.819180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:23.989100Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326600620661471:2225];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:23.989184Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:28.631412Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326643570336154:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.631501Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.632936Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326643570336164:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.633016Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:29.033416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.125836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.217074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.279999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.388087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.501663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.729012Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.946359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:30.165288Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326652160271655:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.165458Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.167087Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326652160271661:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.167156Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326652160271660:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.167200Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.171709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:30.215027Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326652160271664:2501], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:30.296983Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326652160271715:3801] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28390, MsgBus: 6622 2026-01-04T02:24:08.011418Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326556044802692:2198];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:08.011657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:08.135675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:08.639981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:08.640069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:08.655381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:08.840990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:08.908432Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326551749835223:2081] 1767493447957739 != 1767493447957742 2026-01-04T02:24:08.916609Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:09.155199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:09.175338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:09.202402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:09.202418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:09.202426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:09.202562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:10.554780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:10.712826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.048875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.593161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.793835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:13.011818Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326556044802692:2198];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:13.011871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:17.190758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326594699510121:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.190855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.191448Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326594699510131:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.191502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.642477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.684049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.719646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.752392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.788213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.896081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.955647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.003717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.088857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326598994478298:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.088946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.089214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326598994478303:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.089263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326598994478304:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.089381Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.094590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:18.106759Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326598994478307:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:18.183673Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326598994478363:3801] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:20.280550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but ... 01-04T02:24:22.907589Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:23.030065Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:23.398104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:23.417635Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:23.417733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:23.419227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:23.443007Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:23.465144Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326617064337399:2081] 1767493462904891 != 1767493462904894 2026-01-04T02:24:23.654703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:23.654730Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:23.654737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:23.654834Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:23.728241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:23.902363Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:24.462916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:24.504671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:24.639587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:24.837027Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:24.951621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.908701Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326617064337423:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:27.908794Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:28.309330Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326642834143054:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.309448Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.309847Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326642834143063:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.309898Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.375858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.403887Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.447370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.494705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.529011Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.563593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.602623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.668608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.840873Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326642834143932:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.840997Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.845993Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326642834143937:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.846085Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326642834143938:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.846215Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:28.851499Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:28.897091Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326642834143941:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:28.987599Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326642834143996:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:32.780768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15068, MsgBus: 26716 2026-01-04T02:23:42.902290Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326444524961083:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:42.902408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:43.458233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:43.461456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:43.461549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:43.546852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:43.609158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:43.738656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:43.738679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:43.738685Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:43.738762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:43.756051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:43.906242Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:44.506414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:44.531405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:23:44.624745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.012318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.345671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:45.529689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:47.897433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326444524961083:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:47.897501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:48.732192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326470294766562:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.732310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.732846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326470294766572:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:48.732885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:49.340162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.405055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.489144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.604610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.671512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.950965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.147261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.278967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.481199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326478884702070:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.481279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.482569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326478884702075:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.482614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326478884702076:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.482749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:50.487088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:50.511177Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326478884702079:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:50.628272Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326478884702132:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:55.272910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 10711, MsgBus: 25022 2026-01-04T02:24:21.089961Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:21.090071Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:21.220894Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:21.223690Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:21.223774Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:21.226196Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591326611560568214:2081] 1767493461016001 != 1767493461016004 2026-01-04T02:24:21.262226Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:21.268165Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:21.422522Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:21.422555Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:21.422563Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:21.422670Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:22.082120Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:22.186862Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:22.202207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:22.213623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:22.445185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:22.861760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:23.026228Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.457720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326633035406575:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:26.457835Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:26.458215Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326633035406585:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:26.458282Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:26.536011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.609389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.670409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.709609Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.768893Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.940222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.022277Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.109138Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.215078Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326637330374752:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.215177Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.215675Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326637330374757:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.215729Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326637330374758:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.216019Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.220461Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:27.246007Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326637330374761:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:27.342677Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326637330374814:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:30.464968Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:30.536150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:24:34.392208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:34.392312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:34.392354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:34.392395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:34.392434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:34.392465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:34.392522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:34.392631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:34.393413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:34.393723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:34.482746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:34.483047Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:34.497021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:34.497254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:34.497448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:34.507888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:34.508168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:34.508955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:34.509248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:34.512586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:34.512814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:34.514087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:34.514153Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:34.514267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:34.514323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:34.514450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:34.514659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:34.658215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.659945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:34.660633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T02:24:36.673475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:24:36.673547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-04T02:24:36.673680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:24:36.673791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1061:2992], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:24:36.701211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:36.701262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:24:36.701437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:36.701493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:24:36.701669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:24:36.701718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-04T02:24:36.701838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-04T02:24:36.702826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:24:36.702944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:24:36.702984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:24:36.703019Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:24:36.703057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-04T02:24:36.703144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:24:36.710765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:24:36.710853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:24:36.710981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:24:36.711034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:36.711078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:24:36.711141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:36.711205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:24:36.711261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:36.711309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:24:36.711354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:24:36.711552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:24:36.712030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:24:36.718138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:24:36.718204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:24:36.718729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:24:36.718849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:24:36.718894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1271:3152] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-04T02:24:36.723209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:24:36.723384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "SharedDB" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared } 2026-01-04T02:24:36.723450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/SharedDB 2026-01-04T02:24:36.723632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-04T02:24:36.723680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, at schemeshard: 72057594046678944 2026-01-04T02:24:36.728615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:36.728927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ServerlessComputeResourcesMode can be changed only for serverless, operation: ALTER DATABASE, path: /MyRoot/SharedDB TestModificationResult got TxId: 105, wait until txId: 105 TestModificationResults wait txId: 106 2026-01-04T02:24:36.734982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:24:36.735194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeUnspecified } 2026-01-04T02:24:36.735255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 106:0, path /MyRoot/ServerLess0 2026-01-04T02:24:36.735406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 106:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-04T02:24:36.735458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, at schemeshard: 72057594046678944 2026-01-04T02:24:36.738695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:36.738964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: can not set ServerlessComputeResourcesMode to EServerlessComputeResourcesModeUnspecified, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 106, wait until txId: 106 |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze+UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeout [GOOD] >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] >> TSchemeShardServerLess::StorageBilling ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:34.720210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:34.720293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:34.720326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:34.720355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:34.720389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:34.720420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:34.720514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:34.721501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:34.722192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:34.722405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:34.815950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:34.816001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:34.832071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:34.837628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:34.837864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:34.844772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:34.845655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:34.846489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:34.846757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:34.851074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:34.851241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:34.852412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:34.852493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:34.852604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:34.852655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:34.852692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:34.852859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:35.055714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.056629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.056743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.056807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.056896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.056968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.057636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... , TxId: 106, tablet: 72075186233409546, partId: 0 2026-01-04T02:24:37.990982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-01-04T02:24:37.991060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:795: [72057594046678944] TSyncHive, operationId 106:0, HandleReply TEvUpdateDomainReply, from hive: 72075186233409546 2026-01-04T02:24:37.991127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 138 -> 240 2026-01-04T02:24:37.993622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:24:37.993700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:24:37.996244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:24:37.996455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:24:37.996517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-04T02:24:37.996679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:24:37.996723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:24:37.996768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:24:37.996803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:24:37.996851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-04T02:24:37.996897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:24:37.996934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-04T02:24:37.996963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-04T02:24:37.997037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:24:38.003325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:24:38.003378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:24:38.003948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:24:38.004041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:24:38.004088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1347:3228] TestWaitNotification: OK eventTxId 106 2026-01-04T02:24:38.004865Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:24:38.005144Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 252us result status StatusSuccess 2026-01-04T02:24:38.005598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:38.006968Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409549 2026-01-04T02:24:38.007192Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409549 describe path "/MyRoot/ServerLess0" took 248us result status StatusSuccess 2026-01-04T02:24:38.007670Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "MyRoot/ServerLess0" PathId: 1 SchemeshardId: 72075186234409549 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409549 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976725657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/ServerLess0" } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186234409549, at schemeshard: 72075186234409549 2026-01-04T02:24:38.008472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:24:38.008693Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 209us result status StatusSuccess 2026-01-04T02:24:38.009088Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLess0" PathDescription { Self { Name: "ServerLess0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409551 SchemeShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SharedHive: 72075186233409546 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:38.012113Z node 1 :HIVE INFO: tablet_helpers.cpp:1652: [72075186233409546] TEvRequestDomainInfo, 72057594046678944:39 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::TestServerlessComputeResourcesModeFeatureFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:24:35.282362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:35.282457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:35.282496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:35.282535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:35.282571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:35.282600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:35.282653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:35.282729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:35.283606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:35.283913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:35.430316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:35.430375Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:35.445363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:35.445548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:35.445704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:35.456481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:35.456764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:35.457526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:35.457780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:35.467734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:35.467940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:35.468954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:35.469000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:35.469083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:35.469117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:35.469193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:35.469338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:35.648687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.649716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.654619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.654775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.654850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.654919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.654981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:35.655714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 86234409549, txId: 281474976725658 2026-01-04T02:24:38.814077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186234409549, txId: 281474976725658, pathId: [OwnerId: 72075186234409549, LocalPathId: 1], version: 6 2026-01-04T02:24:38.814131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186234409549, LocalPathId: 1] was 5 2026-01-04T02:24:38.814207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725658, ready parts: 0/1, is published: true 2026-01-04T02:24:38.819191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 2026-01-04T02:24:38.819365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-04T02:24:38.819468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:24:38.819617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725657 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-04T02:24:38.819799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409549, cookie: 281474976725658 2026-01-04T02:24:38.819928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:38.820074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:38.820148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-04T02:24:38.820495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:24:38.820560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 104:0, at tablet# 72057594046678944 2026-01-04T02:24:38.820675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:24:38.820778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 39], Generation: 2, ActorId:[1:1061:2992], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:24:38.824119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:38.824163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:24:38.824358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:38.824406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:24:38.824488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:24:38.824540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 104:0, ProgressState, NeedSyncHive: 0 2026-01-04T02:24:38.824626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 240 -> 240 2026-01-04T02:24:38.825383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:24:38.825455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:24:38.825482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:24:38.825512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:24:38.825542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 6 2026-01-04T02:24:38.825610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:24:38.829401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:24:38.829466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:24:38.829572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:24:38.829613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:38.829650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:24:38.829714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:38.829769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:24:38.829827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:24:38.829873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:24:38.829902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:24:38.830068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:24:38.830482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:24:38.832593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:24:38.832645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:24:38.833061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:24:38.833155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:24:38.833199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1271:3152] TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-04T02:24:38.838094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:24:38.838263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "ServerLess0" ServerlessComputeResourcesMode: EServerlessComputeResourcesModeExclusive } 2026-01-04T02:24:38.838310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 105:0, path /MyRoot/ServerLess0 2026-01-04T02:24:38.838472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-04T02:24:38.838530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, at schemeshard: 72057594046678944 2026-01-04T02:24:38.841091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:38.841285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid AlterExtSubDomain request: Unsupported: feature flag EnableServerlessExclusiveDynamicNodes is off, operation: ALTER DATABASE, path: /MyRoot/ServerLess0 TestModificationResult got TxId: 105, wait until txId: 105 |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TStorageTenantTest::Boot [GOOD] >> TStorageTenantTest::CopyTableAndConcurrentSplit >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull+UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:24:34.932361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:35.056382Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:24:35.056877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:35.056969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:24:35.396403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:35.396527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:35.476323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493472016251 != 1767493472016254 2026-01-04T02:24:35.492258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:35.539959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:35.643169Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:24:36.002119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:36.017286Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:36.134130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:36.641418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:36.641520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1081:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:36.641592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:36.653472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:36.653668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:36.673849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:36.858900Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:24:36.979626Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:38.446498Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3d488ze744rv7djg9143nq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MThiNDQxNjQtOWU0Y2FiN2EtZjE3YzQ3MzAtM2E1ZGYy. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Source[0] fatal error: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 } 2026-01-04T02:24:38.456613Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1172:2964], TxId: 281474976715660, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3d488ze744rv7djg9143nq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MThiNDQxNjQtOWU0Y2FiN2EtZjE3YzQ3MzAtM2E1ZGYy. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR KIKIMR_CONSTRAINT_VIOLATION: {
: Fatal: Read from column index 1: got NULL from NOT NULL column, code: 2012 }. 2026-01-04T02:24:38.497387Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:1173:2965], TxId: 281474976715660, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3d488ze744rv7djg9143nq. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MThiNDQxNjQtOWU0Y2FiN2EtZjE3YzQ3MzAtM2E1ZGYy. CurrentExecutionId : . Database : . DatabaseId : /Root. }. InternalError: INTERNAL_ERROR DEFAULT_ERROR: {
: Error: Terminate execution }. 2026-01-04T02:24:38.525035Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MThiNDQxNjQtOWU0Y2FiN2EtZjE3YzQ3MzAtM2E1ZGYy, ActorId: [1:1068:2895], ActorState: ExecuteState, LegacyTraceId: 01ke3d488ze744rv7djg9143nq, Create QueryResponse for error on request, msg: status# INTERNAL_ERROR issues# { message: "Read from column index 1: got NULL from NOT NULL column" issue_code: 2012 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertConflictInteractiveTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 19752, MsgBus: 6491 2026-01-04T02:23:58.534967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326515564450728:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:58.570633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:58.573451Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:59.629308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:59.629409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:59.686407Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:59.686531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:59.754398Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:59.764244Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326515564450678:2081] 1767493438450563 != 1767493438450566 2026-01-04T02:23:59.951510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:59.953915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:00.271313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:00.271334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:00.271340Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:00.271423Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:01.430719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:01.438254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:01.544918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:01.939937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.494981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.719877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.517788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326515564450728:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:03.517926Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:07.541726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326554219158255:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.541870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.542244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326554219158264:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:07.542316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:08.507269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.557245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.605918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.653185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.717765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.830178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:08.885515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.035333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.189356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326562809093753:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.189440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.189879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326562809093758:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.189920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326562809093759:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.189947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:09.200308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:09.232003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326562809093762:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:09.327207Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326562809093813:3797] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... ution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:19.181476Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:19.181683Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:19.194050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:19.195216Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326599831262552:2081] 1767493458784336 != 1767493458784339 2026-01-04T02:24:19.195611Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:19.259923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:19.490904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:19.570739Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:19.570764Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:19.570773Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:19.570841Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:19.822463Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:20.744543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:20.770772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:24:20.780832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.079730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.607468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.842088Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:23.830012Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326599831262773:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:23.830080Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:30.766992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326651370872057:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.767077Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.769048Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326651370872067:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.769119Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:30.863919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:30.923965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:30.971379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.056160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.126767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.207592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.298511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.432196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.615249Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326655665840252:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:31.615379Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:31.617481Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326655665840257:2510], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:31.617543Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326655665840258:2511], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:31.617841Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:31.622203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:31.666600Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326655665840261:2512], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:31.721399Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326655665840314:3815] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:34.047213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:34.047236Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:34.958856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true |83.1%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |83.1%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |83.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_transfer/ydb-core-tx-schemeshard-ut_transfer |83.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut |83.1%| [LD] {RESULT} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TSchemeShardExtSubDomainTest::CreateWithOnlyDotsNotAllowed-SystemNamesProtection-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false |83.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/actorlib_impl/ut/ydb-core-actorlib_impl-ut >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::ForbidInMemoryCacheModeInServerLess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:36.353389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:36.353490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:36.353533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:36.353574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:36.353629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:36.353682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:36.353775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:36.356064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:36.357094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:36.357424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:36.515402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:36.515466Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:36.533594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:36.538827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:36.539052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:36.547690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:36.547942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:36.548732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:36.549000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:36.559962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:36.560170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:36.561414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:36.561498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:36.561609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:36.561661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:36.561706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:36.561886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:36.877712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.878861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.879916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:36.880545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pg_class' 2026-01-04T02:24:41.652086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2026-01-04T02:24:41.652166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720662 2026-01-04T02:24:41.652214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-04T02:24:41.652283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/partition_stats' 2026-01-04T02:24:41.652356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-04T02:24:41.652446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720675 2026-01-04T02:24:41.652511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_request_units_one_hour' 2026-01-04T02:24:41.663441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720674 2026-01-04T02:24:41.663608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_by_tli_one_hour' 2026-01-04T02:24:41.663672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-04T02:24:41.663769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-04T02:24:41.663843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720676 2026-01-04T02:24:41.663909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_hour' 2026-01-04T02:24:41.663959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720678 2026-01-04T02:24:41.664038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_queries_by_duration_one_minute' 2026-01-04T02:24:41.664081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-04T02:24:41.664299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720677 2026-01-04T02:24:41.664381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/pg_tables' 2026-01-04T02:24:41.664413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-04T02:24:41.668605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-04T02:24:41.669913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720680 2026-01-04T02:24:41.670069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_users' 2026-01-04T02:24:41.670166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720679 2026-01-04T02:24:41.670259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_group_members' 2026-01-04T02:24:41.670312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-04T02:24:41.670422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720682 2026-01-04T02:24:41.670497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/streaming_queries' 2026-01-04T02:24:41.670558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-04T02:24:41.670618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-04T02:24:41.670658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-04T02:24:41.674410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720681 2026-01-04T02:24:41.674568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/resource_pools' 2026-01-04T02:24:41.674639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720683 2026-01-04T02:24:41.674719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/auth_permissions' 2026-01-04T02:24:41.674789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976720684 2026-01-04T02:24:41.674865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/SharedDB/.sys/top_partitions_one_minute' 2026-01-04T02:24:41.674906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:866:2820] at schemeshard: 72075186233409546 Send TEvRosterUpdateFinished TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-04T02:24:41.687771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeTryKeepInMemory } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeRegular } } } } TxId: 108 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-04T02:24:41.688061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 108:0, at schemeshard: 72075186233409549 2026-01-04T02:24:41.688344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , at schemeshard: 72075186233409549 2026-01-04T02:24:41.690894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: " TxId: 108 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:24:41.691245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 0 name: , operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 108, wait until txId: 108 TestModificationResults wait txId: 109 2026-01-04T02:24:41.695132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/ServerlessDB" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table" PartitionConfig { ColumnFamilies { Id: 0 ColumnCacheMode: ColumnCacheModeRegular } ColumnFamilies { Id: 1 Name: "Other" ColumnCacheMode: ColumnCacheModeTryKeepInMemory } } } } TxId: 109 TabletId: 72075186233409549 , at schemeshard: 72075186233409549 2026-01-04T02:24:41.695409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/ServerlessDB/Table, pathId: , opId: 109:0, at schemeshard: 72075186233409549 2026-01-04T02:24:41.695680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, at schemeshard: 72075186233409549 2026-01-04T02:24:41.822753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other" TxId: 109 SchemeshardId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:24:41.823053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/ServerlessDB, subject: , status: StatusInvalidParameter, reason: CacheMode InMemory is not supported in serverless databases. ColumnFamily id: 1 name: Other, operation: ALTER TABLE, path: /MyRoot/ServerlessDB/Table TestModificationResult got TxId: 109, wait until txId: 109 |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> DataShardSnapshots::MvccSnapshotReadLockedWrites-UseSink [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowListFromRange-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21970, MsgBus: 20056 2026-01-04T02:24:06.423327Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326547489168487:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:06.423808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:06.491550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:07.117944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:07.170693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:07.170798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:07.428036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:07.458302Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326547489168356:2081] 1767493446357508 != 1767493446357511 2026-01-04T02:24:07.489846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:07.502754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:07.511594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:07.843257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:07.843284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:07.843290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:07.843400Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:08.921916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:08.934008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:24:09.040977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.325772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.679822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:09.838496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:11.419015Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326547489168487:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:11.422175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:16.675947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326590438843246:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:16.676074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:16.676947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326590438843256:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:16.677012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:17.395326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.474616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.589834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.669040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.714015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.765904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.825421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:17.901776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:18.094392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326599028778723:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.094480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.094836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326599028778728:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.094880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326599028778729:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.094984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:18.099236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:18.126978Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326599028778732:2502], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:18.239952Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326599028778783:3790] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... emeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:21.986510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:21.986539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 20359, MsgBus: 64246 2026-01-04T02:24:25.913062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:25.913411Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:26.177891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:26.235107Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326630161304227:2081] 1767493465785858 != 1767493465785861 2026-01-04T02:24:26.252792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:26.254836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:26.290507Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:26.293509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:26.688298Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:26.688326Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:26.688333Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:26.688434Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:26.699309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:26.845765Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:27.977683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:28.016331Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.230857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:28.979250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:29.193856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:32.770036Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326660226077184:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.770151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.777946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326660226077194:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.778045Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:33.146986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.283749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.399560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.527750Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.657976Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.712129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.771666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.895607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.082189Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326668816012674:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.082304Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.082837Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326668816012679:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.082888Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326668816012680:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.082920Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.089520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:34.121931Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326668816012683:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:34.205122Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326668816012737:3791] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:37.864590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertExistingKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10939, MsgBus: 6234 2026-01-04T02:23:45.530616Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326459831300777:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:45.531340Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:46.270860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:46.270953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:46.350310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:46.396339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:46.420429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:46.421197Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326459831300722:2081] 1767493425463017 != 1767493425463020 2026-01-04T02:23:46.566849Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:46.661579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:46.962453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:46.962477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:46.962487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:46.962557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:47.970268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:47.986153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:23:48.179304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.569086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:48.913497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:49.080625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:50.518557Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326459831300777:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:50.518607Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:23:52.791792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326489896073693:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.791900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.792682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326489896073703:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:52.792756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:53.905270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:53.997418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.068289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.134344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.187817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.272654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.371409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.503893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:54.932330Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326498486009201:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.932400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.932836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326498486009206:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.932870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326498486009207:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.932968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:54.936889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:23:55.012097Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326498486009210:2499], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:23:55.117317Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326502780976558:3793] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:23:59.633442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... maybe) 2026-01-04T02:24:21.696212Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:21.696281Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:22.007851Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:22.691989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:22.700363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:24:22.714202Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:22.851578Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:23.353150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:23.567225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.010226Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326613133844214:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:26.010307Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:32.777972Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326660378486164:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.778061Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.778549Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326660378486174:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:32.778598Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:33.010132Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.146427Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.215876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.312125Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.395863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.495980Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.579828Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.718722Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.020398Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326668968421674:2509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.020487Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.020937Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326668968421679:2512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.020983Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326668968421680:2513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.021018Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.027147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:34.054538Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326668968421683:2514], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:34.124596Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326668968421736:3831] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:36.220471Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:36.220497Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:38.524062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:40.048997Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591326694738226014:2605], TxId: 281474976715678, task: 1. Ctx: { TraceId : 01ke3d4aqyds6e9q147fjs7b29. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZTU4OTJhN2ItN2EzODk1MDAtMzc5OTMzOWEtNTI4ZTMzNWM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:24:40.049719Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591326694738226015:2606], TxId: 281474976715678, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3d4aqyds6e9q147fjs7b29. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZTU4OTJhN2ItN2EzODk1MDAtMzc5OTMzOWEtNTI4ZTMzNWM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591326694738226011:2566], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:24:40.050410Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZTU4OTJhN2ItN2EzODk1MDAtMzc5OTMzOWEtNTI4ZTMzNWM=, ActorId: [3:7591326686148291234:2566], ActorState: ExecuteState, LegacyTraceId: 01ke3d4aqyds6e9q147fjs7b29, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DataShardSnapshots::LockedWriteCleanupOnSplit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 22923, MsgBus: 29301 2026-01-04T02:23:52.446288Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326489828104278:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:52.446361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:23:53.245940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:53.451852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:53.451979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:53.558965Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:53.645187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:53.645905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:23:53.645999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:53.687621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:54.086865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:23:54.086898Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:23:54.086905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:23:54.086987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:23:54.112374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:23:55.974836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:56.168385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:56.616320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:57.100797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:57.381722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:57.638097Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326489828104278:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:23:57.906591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:01.799293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326528482811811:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:01.799407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:01.800066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326528482811821:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:01.800148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:02.627672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.695985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.804379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.866492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:02.964521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.066136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.153154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.334600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:03.574063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326537072747318:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:03.574153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:03.574200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326537072747323:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:03.574319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326537072747325:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:03.574353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:03.578973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:03.639086Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326537072747326:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:03.728597Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326537072747378:3805] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:07.895956Z node 1 :GLOBAL WARN: log.cpp:841: fline=ev ... Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:28.624681Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:28.624724Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:28.624732Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:28.624823Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:28.633338Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:29.582251Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:30.638896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:30.645160Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:24:30.656293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:30.828612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.197030Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:31.351051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:33.407602Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591326642133109642:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:33.407695Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:34.194609Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326667902915268:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.194709Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.195216Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326667902915278:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.195258Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.484599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.548522Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.644130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.740226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:34.851204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:35.032924Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:35.161473Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:35.374554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:35.723331Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326672197883453:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:35.723434Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:35.727841Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326672197883458:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:35.727945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591326672197883459:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:35.728000Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:35.733993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:35.784121Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591326672197883462:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:24:35.886275Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591326672197883515:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:40.264245Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591326693672720344:2549], TxId: 281474976715674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3d4b2v6w58gb9pd9nnsnqz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MWE3MjdlZjItZmY3M2M0ODEtMWQ0ZmIzNWQtNGQwMGIxMTY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:24:40.265129Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591326693672720345:2550], TxId: 281474976715674, task: 2. Ctx: { TraceId : 01ke3d4b2v6w58gb9pd9nnsnqz. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=MWE3MjdlZjItZmY3M2M0ODEtMWQ0ZmIzNWQtNGQwMGIxMTY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [3:7591326693672720341:2535], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:24:40.265590Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MWE3MjdlZjItZmY3M2M0ODEtMWQ0ZmIzNWQtNGQwMGIxMTY=, ActorId: [3:7591326689377753000:2535], ActorState: ExecuteState, LegacyTraceId: 01ke3d4b2v6w58gb9pd9nnsnqz, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeSetParams-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSchemeShard-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |83.2%| [LD] {RESULT} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut/ydb-core-load_test-ut |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithoutConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink >> TSchemeShardExtSubDomainTest::CreateWithExtraPathSymbolsAllowed-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::Drop >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpSnapshotIsolation::TReadOnlyOltp |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::Drop [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false >> TTopicApiDescribes::DescribeTopic >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true >> TTopicApiDescribes::DescribeConsumer >> DataShardSnapshots::LockedWriteDistributedCommitFreeze-UseSink [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive >> TSchemeShardExtSubDomainTest::AlterRequiresParamCombinations-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] >> TIcNodeCache::GetNodesInfoTest >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink |83.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.2%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] >> TSchemeShardExtSubDomainTest::SysViewProcessorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] >> TTopicApiDescribes::GetLocalDescribe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateThenDropChangesParent-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:38.110223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:38.110340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:38.110384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:38.110424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:38.110463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:38.110495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:38.110563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:38.110639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:38.111518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:38.111802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:38.253158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:38.253235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:38.286609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:38.300197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:38.300453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:38.335856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:38.336166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:38.336951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:38.337243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:38.347320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:38.347539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:38.349015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:38.349098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:38.349208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:38.349259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:38.349347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:38.349498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:38.609394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.610975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:38.611678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:24:51.663204Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 134 2026-01-04T02:24:51.664234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:24:51.664742Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:24:51.666749Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:24:51.666842Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:24:51.667019Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 134 -> 135 2026-01-04T02:24:51.667326Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:24:51.667415Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:24:51.669444Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:51.669489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:51.669640Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:24:51.669836Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:51.669890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:24:51.669934Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-04T02:24:51.670242Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:24:51.670290Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 102:0 ProgressState 2026-01-04T02:24:51.670345Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 135 -> 240 2026-01-04T02:24:51.671308Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:24:51.671405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:24:51.671442Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:24:51.671477Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:24:51.671514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:24:51.672274Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:24:51.672374Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:24:51.672407Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:24:51.672439Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-04T02:24:51.672476Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:24:51.672546Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:24:51.675613Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:24:51.675677Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:24:51.675845Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:24:51.675910Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:24:51.675966Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:24:51.676020Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:24:51.676073Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:24:51.676142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:24:51.676194Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:24:51.676241Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:24:51.676334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:24:51.676974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:24:51.677038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:24:51.677128Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:24:51.678020Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:24:51.678117Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:24:51.678203Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:24:51.678794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:24:51.679221Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:24:51.681624Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:24:51.681755Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:24:51.682052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:24:51.682115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:24:51.682591Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:24:51.682712Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:24:51.682775Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:344:2334] TestWaitNotification: OK eventTxId 102 2026-01-04T02:24:51.683397Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:24:51.683641Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 292us result status StatusPathDoesNotExist 2026-01-04T02:24:51.683824Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> DataShardSnapshots::ReadIteratorLocalSnapshotThenRestart [GOOD] >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite >> TInterconnectTest::TestSimplePingPong >> TTopicApiDescribes::GetPartitionDescribe >> TInterconnectTest::TestSimplePingPong [GOOD] >> TInterconnectTest::TestSubscribeByFlag >> TInterconnectTest::TestSubscribeByFlag [GOOD] >> TInterconnectTest::TestReconnect >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst >> TInterconnectTest::TestReconnect [GOOD] >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true |83.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut |83.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ut/ydb-tools-stress_tool-ut >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/workload_service/ut/unittest >> KqpWorkloadServiceSubscriptions::TestResourcePoolSubscriptionAfterAclChange [GOOD] Test command err: 2026-01-04T02:18:00.278177Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324978612676517:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.278522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.299780Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.006675s 2026-01-04T02:18:00.393227Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591324977648977690:2219];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:00.393263Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:00.534741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:18:00.544963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.603907Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:00.723160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.723303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.724296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:00.724378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:00.743526Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:18:00.743787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.744053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:00.783041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.786220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:00.888839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:00.907977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:00.908014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:00.908034Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:00.908125Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:01.095946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:01.313199Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:01.404373Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:03.387919Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.388262Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7591324990533879713:2293], Start check tables existence, number paths: 2 2026-01-04T02:18:03.408867Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7591324990533879713:2293], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.408940Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7591324990533879713:2293], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.408983Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7591324990533879713:2293], Successfully finished 2026-01-04T02:18:03.409141Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.410334Z node 2 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.410370Z node 2 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.413474Z node 2 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-04T02:18:03.748345Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:18:03.753574Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=YTRhMWFkODUtZDgyNzI5ZTQtNDYyOWFiZmMtMTRjNDk4ZGU=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# YTRhMWFkODUtZDgyNzI5ZTQtNDYyOWFiZmMtMTRjNDk4ZGU= temp_dir_name# d54c9edb-4633-aeb8-0b4a-1b8e500c1288 trace_id# 2026-01-04T02:18:03.754155Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:18:03.754181Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:18:03.754265Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324991497579540:2324], Start check tables existence, number paths: 2 2026-01-04T02:18:03.768235Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=YTRhMWFkODUtZDgyNzI5ZTQtNDYyOWFiZmMtMTRjNDk4ZGU=, ActorId: [1:7591324991497579543:2327], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.768319Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324991497579540:2324], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:18:03.768371Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324991497579540:2324], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:18:03.768395Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [1:7591324991497579540:2324], Successfully finished 2026-01-04T02:18:03.768565Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:18:03.768665Z node 1 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 2 2026-01-04T02:18:03.769142Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.772405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:03.775400Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976720658 2026-01-04T02:18:03.775575Z node 1 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:18:03.791817Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-04T02:18:03.881373Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:18:03.924864Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324991497579646:2846] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:03.925032Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324991497579570:2789], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:18:03.928091Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZjhmZDExY2MtZGNhZmIyZjctZGU0YjlhYzktYTA3Mzg4ZjE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZjhmZDExY2MtZGNhZmIyZjctZGU0YjlhYzktYTA3Mzg4ZjE= temp_dir_name# 551c4353-440e-446f-cb5a-65867ac57781 trace_id# 2026-01-04T02:18:03.928205Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZjhmZDExY2MtZGNhZmIyZjctZGU0YjlhYzktYTA3Mzg4ZjE=, ActorId: [1:7591324991497579659:2330], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:18:03.928452Z node 1 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: sample_pool_id 2026-01-04T02:18:03.928476Z node 1 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:18:03.92853 ... ad_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:24:23.993985Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE= temp_dir_name# 9746a4f9-42a4-9a89-b55a-32babefa7d8b trace_id# 2026-01-04T02:24:23.994627Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7591326622533894609:2335], Describe table /Root/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:24:23.994704Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7591326622533894609:2335], Describe table /Root/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:24:23.994748Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7591326622533894609:2335], Successfully finished 2026-01-04T02:24:23.995235Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:24:23.996382Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:24:24.001896Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:24:24.009177Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:24.012867Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:421: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Subscribe on create pool tx: 281474976710658 2026-01-04T02:24:24.015904Z node 14 :KQP_WORKLOAD_SERVICE TRACE: scheme_actors.cpp:344: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Tablet to pipe successfully connected 2026-01-04T02:24:24.076535Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:24:24.177560Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:379: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Start pool creating 2026-01-04T02:24:24.190947Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7591326626828861985:2557] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/sample_pool_id\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:24.191367Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:472: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591326622533894638:2525], DatabaseId: Root, PoolId: sample_pool_id, Pool successfully created 2026-01-04T02:24:24.191953Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: Root, PoolId: sample_pool_id 2026-01-04T02:24:24.191983Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id Root 2026-01-04T02:24:24.192073Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828861992:2340], DatabaseId: Root, PoolId: sample_pool_id, Start pool fetching 2026-01-04T02:24:24.195316Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:223: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828861992:2340], DatabaseId: Root, PoolId: sample_pool_id, Pool info successfully fetched 2026-01-04T02:24:24.195422Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:260: [WorkloadService] [Service] Successfully fetched pool sample_pool_id, DatabaseId: Root 2026-01-04T02:24:24.195448Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:578: [WorkloadService] [Service] Creating new handler for pool /Root/sample_pool_id 2026-01-04T02:24:24.225130Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:466: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7591326626828862001:2341], DatabaseId: Root, PoolId: sample_pool_id, Subscribed on schemeboard notifications for path: [OwnerId: 72057594046644480, LocalPathId: 41] 2026-01-04T02:24:24.229781Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7591326626828862001:2341], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-04T02:24:24.409738Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:24:24.409777Z node 14 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:568: [WorkloadService] [Service] Creating new database state for id /Root 2026-01-04T02:24:24.409883Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828862013:2343], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:24:24.409986Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:24:24.410146Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: ReadyState, LegacyTraceId: 01ke3d3w8s6ysy738bpm824a6k, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# GRANT ALL ON `/Root/.metadata/workload_manager/pools/sample_pool_id` TO `test@user`; rpc_actor# [0:0:0] database# /Root database_id# /Root pool_id# default trace_id# 2026-01-04T02:24:24.413533Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828862013:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:24.426094Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:24.426306Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:164: [WorkloadService] [Service] Received subscription request, DatabaseId: /Root, PoolId: default 2026-01-04T02:24:24.426393Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:185: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828862022:2344], DatabaseId: /Root, PoolId: default, Start pool fetching 2026-01-04T02:24:24.427409Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591326626828862022:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:24.427507Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:25.300273Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:24:25.304452Z node 14 :KQP_WORKLOAD_SERVICE DEBUG: pool_handlers_actors.cpp:274: [WorkloadService] [TPoolHandlerActorBase] ActorId: [14:7591326626828862001:2341], DatabaseId: Root, PoolId: sample_pool_id, Got watch notification 2026-01-04T02:24:25.378486Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: ExecuteState, LegacyTraceId: 01ke3d3w8s6ysy738bpm824a6k, Cleanup start is_final# false has_cleanup_ctx# true transactions_to_be_aborted_size# 0 worker_id# [14:7591326626828862024:2339] workload_service_cleanup# false trace_id# 2026-01-04T02:24:25.383382Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: CleanupState, LegacyTraceId: 01ke3d3w8s6ysy738bpm824a6k, EndCleanup is_final# false trace_id# 2026-01-04T02:24:25.383586Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: CleanupState, LegacyTraceId: 01ke3d3w8s6ysy738bpm824a6k, Sent query response back to proxy proxy_request_id# 3 proxy_id# [14:7591326575289253632:2212] trace_id# 2026-01-04T02:24:25.542532Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:24:25.542602Z node 14 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:24:25.542651Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:24:25.542695Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:24:25.542799Z node 14 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=14&id=ZjNlZmQ4ZTMtOTA4M2VkNDctZWE0MGJlN2MtNTRiZGQyMmE=, ActorId: [14:7591326622533894634:2339], ActorState: unknown state, Session actor destroyed trace_id# |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/workload_service/ut/unittest >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true >> TSchemeShardSubDomainTest::SchemeLimitsRejects ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_SingleRowWithValueCast-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6981, MsgBus: 12993 2026-01-04T02:24:15.465323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326585167336886:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:15.465387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:16.505955Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:16.561941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:16.605217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:16.605307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:16.895197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:16.919665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:17.185047Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:17.186297Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326585167336674:2081] 1767493455435035 != 1767493455435038 2026-01-04T02:24:17.190789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:17.639488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:17.639509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:17.639516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:17.639594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:19.006133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:19.033552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:19.144431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:19.775703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:20.304519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:20.468258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326585167336886:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:20.468423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:20.475948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:25.740646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326628117011573:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:25.740754Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:25.741491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326628117011583:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:25.741539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:26.667388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.822825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:26.942789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.028011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.109309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.219379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.271481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.360337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:27.612030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326636706947059:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.612082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.612410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326636706947064:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.612452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326636706947065:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.612551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:27.616538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:27.642009Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326636706947068:2503], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:27.759606Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326636706947119:3804] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: ... ution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:35.798663Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:35.841975Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:35.844932Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326671782050502:2081] 1767493475542192 != 1767493475542195 2026-01-04T02:24:35.876008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:35.876087Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:36.009054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:36.108457Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:36.370491Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:36.370516Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:36.370522Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:36.370589Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:36.537290Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:37.545961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:37.552314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:37.566379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:37.745511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:38.026059Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:38.197907Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:40.560341Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326671782050612:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:40.560411Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:45.354825Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326714731725376:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:45.354912Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:45.355311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326714731725386:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:45.355354Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:45.532631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.603024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.651833Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.773377Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.818161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.902936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:45.995180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:46.079276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:46.257527Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326719026693553:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:46.257667Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:46.258151Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326719026693558:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:46.258216Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326719026693559:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:46.258505Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:46.265360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:46.316212Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326719026693562:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:24:46.395418Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326719026693615:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:50.140062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:50.743909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:50.743935Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |83.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CopyTableAndConcurrentSplit [GOOD] Test command err: 2026-01-04T02:24:33.542710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326665369658611:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:33.542932Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:33.580942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:34.385907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:34.607153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:34.607255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:34.670861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:34.903015Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:35.006087Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326665369658506:2081] 1767493473448392 != 1767493473448395 2026-01-04T02:24:35.008365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:35.013984Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:24:35.742027Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591326665369658737:2095] Handle TEvNavigate describe path dc-1 2026-01-04T02:24:35.742069Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591326673959593890:2449] HANDLE EvNavigateScheme dc-1 2026-01-04T02:24:35.742201Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591326665369658786:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:35.742291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591326669664626348:2292][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591326665369658786:2119], cookie# 1 2026-01-04T02:24:35.743953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326669664626394:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626391:2292], cookie# 1 2026-01-04T02:24:35.743985Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326669664626395:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626392:2292], cookie# 1 2026-01-04T02:24:35.743999Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591326669664626396:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626393:2292], cookie# 1 2026-01-04T02:24:35.744056Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326665369658474:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626394:2292], cookie# 1 2026-01-04T02:24:35.744086Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326665369658477:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626395:2292], cookie# 1 2026-01-04T02:24:35.744116Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591326665369658480:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591326669664626396:2292], cookie# 1 2026-01-04T02:24:35.744158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326669664626394:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326665369658474:2049], cookie# 1 2026-01-04T02:24:35.744175Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326669664626395:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326665369658477:2052], cookie# 1 2026-01-04T02:24:35.744199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591326669664626396:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326665369658480:2055], cookie# 1 2026-01-04T02:24:35.744242Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326669664626348:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326669664626391:2292], cookie# 1 2026-01-04T02:24:35.744285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591326669664626348:2292][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:24:35.744307Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326669664626348:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326669664626392:2292], cookie# 1 2026-01-04T02:24:35.744325Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591326669664626348:2292][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:24:35.744366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591326669664626348:2292][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591326669664626393:2292], cookie# 1 2026-01-04T02:24:35.744383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591326669664626348:2292][/dc-1] Sync cookie mismatch: sender# [1:7591326669664626393:2292], cookie# 1, current cookie# 0 2026-01-04T02:24:35.744433Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591326665369658786:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:24:35.751077Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591326665369658786:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591326669664626348:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:24:35.751205Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591326665369658786:2119], cacheItem# { Subscriber: { Subscriber: [1:7591326669664626348:2292] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:24:35.753792Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591326673959593891:2450], recipient# [1:7591326673959593890:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:24:35.754178Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591326673959593890:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:24:35.817033Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591326673959593890:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:24:35.833312Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591326673959593890:2449] Handle TEvDescribeSchemeResult Forward to# [1:7591326673959593889:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:24:35.836044Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591326665369658737:2095] Handle TEvProposeTransaction 2026-01-04T02:24:35.836066Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591326665369658737:2095] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:24:35.836158Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591326665369658737:2095] Cookie# 0 userReqId# "" tx ... : { Subscriber: [4:7591326709247052301:2312] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:24:51.758644Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591326743606791457:2808], recipient# [4:7591326743606791456:2376], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:51.923092Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591326704952084633:2106], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:51.923232Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591326704952084633:2106], cacheItem# { Subscriber: { Subscriber: [4:7591326743606791394:2802] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:24:51.923278Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591326704952084633:2106], cacheItem# { Subscriber: { Subscriber: [4:7591326743606791395:2803] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:24:51.923372Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591326743606791458:2809], recipient# [4:7591326743606791388:2372], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_0/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:24:51.923961Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7591326743606791388:2372], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:24:51.963720Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591326743606791381:2800] 2026-01-04T02:24:51.963784Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.963809Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591326743606791382:2800] 2026-01-04T02:24:51.963829Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.963847Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591326743606791383:2800] 2026-01-04T02:24:51.963868Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791380:2800][/dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.967337Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591326743606791396:2803] 2026-01-04T02:24:51.967406Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.967428Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591326743606791399:2803] 2026-01-04T02:24:51.967448Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.967464Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591326743606791401:2803] 2026-01-04T02:24:51.967483Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791395:2803][/dc-1/USER_0/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.974750Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591326743606791397:2802] 2026-01-04T02:24:51.974820Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.974841Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591326743606791398:2802] 2026-01-04T02:24:51.974860Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:24:51.974877Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591326743606791400:2802] 2026-01-04T02:24:51.974897Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591326743606791394:2802][/dc-1/USER_0/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591326704952084633:2106], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TSchemeShardExtSubDomainTest::AlterNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase >> TInterconnectTest::OldFormat ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestSubscribeAndUnsubsribeByEvent [GOOD] Test command err: 2026-01-04T02:24:55.408532Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @207 (null) -> PendingActivation 2026-01-04T02:24:55.408641Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [6:10:2048] [node 5] ICP01 ready to work 2026-01-04T02:24:55.408844Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 (null) -> PendingActivation 2026-01-04T02:24:55.408888Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:43: Proxy [5:1:2048] [node 6] ICP01 ready to work 2026-01-04T02:24:55.409354Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-04T02:24:55.410615Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [5:1:2048] [node 6] ICP02 configured for host ::1:19578 2026-01-04T02:24:55.410721Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-04T02:24:55.411106Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:747: Handshake [5:21:2058] [node 6] ICH01 starting outgoing handshake 2026-01-04T02:24:55.411249Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-04T02:24:55.411957Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:751: Handshake [5:21:2058] [node 6] ICH05 connected to peer 2026-01-04T02:24:55.412452Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:35256 2026-01-04T02:24:55.412812Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:23:2058] [node 0] ICH02 starting incoming handshake 2026-01-04T02:24:55.413674Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExRequest Protocol: 2 ProgramPID: 414668 ProgramStartTime: 23672895821796 Serial: 2955285951 ReceiverNodeId: 6 SenderActorId: "[5:2955285951:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 414668" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 414668" AcceptUUID: "Cluster for process with id: 414668" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "i\363\"\267\\\023\024\335h\202\232i\261\373\\\274\027*\006\276{\037L\221\251\203\363h \365\3628" RequestXxhash: true RequestXdcShuffle: true 2026-01-04T02:24:55.414227Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 ReceiveExBlock ExRequest Protocol: 2 ProgramPID: 414668 ProgramStartTime: 23672895821796 Serial: 2955285951 ReceiverNodeId: 6 SenderActorId: "[5:2955285951:0]" SenderHostName: "::1" ReceiverHostName: "::1" UUID: "Cluster for process with id: 414668" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 414668" AcceptUUID: "Cluster for process with id: 414668" } RequestModernFrame: true RequestAuthOnly: false RequestExtendedTraceFmt: true RequestExternalDataChannel: true HandshakeId: "i\363\"\267\\\023\024\335h\202\232i\261\373\\\274\027*\006\276{\037L\221\251\203\363h \365\3628" RequestXxhash: true RequestXdcShuffle: true 2026-01-04T02:24:55.414298Z node 6 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [6:23:2058] [node 5] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-04T02:24:55.414683Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @99 PendingActivation -> PendingNodeInfo 2026-01-04T02:24:55.415718Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:120: Proxy [6:10:2048] [node 5] ICP02 configured for host ::1:25920 2026-01-04T02:24:55.415765Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:263: Proxy [6:10:2048] [node 5] ICP17 incoming handshake (actor [6:23:2058]) 2026-01-04T02:24:55.415803Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @489 PendingNodeInfo -> PendingConnection 2026-01-04T02:24:55.415854Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:211: Proxy [6:10:2048] [node 5] ICP07 issued incoming handshake reply 2026-01-04T02:24:55.415906Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:220: Proxy [6:10:2048] [node 5] ICP08 No active sessions, becoming PendingConnection 2026-01-04T02:24:55.415944Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @221 PendingConnection -> PendingConnection 2026-01-04T02:24:55.416335Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [6:23:2058] [node 5] ICH07 SendExBlock ExReply Success { Protocol: 2 ProgramPID: 414668 ProgramStartTime: 23672908917500 Serial: 2481422210 SenderActorId: "[6:2481422210:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 414668" AcceptUUID: "Cluster for process with id: 414668" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-04T02:24:55.416801Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 ReceiveExBlock ExReply Success { Protocol: 2 ProgramPID: 414668 ProgramStartTime: 23672908917500 Serial: 2481422210 SenderActorId: "[6:2481422210:0]" ClusterUUIDs { ClusterUUID: "Cluster for process with id: 414668" AcceptUUID: "Cluster for process with id: 414668" } StartEncryption: false UseModernFrame: true AuthOnly: false UseExtendedTraceFmt: true UseExternalDataChannel: true UseXxhash: true UseXdcShuffle: true RdmaErr: "Rdma is not ready on the incomming side" } 2026-01-04T02:24:55.416864Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:21:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-04T02:24:55.416967Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-04T02:24:55.417579Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:21:2058] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "i\363\"\267\\\023\024\335h\202\232i\261\373\\\274\027*\006\276{\037L\221\251\203\363h \365\3628" 2026-01-04T02:24:55.417665Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:21:2058] [node 6] ICH04 handshake succeeded 2026-01-04T02:24:55.417996Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-04T02:24:55.418052Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:21:2058] poison: false 2026-01-04T02:24:55.418105Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 PendingConnection -> StateWork 2026-01-04T02:24:55.418306Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [5:1:2048] [node 6] ICP22 created new session: [5:25:2048] 2026-01-04T02:24:55.418367Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:21:2058] self: [5:2955285951:0] peer: [6:2481422210:0] socket: 24 qp: -1 2026-01-04T02:24:55.418405Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-04T02:24:55.418491Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-04T02:24:55.418558Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-04T02:24:55.418601Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-04T02:24:55.418642Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 0 2026-01-04T02:24:55.418686Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:241: Session [5:25:2048] [node 6] ICS04 subscribe for session state for [5:19:2057] 2026-01-04T02:24:55.418783Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:35264 2026-01-04T02:24:55.419182Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:27:2059] [node 0] ICH02 starting incoming handshake 2026-01-04T02:24:55.419308Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:26:2048] [node 6] ICIS01 InputSession created 2026-01-04T02:24:55.420135Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.420253Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.420590Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:23:2058] [node 5] ICH04 handshake succeeded 2026-01-04T02:24:55.420843Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-04T02:24:55.420891Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:23:2058] poison: false 2026-01-04T02:24:55.420944Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 PendingConnection -> StateWork 2026-01-04T02:24:55.421041Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:378: Proxy [6:10:2048] [node 5] ICP22 created new session: [6:28:2048] 2026-01-04T02:24:55.421084Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:23:2058] self: [6:2481422210:0] peer: [5:2955285951:0] socket: 25 qp: -1 2026-01-04T02:24:55.421114Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-04T02:24:55.421190Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-04T02:24:55.421246Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-04T02:24:55.421275Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 0 NextSerial# 1 2026-01-04T02:24:55.421300Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 0 2026-01-04T02:24:55.421356Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:29:2048] [node 5] ICIS01 InputSession created 2026-01-04T02:24:55.421429Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.421483Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:26:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.421559Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:29:2048] [node 5] ICIS02 ReceiveData called 2026-01-04T02:24:55.421612Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:29:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.421660Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:26:2048] [node 6] ICIS02 ReceiveData called ... 1-04T02:24:55.428697Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:204: Proxy [5:1:2048] [node 6] ICP06 reply for incoming handshake (actor [5:34:2060]) is held 2026-01-04T02:24:55.429277Z node 5 :INTERCONNECT DEBUG: interconnect_resolve.cpp:127: ICR04 Host: ::1, RESOLVED address 2026-01-04T02:24:55.429566Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_server.cpp:104: ICListener: [0:0:0] ICL04 Accepted from: ::1:35284 2026-01-04T02:24:55.429936Z node 6 :INTERCONNECT DEBUG: interconnect_handshake.cpp:1037: Handshake [6:37:2062] [node 0] ICH02 starting incoming handshake 2026-01-04T02:24:55.430273Z node 5 :INTERCONNECT DEBUG: interconnect_handshake.cpp:653: Handshake [5:32:2059] [node 6] ICH07 SendExBlock ExternalDataChannelParams HandshakeId: "\327\337\316\2210T\207\226\223\340\341\210\267\3017\232*\026\225N\375u\364\277\'9\266\t>\025Va" 2026-01-04T02:24:55.430341Z node 5 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [5:32:2059] [node 6] ICH04 handshake succeeded 2026-01-04T02:24:55.430636Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:339: Proxy [5:1:2048] [node 6] ICP20 outgoing handshake succeeded 2026-01-04T02:24:55.430678Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [5:1:2048] [node 6] ICP111 dropped incoming handshake: [5:34:2060] poison: true 2026-01-04T02:24:55.430730Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [5:1:2048] [node 6] ICP052 dropped outgoing handshake: [5:32:2059] poison: false 2026-01-04T02:24:55.430765Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @351 StateWork -> StateWork 2026-01-04T02:24:55.430805Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [5:25:2048] [node 6] ICS09 handshake done sender: [5:32:2059] self: [5:2955285951:0] peer: [6:2481422210:0] socket: 30 qp: -1 2026-01-04T02:24:55.430845Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [5:25:2048] [node 6] ICS10 traffic start 2026-01-04T02:24:55.430915Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [5:25:2048] [node 6] ICS11 registering socket in PollerActor 2026-01-04T02:24:55.430972Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.431004Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [5:25:2048] [node 6] ICS06 rewind SendQueue size# 1 LastConfirmed# 1 NextSerial# 2 2026-01-04T02:24:55.431065Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.432086Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [5:38:2048] [node 6] ICIS01 InputSession created 2026-01-04T02:24:55.432915Z node 6 :INTERCONNECT INFO: interconnect_handshake.cpp:414: Handshake [6:35:2061] [node 5] ICH04 handshake succeeded 2026-01-04T02:24:55.433096Z node 6 :INTERCONNECT NOTICE: interconnect_tcp_proxy.cpp:409: Proxy [6:10:2048] [node 5] ICP25 outgoing handshake failed, temporary: 0 explanation: outgoing handshake Peer# ::1(::1:25920) Socket error# connection unexpectedly closed state# ReceiveResponse processed# 0 remain# 52 incoming: [6:35:2061] held: no 2026-01-04T02:24:55.433145Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:461: Proxy [6:10:2048] [node 5] ICP052 dropped outgoing handshake: [6:30:2060] poison: false 2026-01-04T02:24:55.433180Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.cpp:432: Proxy [6:10:2048] [node 5] ICP28 other handshake is still going on 2026-01-04T02:24:55.433216Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:38:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.433277Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:38:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.433321Z node 6 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:335: Proxy [6:10:2048] [node 5] ICP19 incoming handshake succeeded 2026-01-04T02:24:55.433355Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:446: Proxy [6:10:2048] [node 5] ICP111 dropped incoming handshake: [6:35:2061] poison: false 2026-01-04T02:24:55.433396Z node 6 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [6:10:2048] [node 5] ICP77 @351 StateWork -> StateWork 2026-01-04T02:24:55.433431Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:278: Session [6:28:2048] [node 5] ICS09 handshake done sender: [6:35:2061] self: [6:2481422210:0] peer: [5:2955285951:0] socket: 31 qp: -1 2026-01-04T02:24:55.433457Z node 6 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:303: Session [6:28:2048] [node 5] ICS10 traffic start 2026-01-04T02:24:55.433508Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:319: Session [6:28:2048] [node 5] ICS11 registering socket in PollerActor 2026-01-04T02:24:55.433537Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.433563Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 5] ICOCH98 Dropping confirmed messages 2026-01-04T02:24:55.433632Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [6:28:2048] [node 5] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-04T02:24:55.433698Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:358: Session [6:28:2048] [node 5] ICS06 rewind SendQueue size# 0 LastConfirmed# 1 NextSerial# 2 2026-01-04T02:24:55.433727Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.433916Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:252: InputSession [6:39:2048] [node 5] ICIS01 InputSession created 2026-01-04T02:24:55.433942Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-04T02:24:55.433993Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# 106 num# 1 err# 2026-01-04T02:24:55.434066Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434083Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434124Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:38:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.434166Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:38:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434293Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:38:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.434321Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:38:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434345Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-04T02:24:55.434371Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434648Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.434696Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.434765Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [6:39:2048] [node 5] ICIS02 ReceiveData called 2026-01-04T02:24:55.434795Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [6:39:2048] [node 5] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.434823Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.434839Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.434860Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.434874Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 1 2026-01-04T02:24:55.434904Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:164: Session [6:28:2048] [node 5] ICS02 send event from: [6:20:2057] to: [5:19:2057] 2026-01-04T02:24:55.434973Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:961: Session [6:28:2048] [node 5] ICS22 outgoing packet Serial# 2 Confirm# 2 DataSize# 84 RdmaPayload# 0 InflightDataAmount# 84 RdmaInflightDataAmount# 0 2026-01-04T02:24:55.435030Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.435098Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:341: InputSession [5:38:2048] [node 6] ICIS02 ReceiveData called 2026-01-04T02:24:55.435132Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:38:2048] [node 6] ICIS12 Read recvres# 106 num# 1 err# 2026-01-04T02:24:55.435184Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_input_session.cpp:967: InputSession [5:38:2048] [node 6] ICIS12 Read recvres# -11 num# 1 err# 2026-01-04T02:24:55.435232Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.435253Z node 6 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [6:28:2048] [node 5] ICS23 confirm count: 1 2026-01-04T02:24:55.435275Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-04T02:24:55.435300Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:62: OutputChannel 0 [node 6] ICOCH98 Dropping confirmed messages 2026-01-04T02:24:55.435365Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:1027: Session [5:25:2048] [node 6] ICS24 exit InflightDataAmount: 0 bytes RdmaInflightDataAmount: 0 bytes droppedDataAmount: 84 bytes dropped 0 rdma bytes dropped 1 packets 2026-01-04T02:24:55.435399Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_tcp_session.cpp:972: Session [5:25:2048] [node 6] ICS23 confirm count: 2 2026-01-04T02:24:55.435461Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:105: Session [5:25:2048] [node 6] ICS01 socket: 30 reason# 2026-01-04T02:24:55.435513Z node 5 :INTERCONNECT INFO: interconnect_tcp_proxy.cpp:543: Proxy [5:1:2048] [node 6] ICP30 unregister session Session# [5:25:2048] VirtualId# [5:2955285951:0] 2026-01-04T02:24:55.435557Z node 5 :INTERCONNECT DEBUG: interconnect_tcp_proxy.h:181: Proxy [5:1:2048] [node 6] ICP77 @207 StateWork -> PendingActivation 2026-01-04T02:24:55.435597Z node 5 :INTERCONNECT_SESSION INFO: interconnect_tcp_session.cpp:571: Session [5:25:2048] [node 6] ICS25 shutdown socket, reason# 2026-01-04T02:24:55.435704Z node 5 :INTERCONNECT_SESSION DEBUG: interconnect_channel.cpp:461: OutputChannel 0 [node 6] ICOCH89 Notyfying about Undelivered messages! NotYetConfirmed size: 0, Queue size: 0 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_EnableAlterDatabase [GOOD] >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase >> TInterconnectTest::OldFormat [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew >> DataShardSnapshots::VolatileSnapshotTimeoutRefresh [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot >> TInterconnectTest::OldFormatSuppressVersionCheckOnNew [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld >> GroupWriteTest::WithRead |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TInterconnectTest::TestBlobEvent >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive >> TInterconnectTest::OldFormatSuppressVersionCheckOnOld [GOOD] >> TInterconnectTest::OldFormatSuppressVersionCheck >> TInterconnectTest::TestBlobEvent [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalSysViewProcessor-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] >> TInterconnectTest::TestBlobEvent220Bytes [GOOD] >> TInterconnectTest::TestAddressResolve >> TSchemeShardExtSubDomainTest::StatisticsAggregatorSync-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [FAIL] >> DataShardSnapshots::LockedWriteCleanupOnSplit-UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink >> TInterconnectTest::TestAddressResolve [GOOD] >> TInterconnectTest::OldNbs >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts+UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldFormatSuppressVersionCheck [GOOD] Test command err: 2026-01-04T02:24:59.622697Z node 4 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [4:22:2057] [node 3] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-04T02:25:00.113562Z node 5 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [5:20:2058] [node 6] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-04T02:25:00.771870Z node 7 :INTERCONNECT ERROR: interconnect_tcp_proxy.h:262: Proxy [7:1:2048] [node 8] ICP35 event processing took too much time PendingNodeInfo:262(16440340ns) {Configure:104(16436315ns) {ProcessConfigured:135(279678ns) {StartInitialHandshake:157(277121ns) {DropHandshakes:470(2922ns) {DropIncomingHandshake:442(475ns) DropOutgoingHandshake:457(390ns)} GenerateSessionVirtualId:992(1534ns) PrepareNewSessionHandshake:477(1824ns) {SwitchToState:179(711ns)}}}}} 2026-01-04T02:25:00.775771Z node 8 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [8:22:2057] [node 7] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default 2026-01-04T02:25:00.782308Z node 7 :INTERCONNECT WARN: interconnect_handshake.cpp:542: Handshake [7:20:2058] [node 8] ICH09 Neither CompatibilityInfo nor VersionTag of the peer can be validated, accepting by default |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest |83.3%| [TA] $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TInterconnectTest::OldNbs [GOOD] |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true |83.3%| [TA] {RESULT} $(B)/ydb/core/kqp/workload_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/runtime/ydb-core-kqp-ut-runtime ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterSchemeLimits_NoEnableAlterDatabase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:24:41.973034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:41.973122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:41.973163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:41.973196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:41.973227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:41.973252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:41.973309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:41.973374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:41.974259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:41.974530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:42.058418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:42.058490Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:42.075119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:42.075409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:42.075619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:42.085015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:42.085272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:42.086024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:42.086315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:42.089652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.089887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:42.091217Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:42.091291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.091402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:42.091444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:42.091480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:42.091672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:42.245387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.246897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.247892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.248021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onId 103:0, ProgressState, NeedSyncHive: 0 2026-01-04T02:25:01.306085Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-04T02:25:01.307752Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:25:01.307883Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:25:01.307933Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:25:01.307991Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 5 2026-01-04T02:25:01.308078Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:25:01.308196Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:25:01.313284Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 2 TabletID: 72075186233409546 Generation: 3 EffectiveACLVersion: 0 SubdomainVersion: 3 UserAttributesVersion: 1 TenantHive: 18446744073709551615 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-04T02:25:01.313418Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:25:01.313542Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 2], Generation: 3, ActorId:[8:506:2455], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:25:01.313676Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:25:01.313711Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:25:01.313885Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:25:01.313927Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:554:2492], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-04T02:25:01.315324Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72075186233409546, cookie: 0 2026-01-04T02:25:01.315478Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:25:01.315526Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:25:01.315702Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:25:01.315754Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:01.315807Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:25:01.315852Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:01.315904Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:25:01.315962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:01.316042Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:25:01.316092Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:25:01.316205Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:25:01.318836Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:25:01.318979Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:25:01.320879Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:25:01.320939Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:25:01.321443Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:25:01.321573Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:25:01.321628Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:599:2533] TestWaitNotification: OK eventTxId 103 2026-01-04T02:25:01.322315Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:25:01.322550Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 273us result status StatusSuccess 2026-01-04T02:25:01.322995Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 5 ShardsInside: 3 ShardsLimit: 7 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 5 MaxChildrenInDir: 3 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 7 MaxShardsInPath: 3 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:25:01.323724Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-04T02:25:01.323928Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/USER_0" took 206us result status StatusSuccess 2026-01-04T02:25:01.324344Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 2 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "/dc-1/users/tenant-1:hdd" Kind: "hdd" } PathsInside: 0 PathsLimit: 10 ShardsInside: 3 ShardsLimit: 10 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10 MaxChildrenInDir: 10 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 10 MaxShardsInPath: 10 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> KqpSnapshotIsolation::TReadOnlyOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp >> GroupWriteTest::ByTableName >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterSameStoragePools-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/compile_service/ut/ydb-core-kqp-compile_service-ut |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source_reboots/schemeshard-ut_external_data_source_reboots >> GroupWriteTest::SimpleRdma >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::OldNbs [GOOD] |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/ydb-core-blobstorage-vdisk-skeleton-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:24:57.415856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:57.415965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:57.416032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:57.416064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:57.416111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:57.416143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:57.416222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:57.416305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:57.417062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:57.417330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:57.505875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:57.505938Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:57.520347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:57.520905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:57.521079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:57.535657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:57.535924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:57.536662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:57.536907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:57.539845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:57.540028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:57.541246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:57.541303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:57.541350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:57.541398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:57.541533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:57.541730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.550524Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:24:57.689271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:24:57.689514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.689708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:24:57.689758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:24:57.689989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:24:57.690048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:57.696131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:57.696388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:24:57.696637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.696698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:24:57.696737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:24:57.696788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:24:57.701755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.701838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:24:57.701897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:24:57.704346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.704407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:24:57.704463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:24:57.704520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:24:57.715776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:24:57.719035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:24:57.719264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:24:57.720427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:57.720560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:24:57.720602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:24:57.720932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:24:57.720989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:24:57.721210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:24:57.721297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:24:57.727351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:57.727406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 10: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-04T02:25:04.732854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:04.733252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:25:04.733741Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 15 TxId_Deprecated: 0 TabletID: 72075186233409556 2026-01-04T02:25:04.735325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 15, at schemeshard: 72057594046678944 2026-01-04T02:25:04.735652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 3 2026-01-04T02:25:04.736236Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 14 TxId_Deprecated: 0 TabletID: 72075186233409555 Forgetting tablet 72075186233409556 2026-01-04T02:25:04.767841Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:25:04.768375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 14, at schemeshard: 72057594046678944 2026-01-04T02:25:04.768741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 2 Forgetting tablet 72075186233409555 2026-01-04T02:25:04.773396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:25:04.773725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:25:04.774067Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 16 TxId_Deprecated: 0 TabletID: 72075186233409557 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409557 2026-01-04T02:25:04.776982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-04T02:25:04.777198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 16, at schemeshard: 72057594046678944 2026-01-04T02:25:04.777499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 1 2026-01-04T02:25:04.788052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 139 2026-01-04T02:25:04.789027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:25:04.789100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 16], at schemeshard: 72057594046678944 2026-01-04T02:25:04.789208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:25:04.789609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:25:04.789678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:25:04.789871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:25:04.792683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:25:04.792776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:25:04.793078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-04T02:25:04.793124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409556 2026-01-04T02:25:04.801871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:14 2026-01-04T02:25:04.801954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:14 tabletId 72075186233409555 2026-01-04T02:25:04.802439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:25:04.802489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:25:04.802849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:16 2026-01-04T02:25:04.802921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:16 tabletId 72075186233409557 2026-01-04T02:25:04.810551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:25:04.810779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:25:04.810842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:25:04.810982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:25:04.812032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:25:04.814671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 139, wait until txId: 139 TestWaitNotification wait txId: 139 2026-01-04T02:25:04.815657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 139: send EvNotifyTxCompletion 2026-01-04T02:25:04.815718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 139 2026-01-04T02:25:04.816970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 139, at schemeshard: 72057594046678944 2026-01-04T02:25:04.817095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 139: got EvNotifyTxCompletionResult 2026-01-04T02:25:04.817142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 139: satisfy waiter [1:2147:3930] TestWaitNotification: OK eventTxId 139 2026-01-04T02:25:04.819193Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:25:04.819467Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 302us result status StatusSuccess 2026-01-04T02:25:04.819990Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 5 ShardsInside: 0 ShardsLimit: 6 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 20 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 4 MaxPaths: 5 MaxChildrenInDir: 4 MaxAclBytesSize: 25 MaxTableColumns: 3 MaxTableColumnNameLength: 10 MaxTableKeyColumns: 1 MaxTableIndices: 20 MaxShards: 6 MaxShardsInPath: 4 MaxConsistentCopyTargets: 1 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 20 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/view/unittest >> TSelectFromViewTest::QueryCacheIsUpdated [GOOD] Test command err: Trying to start YDB, gRPC: 8351, MsgBus: 6633 2026-01-04T02:18:41.574487Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591325152084149324:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:41.574563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:41.712043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:41.754116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:41.754221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:41.763173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:41.770664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:41.815149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:41.815165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:41.815169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:41.815248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:41.931203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:42.085679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:42.579692Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:43.525872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591325160674084761:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:43.525989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:43.526245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591325160674084771:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:43.526319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 4013, MsgBus: 17028 2026-01-04T02:18:44.259263Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591325166482743323:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:44.259367Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:44.267357Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:44.319275Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:44.366947Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:44.367009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:44.368560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:44.369861Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:44.369875Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:44.369888Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:44.369980Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:44.498685Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:44.607030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:45.285154Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:46.360973Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591325175072678760:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:46.361026Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:46.361164Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591325175072678769:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:46.361190Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 21887, MsgBus: 20048 2026-01-04T02:18:47.117480Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591325177634804877:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:47.117520Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:47.126254Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:47.187185Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591325177634804849:2081] 1767493127116846 != 1767493127116849 2026-01-04T02:18:47.187802Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:47.224430Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:47.224491Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:47.227260Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:47.229825Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:18:47.229840Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:18:47.229847Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:18:47.229923Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:47.314922Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:47.471310Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:48.121023Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:48.950859Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591325181929773020:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:48.950951Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:48.951207Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591325181929773031:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:48.951255Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } Trying to start YDB, gRPC: 15778, MsgBus: 9683 2026-01-04T02:18:49.484266Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TT ... seKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (let $1 (Read! world (DataSource '"kikimr" '"db") (Key '('table (String '"/Root/count_episodes_with_titles"))) (Void) '())) (let $2 (DataSink 'result)) (let $3 (Right! $1)) (let $4 (Write! (Left! $1) $2 (Key) (RemoveSystemMembers (Sort (PersistableRepr (SqlProject $3 '((SqlProjectStarItem (TypeOf $3) '"" (lambda '($5) $5) '())))) '((Bool 'true) (Bool 'true)) (lambda '($6) '((PersistableRepr (Member $6 '"series")) (PersistableRepr (Member $6 '"season")))))) '('('type) '('autoref)))) (return (Commit! $4 $2)) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (DataSource '"kikimr" '"db")) (let $3 '('"season_id" '"series_id")) (let $4 (AggApply 'count_all (StructType) (lambda '($31) (Void)))) (let $5 (Aggregate (ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/episodes"))) (Void) '())) $3) $3 '('('Count0 $4)) '())) (let $6 '('"season_id" '"series_id" '"title")) (let $7 '('Inner '"episodes" '"series" '('"episodes" '"series_id") '('"series" '"series_id") '())) (let $8 '('"episodes" '"series_id" '"episodes" '"season_id")) (let $9 '('"seasons" '"series_id" '"seasons" '"season_id")) (let $10 '('Inner $7 '"seasons" $8 $9 '())) (let $11 '('"rename" '"episodes.Count0" '"episode_count")) (let $12 '('"rename" '"episodes.season_id" '"")) (let $13 '('"rename" '"episodes.series_id" '"")) (let $14 '('"rename" '"seasons.season_id" '"")) (let $15 '('"rename" '"seasons.series_id" '"")) (let $16 '('"rename" '"seasons.title" '"season")) (let $17 '('"rename" '"series.series_id" '"")) (let $18 '('"rename" '"series.title" '"series")) (let $19 '($11 $12 $13 $14 $15 $16 $17 $18)) (let $20 (EquiJoin '($5 '"episodes") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/series"))) (Void) '())) '('"series_id" '"title")) '"series") '((ExtractMembers (Right! (KiReadTable! world $2 (Key '('table (String '"/Root/seasons"))) (Void) '())) $6) '"seasons") $10 $19)) (let $21 (Bool 'true)) (let $22 (Sort $20 '($21 $21) (lambda '($32) '((Member $32 '"series") (Member $32 '"season"))))) (let $23 '('"db" '"/Root/episodes" '"Select")) (let $24 '('"db" '"/Root/series" '"Select")) (let $25 '('"db" '"/Root/seasons" '"Select")) (let $26 '($23 $24 $25)) (let $27 '('('"mode" '"flush"))) (let $28 (KiExecDataQuery! world $1 (DataQueryBlocks (TKiDataQueryBlock '('($22 '() '0 '0)) (KiEffects) $26 '())) $27 (Void))) (let $29 (DataSink 'result)) (let $30 (ResPull! (Left! $28) $29 (Key) (Nth (Right! $28) '0) '('('type) '('autoref)) '"kikimr")) (return (Commit! (Commit! $30 $29) $1 $27)) ) KqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-ApplyExtractMembersToReadTableKqpLogical-RewriteAggregateKqpLogical-RewriteEquiJoinKqpLogical-JoinToIndexLookupKqpLogical-JoinToIndexLookupKqpPhysical-BuildReadTableRangesStageKqpPhysical-PushAggregateCombineToStageKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-ExpandAggregatePhaseKqpPhysical-BuildShuffleStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushFlatmapToStageKqpPhysical-BuildStreamLookupTableStagesKqpPhysical-BuildStreamIdxLookupJoinStagesKqpPhysical-PushExtractMembersToStageKqpPhysical-PushFlatmapToStageKqpPhysical-BuildSortStageKqpPhysical-RewriteKqpReadTableKqpPeepholeFinal-SetCombinerMemoryLimitKqpPeepholeNewOperator-RewriteWideCombinerToDqHashCombinerCompiled query: ( (return (Write! world (DataSink '"kikimr" '"db") (Key '('objectId (String '"/Root/count_episodes_with_titles")) '('typeId (String '"VIEW"))) (Void) '('('mode 'dropObject)))) ) Optimized expr: ( (let $1 (DataSink '"kikimr" '"db")) (let $2 (KiDropObject! world $1 '"/Root/count_episodes_with_titles" '"VIEW" '() '0)) (return (Commit! $2 $1 '('('"mode" '"flush")))) ) Trying to start YDB, gRPC: 11808, MsgBus: 18074 2026-01-04T02:24:18.443633Z node 23 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[23:7591326602106208917:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:18.443784Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:18.532289Z node 23 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005566s 2026-01-04T02:24:18.549027Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:18.716476Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:18.721967Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7591326602106208890:2081] 1767493458441091 != 1767493458441094 2026-01-04T02:24:18.747893Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:18.748036Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:18.800890Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:18.879425Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:19.079010Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:19.079045Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:19.079066Z node 23 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:19.079242Z node 23 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:19.456872Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:21.091215Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:23.446491Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[23:7591326602106208917:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:23.446628Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:33.686303Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:24:33.686340Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:33.986460Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7591326666530719272:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:33.986697Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:33.987209Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7591326666530719281:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:33.987306Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.099719Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7591326670825686598:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.099884Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.100310Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7591326670825686603:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.100439Z node 23 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [23:7591326670825686604:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.100570Z node 23 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:34.107517Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:34.136395Z node 23 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [23:7591326670825686607:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:24:34.235918Z node 23 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [23:7591326670825686658:2589] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/view/unittest >> GroupWriteTest::TwoTables |83.3%| [TM] {RESULT} ydb/core/kqp/ut/view/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] |83.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |83.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/replication-controller-ut_target_discoverer |83.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build |83.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build/ydb-core-tx-schemeshard-ut_column_build ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterCantChangeExternalStatisticsAggregator-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:42.261357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:42.261438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:42.261487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:42.261514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:42.261549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:42.261581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:42.261638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:42.261721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:42.262452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:42.262720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:42.350775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:42.350835Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:42.375460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:42.379502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:42.379718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:42.393585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:42.393867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:42.394630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:42.394888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:42.400734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.400935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:42.402272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:42.402347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.402463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:42.402523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:42.402600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:42.402729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:42.553163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.562954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:42.563630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 102:0 Got OK TEvConfigureStatus from tablet# 72075186233409549 shardIdx# 72057594046678944:4 at schemeshard# 72057594046678944 2026-01-04T02:25:07.696829Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 3 -> 128 2026-01-04T02:25:07.707360Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:25:07.707637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:25:07.707714Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:25:07.707780Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 102:0, at tablet# 72057594046678944 2026-01-04T02:25:07.707853Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 102 ready parts: 1/1 2026-01-04T02:25:07.708034Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:25:07.710530Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-04T02:25:07.710724Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-04T02:25:07.711159Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:07.711320Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 34359740528 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:25:07.711387Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-04T02:25:07.711746Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:25:07.711813Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:0, at tablet# 72057594046678944 2026-01-04T02:25:07.711962Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:25:07.712086Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:767:2744], EffectiveACLVersion: 0, SubdomainVersion: 2, UserAttributesVersion: 1, TenantHive: 18446744073709551615, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 72075186233409549, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 2, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 18446744073709551615, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:25:07.731908Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:07.732013Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:25:07.732320Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:07.732387Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:25:07.732906Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:25:07.732985Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:0, ProgressState, NeedSyncHive: 0 2026-01-04T02:25:07.733038Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-04T02:25:07.739198Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:25:07.739418Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:25:07.739480Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:25:07.739543Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:25:07.739615Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-04T02:25:07.739763Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:25:07.747258Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:25:07.747345Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:25:07.747522Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:25:07.747601Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:25:07.747658Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:25:07.747700Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:25:07.747751Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:25:07.747845Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [8:710:2700] message: TxId: 102 2026-01-04T02:25:07.747910Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:25:07.747970Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:25:07.748011Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:25:07.748275Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:25:07.768232Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:25:07.769721Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:25:07.769818Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [8:955:2897] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-04T02:25:07.773272Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:25:07.773470Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { Name: "USER_0" ExternalStatisticsAggregator: false } 2026-01-04T02:25:07.773527Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 103:0, path /MyRoot/USER_0 2026-01-04T02:25:07.773693Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 103:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-04T02:25:07.773755Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, at schemeshard: 72057594046678944 2026-01-04T02:25:07.811257Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:25:07.811601Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: ExternalStatisticsAggregator could only be added, not removed, operation: ALTER DATABASE, path: /MyRoot/USER_0 TestModificationResult got TxId: 103, wait until txId: 103 >> TTopicApiDescribes::GetLocalDescribe [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/cost/ydb-core-kqp-ut-cost >> DataShardSnapshots::ReadIteratorLocalSnapshotThenWrite [GOOD] >> DataShardSnapshots::RepeatableReadAfterSplitRace >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> DataShardSnapshots::LockedWriteDistributedCommitCrossConflict-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |83.4%| [LD] {RESULT} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ext_index/ut/ydb-services-ext_index-ut >> GroupWriteTest::Simple >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetLocalDescribe [GOOD] Test command err: 2026-01-04T02:24:54.019201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326755645845419:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:54.019247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:54.261396Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326753829745523:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:54.261451Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:54.326927Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:54.396077Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:54.396325Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:54.659041Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.659302Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.852089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:54.852242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:54.854144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:54.854213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:54.866116Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:24:54.866301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:54.886353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:54.996234Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:55.000066Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:55.010618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:55.016478Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:55.100412Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0025cb/r3tmp/yandex24t1Jz.tmp 2026-01-04T02:24:55.100448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0025cb/r3tmp/yandex24t1Jz.tmp 2026-01-04T02:24:55.100642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0025cb/r3tmp/yandex24t1Jz.tmp 2026-01-04T02:24:55.100763Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:55.154555Z INFO: TTestServer started on Port 17291 GrpcPort 10371 2026-01-04T02:24:55.310298Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:10371 2026-01-04T02:24:55.369395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:55.468626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:58.497828Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326771009615089:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:58.497928Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:58.497987Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326771009615100:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:58.498213Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326771009615103:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:58.498267Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:58.504219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:58.546824Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326771009615104:2312], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:24:58.652385Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326771009615132:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:59.121706Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326755645845419:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:59.122318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:59.147354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:59.149472Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591326771009615139:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:24:59.150096Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OGM2YmM2MjAtMThhZDMzNzctMjRmN2VmNzctYmUxNTllMTQ=, ActorId: [2:7591326771009615086:2306], ActorState: ExecuteState, LegacyTraceId: 01ke3d4xkz289g40mpth85pjyt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:24:59.152303Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:24:59.155242Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326772825715936:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:24:59.157564Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDM1NDk5MWUtNjNlOGNkYmEtNjY1ZTc0ZS0zZWJkMmRlMA==, ActorId: [1:7591326772825715911:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3d4xse8f79b5se58xepbea, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/C ... ] init complete for topic 'rt3.dc1--topic-x' partition 5 generation 1 [1:7591326807185455575:2451] 2026-01-04T02:25:06.323516Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037893][Partition][6][StateInit] bootstrapping 6 [1:7591326807185455569:2453] 2026-01-04T02:25:06.318529Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 1 [2:7591326805369353999:2370] 2026-01-04T02:25:06.327148Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7591326805369353995:2373] 2026-01-04T02:25:06.329029Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 1 [2:7591326805369353995:2373] 2026-01-04T02:25:06.335652Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037898][Partition][11][StateInit] bootstrapping 11 [1:7591326807185455581:2452] 2026-01-04T02:25:06.337507Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 1 [1:7591326807185455581:2452] 2026-01-04T02:25:06.340640Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037893][Partition][6][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 6 generation 1 [1:7591326807185455569:2453] 2026-01-04T02:25:06.342618Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][8][StateInit] bootstrapping 8 [2:7591326805369354007:2374] 2026-01-04T02:25:06.344557Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 1 [2:7591326805369354007:2374] 2026-01-04T02:25:06.352530Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037896][Partition][10][StateInit] bootstrapping 10 [1:7591326807185455576:2451] 2026-01-04T02:25:06.354450Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037896][Partition][10][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 10 generation 1 [1:7591326807185455576:2451] 2026-01-04T02:25:06.365913Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [2:7591326805369354008:2374] 2026-01-04T02:25:06.368228Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037892][Partition][3][StateInit] bootstrapping 3 [2:7591326805369354004:2371] 2026-01-04T02:25:06.370062Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][3][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 1 [2:7591326805369354004:2371] 2026-01-04T02:25:06.371661Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037892][Partition][0][StateInit] bootstrapping 0 [2:7591326805369354006:2371] 2026-01-04T02:25:06.373527Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 1 [2:7591326805369354006:2371] 2026-01-04T02:25:06.378255Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037899] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.386369Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037893] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.408377Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 1 [2:7591326805369354008:2374] 2026-01-04T02:25:06.411433Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7591326805369354000:2370] 2026-01-04T02:25:06.413500Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 1 [2:7591326805369354000:2370] 2026-01-04T02:25:06.416646Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.435424Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037898] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.445655Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig ===Query complete 2026-01-04T02:25:06.453604Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.454779Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:06.469014Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig Create topic result: 1 2026-01-04T02:25:06.503373Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326807185455681:4046]: Request location 2026-01-04T02:25:06.504683Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455693:4049] connected; active server actors: 1 2026-01-04T02:25:06.504757Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 1 2026-01-04T02:25:06.504774Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-04T02:25:06.504786Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 1 2026-01-04T02:25:06.504799Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-04T02:25:06.504810Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 1 2026-01-04T02:25:06.504821Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-04T02:25:06.504830Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 1 2026-01-04T02:25:06.504852Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 1 2026-01-04T02:25:06.504864Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 1 2026-01-04T02:25:06.504874Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 1 2026-01-04T02:25:06.504884Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 1 2026-01-04T02:25:06.504894Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 1 2026-01-04T02:25:06.504905Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 1 2026-01-04T02:25:06.504914Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 1 2026-01-04T02:25:06.504927Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 1 2026-01-04T02:25:06.505673Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326807185455681:4046]: Got location 2026-01-04T02:25:06.506379Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455693:4049] disconnected. 2026-01-04T02:25:06.506406Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455693:4049] disconnected; active server actors: 1 2026-01-04T02:25:06.506422Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455693:4049] disconnected no session 2026-01-04T02:25:06.508641Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326807185455694:4050]: Request location 2026-01-04T02:25:06.509836Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326807185455694:4050]: Got location 2026-01-04T02:25:06.509320Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455696:4052] connected; active server actors: 1 2026-01-04T02:25:06.509686Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 1 2026-01-04T02:25:06.509700Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 1 2026-01-04T02:25:06.509710Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 1 2026-01-04T02:25:06.511359Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455696:4052] disconnected. 2026-01-04T02:25:06.511388Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455696:4052] disconnected; active server actors: 1 2026-01-04T02:25:06.511400Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455696:4052] disconnected no session 2026-01-04T02:25:06.511085Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326807185455697:4053]: Request location 2026-01-04T02:25:06.514443Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326807185455699:4055] connected; active server actors: 1 |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:24:44.693342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:44.693455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:44.693499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:44.693537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:44.693584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:44.693620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:44.693682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:44.693742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:44.695467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:44.695822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:44.801580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:44.801644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:44.821199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:44.821459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:44.822134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:44.921116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:44.921409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:44.922270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:44.922574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:44.944383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:44.944614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:44.945919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:44.945993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:44.946109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:44.946166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:44.946238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:44.946467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:45.150254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.154706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.154871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.154991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:45.155960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ], version: 18446744073709551615 2026-01-04T02:25:11.037047Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 7 2026-01-04T02:25:11.037148Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:25:11.039685Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:25:11.039979Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:25:11.040056Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:25:11.040089Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:25:11.040136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:25:11.040342Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:25:11.040408Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:25:11.040597Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:25:11.040668Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:11.040784Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:25:11.040840Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:11.040903Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:25:11.040974Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:25:11.041044Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:25:11.041103Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:25:11.042212Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:25:11.048743Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-04T02:25:11.048988Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:25:11.049202Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:25:11.049523Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:25:11.050071Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 Forgetting tablet 72075186233409546 2026-01-04T02:25:11.051866Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:11.052219Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:25:11.053383Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-04T02:25:11.055506Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-04T02:25:11.055766Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:25:11.056075Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:25:11.056527Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:25:11.056770Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:25:11.057564Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:25:11.057756Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:25:11.057849Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:25:11.058068Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:25:11.058556Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:25:11.058625Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:25:11.058755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:25:11.070103Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:25:11.070224Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-04T02:25:11.070375Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:25:11.070407Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:25:11.070474Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:25:11.070504Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-04T02:25:11.070560Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:25:11.070637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-04T02:25:11.070938Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:25:11.071075Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:25:11.071525Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:25:11.071614Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:25:11.072270Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:25:11.072439Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:25:11.072504Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:584:2526] TestWaitNotification: OK eventTxId 103 2026-01-04T02:25:11.073250Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:25:11.073554Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 342us result status StatusPathDoesNotExist 2026-01-04T02:25:11.073791Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TInterconnectTest::TestConnectAndDisconnect >> TInterconnectTest::TestConnectAndDisconnect [GOOD] >> TInterconnectTest::TestBlobEventPreSerialized >> TestProtocols::TestConnectProtocol >> TInterconnectTest::TestBlobEventPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventUpToMebibytes >> TestProtocols::TestConnectProtocol [GOOD] >> TestProtocols::TestHTTPCollected |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::SchemeQuotas-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:42.661509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:42.661597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:42.661635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:42.661692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:42.661732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:42.661760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:42.665904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:42.666037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:42.666959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:42.667259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:42.843362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:42.843416Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:42.867538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:42.875293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:42.875541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:42.882761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:42.883035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:42.883858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:42.884137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:42.887644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.887836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:42.889174Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:42.889239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:42.889334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:42.889383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:42.889456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:42.889602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:43.112532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.113561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.113707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.113781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.115678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.115808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.115873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:43.116589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6 2026-01-04T02:25:13.170262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1159:3064], at schemeshard: 72075186233409546, txId: 116, path id: 1 2026-01-04T02:25:13.170323Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:1159:3064], at schemeshard: 72075186233409546, txId: 116, path id: 39 2026-01-04T02:25:13.171236Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-04T02:25:13.171329Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 116:0 ProgressState, operation type: TxCreateTable, at tablet# 72075186233409546 2026-01-04T02:25:13.171704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 116:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-04T02:25:13.172519Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-04T02:25:13.172654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 1 Version: 19 PathOwnerId: 72075186233409546, cookie: 116 2026-01-04T02:25:13.172710Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-04T02:25:13.172761Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 19 2026-01-04T02:25:13.172825Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 14 2026-01-04T02:25:13.173677Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-04T02:25:13.173761Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 3 LocalPathId: 39 Version: 1 PathOwnerId: 72075186233409546, cookie: 116 2026-01-04T02:25:13.173792Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 116 2026-01-04T02:25:13.182234Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 116, pathId: [OwnerId: 72075186233409546, LocalPathId: 39], version: 1 2026-01-04T02:25:13.182311Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 39] was 4 2026-01-04T02:25:13.182448Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 116, ready parts: 0/1, is published: true 2026-01-04T02:25:13.185522Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72057594037968897 cookie: 72075186233409546:11 msg type: 268697601 2026-01-04T02:25:13.185744Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72057594037968897 2026-01-04T02:25:13.185827Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-04T02:25:13.186641Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409546 OwnerIdx: 11 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } BindedChannels { StoragePoolName: "/dc-1/users/tenant-1:hdd" StoragePoolKind: "hdd" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-04T02:25:13.186882Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72075186233409546, OwnerIdx 11, type DataShard, boot OK, tablet id 72075186233409556 2026-01-04T02:25:13.187210Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72075186233409546 message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-04T02:25:13.187278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 116, shardIdx: 72075186233409546:11, partId: 0 2026-01-04T02:25:13.187422Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 116:0, at schemeshard: 72075186233409546, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-04T02:25:13.187498Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, at tabletId: 72075186233409546 2026-01-04T02:25:13.187590Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 116:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72075186233409546 OwnerIdx: 11 TabletID: 72075186233409556 Origin: 72057594037968897 2026-01-04T02:25:13.187724Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 116:0 2 -> 3 2026-01-04T02:25:13.197443Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-04T02:25:13.208508Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 116 2026-01-04T02:25:13.210543Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-04T02:25:13.210943Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 116:0, at schemeshard: 72075186233409546 2026-01-04T02:25:13.211026Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 116:0 ProgressState at tabletId# 72075186233409546 2026-01-04T02:25:13.211145Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 seqNo: 3:8 2026-01-04T02:25:13.211698Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 116:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409556 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1107 RawX2: 30064774095 } TxBody: "\n\342\004\n\007Table11\020\'\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\366\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001b\031\010\000J\025\n\005\n\003hdd\022\005\n\003hdd\032\005\n\003hdd\212\001&\010\000\022\004\010\001\020\000\022\004\010\002\020\001\022\004\010\003\020\001\032\004\010\001\020\000\032\004\010\002\020\001\032\004\010\003\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\n\000\220\000\000\020\000\001\020\':\004\010\003\020\010" TxId: 116 ExecLevel: 0 Flags: 0 SchemeShardId: 72075186233409546 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } SubDomainPathId: 1 2026-01-04T02:25:13.223661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 116:0 from tablet: 72075186233409546 to tablet: 72075186233409556 cookie: 72075186233409546:11 msg type: 269549568 2026-01-04T02:25:13.223906Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 116, partId: 0, tablet: 72075186233409556 TestModificationResult got TxId: 116, wait until txId: 116 TestModificationResults wait txId: 117 2026-01-04T02:25:13.287228Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_0" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table12" Columns { Name: "key" Type: "Uint32" } Columns { Name: "Value" Type: "Utf8" } KeyColumnNames: "key" } } TxId: 117 TabletId: 72075186233409546 , at schemeshard: 72075186233409546 2026-01-04T02:25:13.291583Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 117, response: Status: StatusQuotaExceeded Reason: "Request exceeded a limit on the number of schema operations, try again later." TxId: 117 SchemeshardId: 72075186233409546, at schemeshard: 72075186233409546 2026-01-04T02:25:13.291985Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 117, database: /MyRoot/USER_0, subject: , status: StatusQuotaExceeded, reason: Request exceeded a limit on the number of schema operations, try again later., operation: CREATE TABLE, path: /MyRoot/USER_0/Table12 TestModificationResult got TxId: 117, wait until txId: 117 |83.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred/ydb-core-tx-schemeshard-ut_shred >> TestProtocols::TestResolveProtocol |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TestProtocols::TestHTTPCollected [GOOD] >> TInterconnectTest::TestTraceIdPassThrough >> TInterconnectTest::TestBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels >> test_scheme_load.py::TestSchemeLoad::test[create_and_drop_tables] [GOOD] >> TestProtocols::TestResolveProtocol [GOOD] >> TestProtocols::TestHTTPCollectedVerySlow >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [FAIL] |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/opt/ydb-core-kqp-ut-opt >> TInterconnectTest::TestTraceIdPassThrough [GOOD] >> TIcNodeCache::GetNodesInfoTest [GOOD] >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable+UseSink [GOOD] >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink >> TTopicApiDescribes::DescribeConsumer [GOOD] >> TLocksTest::Range_BrokenLock0 [GOOD] >> TLocksTest::Range_BrokenLock1 |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestTraceIdPassThrough [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> DataShardSnapshots::VolatileSnapshotCleanupOnReboot [GOOD] >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventsThroughSubChannels [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestManyEvents >> TestProtocols::TestHTTPCollectedVerySlow [GOOD] >> TestProtocols::TestHTTPRequest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] >> TestProtocols::TestHTTPRequest [GOOD] >> GroupWriteTest::WithRead [GOOD] >> TActorActivity::Basic [GOOD] >> ActorBootstrapped::TestBootstrapped >> ActorBootstrapped::TestBootstrapped [GOOD] >> ActorBootstrapped::TestBootstrappedParent |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TestProtocols::TestHTTPRequest [GOOD] >> ActorBootstrapped::TestBootstrappedParent [GOOD] >> TActorTracker::Basic |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> DataShardSnapshots::MvccSnapshotLockedWritesWithConflicts-UseSink [GOOD] >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TIcNodeCache::GetNodesInfoTest [GOOD] Test command err: 2026-01-04T02:24:53.106613Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326751788212211:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:53.106891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:53.307248Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:53.340293Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:54.093758Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.093868Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:54.094282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.113083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:54.186496Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:54.276553Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:54.351609Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.363391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:54.749533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:54.749633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:54.852153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:54.852237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:54.963052Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:55.027663Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:55.133238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:55.152462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:55.177742Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:24:55.202629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:55.682503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00275d/r3tmp/yandexfu7Rbo.tmp 2026-01-04T02:24:55.682542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00275d/r3tmp/yandexfu7Rbo.tmp 2026-01-04T02:24:55.682699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00275d/r3tmp/yandexfu7Rbo.tmp 2026-01-04T02:24:55.682775Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:55.776605Z INFO: TTestServer started on Port 15086 GrpcPort 61307 PQClient connected to localhost:61307 2026-01-04T02:24:56.046574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:56.251547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:58.069977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326751788212211:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:58.070100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:25:02.043772Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326790442919316:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:02.043882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:02.050194Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326790442919325:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:02.050289Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:02.054293Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3d4w11bj4b89gdwvqkxwxq", Request deadline has expired for 0.210454s seconds 2026-01-04T02:25:02.056584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326790442919331:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:02.060492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:25:02.126058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326790442919333:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:25:02.227714Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326790442919420:3076] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:25:03.447950Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637704 Duration# 0.492841s 2026-01-04T02:25:03.448015Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.492946s 2026-01-04T02:25:03.498963Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326790442919431:2352], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:03.502879Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzIzYmNhMWItMzVlN2I3MjgtYTc3Y2M4MWUtZjFiYzZiNmU=, ActorId: [1:7591326790442919315:2340], ActorState: ExecuteState, LegacyTraceId: 01ke3d51365bjqt650yd55dgtt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:25:03.499987Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591326787359078873:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:03.502554Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZTBkMmQ4ZjgtYTg5NDlkNDEtYTIxNjdmYzMtNjU1ZWYwMWY=, ActorId: [2:7591326787359078854:2309], ActorState: ExecuteState, LegacyTraceId: 01ke3d518ke2f598ebdav9bs4f, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:25:03.505220Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:25:03.507951Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:25:03.664813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:03.989897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:04.272903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591326803327821827:3436] 2026-01-04T02:25:09.343763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:25:09.343794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded === CheckClustersList. Ok |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TActorTracker::Basic [GOOD] >> TTopicApiDescribes::DescribeTopic [GOOD] >> GroupWriteTest::TwoTables [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOltp [GOOD] >> KqpSnapshotIsolation::TReadOwnChangesOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WithRead [GOOD] Test command err: RandomSeed# 12205721846005169652 2026-01-04T02:25:02.563758Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 3 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:02.646612Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:02.646703Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 1 going to send TEvBlock {TabletId# 3 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:02.665884Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:02.719549Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:02.731207Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:25:18.685373Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:25:18.685494Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 3 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:18.847741Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 3 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 3 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_simple.py::TestSimple::test_multi[tablestores] [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeConsumer [GOOD] Test command err: 2026-01-04T02:24:51.485376Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326740614705281:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:51.485439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:51.649270Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:51.634468Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:52.302130Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.302322Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:52.446029Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:52.526378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.528201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:52.595096Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.595186Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:52.660069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:52.660178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:52.664764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:52.664838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:52.743031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:52.834501Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:24:52.878865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:52.924454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:53.016685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:53.016813Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:53.484345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0027a8/r3tmp/yandexb27gzO.tmp 2026-01-04T02:24:53.484369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0027a8/r3tmp/yandexb27gzO.tmp 2026-01-04T02:24:53.484532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0027a8/r3tmp/yandexb27gzO.tmp 2026-01-04T02:24:53.484623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:53.643112Z INFO: TTestServer started on Port 13740 GrpcPort 13856 PQClient connected to localhost:13856 2026-01-04T02:24:54.641606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:54.842600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:55.350648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:24:56.453872Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326740614705281:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:56.453953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:59.970922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326778383111988:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:59.971039Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326778383112001:2313], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:59.971128Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:59.976360Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326778383112005:2315], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:59.976463Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:59.978623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:25:00.023443Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591326778383112004:2314], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:25:00.157398Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591326782678079330:2189] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:25:00.945676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:00.977375Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326779269412228:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:00.978360Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2UyZGI1NmEtMzQ2MTc5YTgtM2UwMzI4MDUtODk3OWVmNDA=, ActorId: [1:7591326779269412185:2336], ActorState: ExecuteState, LegacyTraceId: 01ke3d4z5ebvhdt0n01w1wy01m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:25:00.980817Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:25:00.980502Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591326782678079337:2319], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:00.982366Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTQ4ZWE0OWItZGIyMzc2M2QtYjdlMTU2NTAtZDMwYTEyNGQ=, ActorId: [2:7591326778383111986:2309], ActorState: ExecuteState, LegacyTraceId: 01ke3d4z1v514sc89phhszrvff, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 s ... nanos: 271000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 13 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767493514 nanos: 220000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_consumer_stats { last_read_time { seconds: 1767493514 nanos: 249000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } partitions { partition_id: 14 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767493514 nanos: 229000000 } max_write_time_lag { } bytes_written { } partition_node_id: 1 } partition_consumer_stats { last_read_time { seconds: 1767493514 nanos: 260000000 } max_read_time_lag { } max_write_time_lag { } bytes_read { } max_committed_time_lag { } } } } } } 2026-01-04T02:25:15.013828Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326843693923933:2538] connected; active server actors: 1 2026-01-04T02:25:15.012806Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-04T02:25:15.012877Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" include_location: true 2026-01-04T02:25:15.013380Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326843693923931:2537]: Request location 2026-01-04T02:25:15.014074Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-04T02:25:15.014091Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-04T02:25:15.014102Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-04T02:25:15.014115Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-04T02:25:15.014126Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-04T02:25:15.014136Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-04T02:25:15.014147Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-04T02:25:15.014159Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-04T02:25:15.014167Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 2, Generation 2 2026-01-04T02:25:15.014173Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-04T02:25:15.014179Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-04T02:25:15.014188Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 1, Generation 2 2026-01-04T02:25:15.014213Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 2, Generation 2 2026-01-04T02:25:15.014239Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-04T02:25:15.014256Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 1, Generation 2 2026-01-04T02:25:15.020278Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326843693923931:2537]: Got location 2026-01-04T02:25:15.023541Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326843693923933:2538] disconnected. 2026-01-04T02:25:15.023596Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326843693923933:2538] disconnected; active server actors: 1 2026-01-04T02:25:15.023611Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326843693923933:2538] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767493513835 tx_id: 281474976710670 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-01-04T02:25:15.029000Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-04T02:25:15.029087Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//rt3.dc1--topic-x" consumer: "my-consumer" Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeConsumerResult] { self { name: "rt3.dc1--topic-x/my-consumer" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767493513835 tx_id: 281474976710670 } } consumer { name: "shared/my-consumer" important: true read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } } } } 2026-01-04T02:25:15.035095Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:158: new Describe consumer request 2026-01-04T02:25:15.035201Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:484: TDescribeConsumerActor for request path: "/Root/PQ//bad-topic" consumer: "my-consumer" include_stats: true include_location: true Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TVPatchTests::FullPatchTest [GOOD] >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TActorTracker::Basic [GOOD] Test command err: ASYNC_DESTROYER ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::TwoTables [GOOD] Test command err: RandomSeed# 16958921555149376453 2026-01-04T02:25:09.118686Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058679074007041 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:09.118800Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058502699329537 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:09.148894Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:09.148970Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 1 going to send TEvBlock {TabletId# 72058679074007041 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:09.149130Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:09.149164Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 1 going to send TEvBlock {TabletId# 72058502699329537 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:09.153458Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:09.153573Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:09.175482Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:09.175580Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:09.180077Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:25:09.180186Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:25:20.153457Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:25:20.153551Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058679074007041 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:20.153611Z 1 00h01m20.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058502699329537 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:20.184316Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058679074007041 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058679074007041 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} 2026-01-04T02:25:20.184449Z 1 00h01m20.010512s :BS_LOAD_TEST INFO: TabletId# 72058502699329537 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058502699329537 RecordGeneration# 2 PerGenerationCounter# 22 Channel# 0 Status# OK} |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::GetPartitionDescribe [GOOD] Test command err: 2026-01-04T02:24:55.174917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326759064945378:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:55.174971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:55.517131Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:55.531343Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591326760957050395:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:55.531396Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:55.615153Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:56.182171Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:56.462732Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:56.486954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:56.487014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:56.527742Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:56.593301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:56.635506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:56.639463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:56.639534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:56.864461Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:56.864401Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:56.911823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:56.915127Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:56.924530Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:24:56.930080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:56.933732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:57.261864Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.027224s 2026-01-04T02:24:57.486927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002603/r3tmp/yandex3F96GJ.tmp 2026-01-04T02:24:57.486948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002603/r3tmp/yandex3F96GJ.tmp 2026-01-04T02:24:57.487102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002603/r3tmp/yandex3F96GJ.tmp 2026-01-04T02:24:57.487188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:57.661914Z INFO: TTestServer started on Port 11571 GrpcPort 21570 PQClient connected to localhost:21570 2026-01-04T02:24:58.354665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:58.748552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:00.176731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326759064945378:2232];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:00.176812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:25:00.535733Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591326760957050395:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:00.535805Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:25:05.758921Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3d4ymj12ppe6j1ddp7wqzf", Request deadline has expired for 1.242096s seconds 2026-01-04T02:25:05.771290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326802014619672:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:05.771429Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:05.771837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326802014619684:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:05.771876Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326802014619685:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:05.771991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:05.778189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:25:05.926262Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326802014619688:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:25:06.010958Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326806309587074:3086] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:25:06.491778Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326806309587084:2358], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:06.495634Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591326803906723783:2323], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:06.498237Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZGI2YWU4OTItYjQ0M2ZmY2UtYTg4YzliOTAtYzBiMDEyMw==, ActorId: [2:7591326803906723766:2317], ActorState: ExecuteState, LegacyTraceId: 01ke3d54wm680ed2c8j4kz6d6g, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:25:06.500560Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:25:06.505490Z node 1 ... e 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037895] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:16.265388Z node 2 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037895] has a tx writes info 2026-01-04T02:25:16.265993Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][9][StateInit] bootstrapping 9 [2:7591326851151364866:2395] 2026-01-04T02:25:16.268310Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037895][Partition][2][StateInit] bootstrapping 2 [2:7591326851151364867:2395] 2026-01-04T02:25:16.274482Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:14:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.274584Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:11:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.274808Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][14][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 14 generation 2 [1:7591326849259261900:2507] 2026-01-04T02:25:16.274973Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72075186224037898][Partition][11][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 11 generation 2 [1:7591326849259261901:2507] 2026-01-04T02:25:16.280794Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037898, NodeId 1, Generation 2 2026-01-04T02:25:16.286553Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.288750Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.298557Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][3][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 3 generation 2 [2:7591326851151364858:2396] 2026-01-04T02:25:16.298721Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037892][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 0 generation 2 [2:7591326851151364859:2396] 2026-01-04T02:25:16.300203Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037892, NodeId 2, Generation 2 2026-01-04T02:25:16.311100Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.312963Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037897] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:16.312992Z node 2 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037897] has a tx writes info 2026-01-04T02:25:16.313537Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][7][StateInit] bootstrapping 7 [2:7591326851151364943:2397] 2026-01-04T02:25:16.315590Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037897][Partition][13][StateInit] bootstrapping 13 [2:7591326851151364944:2397] 2026-01-04T02:25:16.319604Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:25:16.319630Z node 2 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-04T02:25:16.320139Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][8][StateInit] bootstrapping 8 [2:7591326851151364953:2398] 2026-01-04T02:25:16.327320Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:9:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.327799Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 2 generation 2 [2:7591326851151364867:2395] 2026-01-04T02:25:16.327937Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037895][Partition][9][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 9 generation 2 [2:7591326851151364866:2395] 2026-01-04T02:25:16.329729Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037895, NodeId 2, Generation 2 2026-01-04T02:25:16.330042Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][12][StateInit] bootstrapping 12 [2:7591326851151364954:2398] 2026-01-04T02:25:16.345278Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:8:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.345784Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:7:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.345878Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:13:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.346140Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][7][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 7 generation 2 [2:7591326851151364943:2397] 2026-01-04T02:25:16.346292Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037897][Partition][13][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 13 generation 2 [2:7591326851151364944:2397] 2026-01-04T02:25:16.347438Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037897, NodeId 2, Generation 2 2026-01-04T02:25:16.347720Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic-x:12:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:25:16.347973Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][8][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 8 generation 2 [2:7591326851151364953:2398] 2026-01-04T02:25:16.348092Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][12][StateInit] init complete for topic 'rt3.dc1--topic-x' partition 12 generation 2 [2:7591326851151364954:2398] 2026-01-04T02:25:16.348989Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037900][rt3.dc1--topic-x] TEvClientConnected TabletId 72075186224037894, NodeId 2, Generation 2 2026-01-04T02:25:17.142536Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-04T02:25:17.142648Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 1 include_location: true 2026-01-04T02:25:17.142681Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7591326853554229329:2527]: Bootstrap 2026-01-04T02:25:17.143464Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326853554229329:2527]: Request location 2026-01-04T02:25:17.144988Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326853554229329:2527]: Got location Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } } } } 2026-01-04T02:25:17.149006Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-04T02:25:17.149140Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//rt3.dc1--topic-x" partition_id: 3 include_stats: true include_location: true 2026-01-04T02:25:17.149184Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7591326853554229332:2529]: Bootstrap 2026-01-04T02:25:17.149721Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326853554229332:2529]: Request location 2026-01-04T02:25:17.144171Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229331:2528] connected; active server actors: 1 2026-01-04T02:25:17.144793Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-04T02:25:17.148130Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229331:2528] disconnected. 2026-01-04T02:25:17.148165Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229331:2528] disconnected; active server actors: 1 2026-01-04T02:25:17.148186Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229331:2528] disconnected no session 2026-01-04T02:25:17.151184Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229335:2531] connected; active server actors: 1 2026-01-04T02:25:17.153112Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-04T02:25:17.153961Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326853554229332:2529]: Got location 2026-01-04T02:25:17.155538Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229335:2531] disconnected. 2026-01-04T02:25:17.155573Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229335:2531] disconnected; active server actors: 1 2026-01-04T02:25:17.155588Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326853554229335:2531] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribePartitionResult] { partition { partition_id: 3 active: true partition_stats { partition_offsets { } last_write_time { seconds: 1767493516 nanos: 257000000 } max_write_time_lag { } bytes_written { } partition_node_id: 2 } partition_location { node_id: 2 generation: 2 } } } } } 2026-01-04T02:25:17.158535Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:167: new Describe partition request 2026-01-04T02:25:17.158631Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1220: TDescribePartitionActor for request path: "/Root/PQ//bad-topic" include_stats: true include_location: true 2026-01-04T02:25:17.158668Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1230: TDescribePartitionActor[1:7591326853554229337:2532]: Bootstrap Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } >> TFqYdbTest::ShouldStatusToIssuesProcessExceptions [GOOD] >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] >> TInterconnectTest::TestManyEvents [GOOD] >> TInterconnectTest::TestCrossConnect >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedBeforeRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestSpecialCase1 [GOOD] |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> KqpCost::IndexLookup-useSink |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TVPatchTests::PatchPartPutError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffWithEmptyDiffBuffer [GOOD] Test command err: Recv 65537 2026-01-04T02:25:21.356621Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:25:21.372718Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-04T02:25:21.372802Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-04T02:25:21.373045Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:25:21.373266Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 5 PatchedPartId# 5 XorReceiver# yes ParityPart# yes ForceEnd# no 2026-01-04T02:25:21.373349Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:100:0] PullingPart# 5 Send NKikimr::TEvBlobStorage::TEvVGet |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest >> TTopicApiDescribes::DescribeTopic [GOOD] Test command err: 2026-01-04T02:24:51.270422Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326742690579152:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:51.270489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:51.401649Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:51.508811Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:24:52.070215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.070286Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:52.125768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.216949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:52.322361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:52.373822Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.389917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:52.454085Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:52.504182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:52.504366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:52.529286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:52.529356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:52.581075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:52.638097Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:24:52.638965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:52.992443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:52.996312Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.102720s 2026-01-04T02:24:52.996377Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.102794s 2026-01-04T02:24:52.997594Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:53.021081Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326742690578902:2082] 1767493491142715 != 1767493491142718 2026-01-04T02:24:53.039025Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:53.506398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002669/r3tmp/yandexcwiYEb.tmp 2026-01-04T02:24:53.506429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002669/r3tmp/yandexcwiYEb.tmp 2026-01-04T02:24:53.506592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002669/r3tmp/yandexcwiYEb.tmp 2026-01-04T02:24:53.506666Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:53.738518Z INFO: TTestServer started on Port 13877 GrpcPort 13950 PQClient connected to localhost:13950 2026-01-04T02:24:54.585779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:55.097770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:56.297740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326742690579152:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:56.297855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:25:04.208026Z node 1 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3d4v1b9z25y6s51kk6r4c8", Request deadline has expired for 3.384188s seconds 2026-01-04T02:25:04.268738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326798525155327:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.268979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.270759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326798525155366:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.271151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326798525155365:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.271265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.276830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:25:04.300423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326798525155401:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.300827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:04.361396Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326798525155369:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:25:04.449081Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326798525155466:3133] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:25:05.081567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:05.096065Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591326798525155476:2365], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:25:05.098683Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI0NDc3Ny1kYWUzNWRiNC1mODA0YjY2MS01MjRjNmYyMQ==, ActorId: [1:7591326798525155325:2347], ActorState: ExecuteState, LegacyTraceId: 01ke3d536fce2q0qp8egyshhf0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:25:05.103804Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591326795873772255:2320], status: SCHEME_ERROR, issues ... ats { min_last_write_time { seconds: 1767493513 nanos: 306000000 } max_write_time_lag { } bytes_written { } } } } } Describe topic with location 2026-01-04T02:25:17.503027Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-04T02:25:17.503157Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x 2026-01-04T02:25:17.503978Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:668: DescribeTopicImpl [1:7591326854359732380:2556]: Request location 2026-01-04T02:25:17.504657Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326854359732382:2557] connected; active server actors: 1 2026-01-04T02:25:17.505110Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 0, NodeId 2, Generation 2 2026-01-04T02:25:17.505130Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 1, NodeId 1, Generation 2 2026-01-04T02:25:17.505144Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 2, NodeId 2, Generation 2 2026-01-04T02:25:17.505158Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037892, PartitionId 3, NodeId 2, Generation 2 2026-01-04T02:25:17.505188Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037899, PartitionId 4, NodeId 1, Generation 2 2026-01-04T02:25:17.505201Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 5, NodeId 1, Generation 2 2026-01-04T02:25:17.505213Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037893, PartitionId 6, NodeId 1, Generation 2 2026-01-04T02:25:17.505228Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 7, NodeId 2, Generation 2 2026-01-04T02:25:17.505239Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 8, NodeId 1, Generation 2 2026-01-04T02:25:17.505250Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037895, PartitionId 9, NodeId 2, Generation 2 2026-01-04T02:25:17.505261Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037896, PartitionId 10, NodeId 1, Generation 2 2026-01-04T02:25:17.505271Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 11, NodeId 2, Generation 2 2026-01-04T02:25:17.505281Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037894, PartitionId 12, NodeId 1, Generation 2 2026-01-04T02:25:17.505294Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037897, PartitionId 13, NodeId 2, Generation 2 2026-01-04T02:25:17.505306Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:611: [72075186224037900][rt3.dc1--topic-x] The partition location was added to response: TabletId 72075186224037898, PartitionId 14, NodeId 2, Generation 2 2026-01-04T02:25:17.505941Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:761: DescribeTopicImpl [1:7591326854359732380:2556]: Got location 2026-01-04T02:25:17.508046Z node 2 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326854359732382:2557] disconnected. 2026-01-04T02:25:17.508078Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326854359732382:2557] disconnected; active server actors: 1 2026-01-04T02:25:17.508093Z node 2 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037900][rt3.dc1--topic-x] pipe [1:7591326854359732382:2557] disconnected no session Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767493512729 tx_id: 281474976710672 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 1 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 2 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 3 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 4 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 5 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 6 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 7 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 8 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 9 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 10 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 11 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 12 active: true partition_location { node_id: 1 generation: 2 } } partitions { partition_id: 13 active: true partition_location { node_id: 2 generation: 2 } } partitions { partition_id: 14 active: true partition_location { node_id: 2 generation: 2 } } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe topic with no stats or location 2026-01-04T02:25:17.511873Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-04T02:25:17.512016Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//rt3.dc1--topic-x Got response: operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Topic.DescribeTopicResult] { self { name: "rt3.dc1--topic-x" owner: "root@builtin" type: TOPIC created_at { plan_step: 1767493512729 tx_id: 281474976710672 } } partitioning_settings { min_active_partitions: 15 max_active_partitions: 1 auto_partitioning_settings { strategy: AUTO_PARTITIONING_STRATEGY_DISABLED partition_write_speed { stabilization_window { seconds: 300 } up_utilization_percent: 80 down_utilization_percent: 20 } } } partitions { active: true } partitions { partition_id: 1 active: true } partitions { partition_id: 2 active: true } partitions { partition_id: 3 active: true } partitions { partition_id: 4 active: true } partitions { partition_id: 5 active: true } partitions { partition_id: 6 active: true } partitions { partition_id: 7 active: true } partitions { partition_id: 8 active: true } partitions { partition_id: 9 active: true } partitions { partition_id: 10 active: true } partitions { partition_id: 11 active: true } partitions { partition_id: 12 active: true } partitions { partition_id: 13 active: true } partitions { partition_id: 14 active: true } retention_period { seconds: 64800 } partition_write_speed_bytes_per_second: 2097152 partition_write_burst_bytes: 2097152 attributes { key: "__max_partition_message_groups_seqno_stored" value: "6000000" } attributes { key: "_message_group_seqno_retention_period_ms" value: "1382400000" } consumers { name: "shared/user" read_from { } attributes { key: "_service_type" value: "data-streams" } streaming_consumer_type { } } } } } Describe bad topic 2026-01-04T02:25:17.521875Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-04T02:25:17.521993Z node 1 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path /Root/PQ//bad-topic Got response: operation { ready: true status: SCHEME_ERROR issues { message: "path \'Root/PQ/bad-topic\' does not exist or you do not have access rights" issue_code: 500018 severity: 1 } } |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> ColumnBuildTest::Initiating_Failed >> TestShred::ShredManualLaunch >> TestShred::ManualLaunch3Cycles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartPutError [GOOD] Test command err: Recv 65537 2026-01-04T02:25:22.996864Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:25:23.012571Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:25:23.012677Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:25:23.012963Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-04T02:25:23.013035Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:25:23.013299Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-04T02:25:23.013367Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-04T02:25:23.013461Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-04T02:25:23.013627Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR 2026-01-04T02:25:23.013683Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VPutResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-04T02:25:23.013765Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |83.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> ExternalIndex::Simple >> TestShred::ShredWithMerge >> TestShred::SimpleTestForTables >> TestShred::SimpleTestForAllSupportedObjects >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.4%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestShred::Run3CyclesForTables |83.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> TFqYdbTest::ShouldStatusToIssuesProcessEmptyIssues [GOOD] >> TestShred::ShredWithCopyTable |83.4%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter >> TestShred::SimpleTestForTopic >> DataShardSnapshots::VolatileSnapshotCleanupOnFinish [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout >> DataShardSnapshots::LockedWriteCleanupOnCopyTable-UseSink [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplit >> TestShred::ShredWithSplit |83.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |83.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |83.4%| [LD] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/core-grpc_services-grpc_request_check_actor_ut |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestShred::ShredManualLaunch [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/ydb-core-tx-schemeshard-ut_partition_stats >> ColumnBuildTest::Initiating_Failed [GOOD] >> ColumnBuildTest::Initiating_DisableFlag |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredManualLaunch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:25:24.402534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:24.402640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.402689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:24.402737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:24.402788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:24.402817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:24.402902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.402999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:24.403843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:24.404146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:24.491061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:24.491113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:24.503818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:24.503957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:24.504069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:24.517650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:24.517922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:24.518723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:24.519016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:24.523205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.523439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:24.524757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:24.524825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.524972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:24.525051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:24.525112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:24.525302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:24.702253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.706947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:24.707678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553241, Sender [1:1086:3003], Recipient [1:861:2815]: NKikimrTxDataShard.TEvVacuumResult VacuumGeneration: 1 TabletId: 72075186233409550 Status: OK 2026-01-04T02:25:29.961751Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5465: StateWork, processing event TEvDataShard::TEvVacuumResult 2026-01-04T02:25:29.961837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:546: TTxCompleteShredShard Execute at schemestard: 72075186233409546 2026-01-04T02:25:29.961904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__tenant_shred_manager.cpp:309: [TenantShredManager] [Finished] Shred is completed for pathId# [OwnerId: 72075186233409546, LocalPathId: 32], tabletId# 72075186233409550, shardIdx# 72075186233409546:5 in# 87 ms, next wakeup in# 14.913000s, rate# 1, in queue# 0 shards, running# 0 shards at schemeshard 72075186233409546 2026-01-04T02:25:29.961957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:326: [TenantShredManager] Shred in shards is completed. Send response to root schemeshard 2026-01-04T02:25:29.961983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__tenant_shred_manager.cpp:349: [TenantShredManager] Complete: Generation# 1 2026-01-04T02:25:29.965096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__tenant_shred_manager.cpp:571: TTxCompleteShredShard Complete at schemestard: 72075186233409546, NeedResponseComplete# true 2026-01-04T02:25:29.965561Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3059:4672], Recipient [1:697:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:29.965602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:29.965628Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:29.965737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125514, Sender [1:861:2815], Recipient [1:697:2681]: NKikimrScheme.TEvTenantShredResponse PathId { OwnerId: 72057594046678944 LocalId: 38 } Generation: 1 Status: COMPLETED 2026-01-04T02:25:29.965768Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5468: StateWork, processing event TEvSchemeShard::TEvTenantShredResponse 2026-01-04T02:25:29.965843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2026-01-04T02:25:29.965897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38] in# 88 ms, next wakeup# 599.912000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-04T02:25:29.965949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-04T02:25:29.966142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3058:4671], Recipient [1:861:2815]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [1:3059:4672] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-04T02:25:29.966173Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:25:29.966218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594046678944, status: OK, at schemeshard: 72075186233409546 2026-01-04T02:25:29.975585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:29.975649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:29.976157Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3063:4676], Recipient [1:697:2681]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3064:4677] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:25:29.976207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:25:29.976236Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-04T02:25:29.976394Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-04T02:25:29.976431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:29.976466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:29.976530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:29.976598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:29.976662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:29.976721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:31.054150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:31.054210Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:31.054248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:31.054456Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:697:2681], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:31.054496Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:31.054627Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-04T02:25:31.054664Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:31.054690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:31.054743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:31.054780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:31.054827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:31.054874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:31.774544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:31.774625Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:31.774712Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:31.774741Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:31.774778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:31.774917Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:697:2681], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:31.774976Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:31.775145Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:31.775192Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:31.775220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:31.775297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:31.775331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:31.775365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-04T02:25:31.779392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:31.780185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3113:4726], Recipient [1:697:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:31.780252Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:31.780290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:31.780382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:682:2672], Recipient [1:697:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:31.780415Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:31.780448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestSqsTopicHttpProxy::TestReceiveMessageGroup [FAIL] Test command err: 2026-01-04T02:22:10.298614Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326051767034770:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:10.298663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:22:10.804404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:22:10.804508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:22:10.876590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:22:11.006251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:22:11.077982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326051767034535:2081] 1767493330220774 != 1767493330220777 2026-01-04T02:22:11.090551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:22:11.300816Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:22:11.300900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:22:11.359032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:22:11.359057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:22:11.359064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:22:11.359143Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:22:11.738500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:22:11.748430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:22:11.993380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:22:12.000678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:22:12.003041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:22:12.093890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.339787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.482578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.551890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.607685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.650388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.702196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.735206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.777719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:12.831427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:14.750423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068946905375:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.750548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.751141Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068946905387:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.751198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326068946905388:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.751555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:22:14.755606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:22:14.770864Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326068946905391:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:22:14.856160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326068946905442:3105] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:22:15.296036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326051767034770:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:22:15.296171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:22:15.520851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.580216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.652687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:22:15.739013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propo ... eadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 34 2026-01-04T02:23:30.320939Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.320980Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.320994Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.321013Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.321026Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.421112Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.421144Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.421166Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.421184Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.421196Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.526711Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.526757Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.526770Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.526787Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.526800Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.626846Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.626881Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.626894Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.626911Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.626925Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.727906Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.727938Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.727952Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.727968Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.727981Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.830333Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.830364Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.830377Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.830394Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.830406Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:30.934277Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:30.934309Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.934321Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:30.934339Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:30.934351Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:31.037252Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:31.037286Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.037309Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:31.037328Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.037341Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:31.138180Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:31.138212Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.138228Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:31.138246Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.138261Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:31.242456Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:31.242488Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.242502Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:31.242518Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.242531Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist 2026-01-04T02:23:31.278352Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:72: [72075186224037907][Partition][0][StateIdle] Handle TEvPQ::TEvMLPConsumerState Consumer: "consumer" CountersValues: 0 CountersValues: 0 CountersValues: 0 CountersValues: 100 CountersValues: 0 CountersValues: 0 CountersValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLocksValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 MessageLockingDurationValues: 0 DeletedByRetentionPolicy: 0 DeletedByDeadlinePolicy: 0 DeletedByMovedToDLQ: 0 CPUUsage: 38 2026-01-04T02:23:31.346002Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:31.346036Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.346050Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:31.346083Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.346097Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist assertion failed at ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp:281, NJson::TJsonMap THttpProxyTestMock::SendJsonRequest(TString, NJson::TJsonMap, ui32): (res.HttpCode == expectedHttpCode) failed: (400 != 200) TBackTrace::Capture()+28 (0x1BE2395C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1C31185C) THttpProxyTestMock::SendJsonRequest(TBasicString>, NJson::TJsonMap, unsigned int)+4019 (0x39113163) THttpProxyTestMock::ReceiveMessage(NJson::TJsonMap, unsigned int)+349 (0x1BA215ED) NTestSuiteTestSqsTopicHttpProxy::TTestCaseTestReceiveMessageGroup::Execute_(NUnitTest::TTestContext&)+12882 (0x1BA34932) std::__y1::__function::__func, void ()>::operator()()+280 (0x1BA583A8) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1C340C9A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1C318538) NTestSuiteTestSqsTopicHttpProxy::TCurrentTest::Execute()+1229 (0x1BA574AD) NUnitTest::TTestFactory::Execute()+2176 (0x1C319CF0) NUnitTest::RunMain(int, char**)+5805 (0x1C33AAFD) ??+0 (0x7F1C7B12DD90) __libc_start_main+128 (0x7F1C7B12DE40) _start+41 (0x19142029) 2026-01-04T02:23:31.445990Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:23:31.446023Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.446045Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:23:31.446062Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:23:31.446077Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][0][StateIdle] Try persist |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |83.5%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/sqs_topic_ut/unittest >> TestShred::SimpleTestForTables [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots |83.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_pq_reboots/ydb-core-tx-schemeshard-ut_pq_reboots >> KqpCost::IndexLookup-useSink [GOOD] >> TestShred::SimpleTestForAllSupportedObjects [GOOD] >> TestShred::ShredWithMerge [GOOD] |83.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |83.5%| [LD] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/rm_service/ut/ydb-core-kqp-rm_service-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:26.023090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:26.023170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.023215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:26.023247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:26.023278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:26.023333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:26.023404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.023466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:26.024825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:26.025107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:26.164791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:26.164859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:26.199859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:26.204424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:26.204602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:26.211824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:26.212056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:26.212833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:26.213062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:26.216378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.216566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:26.217742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:26.217840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.218006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:26.218076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:26.218150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:26.218297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:26.523226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.525768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.527183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.527336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.527430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.527499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.527598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:33.181245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:33.181629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2752:4429], Recipient [1:685:2669]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2753:4430] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:25:33.181672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:25:33.181713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-04T02:25:33.181866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:693:2675], Recipient [1:685:2669]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-04T02:25:33.182125Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:33.182159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:33.182219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:33.182252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:33.182306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:33.182366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:33.696953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:851:2804]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697024Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1307:3193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:33.697286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:685:2669], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.697334Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.697420Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:851:2804], Recipient [1:851:2804]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.697444Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.697492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1307:3193], Recipient [1:1307:3193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.697535Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:33.742453Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:33.742548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:33.742623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:33.742912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:693:2675], Recipient [1:685:2669]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-04T02:25:33.742986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:33.743013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:33.743073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:33.743112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:33.743155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:33.743205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:34.156965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157110Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:851:2804]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1307:3193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157209Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.157271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:851:2804], Recipient [1:851:2804]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.157300Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.157405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1307:3193], Recipient [1:1307:3193]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.157433Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.157492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:685:2669], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.157518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.203835Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:685:2669]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:34.203914Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:34.203943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:34.204233Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:693:2675], Recipient [1:685:2669]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:34.204275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:34.204302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:34.204364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:34.204405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:34.204494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.937000s, Timestamp# 1970-01-01T00:00:05.193000Z 2026-01-04T02:25:34.204535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-04T02:25:34.210982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:34.211632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2772:4449], Recipient [1:685:2669]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:34.211684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:34.211718Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:34.211833Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:670:2660], Recipient [1:685:2669]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:34.211873Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:34.211908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:26.088375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:26.088757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.088808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:26.088847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:26.088880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:26.088939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:26.089020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.089113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:26.090282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:26.090579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:26.214463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:26.214522Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:26.240690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:26.245231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:26.245438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:26.256224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:26.256497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:26.257230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:26.257473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:26.261238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.261437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:26.262636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:26.262705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.262855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:26.262908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:26.262969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:26.263112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:26.592001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.603393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.603624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.603761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.603911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.603995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.604055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.604342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.604463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.604666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.604805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.605001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.605114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.605195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.605317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:34.101678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:34.102109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:3118:4731], Recipient [1:693:2677]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:3119:4732] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:25:34.102169Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:25:34.102200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-04T02:25:34.102348Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:700:2682], Recipient [1:693:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-04T02:25:34.102379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:34.102416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:34.102474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:34.102520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:34.102584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:34.102638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:34.661288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1431:3293]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661379Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661491Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661632Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:34.661701Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:693:2677], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.661730Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.661823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1431:3293], Recipient [1:1431:3293]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.661851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.661952Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.661983Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:34.712862Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:34.712941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:34.712984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:34.713239Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:700:2682], Recipient [1:693:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-04T02:25:34.713279Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:34.713311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:34.713379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:34.713422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:34.713491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:34.713567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:35.166165Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166231Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166301Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1431:3293]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166326Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166377Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166400Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.166461Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1431:3293], Recipient [1:1431:3293]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.166504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.166592Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.166634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.166730Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:693:2677], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.166762Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.212528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:693:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:35.212602Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:35.212640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:35.212829Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:700:2682], Recipient [1:693:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:35.212864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:35.212892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:35.212952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:35.212984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:35.213046Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.919000s, Timestamp# 1970-01-01T00:00:05.216000Z 2026-01-04T02:25:35.213088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-04T02:25:35.216882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:35.217558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:3138:4751], Recipient [1:693:2677]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:35.217619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:35.217656Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:35.217878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:678:2668], Recipient [1:693:2677]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:35.217922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:35.217965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.5%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 5300, MsgBus: 63766 2026-01-04T02:25:22.974545Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326876850637260:2240];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:22.974624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:25:23.029552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:25:23.327713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:23.327824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:23.331674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:23.379641Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:25:23.432576Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326876850637026:2081] 1767493522912379 != 1767493522912382 2026-01-04T02:25:23.441122Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:23.485306Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:25:23.612020Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:25:23.646213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:25:23.646235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:25:23.646241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:25:23.646395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:25:23.975619Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:24.328586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:24.345764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:25:24.431125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:24.612935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:24.804695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:25.021237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:27.969912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326876850637260:2240];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:27.969998Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:25:28.274164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326902620442686:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:28.274320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:28.275008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326902620442695:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:28.275092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:28.648754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:28.711876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:28.794345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:28.880555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:28.960303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:29.042155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:29.135985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:29.219553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:29.354215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326906915410872:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:29.354294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:29.354744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326906915410877:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:29.354782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326906915410878:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:29.354809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:29.359407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:25:29.382014Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326906915410881:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:25:29.471918Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326906915410932:3783] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:25:31.782553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithMerge [GOOD] Test command err: 2026-01-04T02:25:25.667386Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-04T02:25:25.758483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:25.758583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:25.758629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:25.758666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:25.758723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:25.758752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:25.758818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:25.758891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:25.759716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:25.760022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:25.963557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:25.963610Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:25.975738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:25.976207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:25.976392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:26.006326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:26.006548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:26.007224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:26.007496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:26.008735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.009049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:26.010806Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:26.010866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:26.010990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:26.011042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:26.011102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:26.011535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:26.367569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.371886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.372904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.373020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.373109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:26.373208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... MeasureSelfResponseTime 2026-01-04T02:25:35.362429Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.362495Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.362591Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.362619Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.401361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.401431Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.401548Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.401580Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.412844Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.412915Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.413005Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.413034Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.447787Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.447859Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.447957Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.447988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.458401Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.458468Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.458557Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.458585Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.491986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.492055Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.492176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.492207Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.502841Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.502916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.503029Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.503061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.538305Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.538380Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:35.538497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.538527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:35.549492Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1583:3401], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 3013 Memory: 90453 Storage: 10149823 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-04T02:25:35.549566Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:25:35.549617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 10141461 rowCount 99 cpuUsage 0.3013 2026-01-04T02:25:35.549736Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 10141461 RowCount: 99 IndexSize: 4463 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 10141461 IndexSize: 4463 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:25:35.549779Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:25:35.561442Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:35.561528Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:35.561560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:35.561878Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:35.561925Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:35.561959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:35.562038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:35.562071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:35.562143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.998000s, Timestamp# 1970-01-01T00:01:10.002000Z 2026-01-04T02:25:35.562180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-04T02:25:35.562707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:35.567271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1896:3656], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:35.567336Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:35.567375Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:35.567573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:35.567630Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:35.567669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |83.6%| [TA] $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestShred::ManualLaunch3Cycles [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection |83.6%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/ydb-core-tx-schemeshard-ut_backup_collection >> DataShardSnapshots::MvccSnapshotLockedWritesWithReadConflicts [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TestShred::SchemeShardCounterDoesNotConsistWithBscCounter [GOOD] >> TestShred::Run3CyclesForTopics >> TestShred::ShredWithCopyTable [GOOD] >> ColumnBuildTest::Initiating_DisableFlag [GOOD] |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |83.6%| [LD] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/base/ut_board_subscriber/ydb-core-base-ut_board_subscriber |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |83.6%| [LD] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/aggregator/ut/ydb-core-statistics-aggregator-ut |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestShred::Run3CyclesForTables [GOOD] >> TestShred::Run3CyclesForAllSupportedObjects >> TestShred::SimpleTestForTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithCopyTable [GOOD] Test command err: 2026-01-04T02:25:26.844628Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-04T02:25:26.894342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:26.894433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.894474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:26.894509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:26.894542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:26.894570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:26.894619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.894691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:26.895649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:26.896004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:27.043867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:27.043932Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:27.046238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:27.046742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:27.046920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:27.057869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:27.058129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:27.058802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:27.059089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:27.060347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:27.060517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:27.061844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:27.061899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:27.062012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:27.062075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:27.062121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:27.062511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:27.544753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.551950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.552484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... e_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409546:7 followerId=0, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], pathId map=SimpleCopy, is column=0, is olap=0, RowCount 50, DataSize 5121950 2026-01-04T02:25:38.999825Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409552, followerId 0 2026-01-04T02:25:38.999867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:235: [BackgroundCompaction] [Update] Skipped shard# 72075186233409546:7 with partCount# 1, rowCount# 50, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:50.000000Z at schemeshard 72075186233409546 2026-01-04T02:25:38.999897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:25:38.999965Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72075186233409546 2026-01-04T02:25:39.010626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:25:39.010710Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:25:39.010743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409546, queue size# 0 2026-01-04T02:25:39.033638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.033715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.033866Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.033922Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.044436Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.044506Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.044629Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.044665Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.083418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.083485Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.083621Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.083659Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.095977Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.096053Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.096182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.096218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.138454Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.138527Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.138686Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.138722Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.149260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.149343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.149462Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.149497Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.185774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.186265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.186469Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.186507Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.198409Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.198475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.198576Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.198611Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.232921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.232986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:39.233117Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.233148Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:39.246435Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:39.246505Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:39.246536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:39.246780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:39.246822Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:39.246851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:39.246916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:39.246948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:39.247015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 14.999500s, Timestamp# 1970-01-01T00:01:25.000500Z 2026-01-04T02:25:39.247058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 35 s 2026-01-04T02:25:39.247657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:39.252256Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2109:3822], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:39.252323Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:39.252365Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:39.252487Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:39.252523Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:39.252560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest |83.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |83.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |83.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Initiating_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:24.442931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:24.443134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.443186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:24.443230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:24.443288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:24.443350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:24.443441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.443553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:24.444542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:24.444954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:24.911404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:24.911497Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:24.954221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:24.954706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:24.954972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:24.970087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:24.970347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:24.971108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:24.971368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:24.992964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.993200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:24.994677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:24.994768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.994900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:24.994996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:24.995054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:24.995212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:25.575823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.577925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.578014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.578093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.578176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1474976730758 ready parts: 1/1 2026-01-04T02:25:39.434430Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-04T02:25:39.434456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:25:39.434487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-04T02:25:39.434549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1762:3628] message: TxId: 281474976730758 2026-01-04T02:25:39.434591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:25:39.434622Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-04T02:25:39.434650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-04T02:25:39.434730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:25:39.450017Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-04T02:25:39.450132Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-04T02:25:39.450211Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-04T02:25:39.450351Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-04T02:25:39.459112Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-04T02:25:39.459299Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:25:39.459370Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:25:39.468488Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-04T02:25:39.468656Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusPreconditionFailed, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:25:39.468701Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:25:39.468862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:25:39.468911Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1822:3676] TestWaitNotification: OK eventTxId 105 2026-01-04T02:25:39.469592Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:25:39.469932Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At Initiating state got unsuccess propose result, status: StatusPreconditionFailed, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:25:39.478226Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:25:39.478507Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 319us result status StatusSuccess 2026-01-04T02:25:39.478960Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |83.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |83.6%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink |83.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |83.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::SimpleTestForTopic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:25:30.029277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:30.029413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:30.029464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:30.029516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:30.029563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:30.029593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:30.029658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:30.029725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:30.038944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:30.039344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:30.290711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:30.290780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:30.334938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:30.335171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:30.335351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:30.382780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:30.383000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:30.383707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:30.383979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:30.390459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:30.390674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:30.391912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:30.391971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:30.392119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:30.392181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:30.392227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:30.392386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:30.707345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.708684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.708841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.708955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.709798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.770758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:30.770921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:39.372563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:39.372975Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877760, Sender [1:2050:3863], Recipient [1:697:2681]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594037932033 Status: OK ServerId: [1:2051:3864] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:25:39.373056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5343: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:25:39.373092Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6117: Handle TEvClientConnected, tabletId: 72057594037932033, status: OK, at schemeshard: 72057594046678944 2026-01-04T02:25:39.373232Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 0 2026-01-04T02:25:39.373269Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:39.373306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:39.373380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:39.373418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:39.373479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:39.373534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:40.125627Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125703Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125780Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1245:3129]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125826Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125884Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125916Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.125969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.125999Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.126081Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1245:3129], Recipient [1:1245:3129]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.126112Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.126164Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:697:2681], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.126189Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.151486Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:40.151569Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:40.151611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:40.151821Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: false Progress10k: 5000 2026-01-04T02:25:40.151854Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:40.151884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:40.151943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:40.151999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:40.152062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:40.152131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:40.867776Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.867836Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.867913Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.867953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.868039Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:1245:3129]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.868074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:40.868150Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:697:2681], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.868184Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.868254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:864:2815], Recipient [1:864:2815]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.868275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.868335Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1245:3129], Recipient [1:1245:3129]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.868357Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:40.891634Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:697:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:40.891715Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:40.891754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:40.892051Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:705:2687], Recipient [1:697:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:40.892089Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:40.892134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:40.892199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:40.892254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:40.892322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.979000s, Timestamp# 1970-01-01T00:00:05.158000Z 2026-01-04T02:25:40.892362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 2 s 2026-01-04T02:25:40.895446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:40.896078Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:2070:3883], Recipient [1:697:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:40.896149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:40.896182Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:40.896267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:682:2672], Recipient [1:697:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:40.896297Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:40.896334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> KqpRm::SnapshotSharingByExchanger >> KqpRm::SingleSnapshotByExchanger |83.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> TestShred::ShredWithSplit [GOOD] >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] |83.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows |83.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/ydb-core-tx-datashard-ut_erase_rows >> KqpRm::NotEnoughExecutionUnits >> TBackupCollectionTests::DisallowedPath |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |83.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_order/ydb-core-tx-datashard-ut_order >> KqpRm::ResourceBrokerNotEnoughResources >> TBackupCollectionTests::HiddenByFeatureFlag >> KqpRm::ManyTasks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ShredWithSplit [GOOD] Test command err: 2026-01-04T02:25:31.886818Z node 1 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:67:2058] recipient: [1:61:2102] Leader for TabletID 72057594046678944 is [1:71:2106] sender: [1:75:2058] recipient: [1:61:2102] 2026-01-04T02:25:31.961550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:31.961659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:31.961715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:31.961755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:31.961792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:31.976960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:31.977116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:31.977204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:31.978247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:31.978628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:32.122845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:32.122919Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:32.127934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:32.128414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:32.128628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:32.149671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:32.149968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:32.150767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:32.151126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:32.153087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:32.153285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:32.154936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:32.155000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:32.155128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:32.155184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:32.155239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:32.155707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:32.766149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.767953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:32.768621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:131:2142] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.s ... :TEvMeasureSelfResponseTime 2026-01-04T02:25:44.376559Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.410639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.410709Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.410809Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.410838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.421293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.421373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.421464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.421499Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.462315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.462382Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.462504Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.462534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.477895Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.477964Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.478071Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:500:2462], Recipient [1:500:2462]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.478102Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.518272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.518349Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:44.518514Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:405:2399], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.518553Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:44.529603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1376:3244], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409550 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5777 Memory: 89229 Storage: 5024539 } ShardState: 2 UserTablePartOwners: 72075186233409550 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-04T02:25:44.529668Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:25:44.529720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5019511 rowCount 49 cpuUsage 0.5777 2026-01-04T02:25:44.529858Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409550 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5019511 RowCount: 49 IndexSize: 2213 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5019511 IndexSize: 2213 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:25:44.529910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:25:44.530203Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [1:1379:3246], Recipient [1:500:2462]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409551 TableLocalId: 32 Generation: 2 Round: 1 TableStats { DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 1280 Memory: 89253 Storage: 5127032 } ShardState: 2 UserTablePartOwners: 72075186233409551 NodeId: 1 StartTime: 50000 TableOwnerId: 72075186233409546 IsDstSplit: true FollowerId: 0 2026-01-04T02:25:44.530248Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:25:44.530283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] state 'Ready' dataSize 5121950 rowCount 50 cpuUsage 0.128 2026-01-04T02:25:44.530376Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409546 from shard 72075186233409551 followerId 0 pathId [OwnerId: 72075186233409546, LocalPathId: 32] raw table stats: DataSize: 5121950 RowCount: 50 IndexSize: 2258 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 50 HasLoanedParts: false Channels { Channel: 1 DataSize: 5121950 IndexSize: 2258 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:25:44.542702Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [1:405:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:44.542778Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:44.542810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 1 2026-01-04T02:25:44.543114Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [1:408:2401], Recipient [1:405:2399]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 1 Completed: true Progress10k: 10000 2026-01-04T02:25:44.543155Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:44.543182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:44.543244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:44.543288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:44.543368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 29.997500s, Timestamp# 1970-01-01T00:01:10.002500Z 2026-01-04T02:25:44.543407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 1, duration# 20 s 2026-01-04T02:25:44.543937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:44.547638Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1850:3616], Recipient [1:405:2399]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:44.547704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:44.547750Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:44.547969Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [1:392:2392], Recipient [1:405:2399]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:44.548009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:44.548047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> KqpRm::NodesMembershipByExchanger >> KqpRm::NotEnoughExecutionUnits [GOOD] >> KqpRm::Reduce >> KqpRm::NotEnoughMemory ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReadsMultipleSplitsWithData [GOOD] Test command err: 2026-01-04T02:24:28.473117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326645194989633:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:28.473374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:28.550937Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.010254s 2026-01-04T02:24:29.466183Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:29.466548Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:29.478886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:29.548560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:29.548653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:29.605785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:29.799230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:29.817022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:30.314565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:30.314597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:30.314605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:30.314697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:31.228512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:33.466435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326645194989633:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:33.466530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:37.988397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326683849696591:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:37.988513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:37.990071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326683849696601:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:37.990149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:38.716925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:39.565427Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631416:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.568397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631454:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631444:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631462:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631456:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631457:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631458:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631459:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.594984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631460:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.595012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631461:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.595077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631455:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.595136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326692439631443:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:39.601702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:39.866681Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631491:3098] txid# 281474976710666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:24:39.866884Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631488:3095] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:24:39.866972Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631484:3091] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:24:39.867063Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631493:3100] txid# 281474976710668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:24:39.867152Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631486:3093] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:24:39.867255Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326692439631489:3096] txid# 28147 ... \000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\300L\350\202\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000@F\224<\000\000\000\200" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000@F\224<\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\300L\350\202\000\000\000\200" ShardIdx: 5 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000@F\224<\000\000\000\200" } SchemeshardId: 72057594046644480 2026-01-04T02:25:35.143943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:25:35.147450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-04T02:25:35.165222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:25:35.165532Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:25:35.165572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 2 -> 3 2026-01-04T02:25:35.176237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715694:0, at schemeshard: 72057594046644480 2026-01-04T02:25:35.189335Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7591326932957899568:11384] 2026-01-04T02:25:35.203037Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:7591326932957899565:11383] 2026-01-04T02:25:35.242924Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-04T02:25:35.243051Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-04T02:25:35.243264Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:25:35.280445Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037892 2026-01-04T02:25:35.280738Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037892, state: WaitScheme 2026-01-04T02:25:35.280952Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-04T02:25:35.303437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715694 TabletId: 72075186224037892 2026-01-04T02:25:35.314946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715694 TabletId: 72075186224037891 2026-01-04T02:25:35.314999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 3 -> 131 2026-01-04T02:25:35.320952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:25:35.381774Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-01-04T02:25:35.381907Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:25:35.381959Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-04T02:25:35.381988Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2026-01-04T02:25:35.382268Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-04T02:25:35.382754Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-04T02:25:35.382855Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:25:35.382896Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:25:35.382922Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-04T02:25:35.383143Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:25:35.403851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715694 TabletId: 72075186224037889 2026-01-04T02:25:35.404313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 131 -> 132 2026-01-04T02:25:35.407902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:25:35.408293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:25:35.408360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:25:35.413242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715694 2026-01-04T02:25:35.413308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715694 2026-01-04T02:25:35.413328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-04T02:25:35.442351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-04T02:25:35.442436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-01-04T02:25:35.442461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-01-04T02:25:35.442506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715694:0 2026-01-04T02:25:35.445354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715694:0 2026-01-04T02:25:35.512348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:25:35.512527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, next wakeup# 14.999791s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:25:35.519094Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591326645194989813:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037891 2026-01-04T02:25:35.520202Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591326645194989813:2207] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037892 2026-01-04T02:25:35.586300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037891, shardIdx# 72057594046644480:4 in# 73, next wakeup# 14.926028s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:25:35.586351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037892, shardIdx# 72057594046644480:5 in# 74, next wakeup# 14.925966s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:25:35.594035Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037892 CompletedLoansChanged 2026-01-04T02:25:35.594471Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037891 CompletedLoansChanged 2026-01-04T02:25:35.640746Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:25:35.652613Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:25:35.652673Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:25:35.655740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:25:35.655961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:25:35.672124Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:25:35.672688Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:25:35.675919Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:25:35.676021Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:25:39.411039Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 4.5274 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037890 table: [/Root/Foo] Table has 3 shards >> DataShardSnapshots::RepeatableReadAfterSplitRace [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> DataShardSnapshots::PostMergeNotCompactedTooEarly >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] >> KqpRm::ManyTasks [GOOD] >> KqpRm::SingleSnapshotByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughExecutionUnits [GOOD] Test command err: 2026-01-04T02:25:46.270037Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:46.270617Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0023e5/r3tmp/tmpa2CgIm/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:46.271182Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0023e5/r3tmp/tmpa2CgIm/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0023e5/r3tmp/tmpa2CgIm/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 16134019485686603353 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:46.345477Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:46.345824Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:46.365960Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:46.366109Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:46.366205Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:46.366271Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:46.366399Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:46.366434Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:46.366477Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:46.366531Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:46.366758Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.383740Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.384073Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.384224Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.384535Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:46.384707Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:46.384769Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:46.384820Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.384919Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.385114Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:46.385141Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.385227Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.385747Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:46.386993Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.387496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.387886Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:46.388176Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.388433Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:46.388634Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:46.388887Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard >> TBackupCollectionTests::DisallowedPath [GOOD] >> TBackupCollectionTests::CreateAbsolutePath >> KqpRm::Reduce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleSnapshotByExchanger [GOOD] Test command err: 2026-01-04T02:25:45.588638Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:45.589237Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001faa/r3tmp/tmpakzPHA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:45.602157Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001faa/r3tmp/tmpakzPHA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001faa/r3tmp/tmpakzPHA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9439109178140671901 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:45.682644Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:45.682990Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:45.728720Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:45.728851Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:45.728903Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:45.728981Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:45.729123Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:45.729160Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:45.729204Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:45.729246Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:45.729442Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.751013Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.751324Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.751439Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.754127Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:45.754371Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:45.754458Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:45.754496Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.754634Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.754863Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:45.754906Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.754984Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.755527Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:45.755699Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.756097Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.756486Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:45.756754Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.756956Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:45.757210Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:45.757491Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:45.761005Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:45.761093Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:45.761172Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:45.761214Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:45.761260Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:468:2348])) 2026-01-04T02:25:45.763067Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:45.763208Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:45.763255Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:45.763301Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:45.763342Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:45.763386Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:468:2348])) 2026-01-04T02:25:45.763467Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:45.763759Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.763937Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493545 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-04T02:25:45.764218Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:46.939084Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:46.939222Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:468:2348]) (release resources {0, 100}) 2026-01-04T02:25:46.939281Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.300050 (remove task kqp-1-2-1 (1 by [1:468:2348])) 2026-01-04T02:25:46.939318Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.100100 2026-01-04T02:25:46.939370Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:46.939415Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:468:2348]) (release resources {0, 100}) 2026-01-04T02:25:46.939465Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.300050 to 0.100100 (remove task kqp-2-1-2 (2 by [1:468:2348])) 2026-01-04T02:25:46.939513Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:46.939733Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.939887Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.940256Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.266938Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ResourceBrokerNotEnoughResources [GOOD] Test command err: 2026-01-04T02:25:46.809377Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:46.809998Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002451/r3tmp/tmp3s9uaL/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:46.810584Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002451/r3tmp/tmp3s9uaL/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002451/r3tmp/tmp3s9uaL/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13961172675328235021 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:46.876603Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:46.876930Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:46.911951Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:46.912095Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:46.912160Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:46.912220Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:46.912360Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:46.912396Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:46.912438Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:46.912486Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:46.912674Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.946791Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.947093Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.947186Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-04T02:25:46.947462Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:46.947650Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:46.947714Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:46.947747Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.947872Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 100000000 Memory { Pool: 1 Available: 100000000 } ExecutionUnits: 100 2026-01-04T02:25:46.948073Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:46.948104Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.948199Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493546 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:46.948772Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:46.948922Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.949304Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.949664Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:46.956285Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:46.956582Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:46.956809Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:46.957134Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:46.959872Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:468:2348]) priority=0 resources={0, 1000} 2026-01-04T02:25:46.959950Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.960032Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 1000} for task kqp-1-2-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:46.960075Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.960135Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 2.500000 (insert task kqp-1-2-1 (1 by [1:468:2348])) 2026-01-04T02:25:46.960373Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 1000ExternalMemory: 0 } 2026-01-04T02:25:46.960468Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:468:2348]) priority=0 resources={0, 100000} 2026-01-04T02:25:46.960511Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.960552Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:619: Not enough resources to start task kqp-1-2-2 (2 by [1:468:2348]) 2026-01-04T02:25:46.960584Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:499: Removing task kqp-1-2-2 (2 by [1:468:2348]) 2026-01-04T02:25:46.960657Z node 1 :KQP_RESOURCE_MANAGER NOTICE: kqp_rm_service.cpp:338: TxId: 1, taskId: 2. Not enough memory for query, requested: 100000. TxResourcesInfo { TxId: 1, Database: , tx initially granted memory: 0B, tx total memory allocations: 1000B, tx largest successful memory allocation: 1000B, tx last failed memory allocation: 0B, tx total execution units: 0, started at: 2026-01-04T02:25:46.959791Z } >> TBackupCollectionTests::HiddenByFeatureFlag [GOOD] >> TBackupCollectionTests::ParallelCreate |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] >> GroupWriteTest::SimpleRdma [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::ManyTasks [GOOD] Test command err: 2026-01-04T02:25:47.052952Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:47.053511Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001fad/r3tmp/tmpKvu9cK/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:47.054919Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001fad/r3tmp/tmpKvu9cK/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001fad/r3tmp/tmpKvu9cK/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4941196826738938515 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:47.168699Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:47.169047Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:47.208336Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:47.208471Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:47.208532Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:47.208597Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:47.208734Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:47.208787Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:47.208837Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:47.208878Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:47.209102Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.246798Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.247148Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.247258Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.247539Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:47.247711Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:47.247778Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:47.247812Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.247921Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.248143Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:47.248173Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.248274Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.248787Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:47.248929Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.249372Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.249715Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.258504Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.258862Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:47.259142Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:47.259421Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.266104Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.266193Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.266304Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.266352Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.266399Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:47.266682Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.266982Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-2 (2 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.267027Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267076Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-2 (2 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.267107Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267139Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-1-2-2 (2 by [1:468:2348])) 2026-01-04T02:25:47.267172Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.267306Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-3-3 (3 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.267361Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-3-3 (3 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267400Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-3-3 (3 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.267426Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-3-3 (3 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267484Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.750000 (insert task kqp-1-3-3 (3 by [1:468:2348])) 2026-01-04T02:25:47.267535Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 3. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.267676Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-4-4 (4 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.267704Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-4-4 (4 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267734Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-4-4 (4 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.267764Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-4-4 (4 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267791Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.750000 to 1.000000 (insert task kqp-1-4-4 (4 by [1:468:2348])) 2026-01-04T02:25:47.267819Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 4. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.267908Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-5-5 (5 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.267930Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-5-5 (5 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.267954Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-5-5 (5 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.267982Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-5-5 (5 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268008Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.000000 to 1.250000 (insert task kqp-1-5-5 (5 by [1:468:2348])) 2026-01-04T02:25:47.268056Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 5. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.268197Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-6-6 (6 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.268224Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-6-6 (6 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268252Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-6-6 (6 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.268274Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-6-6 (6 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268323Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.250000 to 1.500000 (insert task kqp-1-6-6 (6 by [1:468:2348])) 2026-01-04T02:25:47.268354Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 6. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.268439Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-7-7 (7 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.268473Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-7-7 (7 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268509Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-7-7 (7 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.268546Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-7-7 (7 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268583Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.500000 to 1.750000 (insert task kqp-1-7-7 (7 by [1:468:2348])) 2026-01-04T02:25:47.268616Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 7. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.268718Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-8-8 (8 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.268747Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-8-8 (8 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268776Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-8-8 (8 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.268801Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-8-8 (8 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.268831Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 1.750000 to 2.000000 (insert task kqp-1-8-8 (8 by [1:468:2348])) 2026-01-04T02:25:47.268857Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 8. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.268953Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-9-9 (9 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.268980Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-9-9 (9 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.269018Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-9-9 (9 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.269043Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-9-9 (9 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.269064Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 2.000000 to 2.250000 (insert task kqp-1-9-9 (9 by [1:468:2348])) 2026-01-04T02:25:47.269094Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 9. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.269216Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:468:2348]) (release resources {0, 100}) 2026-01-04T02:25:47.269258Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 2.250000 to 2.000000 (remove task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:47.269301Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TInterconnectTest::TestCrossConnect [GOOD] >> TInterconnectTest::TestManyEventsWithReconnect >> TBackupCollectionTests::CreateAbsolutePath [GOOD] >> TBackupCollectionTests::Create ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::Reduce [GOOD] Test command err: 2026-01-04T02:25:47.808449Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:47.809097Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002445/r3tmp/tmpzNrFV9/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:47.809718Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002445/r3tmp/tmpzNrFV9/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002445/r3tmp/tmpzNrFV9/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13470311957341601527 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:47.881324Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:47.881660Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:47.912309Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:47.912445Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:47.912498Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:47.912556Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:47.912697Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:47.912741Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:47.912784Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:47.912827Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:47.913029Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.940458Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.940761Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.940846Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.941125Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:47.941310Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:47.941381Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:47.941412Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.941532Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.941729Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:47.941758Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.946240Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493547 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.947049Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:47.947210Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.947697Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.948061Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.948352Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:47.948572Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:47.948761Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:47.949047Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.957507Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:47.957607Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.957707Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.957754Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.957824Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:47.958132Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.958407Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task kqp-1-1-1 (1 by [1:468:2348]) (priority=0 type=kqp_query resources={0, 30} resubmit=0) 2026-01-04T02:25:47.958456Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.958509Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.075000 (insert task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:47.958582Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 70, Free Tier: 0, ExecutionUnits: 0. |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> test_insert.py::TestInsert::test[read_data_during_bulk_upsert] [GOOD] >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TBackupCollectionTests::ParallelCreate [GOOD] >> TBackupCollectionTests::DropTwice |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |83.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow |83.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/arrow/ydb-core-kqp-ut-arrow >> TInterconnectTest::TestManyEventsWithReconnect [GOOD] >> TInterconnectTest::TestEventWithPayloadSerialization >> KqpRm::SnapshotSharingByExchanger [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::SimpleRdma [GOOD] Test command err: RandomSeed# 9887819287771136189 2026-01-04T02:25:08.329322Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:08.362692Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:08.362770Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:08.365744Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:08.395024Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:08.399467Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:25:48.991551Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:25:48.991666Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:49.069656Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NotEnoughMemory [GOOD] Test command err: 2026-01-04T02:25:48.329331Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:48.334032Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001fa8/r3tmp/tmp5WsuWX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:48.334725Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001fa8/r3tmp/tmp5WsuWX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001fa8/r3tmp/tmp5WsuWX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9594861929996626501 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:48.417872Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:48.418209Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:48.480746Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:48.480901Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:48.480960Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:48.481025Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:48.481165Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:48.481214Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:48.481259Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:48.481298Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:48.481507Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.516921Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.517227Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.517318Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.517604Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:48.517758Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:48.518065Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:48.518110Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.518221Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.518451Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:48.518483Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.518582Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.519176Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:48.519362Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.519838Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.520215Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:48.520526Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.520761Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:48.520970Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:48.521257Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TBackupCollectionTests::Create [GOOD] >> TBackupCollectionTests::CreateTwice ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::NodesMembershipByExchanger [GOOD] Test command err: 2026-01-04T02:25:48.094313Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:48.094896Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f8e/r3tmp/tmpV8tTW2/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:48.095537Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f8e/r3tmp/tmpV8tTW2/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f8e/r3tmp/tmpV8tTW2/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 12333982096769559472 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:48.250563Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:48.250959Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:48.369367Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:48.369512Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:48.369570Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:48.369633Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:48.369787Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:48.390093Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:48.390196Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:48.390248Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:48.390509Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.503706Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.504048Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.504167Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.504501Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:48.504699Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:48.504768Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:48.504807Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.504926Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.505142Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:48.505176Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:48.505254Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:48.518075Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:48.518345Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.518818Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.519188Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:48.519466Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:48.519667Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:48.519849Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:48.520136Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:49.706984Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:49.707177Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:49.708236Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:50.112627Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SnapshotSharingByExchanger [GOOD] Test command err: 2026-01-04T02:25:45.437950Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:25:45.438495Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002458/r3tmp/tmpkxAvRg/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:25:45.439075Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002458/r3tmp/tmpkxAvRg/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002458/r3tmp/tmpkxAvRg/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17934127359167721237 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:25:45.512476Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:45.512814Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:25:45.544083Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:470:2102] with ResourceBroker at [2:440:2101] 2026-01-04T02:25:45.544234Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:471:2103] 2026-01-04T02:25:45.544287Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:468:2348] with ResourceBroker at [1:439:2329] 2026-01-04T02:25:45.544340Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:469:2349] 2026-01-04T02:25:45.544487Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:45.544524Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:45.544563Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:25:45.544600Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:25:45.544789Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.567505Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.567851Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.567954Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.568332Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:45.568519Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:25:45.568594Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:45.568625Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.568728Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.568959Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:25:45.568992Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:45.569071Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493545 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:45.569670Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:25:45.578096Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.578655Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.579015Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:45.579281Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:25:45.579503Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:25:45.579776Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:45.580021Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:46.866303Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:46.866435Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:46.866582Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:46.866665Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.866719Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:46.866760Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.866805Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:46.867086Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:46.867218Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-1-2 (2 by [1:468:2348]) priority=0 resources={0, 100} 2026-01-04T02:25:46.867262Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-1-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.867305Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-1-2 (2 by [1:468:2348]) from queue queue_kqp_resource_manager 2026-01-04T02:25:46.867359Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-1-2 (2 by [1:468:2348]) to queue queue_kqp_resource_manager 2026-01-04T02:25:46.867408Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-1-2 (2 by [1:468:2348])) 2026-01-04T02:25:46.867504Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:46.867581Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:46.867719Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493546 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-04T02:25:46.868068Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.189887Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:47.190045Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-1-1 (1 by [2:470:2102]) priority=0 resources={0, 100} 2026-01-04T02:25:47.190113Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-1-1 (1 by [2:470:2102]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.190182Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-1-1 (1 by [2:470:2102]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.190237Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-1-1 (1 by [2:470:2102]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.190288Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-1-1 (1 by [2:470:2102])) 2026-01-04T02:25:47.190412Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 1. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.190505Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-2-2-2 (2 by [2:470:2102]) priority=0 resources={0, 100} 2026-01-04T02:25:47.190545Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-2-2-2 (2 by [2:470:2102]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.190599Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-2-2-2 (2 by [2:470:2102]) from queue queue_kqp_resource_manager 2026-01-04T02:25:47.190645Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-2-2-2 (2 by [2:470:2102]) to queue queue_kqp_resource_manager 2026-01-04T02:25:47.190689Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.500000 (insert task kqp-2-2-2 (2 by [2:470:2102])) 2026-01-04T02:25:47.190779Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 2, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:25:47.190915Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.191081Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493547 AvailableComputeActors: 80 UsedMemory: 200 TotalMemory: 1000 Memory { Pool: 1 Available: 800 } ExecutionUnits: 80 2026-01-04T02:25:47.191422Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:47.509495Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:47.509615Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [1:468:2348]) (release resources {0, 100}) 2026-01-04T02:25:47.509687Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [1:468:2348])) 2026-01-04T02:25:47.509731Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-04T02:25:47.509783Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:47.509854Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-1-2 (2 by [1:468:2348]) (release resources {0, 100}) 2026-01-04T02:25:47.509911Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-1-2 (2 by [1:468:2348])) 2026-01-04T02:25:47.509963Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:47.510064Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.510210Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767493548 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.510490Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:25:47.847461Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:25:47.847594Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-1-1 (1 by [2:470:2102]) (release resources {0, 100}) 2026-01-04T02:25:47.847654Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.500000 to 0.350200 (remove task kqp-1-1-1 (1 by [2:470:2102])) 2026-01-04T02:25:47.847702Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:582: Updated real resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.200400 2026-01-04T02:25:47.847754Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 1. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:47.847805Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-2-2-2 (2 by [2:470:2102]) (release resources {0, 100}) 2026-01-04T02:25:47.847847Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.350200 to 0.200400 (remove task kqp-2-2-2 (2 by [2:470:2102])) 2026-01-04T02:25:47.847886Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 2, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 10. 2026-01-04T02:25:47.847948Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:25:47.848154Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767493549 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:25:47.848463Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:25:50.190151Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> TBackupCollectionTests::CreateTwice [GOOD] >> TBackupCollectionTests::Drop >> TBackupCollectionTests::DropTwice [GOOD] >> TBackupCollectionTests::TableWithSystemColumns >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestEventWithPayloadSerialization [GOOD] Test command err: Starting iteration 0 Starting iteration 1 Starting iteration 2 Starting iteration 3 Starting iteration 4 Starting iteration 5 Starting iteration 6 Starting iteration 7 Starting iteration 8 Starting iteration 9 Starting iteration 10 Starting iteration 11 Starting iteration 12 Starting iteration 13 Starting iteration 14 Starting iteration 15 Starting iteration 16 Starting iteration 17 Starting iteration 18 Starting iteration 19 Starting iteration 20 Starting iteration 21 Starting iteration 22 Starting iteration 23 Starting iteration 24 Starting iteration 25 Starting iteration 26 Starting iteration 27 Starting iteration 28 Starting iteration 29 Starting iteration 30 Starting iteration 31 Starting iteration 32 Starting iteration 33 Starting iteration 34 Starting iteration 35 Starting iteration 36 Starting iteration 37 Starting iteration 38 Starting iteration 39 Starting iteration 40 Starting iteration 41 Starting iteration 42 Starting iteration 43 Starting iteration 44 Starting iteration 45 Starting iteration 46 Starting iteration 47 Starting iteration 48 Starting iteration 49 0 0 0 1 0 3 0 7 0 15 0 31 0 63 0 127 0 255 0 511 0 1023 0 2047 0 4095 0 8191 0 16383 0 32767 0 65535 1 0 1 1 1 3 1 7 1 15 1 31 1 63 1 127 1 255 1 511 1 1023 1 2047 1 4095 1 8191 1 16383 1 32767 1 65535 3 0 3 1 3 3 3 7 3 15 3 31 3 63 3 127 3 255 3 511 3 1023 3 2047 3 4095 3 8191 3 16383 3 32767 3 65535 7 0 7 1 7 3 7 7 7 15 7 31 7 63 7 127 7 255 7 511 7 1023 7 2047 7 4095 7 8191 7 16383 7 32767 7 65535 15 0 15 1 15 3 15 7 15 15 15 31 15 63 15 127 15 255 15 511 15 1023 15 2047 15 4095 15 8191 15 16383 15 32767 15 65535 31 0 31 1 31 3 31 7 31 15 31 31 31 63 31 127 31 255 31 511 31 1023 31 2047 31 4095 31 8191 31 16383 31 32767 31 65535 63 0 63 1 63 3 63 7 63 15 63 31 63 63 63 127 63 255 63 511 63 1023 63 2047 63 4095 63 8191 63 16383 63 32767 63 65535 127 0 127 1 127 3 127 7 127 15 127 31 127 63 127 127 127 255 127 511 127 1023 127 2047 127 4095 127 8191 127 16383 127 32767 127 65535 255 0 255 1 255 3 255 7 255 15 255 31 255 63 255 127 255 255 255 511 255 1023 255 2047 255 4095 255 8191 255 16383 255 32767 255 65535 511 0 511 1 511 3 511 7 511 15 511 31 511 63 511 127 511 255 511 511 511 1023 511 2047 511 4095 511 8191 511 16383 511 32767 511 65535 1023 0 1023 1 1023 3 1023 7 1023 15 1023 31 1023 63 1023 127 1023 255 1023 511 1023 1023 1023 2047 1023 4095 1023 8191 1023 16383 1023 32767 1023 65535 2047 0 2047 1 2047 3 2047 7 2047 15 2047 31 2047 63 2047 127 2047 255 2047 511 2047 1023 2047 2047 2047 4095 2047 8191 2047 16383 2047 32767 2047 65535 4095 0 4095 1 4095 3 4095 7 4095 15 4095 31 4095 63 4095 127 4095 255 4095 511 4095 1023 4095 2047 4095 4095 4095 8191 4095 16383 4095 32767 4095 65535 8191 0 8191 1 8191 3 8191 7 8191 15 8191 31 8191 63 8191 127 8191 255 8191 511 8191 1023 8191 2047 8191 4095 8191 8191 8191 16383 8191 32767 8191 65535 16383 0 16383 1 16383 3 16383 7 16383 15 16383 31 16383 63 16383 127 16383 255 16383 511 16383 1023 16383 2047 16383 4095 16383 8191 16383 16383 16383 32767 16383 65535 32767 0 32767 1 32767 3 32767 7 32767 15 32767 31 32767 63 32767 127 32767 255 32767 511 32767 1023 32767 2047 32767 4095 32767 8191 32767 16383 32767 32767 32767 65535 65535 0 65535 1 65535 3 65535 7 65535 15 65535 31 65535 63 65535 127 65535 255 65535 511 65535 1023 65535 2047 65535 4095 65535 8191 65535 16383 65535 32767 65535 65535 |83.7%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution >> DataShardTxOrder::ReadWriteReorder |83.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable >> TestShred::Run3CyclesForTopics [GOOD] >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve |83.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/olap/indexes/ydb-core-kqp-ut-olap-indexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForAllSupportedObjects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:26.871959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:26.872088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.872164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:26.872206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:26.872273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:26.872314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:26.872376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:26.872477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:26.873424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:26.874064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:27.018950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:27.019020Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:27.064724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:27.069716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:27.111724Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:27.136392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:27.136707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:27.137526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:27.137839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:27.155718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:27.156001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:27.157552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:27.157634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:27.157780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:27.157883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:27.157976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:27.158146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:27.656797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.658600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.658826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.658975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.659938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:27.660034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__root_shred_manager.cpp:591: TTxCompleteShredTenant Execute at schemeshard: 72057594046678944 2026-01-04T02:25:52.357278Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 77 ms, next wakeup# 593.923000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-04T02:25:52.357349Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-04T02:25:52.359197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:52.359247Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:52.359461Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:702:2683], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-04T02:25:52.359526Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:52.359557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:52.359611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:52.359645Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:52.359695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:52.359746Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:52.798794Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.798908Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.798999Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:859:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.799038Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.799103Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1441:3301]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.799143Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:52.799221Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:859:2812], Recipient [2:859:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.799253Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.799339Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1441:3301], Recipient [2:1441:3301]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.799366Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.799431Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:694:2677], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.799457Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:52.820188Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:52.820249Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:52.820270Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:52.820581Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:702:2683], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-04T02:25:52.820619Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:52.820645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:52.820709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:52.820747Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:52.820796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:52.820835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:53.346652Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346724Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346795Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:859:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346821Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346873Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1441:3301]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346898Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.346965Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:694:2677], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.346994Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.347077Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:859:2812], Recipient [2:859:2812]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.347102Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.347161Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1441:3301], Recipient [2:1441:3301]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.347186Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.378136Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.378210Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.378239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:53.378508Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:702:2683], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-04T02:25:53.378548Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:53.378578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:53.378643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:53.378675Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:53.378721Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.922000s, Timestamp# 1970-01-01T00:00:11.212000Z 2026-01-04T02:25:53.378751Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-04T02:25:53.385726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:53.386399Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4812:6096], Recipient [2:694:2677]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:53.386459Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:53.386498Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:53.386645Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:679:2668], Recipient [2:694:2677]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:53.386680Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:53.386715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> GroupWriteTest::Simple [GOOD] >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 >> DataShardSnapshots::LockedWritesLimitedPerKey+UseSink [GOOD] >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataShardSnapshots::ShardRestartLockNotBrokenByUncommittedAfterRead-UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::ManualLaunch3CyclesWithNotConsistentCountersInSchemeShardAndBSC [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:25:24.511098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:24.511203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.511254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:24.511299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:24.511356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:24.511387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:24.511435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:24.511491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:24.512300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:24.512566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:24.614639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:24.614713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:24.634811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:24.635042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:24.635191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:24.652429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:24.652656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:24.653330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:24.653574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:24.656256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.656431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:24.657547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:24.657603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:24.657719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:24.657766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:24.657827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:24.658009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:25.068307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.077763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.079967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.080049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.080133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.080228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:25.080320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nt [2:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 100 Completed: false Progress10k: 0 2026-01-04T02:25:53.429156Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:53.429202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:53.429283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:53.429331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:646: TTxCompleteShredBSC Unknown generation#100, Expected gen# 52 at schemestard: 72057594046678944 2026-01-04T02:25:53.429449Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-04T02:25:53.429901Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:706:2687], Recipient [2:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 0 2026-01-04T02:25:53.429952Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:53.429982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:53.430029Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:53.430066Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:53.432367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:53.432446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:53.432542Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:54.068556Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.068663Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.068768Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:698:2681], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.068807Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.079507Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:867:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.079611Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.079692Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1574:3434]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.079723Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.079797Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:867:2817], Recipient [2:867:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.079836Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.079920Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1574:3434], Recipient [2:1574:3434]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.079948Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.152823Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:54.152924Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:54.152991Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-04T02:25:54.153296Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:706:2687], Recipient [2:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: false Progress10k: 5000 2026-01-04T02:25:54.153339Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:54.153375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:54.153457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:54.153504Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:54.153583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:54.153651Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:54.598502Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.598599Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.598707Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:698:2681], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.598748Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.611507Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:867:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.611625Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.611714Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1574:3434]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.611743Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:54.611824Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:867:2817], Recipient [2:867:2817]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.611857Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.611934Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1574:3434], Recipient [2:1574:3434]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.611963Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:54.692469Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:698:2681]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:54.692569Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:54.692604Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 101 2026-01-04T02:25:54.692888Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:706:2687], Recipient [2:698:2681]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 101 Completed: true Progress10k: 10000 2026-01-04T02:25:54.692928Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:54.692962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:54.693066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:54.693105Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:54.693167Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 101, duration# 2 s 2026-01-04T02:25:54.699100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:54.700017Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:4809:6093], Recipient [2:698:2681]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:54.700104Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:54.700174Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:54.700381Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:3960:5414], Recipient [2:698:2681]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:54.700430Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:54.700479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite >> TBackupCollectionTests::Drop [GOOD] >> TBackupCollectionTests::BackupAbsentCollection |83.8%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_change_exchange/ydb-core-tx-datashard-ut_change_exchange |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::Simple [GOOD] Test command err: RandomSeed# 2581320160315102603 2026-01-04T02:25:16.109205Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 1 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:16.155909Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:16.155987Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 1 going to send TEvBlock {TabletId# 1 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:16.159381Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:16.177294Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:16.182062Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:25:55.296317Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:25:55.296415Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 1 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:55.662568Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 1 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 1 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_shred/unittest >> TestShred::Run3CyclesForTopics [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:25:28.291603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:28.291701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:28.291751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:28.291799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:28.291830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:28.291856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:28.291912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:28.291979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:28.292825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:28.293096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:28.498816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:28.498915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:28.535491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:28.536203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:28.536407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:28.586396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:28.586698Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:28.587522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:28.587833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:28.600601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:28.600829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:28.602334Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:28.602410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:28.602489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:28.602554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:28.602609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:28.602791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:29.547450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.548755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.548918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.549893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.550007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:29.550118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... .cpp:312: [RootShredManager] [Finished] Shred completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 39] in# 16 ms, next wakeup# 593.984000s, rate# 0, in queue# 0 tenants, running# 0 tenants at schemeshard 72057594046678944 2026-01-04T02:25:52.203332Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:327: [RootShredManager] Shred in tenants is completed. Send request to BS controller 2026-01-04T02:25:52.222598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# false 2026-01-04T02:25:52.223456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:615: TTxCompleteShredTenant Complete at schemeshard: 72057594046678944, NeedSendRequestToBSC# true 2026-01-04T02:25:52.223504Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:52.223733Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:698:2680], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 0 2026-01-04T02:25:52.223782Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:52.223811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:52.223869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:52.223930Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 0% 2026-01-04T02:25:52.223992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:52.224044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:53.040006Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.040087Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.041572Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:863:2813]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.041656Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.041758Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1250:3132]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.041846Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.041963Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:694:2677], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.041995Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.042135Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:863:2813], Recipient [2:863:2813]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.042176Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.042274Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1250:3132], Recipient [2:1250:3132]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.042309Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.052697Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.052777Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.052808Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:53.053066Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:698:2680], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: false Progress10k: 5000 2026-01-04T02:25:53.053111Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:53.053141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:53.053210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:53.053254Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:657: TTxCompleteShredBSC: Progress data shred in BSC 50% 2026-01-04T02:25:53.053311Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# true 2026-01-04T02:25:53.053362Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:348: [RootShredManager] ScheduleRequestToBSC: Interval# 1.000000s 2026-01-04T02:25:53.837983Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838073Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838193Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:863:2813]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838255Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838339Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [2:1250:3132]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838371Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:25:53.838444Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:863:2813], Recipient [2:863:2813]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.838480Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.838596Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:1250:3132], Recipient [2:1250:3132]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.838634Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.838707Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [2:694:2677], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.838740Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:25:53.850299Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125517, Sender [0:0:0], Recipient [2:694:2677]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.850395Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5472: StateWork, processing event TEvSchemeShard::TEvWakeupToRunShredBSC 2026-01-04T02:25:53.850431Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:354: [RootShredManager] SendRequestToBSC: Generation# 3 2026-01-04T02:25:53.850794Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 268637738, Sender [2:698:2680], Recipient [2:694:2677]: NKikimrBlobStorage.TEvControllerShredResponse CurrentGeneration: 3 Completed: true Progress10k: 10000 2026-01-04T02:25:53.850842Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5471: StateWork, processing event TEvBlobStorage::TEvControllerShredResponse 2026-01-04T02:25:53.850877Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8264: Handle TEvControllerShredResponse, at schemeshard: 72057594046678944 2026-01-04T02:25:53.850954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:639: TTxCompleteShredBSC Execute at schemeshard: 72057594046678944 2026-01-04T02:25:53.851008Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:653: TTxCompleteShredBSC: Data shred in BSC is completed 2026-01-04T02:25:53.851072Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:170: [RootShredManager] ScheduleShredWakeup: Interval# 0.982000s, Timestamp# 1970-01-01T00:00:11.152000Z 2026-01-04T02:25:53.851117Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:378: [RootShredManager] Complete: Generation# 3, duration# 2 s 2026-01-04T02:25:53.858626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__root_shred_manager.cpp:665: TTxCompleteShredBSC Complete at schemeshard: 72057594046678944, NeedScheduleRequestToBSC# false 2026-01-04T02:25:53.859404Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [2:2237:4033], Recipient [2:694:2677]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:53.859498Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:53.859552Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:25:53.859762Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125519, Sender [2:677:2666], Recipient [2:694:2677]: NKikimrScheme.TEvShredInfoRequest 2026-01-04T02:25:53.859812Z node 2 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5469: StateWork, processing event TEvSchemeShard::TEvShredInfoRequest 2026-01-04T02:25:53.859856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:8215: Handle TEvShredInfoRequest, at schemeshard: 72057594046678944 |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_shred/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] >> DataShardSnapshots::VolatileSnapshotRenameTimeout [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommit >> DataShardSnapshots::LockedWriteWithAsyncIndex-WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty >> TBackupCollectionTests::TableWithSystemColumns [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ReadWriteReorder [GOOD] Test command err: 2026-01-04T02:25:55.557287Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:25:55.578469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:55.578540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:55.606410Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:25:55.606771Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:25:55.607043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:25:55.687837Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:25:55.735419Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:25:55.735527Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:25:55.737317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:25:55.737405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:25:55.737479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:25:55.742363Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:25:55.744104Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:25:55.744220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:25:55.923260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:25:56.028995Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:25:56.029412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:25:56.029645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:25:56.029697Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:25:56.034745Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:25:56.034929Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:25:56.035364Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:56.035437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:56.035933Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:25:56.036128Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:25:56.036460Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:25:56.036543Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:25:56.036661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:25:56.036726Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:25:56.036808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:25:56.036909Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:25:56.037007Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:25:56.037135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:56.037197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:56.037310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:25:56.097216Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\n\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:25:56.097428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:25:56.097618Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:25:56.097888Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:25:56.098002Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:25:56.098070Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:25:56.098158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:25:56.098234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:25:56.098300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:25:56.098367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:25:56.098832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:25:56.098896Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:25:56.098937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:25:56.098982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:25:56.099063Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:25:56.099110Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:25:56.099164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:25:56.099227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:25:56.099266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:25:56.122700Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:25:56.122829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:25:56.122891Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:25:56.123012Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:25:56.123105Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:25:56.123882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:56.123953Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:56.124018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:25:56.124194Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:25:56.124234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:25:56.124423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:25:56.124471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:25:56.124521Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:25:56.124566Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:25:56.143965Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:25:56.144090Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:25:56.144467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:56.144518Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:56.144613Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:25:56.144663Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:25:56.144700Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:25:56.144747Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:25:56.144810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000001: ... 6-01-04T02:25:57.573000Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:25:57.573258Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.573306Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.573352Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:25:57.573396Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:25:57.573438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:25:57.573471Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:12] in PlanQueue unit at 9437184 2026-01-04T02:25:57.573498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PlanQueue 2026-01-04T02:25:57.573533Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.573559Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PlanQueue 2026-01-04T02:25:57.573585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadTxDetails 2026-01-04T02:25:57.573611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadTxDetails 2026-01-04T02:25:57.582697Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:12 keys extracted: 3 2026-01-04T02:25:57.582794Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.582833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadTxDetails 2026-01-04T02:25:57.582866Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-04T02:25:57.582937Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit FinalizeDataTxPlan 2026-01-04T02:25:57.582997Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583025Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-04T02:25:57.583050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:25:57.583098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:25:57.583163Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:12] is the new logically complete end at 9437184 2026-01-04T02:25:57.583195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:12] is the new logically incomplete end at 9437184 2026-01-04T02:25:57.583222Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:12] at 9437184 2026-01-04T02:25:57.583345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:25:57.583421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BuildDataTxOutRS 2026-01-04T02:25:57.583448Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BuildDataTxOutRS 2026-01-04T02:25:57.583504Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BuildDataTxOutRS 2026-01-04T02:25:57.583549Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit StoreAndSendOutRS 2026-01-04T02:25:57.583571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit StoreAndSendOutRS 2026-01-04T02:25:57.583595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583616Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit StoreAndSendOutRS 2026-01-04T02:25:57.583635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit PrepareDataTxInRS 2026-01-04T02:25:57.583685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit PrepareDataTxInRS 2026-01-04T02:25:57.583735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583760Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit PrepareDataTxInRS 2026-01-04T02:25:57.583805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit LoadAndWaitInRS 2026-01-04T02:25:57.583833Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit LoadAndWaitInRS 2026-01-04T02:25:57.583857Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit LoadAndWaitInRS 2026-01-04T02:25:57.583906Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:25:57.583927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit BlockFailPoint 2026-01-04T02:25:57.583950Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.583979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:25:57.584014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:25:57.584038Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit ExecuteDataTx 2026-01-04T02:25:57.584540Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:12] at tablet 9437184 with status COMPLETE 2026-01-04T02:25:57.584626Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:12] at 9437184: {NSelectRow: 3, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 3, SelectRowBytes: 24, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:25:57.584689Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.584778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:25:57.584815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompleteOperation 2026-01-04T02:25:57.584843Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompleteOperation 2026-01-04T02:25:57.585034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is DelayComplete 2026-01-04T02:25:57.585092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompleteOperation 2026-01-04T02:25:57.585126Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:12] at 9437184 to execution unit CompletedOperations 2026-01-04T02:25:57.585156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:12] at 9437184 on unit CompletedOperations 2026-01-04T02:25:57.585195Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:12] at 9437184 is Executed 2026-01-04T02:25:57.585230Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:12] at 9437184 executing on unit CompletedOperations 2026-01-04T02:25:57.585268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:12] at 9437184 has finished 2026-01-04T02:25:57.585300Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:25:57.585332Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:25:57.585364Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:25:57.585413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:25:57.604705Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000005 txid# 12} 2026-01-04T02:25:57.604801Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000005} 2026-01-04T02:25:57.604866Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:25:57.604912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437185 on unit CompleteOperation 2026-01-04T02:25:57.605021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:25:57.605096Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-04T02:25:57.606217Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 12} 2026-01-04T02:25:57.606271Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-04T02:25:57.606315Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:25:57.606348Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:12] at 9437184 on unit CompleteOperation 2026-01-04T02:25:57.606394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:25:57.606435Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |83.8%| [LD] {RESULT} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/metadata/initializer/ut/ydb-services-metadata-initializer-ut |83.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpResultSetFormats::ArrowFormat_Simple |83.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan |83.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> DataShardSnapshots::DelayedWriteReadableAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReplyAfterSplit >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility >> TBackupCollectionTests::BackupAbsentCollection [GOOD] >> TBackupCollectionTests::BackupDroppedCollection >> DataShardTxOrder::ZigZag_oo8_dirty |83.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred/test-results/unittest/{meta.json ... results_accumulator.log} |83.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scan/ydb-core-kqp-ut-scan >> GroupWriteTest::ByTableName [GOOD] >> TBackupCollectionTests::DropEmptyBackupCollection [GOOD] >> TBackupCollectionTests::DropNonExistentCollection >> KqpResultSetFormats::ValueFormat_Simple |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_scheme_load.py::TestSchemeLoad::test_multi[create_and_drop_tables] [GOOD] >> DataShardOutOfOrder::TestOutOfOrderLockLost |83.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::ByTableName [GOOD] Test command err: RandomSeed# 11416051829804846101 2026-01-04T02:25:06.785275Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 72058428954028033 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:06.884497Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:06.884606Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 1 going to send TEvBlock {TabletId# 72058428954028033 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:06.887709Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:06.941706Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:06.949847Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:26:01.959387Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:26:01.959489Z 1 00h01m30.010512s :BS_LOAD_TEST DEBUG: TabletId# 72058428954028033 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:26:02.013910Z 1 00h01m30.010512s :BS_LOAD_TEST INFO: TabletId# 72058428954028033 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 72058428954028033 RecordGeneration# 2 PerGenerationCounter# 32 Channel# 0 Status# OK} |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TReadOwnChangesOlap [GOOD] Test command err: Trying to start YDB, gRPC: 28162, MsgBus: 28197 2026-01-04T02:24:48.301386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326730110018697:2235];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:48.301451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:48.368743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:48.438538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:48.976061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:48.976199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:49.186417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:24:49.232144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:49.246206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326730110018466:2081] 1767493488200827 != 1767493488200830 2026-01-04T02:24:49.282527Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:49.325580Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:49.491391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:24:49.817664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:24:49.817682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:24:49.817688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:24:49.817767Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:24:51.314380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:53.295834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591326730110018697:2235];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:53.295891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:24:55.790987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326760174790444:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:55.791166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:55.791506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326760174790456:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:55.791594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591326760174790457:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:55.791639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:24:55.798174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:24:55.824664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591326760174790460:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:24:55.992738Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591326760174790511:2547] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:24:56.628201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:56.891921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:59.798684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 65033, MsgBus: 20642 2026-01-04T02:25:04.782757Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:25:04.846174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:25:05.101909Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:25:05.131202Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:05.142335Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:05.142418Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:05.146060Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326799135283965:2081] 1767493504677296 != 1767493504677299 2026-01-04T02:25:05.204126Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:05.590488Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:25:05.590512Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:25:05.590519Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:25:05.590636Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:25:05.680392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:25:05.742032Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:06.972449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:06.979537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:25:13.603095Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326837789990512:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:13.603206Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:13.603760Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326837789990539:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:13.603805Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591326837789990540:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:25:13.603863Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool d ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.176665Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.181550Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.181608Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.181625Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.199333Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.199393Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.199408Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.204801Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.204870Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.204887Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.223950Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.224010Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.224026Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.228729Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.228795Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.228810Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.233001Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.238343Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.238384Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.242368Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.242429Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.242445Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.257112Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.257176Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.257194Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.263283Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.263341Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.263357Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.273375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.273436Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.273453Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.284552Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.284609Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.284654Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.311866Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.317329Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.317364Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.373561Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.373623Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.373637Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:25:50.532790Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3d5n5z9406ck3m57p4cg8y", SessionId: ydb://session/3?node_id=3&id=MmFmMzIwMjQtODQwODY2NTItOGRjYTQ2Y2MtOGY2OGVhNTg=, Slow query, duration: 20.389877s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |83.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TBackupCollectionTests::DropNonExistentCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithMultipleBackups >> TBackupCollectionTests::BackupDroppedCollection [GOOD] >> TBackupCollectionTests::BackupAbsentDirs >> KqpScanArrowFormat::AllTypesColumnsCellvec |83.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__ASYNC-pk_types7-all_types7-index7-pgdate--ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock1 [GOOD] Test command err: 2026-01-04T02:24:37.474515Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591326681769963138:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:37.479556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:38.297715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:38.454213Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591326681769962925:2081] 1767493477413687 != 1767493477413690 2026-01-04T02:24:38.466874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:38.505624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:38.518862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:38.518959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:38.544737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:24:38.619365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:39.356948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:39.370186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:39.407614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:24:39.600120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:40.032771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:40.183971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:43.789748Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:43.789944Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:43.941494Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:43.946014Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591326707045020639:2081] 1767493483663998 != 1767493483664001 2026-01-04T02:24:43.987269Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:43.987343Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:44.009655Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:44.066372Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:24:44.495116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:44.509544Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:44.536958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:44.699164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:44.754622Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:24:44.853311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:52.757716Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591326746927400318:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:24:52.757766Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:24:52.777666Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:24:52.941703Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:52.958443Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:52.958521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:52.959718Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:52.986941Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:24:53.162818Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:24:53.194854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:24:53.202307Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:24:53.216387Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:24:53.221108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:53.310956Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:53.383318Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:24:57.234125Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:24:57.234358Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:24:57.302641Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591326767296986784:2081] 1767493497107522 != 1767493497107525 2026-01-04T02:24:57.310526Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:57.314569Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:24:57.314652Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:24:57.319172Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Conn ... RN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:37.132289Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591326939000341895:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:37.132650Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:25:37.198215Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:37.334040Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591326939000341779:2081] 1767493537108998 != 1767493537109001 2026-01-04T02:25:37.409778Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:37.413350Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:37.413459Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:37.414109Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:37.418919Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:37.822143Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:37.855790Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:25:37.863311Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:38.018915Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:25:38.031047Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:38.123335Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:38.158194Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:43.566130Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591326965240064333:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:25:43.566213Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:25:43.611536Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:43.906256Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591326965240064305:2081] 1767493543565058 != 1767493543565061 2026-01-04T02:25:43.907373Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:43.925189Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:43.927025Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:43.927136Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:43.932647Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:44.408254Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:25:44.512403Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:44.521110Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:25:44.532733Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:25:44.539209Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:44.591086Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:44.647161Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:44.733828Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:50.434103Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:50.434315Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:25:50.606409Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:25:50.696552Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:50.698331Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591326995739171399:2081] 1767493550250024 != 1767493550250027 2026-01-04T02:25:50.742895Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:50.744442Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:50.903864Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:51.201379Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:25:51.321346Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:51.555394Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:51.567667Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:25:51.588166Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:25:51.600047Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:51.802381Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:25:52.010955Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |83.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> KqpResultSetFormats::ArrowFormat_EmptyBatch |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> DataShardOutOfOrder::TestReadTableWriteConflict >> TBackupCollectionTests::BackupAbsentDirs [GOOD] >> TBackupCollectionTests::BackupNonIncrementalCollection |83.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_locks/ydb-core-tx-datashard-ut_locks >> KqpResultSetFormats::DefaultFormat >> DataShardTxOrder::RandomPointsAndRanges >> DataShardSnapshots::LockedWritesLimitedPerKey-UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink >> TBackupCollectionTests::BackupNonIncrementalCollection [GOOD] >> TBackupCollectionTests::DropCollectionWithFullBackup |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage |83.9%| [LD] {RESULT} $(B)/ydb/tests/fq/control_plane_storage/ydb-tests-fq-control_plane_storage >> KqpResultSetFormats::ArrowFormat_Simple [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize >> TBackupCollectionTests::DropCollectionWithMultipleBackups [GOOD] >> TBackupCollectionTests::DropCollectionWithNestedTables |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_monitoring/ydb-core-tx-scheme_board-ut_monitoring >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail >> TBackupCollectionTests::DropCollectionWithFullBackup [GOOD] >> TBackupCollectionTests::DropCollectionWithIncrementalBackup >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate >> test_alter_tiering.py::TestAlterTiering::test[many_tables] [FAIL] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLockOutOfOrder [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW >> DataShardOutOfOrder::TestOutOfOrderLockLost [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead+UseSink [GOOD] >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink >> DataShardTxOrder::ImmediateBetweenOnline |83.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |83.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] >> KqpResultSetFormats::ValueFormat_Simple [GOOD] >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize >> TBackupCollectionTests::DropCollectionWithIncrementalBackup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveBackup >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] >> KqpResultSetFormats::ArrowFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_write/ydb-core-tx-datashard-ut_write ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo8_dirty [GOOD] Test command err: 2026-01-04T02:26:02.712078Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:02.734017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:26:02.734084Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:02.780615Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:02.781007Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:26:02.781307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:02.915736Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:02.935855Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:02.935983Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:02.946839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:26:02.946955Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:26:02.947050Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:26:02.947598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:02.949355Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:02.949483Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:26:03.145166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:03.188678Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:26:03.188941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:03.189098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:26:03.189139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:26:03.189214Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:26:03.189272Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:03.189520Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:03.189570Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:03.189937Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:26:03.190096Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:26:03.190184Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:03.190222Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:03.190263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:26:03.190305Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:26:03.190338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:03.190418Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:26:03.190473Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:03.190581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:03.190614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:03.190674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:26:03.200359Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:26:03.200501Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:03.200637Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:26:03.200862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:26:03.200917Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:26:03.201003Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:26:03.201090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:26:03.201136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:26:03.201182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:26:03.201236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:03.201568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:03.201610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:26:03.201646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:26:03.201679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:03.201736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:26:03.201788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:26:03.202277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:26:03.202335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:03.202366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:03.215334Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:26:03.215430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:03.215467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:03.215517Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:26:03.215578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:03.216216Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:03.216284Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:03.216336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:26:03.216472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:26:03.216507Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:26:03.216660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:03.216703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:26:03.216741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:26:03.216786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:26:03.236428Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:26:03.236517Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:03.236824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:03.236881Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:03.236942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:03.237016Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:03.237053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:26:03.237097Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:26:03.237152Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-04T02:26:15.569468Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:15.569743Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:351:2319], Recipient [2:351:2319]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:15.579448Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:15.579564Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-04T02:26:15.579604Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:15.579634Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-04T02:26:15.579733Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-01-04T02:26:15.579770Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-01-04T02:26:15.579806Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.579836Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-01-04T02:26:15.579864Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-01-04T02:26:15.579894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-01-04T02:26:15.580746Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-01-04T02:26:15.580797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.580828Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-01-04T02:26:15.580874Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-01-04T02:26:15.580904Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-01-04T02:26:15.580945Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.580968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-01-04T02:26:15.580989Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:15.581011Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-01-04T02:26:15.581067Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-01-04T02:26:15.581107Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-01-04T02:26:15.581140Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-01-04T02:26:15.581176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.581196Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:15.581218Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-01-04T02:26:15.581237Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-01-04T02:26:15.581305Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.581328Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-01-04T02:26:15.581348Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-01-04T02:26:15.581368Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-01-04T02:26:15.581393Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.581413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-01-04T02:26:15.581435Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-01-04T02:26:15.581456Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-01-04T02:26:15.586050Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.586095Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-01-04T02:26:15.586148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-01-04T02:26:15.586177Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-01-04T02:26:15.586200Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.586245Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-01-04T02:26:15.586269Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-01-04T02:26:15.586288Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-01-04T02:26:15.586310Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.586330Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-01-04T02:26:15.586348Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-01-04T02:26:15.586369Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-01-04T02:26:15.586913Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-01-04T02:26:15.586973Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:26:15.587046Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.587077Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-01-04T02:26:15.587101Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-01-04T02:26:15.587145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-01-04T02:26:15.587344Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-01-04T02:26:15.587376Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-01-04T02:26:15.587405Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-01-04T02:26:15.587430Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-01-04T02:26:15.587459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:26:15.587480Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-01-04T02:26:15.587502Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-01-04T02:26:15.587530Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:15.587554Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-04T02:26:15.587581Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-04T02:26:15.587607Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-04T02:26:15.604983Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-04T02:26:15.605053Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-04T02:26:15.605115Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:26:15.605153Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-04T02:26:15.605210Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:26:15.605259Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-04T02:26:15.605713Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-04T02:26:15.605763Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-04T02:26:15.605820Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:15.605849Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-04T02:26:15.605887Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:104:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:26:15.605918Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS >> TBackupCollectionTests::DropCollectionDuringActiveBackup [GOOD] >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup >> TBackupCollectionTests::DropCollectionWithNestedTables [GOOD] >> TBackupCollectionTests::DropLargeBackupCollection |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_secret_reboots/ydb-core-tx-schemeshard-ut_secret_reboots >> DataShardSnapshots::UncommittedWriteRestartDuringCommit [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayData1 [GOOD] Test command err: 2026-01-04T02:25:57.266558Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:25:57.274789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:57.274855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:57.285927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:25:57.286327Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:25:57.286602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:25:57.336371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:25:57.379980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:25:57.380107Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:25:57.386378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:25:57.386502Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:25:57.386587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:25:57.387102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:25:57.388851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:25:57.388971Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:25:57.528057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:25:57.598507Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:25:57.598748Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:25:57.598895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:25:57.598947Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:25:57.599008Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:25:57.599055Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:25:57.599339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.599397Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.599754Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:25:57.599891Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:25:57.599981Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:25:57.600023Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:25:57.600062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:25:57.600094Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:25:57.600165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:25:57.600220Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:25:57.600269Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:25:57.600381Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:57.600423Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:57.600473Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:25:57.620666Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:25:57.620768Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:25:57.620897Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:25:57.621087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:25:57.621135Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:25:57.621200Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:25:57.622310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:25:57.625625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:25:57.625705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:25:57.625765Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:25:57.626205Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:25:57.626250Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:25:57.626292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:25:57.626331Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:25:57.626392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:25:57.626459Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:25:57.626503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:25:57.626539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:25:57.626585Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:25:57.647266Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:25:57.647377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:25:57.647421Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:25:57.647472Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:25:57.647564Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:25:57.648255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:57.648329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:25:57.648378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:25:57.648538Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:25:57.648583Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:25:57.648747Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:25:57.648801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:25:57.648842Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:25:57.648890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:25:57.654328Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:25:57.654438Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:25:57.654831Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.654875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:25:57.654950Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:25:57.654994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:25:57.655037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:25:57.655109Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:25:57.655151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:26:18.076068Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.076092Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:508] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.076144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 508] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.076191Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-04T02:26:18.076216Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.076327Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:26:18.076348Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:26:18.076368Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:26:18.076388Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.076424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:509] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.076459Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 509] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.076550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-04T02:26:18.076586Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.076701Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:26:18.076729Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.076753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:510] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.076784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 510] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.076819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-04T02:26:18.076840Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.076956Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.076995Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:511] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.077027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 511] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.077070Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-04T02:26:18.077108Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.077216Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.077238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:512] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.077269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 512] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.077306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-04T02:26:18.077335Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.077438Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.077467Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:516] at 9437184 on unit FinishPropose 2026-01-04T02:26:18.077540Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 516 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-04T02:26:18.077633Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.077793Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.083964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:514] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.084085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 514] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:26:18.084178Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-04T02:26:18.084230Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.084414Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:18.084447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:515] at 9437184 on unit CompleteOperation 2026-01-04T02:26:18.084487Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 515] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:26:18.084517Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:18.084773Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 506 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-04T02:26:18.084827Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.084888Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 506 2026-01-04T02:26:18.085344Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 507 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-04T02:26:18.085392Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.085438Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 507 2026-01-04T02:26:18.085623Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 508 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-04T02:26:18.085654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.085681Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 508 2026-01-04T02:26:18.085843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 509 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-04T02:26:18.085875Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.085900Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 509 2026-01-04T02:26:18.086029Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 510 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-04T02:26:18.086067Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.086091Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 510 2026-01-04T02:26:18.086235Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 511 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-04T02:26:18.086264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.086300Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 511 2026-01-04T02:26:18.086452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 512 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-04T02:26:18.086493Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.086518Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 512 2026-01-04T02:26:18.086576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:348:2315]: {TEvReadSet step# 1000005 txid# 514 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-04T02:26:18.086598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:18.086620Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 514 expect 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 5 6 - 6 6 7 - - - - - - - - - - - - - - - - - - - - - - - - - - interm 5 6 - 6 6 - - - - - - - - - - - - - - - - - - - - - - - - - - - >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableWriteConflict [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] >> Cdc::KeysOnlyLog[PqRunner] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] >> DataShardSnapshots::PostMergeNotCompactedTooEarly [GOOD] >> DataShardSnapshots::PipelineAndMediatorRestoreRace >> KqpScanArrowFormat::AllTypesColumnsCellvec [GOOD] >> KqpScanArrowFormat::SingleKey >> KqpResultSetFormats::ArrowFormat_EmptyBatch [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap >> TBackupCollectionTests::DropCollectionVerifyCDCCleanup [GOOD] >> TBackupCollectionTests::DropCollectionRollbackOnFailure >> DataShardSnapshots::LockedWriteWithAsyncIndex+WithRestart+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink >> KqpResultSetFormats::ValueFormat_SmallChannelBufferSize [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterStuckRW [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:03.084536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:03.244741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:03.245563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:03.245644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:03.685595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:03.685759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:03.954029Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493559457007 != 1767493559457010 2026-01-04T02:26:04.179992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:04.291397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:04.417603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:05.031526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:05.060093Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:05.192797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:05.718346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.101375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.101546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.101643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.126734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.127121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.148857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:06.429612Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:26:06.670276Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... performing the first select { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } ... waiting for commit read sets ... captured readset ... captured readset ... performing an upsert ... performing the second select ... performing the third select ... performing the last upsert and commit 2026-01-04T02:26:11.781189Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzE3ZWYxZjUtYjUwNGMwY2UtNWRiNGYzM2UtOTZiNzY1NTE=, ActorId: [1:1167:2960], ActorState: ExecuteState, LegacyTraceId: 01ke3d7517e6grxtepn0x2hsbb, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-04T02:26:16.849052Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:16.859807Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:16.864526Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:16.864827Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:16.865003Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:17.249940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:17.250068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:17.278732Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493572612973 != 1767493572612977 2026-01-04T02:26:17.284346Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:17.362885Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:17.437141Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:17.720229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:17.740186Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:17.844160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.865170Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:17.866285Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:17.866587Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-04T02:26:17.866825Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:17.914234Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:17.915037Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:17.915170Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:17.916939Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:17.917023Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:17.917090Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:17.917444Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:17.917622Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:17.917712Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-04T02:26:17.928996Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestor ... # 4001 txid# 281474976715664 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-04T02:26:19.700980Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:19.701009Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715664 ... performing the first select 2026-01-04T02:26:20.710698Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1282:3041], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-04T02:26:20.711213Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:20.711321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:20.711424Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:26:20.711472Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:20.711514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:20.711564Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:20.711630Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-04T02:26:20.711681Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:26:20.711714Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:20.711739Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:20.711767Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:20.711913Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:26:20.712242Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:26:20.712315Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-04T02:26:20.712375Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1282:3041], 0} after executionsCount# 1 2026-01-04T02:26:20.712426Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1282:3041], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:20.712518Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1282:3041], 0} finished in read 2026-01-04T02:26:20.712605Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:26:20.712633Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:20.712662Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:20.712693Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:20.712742Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:26:20.712764Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:20.712789Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-04T02:26:20.712836Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:20.712956Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:20.713258Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1284:3042], Recipient [2:969:2824]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC KeysSize: 1 2026-01-04T02:26:20.713582Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-04T02:26:20.713698Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:26:20.713752Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-04T02:26:20.719734Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:26:20.719812Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:26:20.719845Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:20.719880Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:26:20.719944Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-04T02:26:20.719983Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:26:20.720010Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:20.720035Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:26:20.720060Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:26:20.720222Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715665 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 2 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:26:20.720482Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715665, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:26:20.720535Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-04T02:26:20.720573Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1284:3042], 0} after executionsCount# 1 2026-01-04T02:26:20.720612Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1284:3042], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:20.720677Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1284:3042], 0} finished in read 2026-01-04T02:26:20.720759Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:26:20.720788Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:26:20.720815Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:26:20.720844Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:26:20.720890Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:26:20.720913Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:26:20.720936Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-04T02:26:20.720964Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:26:20.721046Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:26:20.721344Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:969:2824]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715665 LockNode: 2 Status: STATUS_SUBSCRIBED 2026-01-04T02:26:20.722394Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1282:3041], Recipient [2:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:20.722469Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:26:20.722604Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1284:3042], Recipient [2:969:2824]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:20.722639Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |83.9%| [LD] {RESULT} $(B)/ydb/services/cms/ut/ydb-services-cms-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/cms/ut/ydb-services-cms-ut >> KqpResultSetFormats::ArrowFormat_Types_Arithmetic [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:07.916337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:08.178557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:08.179155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:08.179228Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:08.501069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:08.501205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:08.578206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493563252691 != 1767493563252694 2026-01-04T02:26:08.591967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:08.643372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:08.740622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:09.070235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:09.086429Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:09.251834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.292852Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:09.294043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:09.294380Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:26:09.294635Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:09.334161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:09.334963Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:09.335105Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:09.336822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:09.336886Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:09.336942Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:09.337266Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:09.337409Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:09.337505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:26:09.348356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:09.372967Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:09.373190Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:09.373316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:26:09.373362Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:09.373398Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:09.373430Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:09.373562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.373603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.373969Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:09.374062Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:09.374151Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:09.374248Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:09.374308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:09.374351Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:09.374386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:09.374425Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:09.374467Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:09.374592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.374624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.374665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:26:09.375082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:26:09.375132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:09.375247Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:09.375471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:09.375529Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:09.375612Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:09.375661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:09.375705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:09.375745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:09.375786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:09.376146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:09.376194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:26:09.376237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:09.376274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:09.376334Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:26:09.376357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:09.376386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:26:09.376411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:26:09.376431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:09.377624Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:26:09.377676Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:09.388452Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:09.388540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... UG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:26:20.749678Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:26:20.750008Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1089:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1086:2904];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:26:20.750109Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1089:2907];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:26:20.750515Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1086:2904] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 366005 Tasks { TaskId: 1 CpuTimeUs: 255852 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 13 BuildCpuTimeUs: 255839 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-46vtxmsa7i" NodeId: 2 CreateTimeMs: 1767493579524 CurrentWaitInputTimeUs: 412754 UpdateTimeMs: 1767493580747 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:26:20.750623Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1086:2904] 2026-01-04T02:26:20.750740Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-04T02:26:20.750846Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1087:2905] TaskId# 2 trace_id# 2026-01-04T02:26:20.750950Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1088:2906] TaskId# 3 trace_id# 2026-01-04T02:26:20.751125Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1082:2878] TxId: 281474976715662. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:26:20.751253Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 1970-01-01T00:00:04.000000Z, after 0.550000s 2026-01-04T02:26:20.751313Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1087:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3d7c7be2b98wnn13mkv8sy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-04T02:26:20.751400Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1087:2905], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3d7c7be2b98wnn13mkv8sy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1082:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-04T02:26:20.751515Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 2. pass away 2026-01-04T02:26:20.751623Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:26:20.753572Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:26:20.753739Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1088:2906], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3d7c7be2b98wnn13mkv8sy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-04T02:26:20.753827Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1088:2906], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3d7c7be2b98wnn13mkv8sy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1082:2878], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-04T02:26:20.753915Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 3. pass away 2026-01-04T02:26:20.753985Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:26:20.756887Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:26:20.757345Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, ActorId: [2:1055:2878], ActorState: ExecuteState, LegacyTraceId: 01ke3d7c7be2b98wnn13mkv8sy, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-04T02:26:20.757790Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:26:20.757905Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-04T02:26:20.758504Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-04T02:26:20.758593Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-04T02:26:20.758639Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-04T02:26:20.758744Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1125:2938] SnapshotReq marker# P0 2026-01-04T02:26:20.759148Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1127:2938] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-04T02:26:20.759347Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-04T02:26:20.759408Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1127:2938] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-04T02:26:20.759530Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:26:20.759609Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1124:2878] TxId: 281474976715664. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:26:20.759721Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1124:2878] TxId: 281474976715664. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:26:20.759798Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1124:2878] TxId: 281474976715664. Ctx: { TraceId: 01ke3d7c7be2b98wnn13mkv8sy, Database: , SessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:26:20.759902Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1125:2938] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-04T02:26:20.760200Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1055:2878] 2026-01-04T02:26:20.760531Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1125:2938], Recipient [2:886:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-04T02:26:20.761431Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDZiOWRkNGItZTYyMDQ0NzYtZmUwMDhjM2ItZWZkMzE3ZTE=, workerId: [2:1055:2878], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 525 |83.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] >> Cdc::KeysOnlyLog[YdsRunner] |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |83.9%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/ydb-core-tx-tx_proxy-ut_schemereq >> KqpResultSetFormats::DefaultFormat [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always >> TBackupCollectionTests::DropCollectionRollbackOnFailure [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases |83.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.9%| [LD] {RESULT} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut |83.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/certificate_check/ut/ydb-core-security-certificate_check-ut >> DataShardOutOfOrder::TestShardRestartNoUndeterminedImmediate [GOOD] >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestUnprotectedReadsThenWriteVisibility [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:13.690526Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:13.691557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:13.702427Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:26:14.167628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:14.195235Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:14.196411Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:14.196754Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:14.197004Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:14.212445Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:14.212532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:15.472473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:15.656694Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:15.656828Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:15.657306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:15.657380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:15.723015Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:15.723742Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:15.724208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:15.940558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:15.998181Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:17.084506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:17.183635Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:17.183772Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:17.618684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.743728Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:17.749132Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:17.749504Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1613:2394] 2026-01-04T02:26:17.749791Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:17.841600Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:17.861424Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:17.863956Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:17.865855Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:17.865942Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:17.866045Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:17.866472Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:17.866881Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:17.866987Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1637:2394] in generation 1 2026-01-04T02:26:17.873172Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:17.911579Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:17.911827Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:17.911947Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1641:2411] 2026-01-04T02:26:17.911986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:17.912020Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:17.912055Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:17.913222Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1613:2394], Recipient [2:1613:2394]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:17.913308Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:17.913776Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:17.913895Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:17.914016Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:17.914074Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:17.914122Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:17.914172Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:17.914206Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:17.914245Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:17.914288Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:17.914392Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1610:2392], Recipient [2:1613:2394]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:17.914437Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:17.914492Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1596:3052], serverId# [2:1610:2392], sessionId# [0:0:0] 2026-01-04T02:26:17.915028Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:814:2460], Recipient [2:1610:2392] 2026-01-04T02:26:17.915090Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:17.915231Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:17.915491Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:17.915543Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:17.915644Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:17.915704Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:17.915750Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:17.915789Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:17.915835Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:17.916148Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 720751862240 ... iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:22.134718Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:22.136008Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2002:2448], Recipient [2:1613:2394]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:22.136082Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-04T02:26:22.308910Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2020:2449], Recipient [2:1613:2394]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-04T02:26:22.309358Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:22.309465Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:22.309561Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:22.309608Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:22.309654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:22.309691Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:22.309746Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T02:26:22.309788Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:22.309853Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:22.309889Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:22.309916Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:22.310081Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } LockTxId: 281474976715662 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:26:22.310392Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715662, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:26:22.310450Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:26:22.310498Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2020:2449], 0} after executionsCount# 1 2026-01-04T02:26:22.310554Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2020:2449], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:22.310643Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2020:2449], 0} finished in read 2026-01-04T02:26:22.310715Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:22.310744Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:22.310772Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:22.310803Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:22.310848Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:22.310894Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:22.310925Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T02:26:22.310968Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:22.311077Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:22.312053Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2020:2449], Recipient [2:1613:2394]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:22.312136Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } } 2026-01-04T02:26:22.703242Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:2044:2450], Recipient [2:1613:2394]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-04T02:26:22.703445Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:22.703540Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:22.703650Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-04T02:26:22.703699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:22.703742Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:22.703783Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:22.703834Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 72075186224037888 2026-01-04T02:26:22.703876Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-04T02:26:22.703950Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:22.703979Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:22.704006Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:22.704157Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } LockTxId: 281474976715666 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:26:22.704486Z node 2 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715666, counter# 1 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:26:22.704544Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-04T02:26:22.704594Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:2044:2450], 0} after executionsCount# 1 2026-01-04T02:26:22.704653Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:2044:2450], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:22.704731Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:2044:2450], 0} finished in read 2026-01-04T02:26:22.704818Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-04T02:26:22.704848Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:22.704877Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:22.704909Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:22.704954Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 72075186224037888 is Executed 2026-01-04T02:26:22.704995Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:22.705031Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 72075186224037888 has finished 2026-01-04T02:26:22.705083Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:22.705194Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:22.706754Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:2044:2450], Recipient [2:1613:2394]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:22.706841Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:26:22.707222Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:260:2140], Recipient [2:1613:2394]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715666 LockNode: 1 Status: STATUS_SUBSCRIBED { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> Cdc::UuidExchange[PqRunner] >> TSchemeShardServerLess::StorageBilling [GOOD] >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] >> TBackupCollectionTests::DropCollectionValidationCases [GOOD] >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup >> TBackupCollectionTests::DropLargeBackupCollection [GOOD] >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBilling [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:24:40.411496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:24:40.411584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:40.411626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:24:40.411661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:24:40.411696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:24:40.411736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:24:40.411815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:24:40.411909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:24:40.412834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:24:40.413533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:24:40.535136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:24:40.535206Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:24:40.560312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:24:40.565099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:24:40.565326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:24:40.580071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:24:40.580379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:24:40.581302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:24:40.581615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:24:40.588800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:40.589053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:24:40.590464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:24:40.590557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:24:40.590689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:24:40.590735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:24:40.590799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:24:40.590972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:24:40.815470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.816494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.816749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.816824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.816920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.816989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:24:40.817739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eBoard Send, to populator: [1:1102:3019], at schemeshard: 72075186233409549, txId: 107, path id: 32 2026-01-04T02:26:01.394660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-04T02:26:01.394735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72075186233409549 2026-01-04T02:26:01.394846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-04T02:26:01.394904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409552, at schemeshard: 72075186233409549 2026-01-04T02:26:01.394953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-04T02:26:01.395743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-04T02:26:01.395846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409549, cookie: 107 2026-01-04T02:26:01.395882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-04T02:26:01.395921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 1], version: 12 2026-01-04T02:26:01.395963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 6 2026-01-04T02:26:01.396845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-04T02:26:01.397003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409549, msg: Owner: 72075186233409549 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409549, cookie: 107 2026-01-04T02:26:01.397033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409549, txId: 107 2026-01-04T02:26:01.397062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409549, txId: 107, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], version: 18446744073709551615 2026-01-04T02:26:01.397092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-04T02:26:01.397158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-04T02:26:01.413296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72075186233409549 2026-01-04T02:26:01.413376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72075186233409549 2026-01-04T02:26:01.413696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:26:01.413937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:26:01.413976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:26:01.414022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:26:01.414053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:26:01.414138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-04T02:26:01.414236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1296:3176] message: TxId: 107 2026-01-04T02:26:01.414310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:26:01.414348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:26:01.414379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-04T02:26:01.414483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 2 2026-01-04T02:26:01.416429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-04T02:26:01.416661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409549, cookie: 107 2026-01-04T02:26:01.420893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:26:01.420956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:3497:5329] TestWaitNotification: OK eventTxId 107 2026-01-04T02:26:01.448354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72075186233409549, message: Source { RawX1: 1268 RawX2: 4294970452 } TabletId: 72075186233409552 State: 4 2026-01-04T02:26:01.448465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409552, state: Offline, at schemeshard: 72075186233409549 2026-01-04T02:26:01.450923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72075186233409549 2026-01-04T02:26:01.451066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72075186233409549:4 hive 72057594037968897 at ss 72075186233409549 2026-01-04T02:26:01.451686Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72075186233409549 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409552 2026-01-04T02:26:01.459605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72075186233409549 ShardLocalIdx: 4, at schemeshard: 72075186233409549 2026-01-04T02:26:01.459970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 1 2026-01-04T02:26:01.463809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72075186233409549 2026-01-04T02:26:01.463925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72075186233409549, LocalPathId: 32], at schemeshard: 72075186233409549 2026-01-04T02:26:01.464027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72075186233409549, LocalPathId: 1] was 5 2026-01-04T02:26:01.475971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72075186233409549:4 2026-01-04T02:26:01.476067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72075186233409549:4 tabletId 72075186233409552 2026-01-04T02:26:01.476673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72075186233409549 2026-01-04T02:26:01.600750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-04T02:26:01.600864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:26:01.600974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:26:01.601048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-04T02:26:01.601101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-04T02:26:01.601148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-04T02:26:01.601204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-04T02:26:01.601238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-04T02:26:01.601267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-04T02:26:01.670321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:26:01.670456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:06:00.000000Z 2026-01-04T02:26:01.670532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:26:26.662306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:26:26.662423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:83: TTxServerlessStorageBilling: nothing to bill, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], next retry at: 2020-09-18T18:07:00.000000Z 2026-01-04T02:26:26.662512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableSingleShardImmediate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:10.289788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:10.426288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:10.426770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:10.426831Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:10.723622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:10.723762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:10.812110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493566778293 != 1767493566778296 2026-01-04T02:26:10.825233Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:10.880615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:10.975494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:11.514465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:11.532530Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:11.642580Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:26:11.642643Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:11.642742Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:26:11.762550Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 2 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:26:11.762650Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:11.763306Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:26:11.763418Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:11.763713Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:11.763842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:11.763977Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:11.766001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.766550Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:11.767234Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:11.767293Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:11.814689Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:11.815902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:11.816205Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-04T02:26:11.816451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:11.826196Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:11.864817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:11.865227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:11.865478Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-04T02:26:11.865673Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:11.873924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:11.874607Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:11.874744Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:11.876370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:11.876452Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:11.876537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:11.876850Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:11.877062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:11.877133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-04T02:26:11.877508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:11.877584Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:11.878837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:26:11.878914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:26:11.878970Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:26:11.879272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:11.879356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:11.879419Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-04T02:26:11.890470Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:11.944313Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:11.944507Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:11.944619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-04T02:26:11.944674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:11.944706Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:11.944744Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:11.945030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:11.945099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:11.945209Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:11.945251Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:26:11.945335Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:11.945400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-04T02:26:11.945427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:26:11.945460Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:26:11.945494Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:26:11.945824Z node 1 :TX_ ... eRestarts 2026-01-04T02:26:27.315308Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:26:27.315349Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:26:27.315385Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:26:27.315462Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:27.315493Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:26:27.315526Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:27.315557Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:27.315599Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:26:27.315622Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:27.315648Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T02:26:27.326465Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:26:27.326548Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:26:27.326608Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:26:27.326715Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:27.328750Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-04T02:26:27.328813Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 281474976715661 ProcessProposeTransaction 2026-01-04T02:26:27.328905Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [2:1059:2877] DataReq marker# P0 2026-01-04T02:26:27.329038Z node 2 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [2:1059:2877] Cookie# 0 txid# 281474976715661 HANDLE TDataReq marker# P1 2026-01-04T02:26:27.329274Z node 2 :TX_PROXY DEBUG: datareq.cpp:1467: Actor# [2:1059:2877] txid# 281474976715661 HANDLE EvNavigateKeySetResult TDataReq marker# P3b ErrorCount# 0 2026-01-04T02:26:27.329465Z node 2 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [2:1059:2877] txid# 281474976715661 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-04T02:26:27.329558Z node 2 :TX_PROXY DEBUG: datareq.cpp:1204: Actor# [2:1059:2877] txid# 281474976715661 SEND TEvProposeTransaction to datashard 72075186224037888 with read table request affected shards 1 followers disallowed marker# P4b 2026-01-04T02:26:27.329925Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1059:2877], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCAN SourceDeprecated { RawX1: 1059 RawX2: 8589937469 } TxBody: " \0018\001B8\n\014\010\200\202\224\204\200\200\200\200\001\020&\022\t\010\001\022\003key\030\002\022\013\010\002\022\005value\030\002\032\016\n\006\001\000\000\000\000\200\022\000\030\001 \001 \001H\001R\022\t#\004\000\000\000\000\000\000\021=\013\000\000\002\000\000\000" TxId: 281474976715661 ExecLevel: 0 Flags: 8 2026-01-04T02:26:27.329980Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:27.415569Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:27.415823Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: [(Uint32 : NULL) ; ()] table: [72057594046644480:38:0] 2026-01-04T02:26:27.415924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit CheckDataTx 2026-01-04T02:26:27.415989Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-04T02:26:27.416038Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit CheckDataTx 2026-01-04T02:26:27.416079Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:27.416125Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:27.416176Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T02:26:27.416250Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715661] at 72075186224037888 2026-01-04T02:26:27.416295Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-04T02:26:27.416322Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:27.416345Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit MakeScanSnapshot 2026-01-04T02:26:27.416372Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit MakeScanSnapshot 2026-01-04T02:26:27.416402Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-04T02:26:27.416425Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit MakeScanSnapshot 2026-01-04T02:26:27.416445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit WaitForStreamClearance 2026-01-04T02:26:27.416468Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-04T02:26:27.416520Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:99: Requested stream clearance from [2:1059:2877] for [0:281474976715661] at 72075186224037888 2026-01-04T02:26:27.416552Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-04T02:26:27.416610Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:27.416753Z node 2 :TX_PROXY DEBUG: datareq.cpp:2504: Got clearance request, shard: 72075186224037888, txid: 281474976715661 2026-01-04T02:26:27.416837Z node 2 :TX_PROXY DEBUG: datareq.cpp:2513: Collected all clerance requests, txid: 281474976715661 2026-01-04T02:26:27.416883Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037888, txid: 281474976715661, cleared: 1 2026-01-04T02:26:27.417017Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287942, Sender [2:1059:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearancePending TxId: 281474976715661 2026-01-04T02:26:27.417064Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3175: StateWork, processing event TEvTxProcessing::TEvStreamClearancePending 2026-01-04T02:26:27.417163Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1059:2877], Recipient [2:886:2765]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-04T02:26:27.417199Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-04T02:26:27.417293Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:27.417323Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:27.417387Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:27.417460Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:26:27.417517Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:26:27.417560Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit WaitForStreamClearance 2026-01-04T02:26:27.417608Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [0:281474976715661] at 72075186224037888 2026-01-04T02:26:27.417650Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Executed 2026-01-04T02:26:27.417699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715661] at 72075186224037888 executing on unit WaitForStreamClearance 2026-01-04T02:26:27.417737Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715661] at 72075186224037888 to execution unit ReadTableScan 2026-01-04T02:26:27.417771Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715661] at 72075186224037888 on unit ReadTableScan 2026-01-04T02:26:27.418013Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715661] at 72075186224037888 is Continue 2026-01-04T02:26:27.418056Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:26:27.418094Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:26:27.418140Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:27.418175Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:27.418249Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:27.418732Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1065:2882], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T02:26:27.418777Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> Cdc::KeysOnlyLog[PqRunner] [GOOD] >> Cdc::KeysOnlyLog[TopicRunner] >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Binary [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNoBarrierRestartImmediateLongTail [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:06.205677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:06.416805Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:06.417290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:06.417369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:06.948999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:06.949141Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:07.050407Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493557376075 != 1767493557376078 2026-01-04T02:26:07.062487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:07.108047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:07.215092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:07.574101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:07.592844Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:07.719897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.763911Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:07.765114Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:07.765464Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:26:07.765756Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:07.817960Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:07.818759Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:07.818917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:07.820803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:07.820894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:07.820957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:07.821388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:07.821535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:07.821637Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:26:07.832619Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:07.865485Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:07.865739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:07.866248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:26:07.866307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:07.866354Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:07.866393Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:07.866575Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:07.866629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:07.867057Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:07.867210Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:07.867324Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:07.867423Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:07.867480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:07.867521Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:07.867563Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:07.867603Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:07.867663Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:07.867817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:07.867856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:07.867903Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:26:07.868435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:26:07.868500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:07.868628Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:07.868876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:07.868953Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:07.869041Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:07.869094Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:07.869135Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:07.869188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:07.869232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:07.869640Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:07.869691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:26:07.869728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:07.869786Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:07.869969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:26:07.870003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:07.870039Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:26:07.870073Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:26:07.870102Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:07.871654Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:26:07.871719Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:07.882819Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:07.882917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-04T02:26:27.941131Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-04T02:26:27.941228Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:26:27.941586Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1430:3134] 2026-01-04T02:26:27.941671Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1430:3134], channels: 1 2026-01-04T02:26:27.941750Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:26:27.941836Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1430:3134], trace_id# 2026-01-04T02:26:27.941908Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1430:3134], trace_id# 2026-01-04T02:26:27.941977Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:26:27.942933Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1430:3134] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:26:27.943022Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1430:3134], trace_id# 2026-01-04T02:26:27.943089Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1430:3134], trace_id# 2026-01-04T02:26:27.943618Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1432:3134], Recipient [2:1367:3101]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-04T02:26:27.943771Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:27.943838Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v5001/281474976715667 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v5001/18446744073709551615 2026-01-04T02:26:27.943888Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v6000/18446744073709551615 2026-01-04T02:26:27.943956Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:27.944051Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:26:27.944095Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:27.944156Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:27.944204Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:27.944259Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-04T02:26:27.944304Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:26:27.944335Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:27.944359Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:27.944384Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:27.944575Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:26:27.944813Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1432:3134], 0} after executionsCount# 1 2026-01-04T02:26:27.944881Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1432:3134], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:27.944971Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1432:3134], 0} finished in read 2026-01-04T02:26:27.945047Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:26:27.945077Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:27.945103Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:27.945130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:27.945184Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:26:27.945209Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:27.945236Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-04T02:26:27.945287Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:27.946083Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1432:3134], Recipient [2:1367:3101]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:27.946150Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:26:27.946878Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1430:3134] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1040 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 205 FinishTimeMs: 1767493587946 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 58 BuildCpuTimeUs: 147 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767493587945 CreateTimeMs: 1767493587942 UpdateTimeMs: 1767493587946 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:26:27.947056Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1430:3134] 2026-01-04T02:26:27.947276Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:26:27.947352Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1426:3134] TxId: 281474976715671. Ctx: { TraceId: 01ke3d7mtdcphcntq5my9x1vvz, Database: , SessionId: ydb://session/3?node_id=2&id=Y2ExODM1Ny1kNjNlNzFmMy04YWIxMjNmYi04ZGU4NDFk, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001040s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 7 } items { uint32_value: 4 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartLockBrokenByUncommittedAfterRead-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:23.707248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:24.136724Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:24.137542Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:24.137634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:24.732122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:24.732252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:24.925465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493395921392 != 1767493395921395 2026-01-04T02:23:24.973974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:25.033323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:25.313998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:26.046300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:26.293290Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=NWY2ZTA4Y2ItOGE0OTJiNjctODg1ZTRhMTktMTYwMDYwZjk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# NWY2ZTA4Y2ItOGE0OTJiNjctODg1ZTRhMTktMTYwMDYwZjk= temp_dir_name# 0d654a04-413a-426c-a748-71afa3410605 trace_id# 2026-01-04T02:23:26.294288Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=NWY2ZTA4Y2ItOGE0OTJiNjctODg1ZTRhMTktMTYwMDYwZjk=, ActorId: [1:843:2738], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:23:26.294909Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:459} SessionId: ydb://session/3?node_id=1&id=NWY2ZTA4Y2ItOGE0OTJiNjctODg1ZTRhMTktMTYwMDYwZjk=, ActorId: [1:843:2738], ActorState: ReadyState, LegacyTraceId: 01ke3d23jp9p37txb30xebc8ec, Received request proxy_request_id# 3 prepared# false has_tx_control# false action# QUERY_ACTION_EXECUTE type# QUERY_TYPE_SQL_DDL text# CREATE TABLE `/Root/table1` (key int, value int, PRIMARY KEY (key)); rpc_actor# [0:0:0] database# database_id# /Root pool_id# default trace_id# 2026-01-04T02:23:28.090741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:846:2741], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:28.090912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:28.091354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2746], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:28.091446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:23:28.107676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:28.151019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:28.154172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:28.154502Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:893:2772] 2026-01-04T02:23:28.154784Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:28.166505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:885:2766], Recipient [1:893:2772]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:28.211515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:28.211655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:28.213271Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:28.213352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:28.213422Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:28.213875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:28.214024Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:28.214126Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:909:2772] in generation 1 2026-01-04T02:23:28.214585Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:28.283168Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:28.283390Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:28.283505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:911:2782] 2026-01-04T02:23:28.283544Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:28.283609Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:28.283654Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:28.283882Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:893:2772], Recipient [1:893:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:28.283928Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:28.284303Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:28.284413Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:28.284489Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:28.284528Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:28.284580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:28.284614Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:28.284648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:28.284701Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:28.284746Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:28.285203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2773], Recipient [1:893:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:28.285250Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:28.285347Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:892:2771], serverId# [1:896:2773], sessionId# [0:0:0] 2026-01-04T02:23:28.285457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:896:2773] 2026-01-04T02:23:28.285495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:28.285608Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:28.286266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:28.286339Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:28.286428Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:28.286471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:23:28.286529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:23:28.286578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:23:28.286617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on ... 6:22.842537Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:26:22.842633Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:22.842888Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T02:26:22.843042Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CheckDataTx 2026-01-04T02:26:22.843110Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-04T02:26:22.843155Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CheckDataTx 2026-01-04T02:26:22.843195Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:22.843233Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:22.843280Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-04T02:26:22.843351Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715666] at 72075186224037888 2026-01-04T02:26:22.843396Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-04T02:26:22.843425Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:22.843455Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:26:22.843489Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:26:22.843522Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-04T02:26:22.843552Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:26:22.843582Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-04T02:26:22.843615Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-04T02:26:22.843710Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715666] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-04T02:26:22.843895Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-04T02:26:22.844017Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-04T02:26:22.844134Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-04T02:26:22.844167Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-04T02:26:22.844197Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:22.844230Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:22.844300Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715666 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:26:22.844427Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is DelayComplete 2026-01-04T02:26:22.844463Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:22.844499Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715666] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:22.844536Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715666] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:22.844596Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715666] at 72075186224037888 is Executed 2026-01-04T02:26:22.844632Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715666] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:22.844671Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715666] at 72075186224037888 has finished 2026-01-04T02:26:22.844752Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:22.844794Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715666] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:22.844844Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:22.854269Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [13:70:2117], Recipient [13:1108:2925]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 13 Status: STATUS_NOT_FOUND 2026-01-04T02:26:23.255818Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1174:2969], Recipient [13:1108:2925]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:26:23.256235Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:23.256369Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v600/18446744073709551615 ImmediateWriteEdgeReplied# v600/18446744073709551615 2026-01-04T02:26:23.256461Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v600/18446744073709551615 2026-01-04T02:26:23.256586Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:23.256778Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:26:23.256857Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:23.256934Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:23.257020Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:23.257090Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-04T02:26:23.257155Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:26:23.257195Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:23.257225Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:23.257255Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:23.257469Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:26:23.258094Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1174:2969], 0} after executionsCount# 1 2026-01-04T02:26:23.258203Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1174:2969], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:23.258373Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1174:2969], 0} finished in read 2026-01-04T02:26:23.258499Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:26:23.258535Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:23.258565Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:23.258599Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:23.258669Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:26:23.258699Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:23.258743Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-04T02:26:23.258818Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:23.259028Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:23.260184Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1174:2969], Recipient [13:1108:2925]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:23.260276Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 22 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> TBackupCollectionTests::DropCollectionVerifyLocalDatabaseCleanup [GOOD] >> TBackupCollectionTests::DropCollectionDuringActiveOperation >> TBackupCollectionTests::DropSpecificCollectionAmongMultiple [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_2_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 2] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] >> KqpScanArrowFormat::SingleKey [GOOD] >> KqpScanArrowFormat::JoinWithParams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesTwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:01.516603Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:01.681154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:01.692505Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:01.692883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:01.693066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:02.644877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:02.885277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:02.885407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:02.936157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:03.024387Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009738s 2026-01-04T02:26:03.065244Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:04.367763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:04.368581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:04.368628Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:04.368663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:04.369142Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:04.438847Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:05.251747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:13.238427Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:13.264629Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:13.424690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:13.424826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:13.465542Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:13.473826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:13.975635Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:13.975767Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:13.977364Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:13.999287Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.000101Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.001313Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.001564Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.004939Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.005251Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.005458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.005684Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:14.047144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:15.048528Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:15.179338Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:15.179450Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:15.344110Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:15.345496Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:15.345702Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:15.345776Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:15.370012Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:15.370130Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:15.370189Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:15.370272Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:15.371001Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:15.413481Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:15.413598Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2119:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:15.581984Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2156:2620] 2026-01-04T02:26:15.582843Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2156:2620], schemeshard id = 72075186224037897 2026-01-04T02:26:15.797343Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2246:2666] 2026-01-04T02:26:15.799486Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:26:15.814357Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Describe result: PathErrorUnknown 2026-01-04T02:26:15.814425Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Creating table 2026-01-04T02:26:15.814513Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:26:15.823342Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2303:2693], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:15.828174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.837430Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:15.837579Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Subscribe on create table tx: 281474976720657 2026-01-04T02:26:15.856616Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:26:16.149637Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:16.327677Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:26:17.279850Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:26:17.426790Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:26:17.426888Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2250:2669] Owner: [2:2249:2668]. Column diff is empty, finishing 2026-01-04T02:26:18.343790Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:18.383015Z ... PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.578612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.579589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4403:3828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.579680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.582152Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:4406:3831]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:25.582417Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:26:25.582649Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:26:25.582735Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:4409:3834] 2026-01-04T02:26:25.582820Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:4409:3834] 2026-01-04T02:26:25.589859Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4410:3921] 2026-01-04T02:26:25.590336Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:4409:3834], server id = [2:4410:3921], tablet id = 72075186224037894, status = OK 2026-01-04T02:26:25.590624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4410:3921], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:26:25.590716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:26:25.591158Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:26:25.591246Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:4406:3831], StatRequests.size() = 1 2026-01-04T02:26:25.639003Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:26:25.639502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4414:3838], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.639660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.646477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4418:3842], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.646630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.646838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:4421:3845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:25.665339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:25.914423Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:26:25.914523Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:26:25.965335Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:4409:3834], schemeshard count = 1 2026-01-04T02:26:26.375663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:4423:3847], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715664 completed, doublechecking } 2026-01-04T02:26:26.579756Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:4535:3918] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:26:26.613154Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:4558:3934]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:26.624994Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:26.625113Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:4558:3934], StatRequests.size() = 1 2026-01-04T02:26:26.928052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72075186224037905, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:27.763912Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4902:3997]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:27.764252Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:27.764749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 1, schemeshard count = 1, urgent = 0 2026-01-04T02:26:27.764803Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:26:27.765113Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:26:27.765196Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4902:3997], StatRequests.size() = 1 2026-01-04T02:26:27.865900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:26:27.928318Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4911:4006]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:27.928580Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:26:27.928633Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4911:4006], StatRequests.size() = 1 2026-01-04T02:26:28.264045Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4955:4171]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:26:28.276673Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:26:28.276762Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:26:28.277108Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:26:28.277165Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:26:28.277215Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:26:28.321436Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:26:28.341295Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:26:28.341655Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:26:28.342178Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:26:28.342575Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:26:28.343052Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4983:4187]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:26:28.350016Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:28.350096Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:26:28.350550Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:28.350610Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:26:28.350666Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:26:28.350916Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037905, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:26:28.355666Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-04T02:26:28.356035Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-04T02:26:28.356748Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-04T02:26:28.356966Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> Cdc::UuidExchange[PqRunner] [GOOD] >> Cdc::UuidExchange[YdsRunner] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_Always [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_String >> DataShardSnapshots::PipelineAndMediatorRestoreRace [GOOD] >> DataShardSnapshots::ShardRestartLockBasic |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite >> DataShardSnapshots::DelayedWriteReplyAfterSplit [GOOD] >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot >> TMonitoringTests::InvalidActorId >> TBackupCollectionTests::DropCollectionDuringActiveOperation [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> Cdc::KeysOnlyLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[PqRunner] >> TMonitoringTests::InvalidActorId [GOOD] >> DataShardWrite::UpsertImmediate |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::InvalidActorId [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> DataShardWrite::UpsertWithDefaults >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Unspecified [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] >> TBackupCollectionTests::ConcurrentDropProtectionTest [GOOD] >> TBackupCollectionTests::BackupServiceDirectoryValidation >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] >> DataShardSnapshots::UncommittedWriteRestartDuringCommitThenBulkErase [GOOD] >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalBackup [GOOD] >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed+EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:20.505255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:20.636616Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:20.637174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:20.637240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:21.147070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:21.147188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:21.425101Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493573077401 != 1767493573077404 2026-01-04T02:26:21.444605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:21.499167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:21.636638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:22.029213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:22.043206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:22.172867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:22.572491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:22.881211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1114:2915], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:22.881431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1124:2920], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:22.881594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:22.883030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1130:2925], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:22.883255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:22.906877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:23.172354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1128:2923], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:26:23.323341Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1186:2962] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ... waiting for commit read sets ... sending immediate upsert ... waiting for immediate propose ... immediate upsert is blocked 2026-01-04T02:26:25.768945Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting;tx_id=281474976715666; 2026-01-04T02:26:25.783428Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:1382:3015], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1263:3015]Got OVERLOADED for table `/Root/table-1`. ShardID=72075186224037889, Sink=[1:1382:3015]. Ignored this error.{
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } 2026-01-04T02:26:25.784073Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1375:3015], SessionActorId: [1:1263:3015], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 . sessionActorId=[1:1263:3015]. 2026-01-04T02:26:25.784429Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=YmE0N2RiZmQtOTU0YzgyOWItOTJlNjQzZTktNmU2Y2RjNjQ=, ActorId: [1:1263:3015], ActorState: ExecuteState, LegacyTraceId: 01ke3d7jq24r4f3ec17qj2zt2w, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:1376:3015] from: [1:1375:3015] trace_id# 2026-01-04T02:26:25.785266Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:1376:3015] TxId: 281474976715665. Ctx: { TraceId: 01ke3d7jq24r4f3ec17qj2zt2w, Database: , SessionId: ydb://session/3?node_id=1&id=YmE0N2RiZmQtOTU0YzgyOWItOTJlNjQzZTktNmU2Y2RjNjQ=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`., code: 2006 subissue: {
: Error: Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting, code: 2006 } } trace_id# 2026-01-04T02:26:25.788929Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YmE0N2RiZmQtOTU0YzgyOWItOTJlNjQzZTktNmU2Y2RjNjQ=, ActorId: [1:1263:3015], ActorState: ExecuteState, LegacyTraceId: 01ke3d7jq24r4f3ec17qj2zt2w, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037889 is overloaded. Table `/Root/table-1`." issue_code: 2006 severity: 1 issues { message: "Rejecting immediate write tx 281474976715666 because datashard 72075186224037889 is restarting" issue_code: 2006 severity: 1 } } trace_id# 2026-01-04T02:26:25.789360Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [1:1348:3017], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [1:1265:3017]TEvDeliveryProblem was received from tablet: 72075186224037889 2026-01-04T02:26:25.789471Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:1338:3017], SessionActorId: [1:1265:3017], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 . sessionActorId=[1:1265:3017]. 2026-01-04T02:26:25.790464Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ODA5NjJkNi01ZTI1MWM1MC1kYzU5M2YwMi01NjMzY2EyOQ==, ActorId: [1:1265:3017], ActorState: ExecuteState, LegacyTraceId: 01ke3d7jfq372yrchqkphtp31h, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [1:1339:3017] from: [1:1338:3017] trace_id# 2026-01-04T02:26:25.791288Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:1339:3017] TxId: 281474976715664. Ctx: { TraceId: 01ke3d7jfq372yrchqkphtp31h, Database: , SessionId: ydb://session/3?node_id=1&id=ODA5NjJkNi01ZTI1MWM1MC1kYzU5M2YwMi01NjMzY2EyOQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889., code: 2026 } trace_id# 2026-01-04T02:26:25.791754Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODA5NjJkNi01ZTI1MWM1MC1kYzU5M2YwMi01NjMzY2EyOQ==, ActorId: [1:1265:3017], ActorState: ExecuteState, LegacyTraceId: 01ke3d7jfq372yrchqkphtp31h, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037889." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } } ... waiting for SysViewsRoster update finished 2026-01-04T02:26:32.238024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:32.254858Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:32.259628Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:32.259976Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:32.260201Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:32.560524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:32.560673Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:32.599218Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493586947324 != 1767493586947328 2026-01-04T02:26:32.605517Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:32.657930Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:32.732202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:33.056527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:33.073991Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:33.181978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:33.494576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-04T02:26:33.853177Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:33.853311Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:33.853441Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:33.856459Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:33.856658Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:33.863462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:34.071547Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:26:34.122209Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet 2026-01-04T02:26:35.376089Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1223:2953], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1160:2953]TEvDeliveryProblem was received from tablet: 72075186224037888 ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> Cdc::KeysOnlyLog[TopicRunner] [GOOD] >> Cdc::KeysOnlyLogDebezium >> TCertificateCheckerTest::CheckSubjectDns >> KqpResultSetFormats::ArrowFormat_Types_String [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Time >> Cdc::UuidExchange[YdsRunner] [GOOD] >> Cdc::UuidExchange[TopicRunner] >> TBackupCollectionTests::BackupServiceDirectoryValidation [GOOD] >> TBackupCollectionTests::BackupWithIndexes >> DataShardWrite::ExecSQLUpsertImmediate+EvWrite [GOOD] >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithPendingVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:21.629390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:22.027843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:22.028390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:22.028459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:22.943362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:22.943516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:23.204521Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493390427641 != 1767493390427644 2026-01-04T02:23:23.244642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:23.298542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:23.444891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:23.807246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:23.826600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:23.948494Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:23.948569Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:23.948710Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:24.364799Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:24.364915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:24.365626Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:24.365736Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:24.366096Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:24.366277Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:24.366474Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:24.366825Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:24.380272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:24.381472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:24.381584Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:24.444967Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:24.456239Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:24.456638Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:23:24.456997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:24.618214Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:24.619100Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:24.619228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:24.621135Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:24.621239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:24.621313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:24.626041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:24.626300Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:24.626434Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:24.627034Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:24.753606Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:24.753862Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:24.754008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:24.762601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:24.762670Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:24.762731Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:24.762942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:24.763007Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:24.763497Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:24.763609Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:24.763696Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:24.763736Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:24.763793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:24.763832Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:24.763868Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:24.763905Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:24.763945Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:24.764110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:24.764161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:24.764238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:24.764691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:24.764738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:24.764845Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:24.765098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:24.765152Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:24.765237Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:24.765292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... 75186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-04T02:26:36.001568Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 0 Seqno# 0 Flags# 7} 2026-01-04T02:26:36.001614Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:26:36.001709Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:971:2825], Recipient [13:885:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:26:36.001741Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:26:36.001774Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037889 dest 72075186224037888 producer 72075186224037889 txId 281474976715663 2026-01-04T02:26:36.001843Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037889 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:26:36.002067Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037888 at tablet 72075186224037888 send result to client [13:1175:2962], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:26:36.002467Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [13:885:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:26:36.002512Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:26:36.002545Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-04T02:26:36.002606Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:26:36.002723Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:714: Complete [4001 : 281474976715663] from 72075186224037889 at tablet 72075186224037889 send result to client [13:1175:2962], exec latency: 0 ms, propose latency: 0 ms TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037888 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037888 CpuTimeUsec: 12847 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2026-01-04T02:26:36.003676Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:36.004049Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 TEvProposeTransactionResult: TxKind: TX_KIND_DATA Origin: 72075186224037889 Status: COMPLETE TxId: 281474976715663 TxResult: "" ExecLatency: 0 ProposeLatency: 0 TxStats { PerShardStats { ShardId: 72075186224037889 CpuTimeUsec: 1066 } LocksBrokenAsBreaker: 0 } ComputeActorStats { Tasks { Tables { TablePath: "/Root/table-2" WriteRows: 1 WriteBytes: 8 } } } CommitVersion { Step: 4001 TxId: 281474976715663 } 2026-01-04T02:26:36.005075Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:36.008679Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:26:36.008930Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:885:2765], Recipient [13:971:2825]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-04T02:26:36.009052Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:36.009146Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715663 2026-01-04T02:26:36.016457Z node 13 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:26:36.016940Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [13:971:2825], Recipient [13:885:2765]: {TEvReadSet step# 4001 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-04T02:26:36.017045Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:26:36.017136Z node 13 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-04T02:26:36.302337Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:26:36.302455Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715667 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 2026-01-04T02:26:36.312948Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1287:3070], Recipient [13:885:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:26:36.313235Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:26:36.313368Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v4001/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v5000/18446744073709551615 ImmediateWriteEdge# v5001/0 ImmediateWriteEdgeReplied# v5001/0 2026-01-04T02:26:36.313462Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5001/18446744073709551615 2026-01-04T02:26:36.313613Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-04T02:26:36.313939Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:36.314043Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:26:36.314132Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:36.314201Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:36.314283Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T02:26:36.314364Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:36.314402Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:36.314431Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:26:36.314462Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:26:36.314656Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Columns: 3 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:26:36.315206Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[13:1287:3070], 0} after executionsCount# 1 2026-01-04T02:26:36.315321Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[13:1287:3070], 0} sends rowCount# 2, bytes# 96, quota rows left# 999, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:26:36.315453Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[13:1287:3070], 0} finished in read 2026-01-04T02:26:36.315567Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:36.315599Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:26:36.315629Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:36.315661Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:36.315729Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:26:36.315759Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:36.315798Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T02:26:36.315874Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:26:36.316071Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:26:36.317183Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1287:3070], Recipient [13:885:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:26:36.317261Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 2 } items { uint32_value: 22 } } |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> DataShardWrite::UpsertWithDefaults [GOOD] >> DataShardWrite::WriteImmediateBadRequest >> DataShardWrite::UpsertImmediate [GOOD] >> DataShardWrite::UpsertImmediateManyColumns >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> Cdc::NewAndOldImagesLog[PqRunner] [GOOD] >> Cdc::NewAndOldImagesLog[YdsRunner] >> KqpResultSetFormats::ArrowFormat_AllTypes+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test[read_update_write_load] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateCheckerTest::CheckSubjectDns [GOOD] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> DataShardSnapshots::ShardRestartLockBasic [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTable >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement >> TBackupCollectionTests::VerifyCdcStreamCleanupInIncrementalDrop [GOOD] >> TBackupCollectionTests::DropErrorRecoveryTest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_6_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 6] [GOOD] >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> KqpScanArrowFormat::JoinWithParams [GOOD] >> KqpScanArrowInChanels::AggregateCountStar >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] |84.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |84.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator |84.0%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/ydb-core-tx-replication-controller-ut_dst_creator >> TBackupCollectionTests::BackupWithIndexes [GOOD] >> TBackupCollectionTests::BackupWithIndexesOmit |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::ClientCertAuthorizationParamsMatch [GOOD] >> Cdc::UuidExchange[TopicRunner] [GOOD] >> Cdc::UpdatesLog[PqRunner] |84.0%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates >> KqpResultSetFormats::ValueFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpScanArrowFormat::AggregateCountStar |84.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] >> Cdc::KeysOnlyLogDebezium [GOOD] >> Cdc::DocApi[PqRunner] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit+UseSink [GOOD] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink >> DataShardWrite::ExecSQLUpsertImmediate-EvWrite [GOOD] >> DataShardWrite::IncrementImmediate >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 |84.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/certificate_check/ut/unittest >> TCertificateAuthUtilsTest::GenerateAndVerifyCertificates [GOOD] |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/security/certificate_check/ut/unittest >> DataShardWrite::UpsertImmediateManyColumns [GOOD] >> DataShardWrite::UpsertPrepared+Volatile >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_8_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 8] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] >> KqpResultSetFormats::ArrowFormat_Types_Time [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 |84.1%| [TA] $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |84.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::BackupWithIndexesOmit [GOOD] >> TBackupCollectionTests::BackupWithIndexesDefault >> Cdc::NewAndOldImagesLog[YdsRunner] [GOOD] >> Cdc::NewAndOldImagesLog[TopicRunner] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 >> DataShardWrite::WriteImmediateBadRequest [GOOD] >> DataShardWrite::WriteImmediateSeveralOperations >> TBackupCollectionTests::DropErrorRecoveryTest [GOOD] >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_4_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 4] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 >> KqpResultSetFormats::ArrowFormat_AllTypes-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_FirstOnly_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_Stress >> Cdc::UpdatesLog[PqRunner] [GOOD] >> Cdc::UpdatesLog[YdsRunner] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 >> TBackupCollectionTests::BackupWithIndexesDefault [GOOD] >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups >> DataShardWrite::IncrementImmediate [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile >> DataShardWrite::UpsertPrepared+Volatile [GOOD] >> DataShardWrite::UpsertPrepared-Volatile >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 >> DataShardSnapshots::ShardRestartAfterDropTable [GOOD] >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::UncommittedChangesRenameTable-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:20.357008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:20.635861Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.018623s 2026-01-04T02:23:20.720859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:20.721303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:20.721374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:21.856931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:21.857070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:21.952916Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493394447532 != 1767493394447535 2026-01-04T02:23:21.973463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:22.026325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:22.141056Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:22.596304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:22.610499Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:22.725453Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:22.725518Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:22.725740Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:22.991813Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:22.991918Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:22.992610Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:22.992707Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:22.993045Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:22.993198Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:22.993373Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:22.993689Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:22.995816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:22.996915Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:22.997008Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:23.047888Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:23.048915Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:23.051238Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:23:23.051497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:23.190157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:23.191060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:23.191177Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:23.203472Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:23.203570Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:23.203652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:23.204053Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:23.204221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:23.204321Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:23.217545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:23.320670Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:23.320895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:23.321014Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:23.321055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:23.321090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:23.321139Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:23.321318Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:23.321371Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:23.321762Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:23.329986Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:23.330275Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:23.330350Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:23.330399Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:23.330433Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:23.330469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:23.330502Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:23.330548Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:23.330747Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:23.330817Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:23.330874Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:23.331561Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:23.331613Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:23.331711Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:23.331973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:23.332040Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:23.332157Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 ... efault, DatabaseId: /Root, IsStreamingQuery: 0}. ExecuteDatashardTransaction trace_id_verbosity# 0 trace_id# 2026-01-04T02:26:47.658043Z node 13 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 1 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:26:47.658101Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:146} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Datashard not finished yet actor_state# WaitResolveState shard_id# 72075186224037888 state# Executing trace_id# 2026-01-04T02:26:47.658154Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 0 compute actor(s) and 1 datashard(s): DS 72075186224037888 (Executing), trace_id# 2026-01-04T02:26:47.658201Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:26:47.658480Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [13:1187:2877], Recipient [13:1156:2961]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1187 RawX2: 55834577725 } TxBody: " \0018\001j3\010\001\032\'\n#\t\215\023\000\000\000\000\001\000\021\000\000\001\000\000\020\000\001\030\001 \000)\000\001\205\000\000\000\000\0010&8\001 \003\"\006\020\0020\000@\n\220\001\000" TxId: 281474976715665 ExecLevel: 0 Flags: 8 2026-01-04T02:26:47.658524Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:47.658655Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [13:1156:2961], Recipient [13:1156:2961]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:26:47.658692Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:26:47.658768Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:47.658957Z node 13 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T02:26:47.659051Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CheckDataTx 2026-01-04T02:26:47.659104Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-04T02:26:47.659137Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CheckDataTx 2026-01-04T02:26:47.659169Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:26:47.659201Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:26:47.659245Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v600/281474976715663 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v500/18446744073709551615 ImmediateWriteEdgeReplied# v1000/18446744073709551615 2026-01-04T02:26:47.659298Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715665] at 72075186224037888 2026-01-04T02:26:47.659336Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-04T02:26:47.659369Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:26:47.659399Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:26:47.659430Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:26:47.659463Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-04T02:26:47.659491Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:26:47.659518Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-04T02:26:47.659550Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-04T02:26:47.659625Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715665] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-04T02:26:47.659776Z node 13 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-04T02:26:47.659882Z node 13 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-04T02:26:47.659972Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-04T02:26:47.660005Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-04T02:26:47.660037Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:47.660069Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:47.660145Z node 13 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715665 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:26:47.660265Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is DelayComplete 2026-01-04T02:26:47.660300Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:47.660335Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715665] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:26:47.660366Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715665] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:26:47.660423Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715665] at 72075186224037888 is Executed 2026-01-04T02:26:47.660451Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715665] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:26:47.660479Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715665] at 72075186224037888 has finished 2026-01-04T02:26:47.660546Z node 13 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:47.660580Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715665] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:47.660622Z node 13 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:47.660807Z node 13 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:1413} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got propose result shard_id# 72075186224037888 status# COMPLETE error# trace_id# 2026-01-04T02:26:47.660974Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:26:47.661092Z node 13 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [13:1187:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d883934d77a546qptz35k, Database: , SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:26:47.661262Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, ActorId: [13:1043:2877], ActorState: CleanupState, LegacyTraceId: 01ke3d883934d77a546qptz35k, EndCleanup is_final# false trace_id# 2026-01-04T02:26:47.661486Z node 13 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:2837} SessionId: ydb://session/3?node_id=13&id=OWMyOWE5MDQtY2RmZmU1NzEtY2ZjYzA0YTctZGViMTM5Njc=, ActorId: [13:1043:2877], ActorState: CleanupState, LegacyTraceId: 01ke3d883934d77a546qptz35k, Sent query response back to proxy proxy_request_id# 8 proxy_id# [13:66:2113] trace_id# 2026-01-04T02:26:47.938952Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [13:1196:2987], Recipient [13:1156:2961]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:47.939114Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:47.939236Z node 13 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [13:1195:2986], serverId# [13:1196:2987], sessionId# [0:0:0] 2026-01-04T02:26:47.939546Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [13:828:2723], Recipient [13:1156:2961]: NKikimr::TEvDataShard::TEvGetOpenTxs |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 >> DataShardWrite::WriteImmediateSeveralOperations [GOOD] >> DataShardWrite::UpsertPreparedManyTables-Volatile >> Cdc::UpdatesLog[YdsRunner] [GOOD] >> Cdc::UpdatesLog[TopicRunner] >> Cdc::NewAndOldImagesLog[TopicRunner] [GOOD] >> Cdc::NewAndOldImagesLogDebezium >> TraverseColumnShard::TraverseColumnTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 >> TBackupCollectionTests::RestorePathStatePersistenceAcrossRestart [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 |84.1%| [TA] {RESULT} $(B)/ydb/core/security/certificate_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UpsertPrepared-Volatile [GOOD] >> DataShardWrite::UpsertPreparedManyTables+Volatile >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-25 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-5 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut/ydb-core-keyvalue-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 >> KqpResultSetFormats::ArrowFormat_Types_Optional_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:00.310048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:00.424665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:00.433491Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:00.433897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:00.434057Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:01.086391Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:01.199175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:01.199290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:01.253044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:01.372623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:02.909473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:02.919249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:02.919331Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:02.919377Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:02.920031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:03.025005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:03.731478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:09.665479Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:09.676699Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:09.681528Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:09.772062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:09.772211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:09.816754Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:09.819451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:10.197578Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.210777Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.211529Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.212352Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.212611Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.212836Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.213051Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.213271Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.213373Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:10.548085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:26:10.562430Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:10.562552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:10.596405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:11.333365Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:11.633164Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:11.633305Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:11.735653Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:11.735999Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:11.737005Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:11.737101Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:11.737157Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:11.737218Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:11.737279Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:11.737387Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:11.738448Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:12.061383Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:26:12.061990Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:26:12.064638Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:26:12.064704Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:26:12.064809Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:12.125658Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:12.154587Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:26:12.204736Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:26:12.403408Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:12.454863Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:12.455016Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:12.541052Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:26:12.542842Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:26:13.734402Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:13.736676Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:13.747353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:13.754986Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:13.755115Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... 003Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5505:4739], server id = [2:5506:4740], tablet id = 72075186224037894, status = OK 2026-01-04T02:26:51.042085Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5506:4740] 2026-01-04T02:26:51.042223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5506:4740], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:26:51.042296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:26:51.042894Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:26:51.043004Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5503:4737], StatRequests.size() = 1 2026-01-04T02:26:51.043104Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:26:51.689216Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5493:4727], ActorId: [2:5494:4728], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OGVjNDM1NDctNDQyOTBhZi1hYTVmZjkwYy0zNmVkMDFkMg==, TxId: 2026-01-04T02:26:51.689301Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5493:4727], ActorId: [2:5494:4728], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGVjNDM1NDctNDQyOTBhZi1hYTVmZjkwYy0zNmVkMDFkMg==, TxId: 2026-01-04T02:26:51.689824Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5492:4726], ActorId: [2:5493:4727], Got response [2:5494:4728] SUCCESS 2026-01-04T02:26:51.690384Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:26:51.714035Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-04T02:26:51.714121Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:26:51.838724Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:26:51.838833Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:26:51.949909Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5505:4739], schemeshard count = 1 2026-01-04T02:26:52.674892Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:26:52.674995Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:26:52.675038Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:26:52.722658Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:26:52.783618Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:26:52.784351Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:26:52.784451Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:26:52.786785Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:26:52.827672Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:26:52.838548Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:26:52.840074Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5589:4784], server id = [2:5593:4788], tablet id = 72075186224037899, status = OK 2026-01-04T02:26:52.840658Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5589:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:52.840905Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5590:4785], server id = [2:5594:4789], tablet id = 72075186224037900, status = OK 2026-01-04T02:26:52.840974Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5590:4785], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:52.846272Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5591:4786], server id = [2:5595:4790], tablet id = 72075186224037901, status = OK 2026-01-04T02:26:52.846390Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5591:4786], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:52.847047Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5592:4787], server id = [2:5596:4791], tablet id = 72075186224037902, status = OK 2026-01-04T02:26:52.847122Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5592:4787], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:52.870333Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:26:52.871562Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5589:4784], server id = [2:5593:4788], tablet id = 72075186224037899 2026-01-04T02:26:52.871620Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:52.873302Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:26:52.873700Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5590:4785], server id = [2:5594:4789], tablet id = 72075186224037900 2026-01-04T02:26:52.873733Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:52.883793Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:26:52.884091Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5592:4787], server id = [2:5596:4791], tablet id = 72075186224037902 2026-01-04T02:26:52.884138Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:52.884475Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:26:52.884526Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:26:52.884769Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5591:4786], server id = [2:5595:4790], tablet id = 72075186224037901 2026-01-04T02:26:52.884798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:52.884899Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:26:52.885069Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:26:52.885560Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5609:4800], ActorId: [2:5610:4801], Starting query actor #1 [2:5611:4802] 2026-01-04T02:26:52.885636Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5610:4801], ActorId: [2:5611:4802], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:26:52.889056Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5610:4801], ActorId: [2:5611:4802], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDMyOTU2YmYtYmJlZmQ1MDMtYzgwZDI3NS0yZmIyNTEwOQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:26:53.411943Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5620:4811]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:53.429294Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:53.429378Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5620:4811], StatRequests.size() = 1 2026-01-04T02:26:53.970657Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5610:4801], ActorId: [2:5611:4802], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDMyOTU2YmYtYmJlZmQ1MDMtYzgwZDI3NS0yZmIyNTEwOQ==, TxId: 2026-01-04T02:26:53.970752Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5610:4801], ActorId: [2:5611:4802], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDMyOTU2YmYtYmJlZmQ1MDMtYzgwZDI3NS0yZmIyNTEwOQ==, TxId: 2026-01-04T02:26:53.971204Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5609:4800], ActorId: [2:5610:4801], Got response [2:5611:4802] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:26:53.971547Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5634:4817]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:53.972036Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:53.972095Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:26:53.972337Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:26:53.973257Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:53.973309Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:26:53.973363Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:26:53.992140Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataShardSnapshots::DelayedWriteReadableAfterSplitAndReboot [GOOD] >> DataShardSnapshots::BrokenLockChangesDontLeak >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 >> KqpResultSetFormats::ArrowFormat_Stress [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_1 >> KqpScanArrowFormat::AggregateCountStar [GOOD] >> KqpScanArrowFormat::AllTypesColumns >> KqpScanArrowInChanels::AggregateCountStar [GOOD] >> KqpScanArrowInChanels::AllTypesColumns >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/ydb-core-persqueue-dread_cache_service-ut >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut |84.1%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/ut/ydb-core-kesus-tablet-ut >> Cdc::UpdatesLog[TopicRunner] [GOOD] >> Cdc::VirtualTimestamps[PqRunner] >> Cdc::DocApi[PqRunner] [GOOD] >> Cdc::DocApi[YdsRunner] |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/s3/ydb-core-kqp-ut-federated_query-s3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:25:56.420371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:25:56.677443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:25:56.693680Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:25:56.694077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:25:56.694245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:25:57.569867Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:57.747617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:57.747749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:57.817822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:57.944368Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:25:59.766751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:59.767700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:25:59.767749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:25:59.767783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:25:59.768286Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:25:59.882532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:01.180967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:07.317497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:07.329295Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:07.338916Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:07.426463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:07.426603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:07.472351Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:07.483136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:07.892144Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:07.892278Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:07.894224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.894995Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.895730Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.896736Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.896978Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.897074Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.897269Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.897495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.897646Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:07.932708Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:08.295727Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:08.354530Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:08.354643Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:08.386382Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:08.391940Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:08.392230Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:08.392302Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:08.392362Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:08.392416Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:08.392472Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:08.392539Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:08.393163Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:08.527433Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2174:2614] 2026-01-04T02:26:08.528647Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:08.528782Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2179:2619], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:08.536534Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:26:08.565408Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2237:2660] 2026-01-04T02:26:08.568841Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2237:2660], schemeshard id = 72075186224037897 2026-01-04T02:26:08.588495Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Describe result: PathErrorUnknown 2026-01-04T02:26:08.588562Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Creating table 2026-01-04T02:26:08.588643Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:08.677639Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2298:2686], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:08.725913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:08.764335Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:08.764529Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Subscribe on create table tx: 281474976720657 2026-01-04T02:26:08.823389Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:26:09.058543Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:26:09.078147Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:09.911051Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:26:10.061688Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:26:10.061790Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2190:2628] Owner: [2:2188:2626]. Column diff is empty, finishing 2026-01-04T02:26:11.077548Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... ATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:26:55.808546Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5442:4712], schemeshard count = 1 2026-01-04T02:26:56.867117Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:26:56.867203Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:26:56.867244Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:26:56.873229Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:26:56.914838Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:26:56.915655Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:26:56.915772Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:26:56.922663Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:26:56.976547Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:26:56.980882Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:26:56.989088Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5524:4755], server id = [2:5528:4759], tablet id = 72075186224037899, status = OK 2026-01-04T02:26:56.991064Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5524:4755], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.992458Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5525:4756], server id = [2:5529:4760], tablet id = 72075186224037900, status = OK 2026-01-04T02:26:56.992569Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5525:4756], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.994830Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5526:4757], server id = [2:5530:4761], tablet id = 72075186224037901, status = OK 2026-01-04T02:26:56.994938Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5526:4757], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.998242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5527:4758], server id = [2:5531:4762], tablet id = 72075186224037902, status = OK 2026-01-04T02:26:56.998339Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5527:4758], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:57.004487Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:26:57.005105Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5524:4755], server id = [2:5528:4759], tablet id = 72075186224037899 2026-01-04T02:26:57.005163Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:57.006788Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:26:57.007664Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5525:4756], server id = [2:5529:4760], tablet id = 72075186224037900 2026-01-04T02:26:57.007706Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:57.008213Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:26:57.008592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5526:4757], server id = [2:5530:4761], tablet id = 72075186224037901 2026-01-04T02:26:57.008625Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:57.008934Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:26:57.008992Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:26:57.009236Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5527:4758], server id = [2:5531:4762], tablet id = 72075186224037902 2026-01-04T02:26:57.009267Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:57.009381Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:26:57.009563Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:26:57.010096Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5544:4771], ActorId: [2:5545:4772], Starting query actor #1 [2:5546:4773] 2026-01-04T02:26:57.010177Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5545:4772], ActorId: [2:5546:4773], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:26:57.013368Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5545:4772], ActorId: [2:5546:4773], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=OTQwZjUzZDQtZGE4ZDU0MGUtOThmMDNjOWYtNTRlZjUzYWQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:26:57.135204Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5555:4782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:57.135650Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:57.135743Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5555:4782], StatRequests.size() = 1 2026-01-04T02:26:57.322345Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5545:4772], ActorId: [2:5546:4773], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=OTQwZjUzZDQtZGE4ZDU0MGUtOThmMDNjOWYtNTRlZjUzYWQ=, TxId: 2026-01-04T02:26:57.322428Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5545:4772], ActorId: [2:5546:4773], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTQwZjUzZDQtZGE4ZDU0MGUtOThmMDNjOWYtNTRlZjUzYWQ=, TxId: 2026-01-04T02:26:57.322831Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5544:4771], ActorId: [2:5545:4772], Got response [2:5546:4773] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:26:57.323429Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:26:57.324933Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2830:3036];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-04T02:26:57.375194Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:26:57.375269Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:26:57.590694Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:5576:4792];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; 2026-01-04T02:26:57.604180Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 DefaultCompression { } UpsertIndexes { Id: 3 Name: "cms_key" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 1 } } Options { SchemeNeedActualization: false } ; 2026-01-04T02:26:57.604741Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=2;to_version=3;diff=Version: 3 DefaultCompression { } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-04T02:26:57.605109Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:composite_init/tables_manager;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 DefaultCompression { } UpsertIndexes { Id: 4 Name: "cms_value" StorageId: "__DEFAULT" InheritPortionStorage: false ClassName: "COUNT_MIN_SKETCH" CountMinSketch { ColumnIds: 2 } } Options { SchemeNeedActualization: false CompactionPlannerConstructor { ClassName: "l-buckets" WeightKff: 1 LBuckets { } } } ; 2026-01-04T02:26:57.962953Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5694:4885]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:57.963504Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:57.963580Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:26:57.964027Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:57.964096Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:26:57.964186Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:26:57.971802Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |84.1%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join |84.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/join/ydb-core-kqp-ut-join ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeReqDistribution [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:00.301536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:00.492426Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:00.513478Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:00.513985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:00.514188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:01.727240Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:01.990604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:01.990737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:02.048094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:02.182674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:03.216380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:03.217625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:03.217681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:03.217715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:03.226471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:03.333446Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:04.265629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:09.331563Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:09.363186Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:09.378609Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:09.509870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:09.510017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:09.551019Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:09.562658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:09.921946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:09.922079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:09.923850Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.924621Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.925228Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.926302Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.926534Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.926779Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.927054Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.927238Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.927372Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:09.962028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:10.617380Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:10.806627Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:10.806750Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:10.846534Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:10.849156Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:10.849403Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:10.849463Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:10.849528Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:10.849585Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:10.849640Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:10.849696Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:10.850657Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:10.857838Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1900:2453] 2026-01-04T02:26:10.876942Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:26:10.905822Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Describe result: PathErrorUnknown 2026-01-04T02:26:10.905895Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Creating table 2026-01-04T02:26:10.905983Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:10.926965Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:10.927102Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2136:2594], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:10.959141Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2177:2626] 2026-01-04T02:26:10.960147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2177:2626], schemeshard id = 72075186224037897 2026-01-04T02:26:10.973231Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2193:2640], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:11.035052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.076258Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:11.076709Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Subscribe on create table tx: 281474976720657 2026-01-04T02:26:11.098473Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:26:11.698527Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:11.771153Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:26:12.390694Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:26:12.530355Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:26:12.530467Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2106:2578] Owner: [2:2105:2577]. Column diff is empty, finishing 2026-01-04T02:26:13.422476Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... node 2 :STATISTICS DEBUG: tx_init.cpp:69: [72075186224037894] Loaded traversal table owner id: 72075186224037897 2026-01-04T02:26:56.755205Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-04T02:26:56.755248Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767493616598994 2026-01-04T02:26:56.755289Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-04T02:26:56.755334Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-04T02:26:56.755446Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-04T02:26:56.755513Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:56.755630Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:26:56.755702Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:56.755770Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:56.755844Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:56.756000Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:26:56.757267Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:56.757485Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5558:4766] Owner: [2:5557:4765]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:26:56.757546Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5558:4766] Owner: [2:5557:4765]. Column diff is empty, finishing 2026-01-04T02:26:56.757948Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:26:56.758024Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:26:56.759216Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:26:56.759306Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:26:56.762326Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:26:56.784518Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5567:4773] 2026-01-04T02:26:56.784813Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5567:4773], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:26:56.784951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5523:4745], server id = [2:5567:4773], tablet id = 72075186224037894, status = OK 2026-01-04T02:26:56.785106Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5568:4774] 2026-01-04T02:26:56.785283Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5568:4774], schemeshard id = 72075186224037897 2026-01-04T02:26:56.843464Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:26:56.843753Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:26:56.844630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5573:4779], server id = [2:5577:4783], tablet id = 72075186224037899, status = OK 2026-01-04T02:26:56.845018Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5573:4779], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.845422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5574:4780], server id = [2:5578:4784], tablet id = 72075186224037900, status = OK 2026-01-04T02:26:56.845494Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5574:4780], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.847178Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5575:4781], server id = [2:5579:4785], tablet id = 72075186224037901, status = OK 2026-01-04T02:26:56.847247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5575:4781], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.847718Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5576:4782], server id = [2:5580:4786], tablet id = 72075186224037902, status = OK 2026-01-04T02:26:56.847767Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5576:4782], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:26:56.854744Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:26:56.855516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5573:4779], server id = [2:5577:4783], tablet id = 72075186224037899 2026-01-04T02:26:56.855571Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:56.856102Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:26:56.856490Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5574:4780], server id = [2:5578:4784], tablet id = 72075186224037900 2026-01-04T02:26:56.856516Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:56.857507Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:26:56.857874Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5575:4781], server id = [2:5579:4785], tablet id = 72075186224037901 2026-01-04T02:26:56.857913Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:56.858066Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:26:56.858128Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:26:56.858420Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:26:56.858565Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:26:56.858790Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5593:4795], ActorId: [2:5594:4796], Starting query actor #1 [2:5595:4797] 2026-01-04T02:26:56.858860Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5594:4796], ActorId: [2:5595:4797], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:26:56.862197Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5576:4782], server id = [2:5580:4786], tablet id = 72075186224037902 2026-01-04T02:26:56.862242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:26:56.863041Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5594:4796], ActorId: [2:5595:4797], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=N2NlMjJjNTEtYzg1NmFlYWMtOTgyOGM2My03Y2JkM2E5YQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:26:57.004027Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5604:4806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:57.004335Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:26:57.004388Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5604:4806], StatRequests.size() = 1 2026-01-04T02:26:57.196239Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5594:4796], ActorId: [2:5595:4797], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=N2NlMjJjNTEtYzg1NmFlYWMtOTgyOGM2My03Y2JkM2E5YQ==, TxId: 2026-01-04T02:26:57.196359Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5594:4796], ActorId: [2:5595:4797], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2NlMjJjNTEtYzg1NmFlYWMtOTgyOGM2My03Y2JkM2E5YQ==, TxId: 2026-01-04T02:26:57.196917Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5593:4795], ActorId: [2:5594:4796], Got response [2:5595:4797] SUCCESS 2026-01-04T02:26:57.197514Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:26:57.239799Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:26:57.239865Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:26:57.400544Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5624:4814]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:57.400932Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:57.400989Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:26:57.401318Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:26:57.401375Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:26:57.401443Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:26:57.419694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |84.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DataShardWrite::UpsertPreparedManyTables+Volatile [GOOD] >> DataShardWrite::UpsertNoLocksArbiter |84.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut |84.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/with_offset_ranges_mode_ut >> TBackupCollectionTests::CdcStreamRotationDuringIncrementalBackups [GOOD] >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 >> Cdc::NewAndOldImagesLogDebezium [GOOD] >> Cdc::OldImageLogDebezium >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 >> DataShardWrite::UpsertPreparedManyTables-Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache+Volatile >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 >> KqpResultSetFormats::ArrowFormat_Types_List_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyList >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeResolve [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:04.631342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:04.775058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:04.785610Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:04.786108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:04.786311Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:05.479727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:05.613696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:05.613790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:05.676921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:05.765253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:07.147217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:07.148708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:07.148767Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:07.148801Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:07.149366Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:07.253380Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:08.096050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:18.052839Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:18.063747Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:18.068522Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:18.130839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:18.130973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:18.192099Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:18.199245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:18.498687Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.499508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.500314Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.507787Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.508356Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.508596Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.508877Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.509158Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.509280Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:18.853288Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:26:18.883477Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:18.883610Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:18.899565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:19.068405Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:19.151041Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:19.151161Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:19.197503Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:19.197749Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:19.198145Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:19.198230Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:19.198296Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:19.198370Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:19.198434Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:19.198499Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:19.199030Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:19.371281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:26:19.371844Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:26:19.375135Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:26:19.375203Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:26:19.375299Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:19.380142Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:19.384300Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:26:19.390133Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:26:19.481164Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:19.481305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2297:2658], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:19.500280Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2322:2680] 2026-01-04T02:26:19.500921Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2322:2680], schemeshard id = 72075186224037897 2026-01-04T02:26:19.577300Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:20.342284Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:20.345536Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2887], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:20.349680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:20.353879Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:20.353997Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... ner id: 72075186224037897 2026-01-04T02:27:00.153564Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-04T02:27:00.153618Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767493619906995 2026-01-04T02:27:00.153660Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-04T02:27:00.153697Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-04T02:27:00.153788Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-04T02:27:00.153905Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:27:00.154010Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:27:00.154096Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:27:00.154189Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:27:00.154275Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:27:00.154471Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:27:00.155943Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:27:00.157276Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:27:00.157385Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:27:00.157509Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5635:4801] Owner: [2:5634:4800]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:27:00.157574Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5635:4801] Owner: [2:5634:4800]. Column diff is empty, finishing ... blocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR cookie 0 2026-01-04T02:27:00.195702Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5644:4808] 2026-01-04T02:27:00.195871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5600:4780], server id = [2:5644:4808], tablet id = 72075186224037894, status = OK 2026-01-04T02:27:00.196215Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5644:4808], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:27:00.196504Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5645:4809] 2026-01-04T02:27:00.196662Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5645:4809], schemeshard id = 72075186224037897 ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to ... unblocking NKikimr::TEvTxProxySchemeCache::TEvResolveKeySetResult from to STATISTICS_AGGREGATOR ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse 2026-01-04T02:27:00.322937Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:27:00.323056Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:27:00.324752Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:27:00.341069Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:27:00.341393Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:27:00.342744Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5652:4816], server id = [2:5656:4820], tablet id = 72075186224037899, status = OK 2026-01-04T02:27:00.343270Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5652:4816], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:27:00.343723Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5653:4817], server id = [2:5657:4821], tablet id = 72075186224037900, status = OK 2026-01-04T02:27:00.343787Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5653:4817], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:27:00.344628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5654:4818], server id = [2:5658:4822], tablet id = 72075186224037901, status = OK 2026-01-04T02:27:00.344686Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5654:4818], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:27:00.345437Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5655:4819], server id = [2:5659:4823], tablet id = 72075186224037902, status = OK 2026-01-04T02:27:00.345493Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5655:4819], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:27:00.371707Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:27:00.377967Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5652:4816], server id = [2:5656:4820], tablet id = 72075186224037899 2026-01-04T02:27:00.378055Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:27:00.379693Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:27:00.380422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5653:4817], server id = [2:5657:4821], tablet id = 72075186224037900 2026-01-04T02:27:00.380469Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:27:00.381293Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:27:00.381647Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5654:4818], server id = [2:5658:4822], tablet id = 72075186224037901 2026-01-04T02:27:00.381683Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:27:00.382592Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:27:00.382659Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:27:00.383057Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:27:00.383237Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:27:00.383553Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5672:4832], ActorId: [2:5673:4833], Starting query actor #1 [2:5674:4834] 2026-01-04T02:27:00.383647Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5673:4833], ActorId: [2:5674:4834], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:27:00.391803Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5655:4819], server id = [2:5659:4823], tablet id = 72075186224037902 2026-01-04T02:27:00.391868Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:27:00.398908Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5673:4833], ActorId: [2:5674:4834], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZmUzZGE0MTgtN2QyODlhYWItOTA1NDIzMmUtYWIxZDczNzA=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:27:00.558805Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5683:4843]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:27:00.559322Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:27:00.559408Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5683:4843], StatRequests.size() = 1 2026-01-04T02:27:00.995702Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5673:4833], ActorId: [2:5674:4834], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZmUzZGE0MTgtN2QyODlhYWItOTA1NDIzMmUtYWIxZDczNzA=, TxId: 2026-01-04T02:27:01.002355Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5673:4833], ActorId: [2:5674:4834], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmUzZGE0MTgtN2QyODlhYWItOTA1NDIzMmUtYWIxZDczNzA=, TxId: 2026-01-04T02:27:01.003344Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5672:4832], ActorId: [2:5673:4833], Got response [2:5674:4834] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:27:01.003770Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5697:4849]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:27:01.004427Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:27:01.004836Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:27:01.004897Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:27:01.006042Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:27:01.009968Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:27:01.010096Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:27:01.019713Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> Cdc::VirtualTimestamps[PqRunner] [GOOD] >> Cdc::VirtualTimestamps[YdsRunner] >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] >> TBackupCollectionTests::IncrementalBackupOperation [GOOD] >> TBackupCollectionTests::EmptyIncrementalBackupRace |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |84.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/ydb-core-blobstorage-dsproxy-ut_fat >> alter_compression.py::TestAllCompression::test_all_supported_compression[lz4_compression-COMPRESSION = "lz4"] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_compression-COMPRESSION = "zstd"] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::ShardRestartAfterDropTableAndAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:25.863357Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:26.106735Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:26.108753Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:26.108830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:26.448438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:26.448614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:26.553626Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493395339693 != 1767493395339696 2026-01-04T02:23:26.569672Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:26.622610Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:26.729043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:27.148521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:27.163790Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:27.273575Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:27.273676Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:27.275114Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:27.511128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:27.511257Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:27.512074Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:27.512225Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:27.512625Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:27.512793Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:27.512947Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:27.515698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:27.516338Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:27.517249Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:27.517330Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:27.581979Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:27.583284Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:27.583637Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:23:27.583991Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:27.734697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:27.735601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:27.735783Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:27.749040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:27.749200Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:27.749270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:27.749786Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:27.750016Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:27.750147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:27.750772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:27.855857Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:27.856150Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:27.856310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:27.856373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:27.856418Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:27.856469Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:27.856664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:27.856722Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:27.857167Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:27.857301Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:27.857411Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:27.857456Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:27.857501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:27.857560Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:27.857606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:27.857646Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:27.857702Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:27.857879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:27.857922Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:27.858018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:27.858543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:27.858605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:27.858744Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:27.859059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:27.859133Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:27.859235Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:27.859293Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... [14:1068:2877] trace_id# 2026-01-04T02:27:03.379623Z node 14 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [14:1201:2877] TxId: 281474976715665. Ctx: { TraceId: 01ke3d8qdp8mdz7tten89yrpam, Database: , SessionId: ydb://session/3?node_id=14&id=M2Y1NDNhZjctZjE3OGIwMTEtY2E3MjM1ZTYtMWIzZmZhNTg=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/table`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-04T02:27:03.380289Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=M2Y1NDNhZjctZjE3OGIwMTEtY2E3MjM1ZTYtMWIzZmZhNTg=, ActorId: [14:1047:2877], ActorState: ExecuteState, LegacyTraceId: 01ke3d8qdp8mdz7tten89yrpam, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/table`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-04T02:27:03.381475Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [14:1068:2877], Recipient [14:894:2772]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } Op: Rollback } 2026-01-04T02:27:03.381524Z node 14 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-04T02:27:03.381618Z node 14 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-04T02:27:03.381663Z node 14 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) ... blocking NKikimr::NLongTxService::TEvLongTxService::TEvLockStatus from LONG_TX_SERVICE to TX_DATASHARD_ACTOR cookie 0 2026-01-04T02:27:03.383925Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 65543, Sender [14:828:2723], Recipient [14:894:2772]: NActors::TEvents::TEvPoison 2026-01-04T02:27:03.384892Z node 14 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-01-04T02:27:03.384999Z node 14 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:27:03.488703Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:27:03.498751Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:27:03.499013Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828684, Sender [14:1206:3007], Recipient [14:1208:3008]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:27:03.503729Z node 14 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [14:1208:3008] 2026-01-04T02:27:03.504195Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:27:03.514486Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:27:03.516444Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:27:03.519895Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:27:03.520037Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:27:03.520155Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:27:03.520899Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:27:03.521351Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:27:03.521447Z node 14 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:27:03.521546Z node 14 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state PreOffline tabletId 72075186224037888 2026-01-04T02:27:03.521753Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 1 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:27:03.521893Z node 14 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast PreOffline tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:27:03.522112Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [14:1222:3015] 2026-01-04T02:27:03.522207Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:27:03.522291Z node 14 :TX_DATASHARD INFO: datashard.cpp:1292: Cannot activate change sender: at tablet: 72075186224037888, state: PreOffline, queue size: 0 2026-01-04T02:27:03.522362Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:27:03.522807Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [14:70:2117], Recipient [14:1208:3008]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 14 Status: STATUS_NOT_FOUND 2026-01-04T02:27:03.523248Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [14:1208:3008], Recipient [14:1208:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:27:03.523300Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:27:03.523506Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435075, Sender [14:1208:3008], Recipient [14:1208:3008]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressResendReadSet 2026-01-04T02:27:03.523551Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3181: StateWork, processing event TEvPrivate::TEvProgressResendReadSet 2026-01-04T02:27:03.524092Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [14:26:2073], Recipient [14:1208:3008]: {TEvRegisterTabletResult TabletId# 72075186224037888 Entry# 700} 2026-01-04T02:27:03.524163Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:27:03.524243Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 700 2026-01-04T02:27:03.524322Z node 14 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:27:03.526858Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:27:03.527002Z node 14 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037888 state 5 2026-01-04T02:27:03.527238Z node 14 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-04T02:27:03.527338Z node 14 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-04T02:27:03.527436Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715663 2026-01-04T02:27:03.528154Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [14:26:2073], Recipient [14:1208:3008]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 500 NextReadStep# 700 ReadStep# 700 } 2026-01-04T02:27:03.528220Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-04T02:27:03.528306Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 500 next step 700 2026-01-04T02:27:03.528493Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [14:1208:3008], Recipient [14:1108:2920]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-04T02:27:03.528533Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:27:03.528609Z node 14 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715663 2026-01-04T02:27:03.528749Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-04T02:27:03.528847Z node 14 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 600:281474976715663 at 72075186224037889 2026-01-04T02:27:03.528930Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:27:03.529030Z node 14 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 138 Seqno# 1 Flags# 0} 2026-01-04T02:27:03.529217Z node 14 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:27:03.529386Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [14:1108:2920], Recipient [14:1208:3008]: {TEvReadSet step# 600 txid# 281474976715663 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 1} 2026-01-04T02:27:03.529460Z node 14 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:03.529545Z node 14 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715663 2026-01-04T02:27:03.529646Z node 14 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:27:03.691247Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream_reboots/ydb-core-tx-schemeshard-ut_cdc_stream_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge/ydb-core-tx-schemeshard-ut_split_merge >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::LockedWriteWithAsyncIndexAndVolatileCommit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:27.162143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:27.408420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:27.408903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:27.408971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:28.095186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:28.095967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:28.211637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493398517806 != 1767493398517809 2026-01-04T02:23:28.242327Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:28.303555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:28.436894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:28.873674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:28.891653Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:29.008273Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:29.008350Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:29.008467Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:29.278340Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:29.278468Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:29.279713Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:29.279829Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:29.280316Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:29.280489Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:29.280627Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:29.280987Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:29.282774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:29.285488Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:29.285619Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:29.343340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:29.344421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:29.344747Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:23:29.345006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:29.435529Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:29.436394Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:29.436515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:29.438336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:29.438439Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:29.438505Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:29.438945Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:29.439115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:29.439219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:29.439834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:29.513499Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:29.513747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:29.513882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:29.513939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:29.513988Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:29.514025Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:29.514186Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:29.514243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:29.514625Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:29.514745Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:29.514823Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:29.514882Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:29.514929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:29.514975Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:29.515014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:29.515049Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:29.515095Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:29.515238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:29.515280Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:29.515325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:29.515754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:29.515803Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:29.515929Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:29.516218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:29.516280Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:29.516368Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:29.518741Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... node 13 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 3, left# 0, at tablet# 72075186224037890 2026-01-04T02:27:01.233491Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [13:1266:3068]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:27:03.404100Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:27:03.404244Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715671 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-04T02:27:03.409567Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1847:3545], Recipient [13:993:2849]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-04T02:27:03.422467Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:27:03.422650Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:27:03.422757Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-04T02:27:03.422894Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-04T02:27:03.423164Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:27:03.423259Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:27:03.423369Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:27:03.423454Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:27:03.423526Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-04T02:27:03.423602Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:27:03.423640Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:27:03.423666Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:27:03.423694Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:27:03.423908Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 40 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:27:03.424423Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[13:1847:3545], 0} after executionsCount# 1 2026-01-04T02:27:03.424550Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[13:1847:3545], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-04T02:27:03.424694Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[13:1847:3545], 0} finished in read 2026-01-04T02:27:03.424822Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:27:03.424858Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:27:03.424889Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:27:03.424921Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:27:03.424975Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:27:03.425001Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:27:03.425039Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-04T02:27:03.425119Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:27:03.425338Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:27:03.443234Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1847:3545], Recipient [13:993:2849]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:27:03.443357Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } }, { items { uint32_value: 2 } items { uint32_value: 21 } } 2026-01-04T02:27:05.051419Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [13:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:27:05.051538Z node 13 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [13:68:2115] TxId# 281474976715672 ProcessProposeKqpTransaction TEvRead: ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false 2026-01-04T02:27:05.060898Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [13:1879:3571], Recipient [13:1266:3068]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 3 2026-01-04T02:27:05.061143Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-04T02:27:05.061238Z node 13 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v9001/281474976715670 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:27:05.061315Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v10000/18446744073709551615 2026-01-04T02:27:05.061434Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CheckRead 2026-01-04T02:27:05.061626Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-04T02:27:05.061716Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CheckRead 2026-01-04T02:27:05.061792Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-04T02:27:05.066261Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-04T02:27:05.066376Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037891 2026-01-04T02:27:05.066461Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-04T02:27:05.066506Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-04T02:27:05.066569Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit ExecuteRead 2026-01-04T02:27:05.066605Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit ExecuteRead 2026-01-04T02:27:05.066834Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 43 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:27:05.067410Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[13:1879:3571], 0} after executionsCount# 1 2026-01-04T02:27:05.067536Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[13:1879:3571], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-04T02:27:05.067674Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[13:1879:3571], 0} finished in read 2026-01-04T02:27:05.067809Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-04T02:27:05.067849Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit ExecuteRead 2026-01-04T02:27:05.067886Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037891 to execution unit CompletedOperations 2026-01-04T02:27:05.067922Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037891 on unit CompletedOperations 2026-01-04T02:27:05.067979Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037891 is Executed 2026-01-04T02:27:05.068006Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037891 executing on unit CompletedOperations 2026-01-04T02:27:05.068047Z node 13 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037891 has finished 2026-01-04T02:27:05.068140Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-04T02:27:05.068357Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 2026-01-04T02:27:05.074173Z node 13 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [13:1879:3571], Recipient [13:1266:3068]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:27:05.074303Z node 13 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037891 ReadCancel: { ReadId: 0 } { items { uint32_value: 10 } items { uint32_value: 110 } }, { items { uint32_value: 20 } items { uint32_value: 210 } } |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> KqpResultSetFormats::ArrowFormat_Types_Optional_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 >> TBackupCollectionTests::DropCollectionAfterIncrementalRestore [GOOD] >> TBackupCollectionTests::BackupRestoreCoveringIndex >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite-Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile >> DataShardWrite::UpsertNoLocksArbiter [GOOD] >> DataShardWrite::UpsertLostPrepareArbiter |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names |84.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/ydb-core-tx-schemeshard-ut_system_names >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 >> TGRpcCmsTest::RemoveWithAnotherTokenTest |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console |84.2%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_console/ydb-core-cms-console-ut_console ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot/unittest >> DataShardSnapshots::BrokenLockChangesDontLeak [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:23:23.936331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:23:24.139071Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:23:24.139524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:23:24.139579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:23:24.582846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:23:24.583027Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:23:24.750834Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493396516657 != 1767493396516660 2026-01-04T02:23:24.766718Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:23:24.819099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:23:24.948768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:23:25.600026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:23:25.635838Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:23:25.760309Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:23:25.760386Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:23:25.760506Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:23:26.194816Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:23:26.194914Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:23:26.195945Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:23:26.196065Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:23:26.196958Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:23:26.197124Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:23:26.197257Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:23:26.197580Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:23:26.199237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:23:26.200275Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:23:26.200363Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:23:26.245608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:23:26.247630Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:23:26.247948Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:23:26.248277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:23:26.362151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:23:26.362992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:23:26.363110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:23:26.364830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:23:26.364920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:23:26.364971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:23:26.366020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:23:26.366168Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:23:26.366305Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:23:26.366884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:23:26.412812Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:23:26.413021Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:23:26.413968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:23:26.414034Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:23:26.414074Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:23:26.414114Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:23:26.414278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:26.414339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:23:26.414796Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:23:26.414894Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:23:26.414975Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:23:26.415029Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:23:26.415101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:23:26.415150Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:23:26.415187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:23:26.415219Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:23:26.415265Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:23:26.415415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:26.415450Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:23:26.415497Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:23:26.415932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:23:26.415974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:23:26.416123Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:23:26.416407Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:23:26.416461Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:23:26.416543Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:23:26.416589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... ode 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:27:07.112158Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:27:07.112251Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:27:07.112282Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:27:07.112329Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-04T02:27:07.112445Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:27:07.112536Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:5] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:27:07.112625Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 5 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_LOCKS_BROKEN 2026-01-04T02:27:07.112864Z node 16 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-04T02:27:07.112997Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:27:07.113401Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [16:1110:2874], Table: `/Root/table` ([72057594046644480:38:1]), SessionActorId: [16:1045:2874]Got LOCKS BROKEN for table `/Root/table`. ShardID=72075186224037888, Sink=[16:1110:2874].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-04T02:27:07.113716Z node 16 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [16:1103:2874], SessionActorId: [16:1045:2874], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[16:1045:2874]. 2026-01-04T02:27:07.114361Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=16&id=ZmRjYzgyNGUtZDlkNDQ2YS1mYzE2YmJkNy1lMGM2NmZhOA==, ActorId: [16:1045:2874], ActorState: ExecuteState, LegacyTraceId: 01ke3d8twkdpgwezmqxp77y1t8, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [16:1104:2874] from: [16:1103:2874] trace_id# 2026-01-04T02:27:07.114636Z node 16 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [16:1104:2874] TxId: 281474976715663. Ctx: { TraceId: 01ke3d8twkdpgwezmqxp77y1t8, Database: , SessionId: ydb://session/3?node_id=16&id=ZmRjYzgyNGUtZDlkNDQ2YS1mYzE2YmJkNy1lMGM2NmZhOA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-04T02:27:07.115378Z node 16 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=16&id=ZmRjYzgyNGUtZDlkNDQ2YS1mYzE2YmJkNy1lMGM2NmZhOA==, ActorId: [16:1045:2874], ActorState: ExecuteState, LegacyTraceId: 01ke3d8twkdpgwezmqxp77y1t8, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:27:07.116844Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [16:1103:2874], Recipient [16:893:2772]: NKikimrDataEvents.TEvWrite TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-04T02:27:07.116892Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-04T02:27:07.117011Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [16:893:2772], Recipient [16:893:2772]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:27:07.117043Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:27:07.117102Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-04T02:27:07.117214Z node 16 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 } Op: Rollback } 2026-01-04T02:27:07.117321Z node 16 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T02:27:07.117419Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-04T02:27:07.117457Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:27:07.117480Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-04T02:27:07.117505Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:27:07.117536Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:27:07.117590Z node 16 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v500/18446744073709551615 ImmediateWriteEdge# v501/0 ImmediateWriteEdgeReplied# v501/0 2026-01-04T02:27:07.117680Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-04T02:27:07.117722Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:27:07.117756Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:27:07.117826Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:27:07.117862Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:27:07.117892Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:27:07.117920Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:27:07.117948Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T02:27:07.117972Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:27:07.117998Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-04T02:27:07.118101Z node 16 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-04T02:27:07.118169Z node 16 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-04T02:27:07.118270Z node 16 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:27:07.118372Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:27:07.118410Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:27:07.118503Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:27:07.118584Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:27:07.118627Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-04T02:27:07.118657Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:27:07.118695Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:27:07.118729Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:27:07.118786Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:27:07.118835Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:27:07.118881Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-04T02:27:07.118955Z node 16 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:27:07.118992Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:27:07.119049Z node 16 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:27:07.119115Z node 16 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:27:07.121580Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [16:70:2117], Recipient [16:893:2772]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 16 Status: STATUS_NOT_FOUND 2026-01-04T02:27:07.125872Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [16:1118:2924], Recipient [16:893:2772]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:07.125994Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:07.126093Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [16:1117:2923], serverId# [16:1118:2924], sessionId# [0:0:0] 2026-01-04T02:27:07.126387Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553224, Sender [16:828:2723], Recipient [16:893:2772]: NKikimr::TEvDataShard::TEvGetOpenTxs |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 >> KqpScanArrowInChanels::AllTypesColumns [GOOD] >> KqpScanArrowInChanels::SingleKey >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 >> DataShardWrite::UpsertPreparedNoTxCache+Volatile [GOOD] >> DataShardWrite::UpsertPreparedNoTxCache-Volatile >> TGRpcCmsTest::DisabledTxTest >> KqpResultSetFormats::ArrowFormat_Types_EmptyList [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 >> KqpResultSetFormats::ArrowFormat_LargeTable [GOOD] >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit >> Cdc::VirtualTimestamps[YdsRunner] [GOOD] >> Cdc::VirtualTimestamps[TopicRunner] >> KqpScanArrowFormat::AllTypesColumns [GOOD] >> KqpScanArrowFormat::AggregateByColumn |84.2%| [TA] $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 >> Cdc::OldImageLogDebezium [GOOD] >> Cdc::NewImageLogDebezium >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 >> DstCreator::WithSyncIndexAndIntermediateDir >> TBackupCollectionTests::EmptyIncrementalBackupRace [GOOD] >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_read_update_write_load.py::TestReadUpdateWriteLoad::test_multi[read_update_write_load] [GOOD] Test command err: Was written: 0.0 MiB, Speed: 0.0 MiB/s Step 1. only write Write: 10% 3551 30% 3551 50% 3551 90% 3551 99% 3551 ms Write: 10% 5797 30% 5797 50% 5797 90% 5797 99% 5797 ms Write: 10% 9952 30% 9952 50% 9952 90% 9952 99% 9952 ms Write: 10% 16410 30% 16410 50% 16410 90% 16410 99% 16410 ms Write: 10% 18076 30% 18076 50% 18076 90% 18076 99% 18076 ms Write: 10% 28984 30% 28984 50% 28984 90% 28984 99% 28984 ms Write: 10% 24655 30% 24655 50% 24655 90% 24655 99% 24655 ms Write: 10% 23741 30% 23741 50% 23741 90% 23741 99% 23741 ms Write: 10% 29211 30% 29211 50% 29211 90% 29211 99% 29211 ms Write: 10% 27726 30% 27726 50% 27726 90% 27726 99% 27726 ms Write: 10% 30253 30% 30253 50% 30253 90% 30253 99% 30253 ms Write: 10% 24086 30% 24086 50% 24086 90% 24086 99% 24086 ms Write: 10% 25064 30% 25064 50% 25064 90% 25064 99% 25064 ms Write: 10% 26164 30% 26164 50% 26164 90% 26164 99% 26164 ms Write: 10% 28298 30% 28298 50% 28298 90% 28298 99% 28298 ms Write: 10% 25124 30% 25124 50% 25124 90% 25124 99% 25124 ms Write: 10% 17601 30% 17601 50% 17601 90% 17601 99% 17601 ms Write: 10% 23480 30% 23480 50% 23480 90% 23480 99% 23480 ms Write: 10% 27761 30% 27761 50% 27761 90% 27761 99% 27761 ms Write: 10% 11487 30% 11487 50% 11487 90% 11487 99% 11487 ms Write: 10% 19158 30% 19158 50% 19158 90% 19158 99% 19158 ms Write: 10% 20431 30% 20431 50% 20431 90% 20431 99% 20431 ms Write: 10% 16600 30% 16600 50% 16600 90% 16600 99% 16600 ms Write: 10% 18099 30% 18099 50% 18099 90% 18099 99% 18099 ms Write: 10% 7596 30% 7596 50% 7596 90% 7596 99% 7596 ms Write: 10% 24567 30% 24567 50% 24567 90% 24567 99% 24567 ms Write: 10% 13115 30% 13115 50% 13115 90% 13115 99% 13115 ms Write: 10% 21398 30% 21398 50% 21398 90% 21398 99% 21398 ms Write: 10% 7292 30% 7292 50% 7292 90% 7292 99% 7292 ms Write: 10% 13476 30% 13476 50% 13476 90% 13476 99% 13476 ms Write: 10% 7328 30% 7328 50% 7328 90% 7328 99% 7328 ms Write: 10% 4408 30% 4408 50% 4408 90% 4408 99% 4408 ms Write: 10% 5298 30% 5298 50% 5298 90% 5298 99% 5298 ms Write: 10% 12483 30% 12483 50% 12483 90% 12483 99% 12483 ms Write: 10% 8798 30% 8798 50% 8798 90% 8798 99% 8798 ms Write: 10% 3560 30% 3560 50% 3560 90% 3560 99% 3560 ms Write: 10% 11714 30% 11714 50% 11714 90% 11714 99% 11714 ms Write: 10% 10867 30% 10867 50% 10867 90% 10867 99% 10867 ms Write: 10% 3399 30% 3399 50% 3399 90% 3399 99% 3399 ms Write: 10% 10087 30% 10087 50% 10087 90% 10087 99% 10087 ms Write: 10% 4798 30% 4798 50% 4798 90% 4798 99% 4798 ms Write: 10% 11025 30% 11025 50% 11025 90% 11025 99% 11025 ms Write: 10% 10194 30% 10194 50% 10194 90% 10194 99% 10194 ms Write: 10% 4874 30% 4874 50% 4874 90% 4874 99% 4874 ms Write: 10% 3510 30% 3510 50% 3510 90% 3510 99% 3510 ms Write: 10% 9018 30% 9018 50% 9018 90% 9018 99% 9018 ms Write: 10% 14633 30% 14633 50% 14633 90% 14633 99% 14633 ms Write: 10% 5313 30% 5313 50% 5313 90% 5313 99% 5313 ms Write: 10% 19716 30% 19716 50% 19716 90% 19716 99% 19716 ms Write: 10% 12893 30% 12893 50% 12893 90% 12893 99% 12893 ms Write: 10% 14152 30% 14152 50% 14152 90% 14152 99% 14152 ms Write: 10% 5144 30% 5144 50% 5144 90% 5144 99% 5144 ms Write: 10% 3158 30% 3158 50% 3158 90% 3158 99% 3158 ms Write: 10% 4896 30% 4896 50% 4896 90% 4896 99% 4896 ms Write: 10% 6186 30% 6186 50% 6186 90% 6186 99% 6186 ms Write: 10% 8719 30% 8719 50% 8719 90% 8719 99% 8719 ms Write: 10% 4340 30% 4340 50% 4340 90% 4340 99% 4340 ms Write: 10% 7447 30% 7447 50% 7447 90% 7447 99% 7447 ms Write: 10% 5796 30% 5796 50% 5796 90% 5796 99% 5796 ms Write: 10% 5842 30% 5842 50% 5842 90% 5842 99% 5842 ms Write: 10% 4940 30% 4940 50% 4940 90% 4940 99% 4940 ms Write: 10% 7301 30% 7301 50% 7301 90% 7301 99% 7301 ms Write: 10% 5337 30% 5337 50% 5337 90% 5337 99% 5337 ms Write: 10% 7400 30% 7400 50% 7400 90% 7400 99% 7400 ms Step 2. read write Write: 10% 11855 30% 11855 50% 11855 90% 11855 99% 11855 ms Write: 10% 10945 30% 10945 50% 10945 90% 10945 99% 10945 ms Write: 10% 11615 30% 11615 50% 11615 90% 11615 99% 11615 ms Write: 10% 10712 30% 10712 50% 10712 90% 10712 99% 10712 ms Write: 10% 12214 30% 12214 50% 12214 90% 12214 99% 12214 ms Write: 10% 19171 30% 19171 50% 19171 90% 19171 99% 19171 ms Was written: 12.5 MiB, Speed: 0.20833333333333334 MiB/s Write: 10% 19628 30% 19628 50% 19628 90% 19628 99% 19628 ms Write: 10% 24494 30% 24494 50% 24494 90% 24494 99% 24494 ms Write: 10% 23610 30% 23610 50% 23610 90% 23610 99% 23610 ms Write: 10% 22931 30% 22931 50% 22931 90% 22931 99% 22931 ms Write: 10% 22715 30% 22715 50% 22715 90% 22715 99% 22715 ms Write: 10% 24474 30% 24474 50% 24474 90% 24474 99% 24474 ms Write: 10% 23039 30% 23039 50% 23039 90% 23039 99% 23039 ms Write: 10% 22508 30% 22508 50% 22508 90% 22508 99% 22508 ms Write: 10% 14692 30% 14692 50% 14692 90% 14692 99% 14692 ms Write: 10% 23311 30% 23311 50% 23311 90% 23311 99% 23311 ms Write: 10% 19862 30% 19862 50% 19862 90% 19862 99% 19862 ms Write: 10% 21799 30% 21799 50% 21799 90% 21799 99% 21799 ms Write: 10% 20042 30% 20042 50% 20042 90% 20042 99% 20042 ms Write: 10% 18844 30% 18844 50% 18844 90% 18844 99% 18844 ms Write: 10% 14476 30% 14476 50% 14476 90% 14476 99% 14476 ms Write: 10% 16821 30% 16821 50% 16821 90% 16821 99% 16821 ms Write: 10% 17041 30% 17041 50% 17041 90% 17041 99% 17041 ms Write: 10% 16865 30% 16865 50% 16865 90% 16865 99% 16865 ms Write: 10% 9999 30% 9999 50% 9999 90% 9999 99% 9999 ms Write: 10% 20005 30% 20005 50% 20005 90% 20005 99% 20005 ms Write: 10% 15915 30% 15915 50% 15915 90% 15915 99% 15915 ms Write: 10% 19961 30% 19961 50% 19961 90% 19961 99% 19961 ms Write: 10% 6970 30% 6970 50% 6970 90% 6970 99% 6970 ms Write: 10% 6195 30% 6195 50% 6195 90% 6195 99% 6195 ms Write: 10% 14769 30% 14769 50% 14769 90% 14769 99% 14769 ms Write: 10% 5856 30% 5856 50% 5856 90% 5856 99% 5856 ms Write: 10% 13363 30% 13363 50% 13363 90% 13363 99% 13363 ms Write: 10% 6333 30% 6333 50% 6333 90% 6333 99% 6333 ms Write: 10% 17951 30% 17951 50% 17951 90% 17951 99% 17951 ms Write: 10% 10856 30% 10856 50% 10856 90% 10856 99% 10856 ms Write: 10% 13557 30% 13557 50% 13557 90% 13557 99% 13557 ms Write: 10% 5096 30% 5096 50% 5096 90% 5096 99% 5096 ms Write: 10% 9376 30% 9376 50% 9376 90% 9376 99% 9376 ms Write: 10% 16220 30% 16220 50% 16220 90% 16220 99% 16220 ms Write: 10% 6034 30% 6034 50% 6034 90% 6034 99% 6034 ms Write: 10% 3684 30% 3684 50% 3684 90% 3684 99% 3684 ms Write: 10% 4168 30% 4168 50% 4168 90% 4168 99% 4168 ms Write: 10% 4600 30% 4600 50% 4600 90% 4600 99% 4600 ms Write: 10% 14553 30% 14553 50% 14553 90% 14553 99% 14553 ms Write: 10% 3294 30% 3294 50% 3294 90% 3294 99% 3294 ms Write: 10% 8727 30% 8727 50% 8727 90% 8727 99% 8727 ms Write: 10% 7256 30% 7256 50% 7256 90% 7256 99% 7256 ms Write: 10% 7652 30% 7652 50% 7652 90% 7652 99% 7652 ms Write: 10% 9994 30% 9994 50% 9994 90% 9994 99% 9994 ms Write: 10% 13889 30% 13889 50% 13889 90% 13889 99% 13889 ms Write: 10% 5002 30% 5002 50% 5002 90% 5002 99% 5002 ms Write: 10% 7104 30% 7104 50% 7104 90% 7104 99% 7104 ms Write: 10% 9774 30% 9774 50% 9774 90% 9774 99% 9774 ms Write: 10% 13723 30% 13723 50% 13723 90% 13723 99% 13723 ms Write: 10% 6693 30% 6693 50% 6693 90% 6693 99% 6693 ms Write: 10% 3006 30% 3006 50% 3006 90% 3006 99% 3006 ms Write: 10% 10570 30% 10570 50% 10570 90% 10570 99% 10570 ms Write: 10% 7668 30% 7668 50% 7668 90% 7668 99% 7668 ms Write: 10% 3555 30% 3555 50% 3555 90% 3555 99% 3555 ms Write: 10% 3631 30% 3631 50% 3631 90% 3631 99% 3631 ms Write: 10% 2684 30% 2684 50% 2684 90% 2684 99% 2684 ms Write: 10% 8039 30% 8039 50% 8039 90% 8039 99% 8039 ms Write: 10% 3625 30% 3625 50% 3625 90% 3625 99% 3625 ms Read: 10% 3982 30% 5003 50% 6023 90% 25101 99% 29394 ms Step 3. write modify Write: 10% 5253 30% 5253 50% 5253 90% 5253 99% 5253 ms Write: 10% 7676 30% 7676 50% 7676 90% 7676 99% 7676 ms Write: 10% 7503 30% 7503 50% 7503 90% 7503 99% 7503 ms Write: 10% 12833 30% 12833 50% 12833 90% 12833 99% 12833 ms Write: 10% 12004 30% 12004 50% 12004 90% 12004 99% 12004 ms Write: 10% 11409 30% 11409 50% 11409 90% 11409 99% 11409 ms Write: 10% 20133 30% 20133 50% 20133 90% 20133 99% 20133 ms Write: 10% 20930 30% 20930 50% 20930 90% 20930 99% 20930 ms Write: 10% 21533 30% 21533 50% 21533 90% 21533 99% 21533 ms Write: 10% 22435 30% 22435 50% 22435 90% 22435 99% 22435 ms Write: 10% 19640 30% 19640 50% 19640 90% 19640 99% 19640 ms Write: 10% 16779 30% 16779 50% 16779 90% 16779 99% 16779 ms Write: 10% 20140 30% 20140 50% 20140 90% 20140 99% 20140 ms Write: 10% 21984 30% 21984 50% 21984 90% 21984 99% 21984 ms Write: 10% 20363 30% 20363 50% 20363 90% 20363 99% 20363 ms Write: 10% 20428 30% 20428 50% 20428 90% 20428 99% 20428 ms Write: 10% 5074 30% 5074 50% 5074 90% 5074 99% 5074 ms Write: 10% 21580 30% 21580 50% 21580 90% 21580 99% 21580 ms Write: 10% 19836 30% 19836 50% 19836 90% 19836 99% 19836 ms Write: 10% 20028 30% 20028 50% 20028 90% 20028 99% 20028 ms Write: 10% 15034 30% 15034 50% 15034 90% 15034 99% 15034 ms Write: 10% 14179 30% 14179 50% 14179 90% 14179 99% 14179 ms Write: 10% 19441 30% 19441 50% 19441 90% 19441 99% 19441 ms Write: 10% 21048 30% 21048 50% 21048 90% 21048 99% 21048 ms Write: 10% 15493 30% 15493 50% 15493 90% 15493 99% 15493 ms Write: 10% 20224 30% 20224 50% 20224 90% 20224 99% 20224 ms Write: 10% 16286 30% 16286 50% 16286 90% 16286 99% 16286 ms Write: 10% 18278 30% 18278 50% 18278 90% 18278 99% 18278 ms Write: 10% 4701 30% 4701 50% 4701 90% 4701 99% 4701 ms Write: 10% 17440 30% 17440 50% 17440 90% 17440 99% 17440 ms Write: 10% 18331 30% 18331 50% 18331 90% 18331 99% 18331 ms Write: 10% 16480 30% 16480 50% 16480 90% 16480 99% 16480 ms Write: 10% 18518 30% 18518 50% 18518 90% 18518 99% 18518 ms Write: 10% 15342 30% 15342 50% 15342 90% 15342 99% 15342 ms Write: 10% 18307 30% 18307 50% 18307 90% 18307 99% 18307 ms Write: 10% 14225 30% 14225 50% 14225 90% 14225 99% 14225 ms Write: 10% 8619 30% 8619 50% 8619 90% 8619 99% 8619 ms Write: 10% 18182 30% 18182 50% 18182 90% 18182 99% 18182 ms Write: 10% 13420 30% 13420 50% 13420 90% 13420 99% 13420 ms Write: 10% 4149 30% 4149 50% 4149 90% 4149 99% 4149 ms Write: 10% 20300 30% 20300 50% 20300 90% 20300 99% 20300 ms Write: 10% 7985 30% 7985 50% 7985 90% 7985 99% 7985 ms Write: 10% 7971 30% 7971 50% 7971 90% 7971 99% 7971 ms Write: 10% 12146 30% 12146 50% 12146 90% 12146 99% 12146 ms Write: 10% 12115 30% 12115 50% 12115 90% 12115 99% 12115 ms Write: 10% 12950 30% 12950 50% 12950 90% 12950 99% 12950 ms Write: 10% 4729 30% 4729 50% 4729 90% 4729 99% 4729 ms Write: 10% 5287 30% 5287 50% 5287 90% 5287 99% 5287 ms Write: 10% 13795 30% 13795 50% 13795 90% 13795 99% 13795 ms Write: 10% 14730 30% 14730 50% 14730 90% 14730 99% 14730 ms Write: 10% 14165 30% 14165 50% 14165 90% 14165 99% 14165 ms Write: 10% 21055 30% 21055 50% 21055 90% 21055 99% 21055 ms Write: 10% 8208 30% 8208 50% 8208 90% 8208 99% 8208 ms Write: 10% 12925 30% 12925 50% 12925 90% 12925 99% 12925 ms Write: 10% 5193 30% 5193 50% 5193 90% 5193 99% 5193 ms Write: 10% 4611 30% 4611 50% 4611 90% 4611 99% 4611 ms Write: 10% 14774 30% 14774 50% 14774 90% 14774 99% 14774 ms Write: 10% 5646 30% 5646 50% 5646 90% 5646 99% 5646 ms Write: 10% 8460 30% 8460 50% 8460 90% 8460 99% 8460 ms Write: 10% 4986 30% 4986 50% 4986 90% 4986 99% 4986 ms Write: 10% 4802 30% 4802 50% 4802 90% 4802 99% 4802 ms Write: 10% 4120 30% 4120 50% 4120 90% 4120 99% 4120 ms Write: 10% 7872 30% 7872 50% 7872 90% 7872 99% 7872 ms Write: 10% 5099 30% 5099 50% 5099 90% 5099 99% 5099 ms Update: 10% 1693 30% 1693 50% 1693 90% 1693 99% 1693 ms Step 4. read modify write Write: 10% 4577 30% 4577 50% 4577 90% 4577 99% 4577 ms Was written: 22.8515625 MiB, Speed: 0.17252604166666666 MiB/s Write: 10% 7115 30% 7115 50% 7115 90% 7115 99% 7115 ms Write: 10% 8120 30% 8120 50% 8120 90% 8120 99% 8120 ms Write: 10% 12400 30% 12400 50% 12400 90% 12400 99% 12400 ms Write: 10% 13788 30% 13788 50% 13788 90% 13788 99% 13788 ms Write: 10% 15236 30% 15236 50% 15236 90% 15236 99% 15236 ms Write: 10% 17995 30% 17995 50% 17995 90% 17995 99% 17995 ms Write: 10% 20833 30% 20833 50% 20833 90% 20833 99% 20833 ms Write: 10% 21262 30% 21262 50% 21262 90% 21262 99% 21262 ms Write: 10% 20525 30% 20525 50% 20525 90% 20525 99% 20525 ms Write: 10% 22638 30% 22638 50% 22638 90% 22638 99% 22638 ms Write: 10% 20192 30% 20192 50% 20192 90% 20192 99% 20192 ms Write: 10% 22730 30% 22730 50% 22730 90% 22730 99% 22730 ms Write: 10% 23027 30% 23027 50% 23027 90% 23027 99% 23027 ms Write: 10% 23063 30% 23063 50% 23063 90% 23063 99% 23063 ms Write: 10% 21747 30% 21747 50% 21747 90% 21747 99% 21747 ms Write: 10% 18535 30% 18535 50% 18535 90% 18535 99% 18535 ms Write: 10% 16732 30% 16732 50% 16732 90% 16732 99% 16732 ms Write: 10% 20968 30% 20968 50% 20968 90% 20968 99% 20968 ms Write: 10% 21030 30% 21030 50% 21030 90% 21030 99% 21030 ms Write: 10% 17344 30% 17344 50% 17344 90% 17344 99% 17344 ms Write: 10% 17704 30% 17704 50% 17704 90% 17704 99% 17704 ms Write: 10% 15796 30% 15796 50% 15796 90% 15796 99% 15796 ms Write: 10% 10044 30% 10044 50% 10044 90% 10044 99% 10044 ms Write: 10% 15290 30% 15290 50% 15290 90% 15290 99% 15290 ms Write: 10% 15418 30% 15418 50% 15418 90% 15418 99% 15418 ms Write: 10% 9986 30% 9986 50% 9986 90% 9986 99% 9986 ms Write: 10% 6880 30% 6880 50% 6880 90% 6880 99% 6880 ms Write: 10% 7133 30% 7133 50% 7133 90% 7133 99% 7133 ms Write: 10% 14236 30% 14236 50% 14236 90% 14236 99% 14236 ms Write: 10% 10841 30% 10841 50% 10841 90% 10841 99% 10841 ms Write: 10% 7304 30% 7304 50% 7304 90% 7304 99% 7304 ms Write: 10% 12953 30% 12953 50% 12953 90% 12953 99% 12953 ms Write: 10% 12956 30% 12956 50% 12956 90% 12956 99% 12956 ms Write: 10% 12053 30% 12053 50% 12053 90% 12053 99% 12053 ms Write: 10% 12274 30% 12274 50% 12274 90% 12274 99% 12274 ms Write: 10% 11838 30% 11838 50% 11838 90% 11838 99% 11838 ms Write: 10% 8036 30% 8036 50% 8036 90% 8036 99% 8036 ms Write: 10% 11934 30% 11934 50% 11934 90% 11934 99% 11934 ms Write: 10% 17050 30% 17050 50% 17050 90% 17050 99% 17050 ms Write: 10% 13572 30% 13572 50% 13572 90% 13572 99% 13572 ms Write: 10% 8360 30% 8360 50% 8360 90% 8360 99% 8360 ms Write: 10% 4726 30% 4726 50% 4726 90% 4726 99% 4726 ms Write: 10% 4761 30% 4761 50% 4761 90% 4761 99% 4761 ms Write: 10% 4757 30% 4757 50% 4757 90% 4757 99% 4757 ms Write: 10% 4324 30% 4324 50% 4324 90% 4324 99% 4324 ms Write: 10% 8656 30% 8656 50% 8656 90% 8656 99% 8656 ms Write: 10% 7214 30% 7214 50% 7214 90% 7214 99% 7214 ms Write: 10% 15288 30% 15288 50% 15288 90% 15288 99% 15288 ms Write: 10% 6934 30% 6934 50% 6934 90% 6934 99% 6934 ms Write: 10% 4118 30% 4118 50% 4118 90% 4118 99% 4118 ms Write: 10% 4138 30% 4138 50% 4138 90% 4138 99% 4138 ms Write: 10% 6634 30% 6634 50% 6634 90% 6634 99% 6634 ms Write: 10% 10905 30% 10905 50% 10905 90% 10905 99% 10905 ms Write: 10% 6233 30% 6233 50% 6233 90% 6233 99% 6233 ms Write: 10% 12369 30% 12369 50% 12369 90% 12369 99% 12369 ms Write: 10% 10894 30% 10894 50% 10894 90% 10894 99% 10894 ms Write: 10% 6554 30% 6554 50% 6554 90% 6554 99% 6554 ms Write: 10% 7077 30% 7077 50% 7077 90% 7077 99% 7077 ms Write: 10% 9684 30% 9684 50% 9684 90% 9684 99% 9684 ms Write: 10% 5779 30% 5779 50% 5779 90% 5779 99% 5779 ms Write: 10% 7217 30% 7217 50% 7217 90% 7217 99% 7217 ms Write: 10% 9281 30% 9281 50% 9281 90% 9281 99% 9281 ms Write: 10% 5364 30% 5364 50% 5364 90% 5364 99% 5364 ms Read: 10% 4940 30% 5081 50% 5223 90% 25616 99% 30204 ms Update: 10% 346 30% 1039 50% 1731 90% 3116 99% 3428 ms >> DstCreator::SameOwner |84.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-4 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 >> DataShardWrite::ExecSQLUpsertPrepared-EvWrite+Volatile [GOOD] >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 >> KqpResultSetFormats::ArrowFormat_Types_Optional_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 >> DataShardWrite::UpsertLostPrepareArbiter [GOOD] >> DataShardWrite::UpsertBrokenLockArbiter >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 >> DstCreator::ColumnsSizeMismatch >> TGRpcCmsTest::DisabledTxTest [GOOD] >> VDiskIOTest::HugeBlobIOCount [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_12_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 12] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] >> Cdc::DocApi[YdsRunner] [GOOD] >> Cdc::DocApi[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::BackupRestoreCoveringIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:46.457511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:46.457609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:46.457647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:46.457683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:46.457718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:46.457765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:46.458199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:46.458308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:46.459110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:46.459376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:46.573002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:46.573072Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:46.596283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:46.600701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:46.600903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:46.634625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:46.634836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:46.635486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:46.640735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:46.654538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:46.654778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:46.656285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:46.656348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:46.656484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:46.656526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:46.656568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:46.656711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:46.867123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.868968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.869073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.869156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:84: NIncrRestoreState::TConfigurePartsAtTable operationId: 281474976710694:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:27:14.891876Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:27:14.898817Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-04T02:27:14.901221Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:27:14.901311Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710693:0 from tablet: 72057594046678944 to tablet: 72075186233409554 cookie: 72057594046678944:9 msg type: 269549568 2026-01-04T02:27:14.901474Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710693, partId: 0, tablet: 72075186233409554 2026-01-04T02:27:14.901618Z node 25 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 108 2026-01-04T02:27:14.902326Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:27:14.902373Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710694:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:12 msg type: 269549568 2026-01-04T02:27:14.902458Z node 25 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710694, partId: 0, tablet: 72075186233409555 TestWaitNotification: OK eventTxId 108 2026-01-04T02:27:14.903719Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2585:4203], Recipient [25:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:27:14.903826Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:27:14.904026Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:27:14.904503Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable" took 488us result status StatusSuccess 2026-01-04T02:27:14.914021Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable" PathDescription { Self { Name: "CoverTable" PathId: 65 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 1 PathState: EPathStateIncomingIncrementalRestore Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "CoverTable" Columns { Name: "id" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "name" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "age" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "ega" Type: "Uint64" TypeId: 4 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "id" KeyColumnIds: 1 TableIndexes { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 65 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:27:14.915486Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [25:2586:4204], Recipient [25:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-04T02:27:14.915592Z node 25 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:27:14.915799Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CoverTable/idx_name_age" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:27:14.916280Z node 25 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CoverTable/idx_name_age" took 486us result status StatusSuccess 2026-01-04T02:27:14.917388Z node 25 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CoverTable/idx_name_age" PathDescription { Self { Name: "idx_name_age" PathId: 66 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 65 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 67 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 108 CreateStep: 5000045 ParentPathId: 66 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 55 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "idx_name_age" LocalPathId: 66 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "name" KeyColumnNames: "age" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataColumnNames: "ega" DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } FollowerGroups { } } } } } PathId: 66 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DisabledTxTest [GOOD] Test command err: 2026-01-04T02:27:13.345521Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327352606634324:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:13.346148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:13.848035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:13.907501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:13.907592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:13.946591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:14.164508Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:14.177902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:14.230217Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005799s 2026-01-04T02:27:14.371196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:14.426528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:14.426554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:14.426565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:14.426637Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:14.935289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:15.205349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) 2026-01-04T02:27:15.317449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> KqpResultSetFormats::ArrowFormat_Types_Dict_1 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 |84.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_snapshot/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupCollectionTests::SingleTableWithGlobalSyncIndex [GOOD] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |84.2%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut_ftol/ydb-core-blobstorage-dsproxy-ut_ftol >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 >> DataShardWrite::ExecSQLUpsertPrepared+EvWrite+Volatile [GOOD] >> DataShardWrite::InsertImmediate >> DstCreator::ExistingDst >> alter_compression.py::TestAlterCompression::test_availability_data |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |84.2%| [LD] {RESULT} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/client/server/ut/ydb-core-client-server-ut >> KqpResultSetFormats::ArrowFormat_Types_Optional_4 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_2 >> DstCreator::ColumnsSizeMismatch [GOOD] >> DstCreator::ColumnTypeMismatch >> DataShardWrite::UpsertPreparedNoTxCache-Volatile [GOOD] >> DataShardWrite::WriteCommitVersion >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_tiering.py::TestAlterTiering::test_multi[many_tables] [GOOD] Test command err: Thread 0 failed |84.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] >> DstCreator::SameOwner [GOOD] >> DstCreator::SamePartitionCount >> DataShardWrite::UpsertBrokenLockArbiter [GOOD] >> DataShardWrite::UpsertNoLocksArbiterRestart >> Cdc::NewImageLogDebezium [GOOD] >> Cdc::NaN[PqRunner] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 >> Cdc::VirtualTimestamps[TopicRunner] [GOOD] >> Cdc::Write[PqRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndexAndIntermediateDir [GOOD] Test command err: 2026-01-04T02:27:15.120369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327359911005635:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:15.120793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:15.695506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:15.698627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:15.698714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:15.835061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:15.864346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:15.901484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327359911005411:2081] 1767493635080549 != 1767493635080552 2026-01-04T02:27:16.042765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:16.118770Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:16.654381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:16.654401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:16.654408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:16.654472Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:17.485778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:17.492462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:27:17.496393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:18.397685Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:18.397707Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:18.398299Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:20.121790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327359911005635:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:20.121893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:22.670135Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493638176, tx_id: 281474976710658 } } } 2026-01-04T02:27:22.670602Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:22.672859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:22.688434Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-04T02:27:22.688462Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-04T02:27:22.784364Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-04T02:27:22.955968Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dir/Replicated" PathDescription { Self { Name: "Replicated" PathId: 42 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767493642803 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 43 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompa ... ImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 44 PathOwnerId: 72057594046644480 } 2026-01-04T02:27:22.979923Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 44] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 44 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767493642803 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037905 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Dir/Replicated/index_by_value/indexImplTable" |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TKeyValueTest::TestObtainLockNewApi >> KqpScanArrowInChanels::SingleKey [GOOD] >> KqpScanArrowInChanels::AggregateByColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_vdisk_io/unittest >> VDiskIOTest::HugeBlobIOCount [GOOD] Test command err: Total puts# 5000 Total vdisk writes# 10093 Total log vdisk writes# 5084 Total chunk vdisk writes# 5009 Log vdisk writes per put# 1.0168 Chunk vdisk writes per put# 1.0018 VDisk writes per put# 2.0186 DeviceWrites 10091 WriteLog requests 5084 WriteHugeUser requests 5000 |84.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_vdisk_io/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [FAIL] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] >> TBackupCollectionTests::SingleTableWithMultipleGlobalSyncIndexes [GOOD] >> TBackupCollectionTests::TableWithMixedIndexTypes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::RemoveWithAnotherTokenTest [GOOD] Test command err: 2026-01-04T02:27:11.942319Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009756s 2026-01-04T02:27:11.987766Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639248 Duration# 0.008196s 2026-01-04T02:27:11.990373Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327344713393399:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:11.990460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:12.083411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:27:12.816640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:12.906513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:12.906611Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:12.920553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:13.018828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:13.157225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:13.198290Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.125807s 2026-01-04T02:27:13.198356Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.125898s 2026-01-04T02:27:13.219250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:13.532853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:13.532886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:13.532918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:13.533128Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:14.275381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:15.024150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:27:15.107148Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7591327361893263657:2303], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:47526" } 2026-01-04T02:27:15.107197Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-04T02:27:15.107216Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:15.107226Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:15.107395Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" PeerName: "ipv6:[::1]:47526" 2026-01-04T02:27:15.107594Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767493635106261) 2026-01-04T02:27:15.222174Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767493635106261 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-04T02:27:15.222407Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-04T02:27:15.234643Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-04T02:27:15.235688Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493635106261&action=1" } } } 2026-01-04T02:27:15.235846Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:15.235930Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:27:15.236067Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:27:15.236534Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-04T02:27:15.236642Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:27:15.247276Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591327361893263666:2304], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493635106261&action=1" } UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\000" } 2026-01-04T02:27:15.247307Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:27:15.247557Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493635106261&action=1" } } 2026-01-04T02:27:15.262064Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-04T02:27:15.262134Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:27:15.262212Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7591327361893263662:2199], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:27:15.262252Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-04T02:27:15.262269Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:15.262277Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:15.262328Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-04T02:27:15.262345Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-04T02:27:15.262427Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-04T02:27:15.282826Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:27:15.282880Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:15.282894Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:15.282901Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:15.282965Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-04T02:27:15.282987Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767493635106261 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:27:15.290825Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:27:15.341294Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:15.341408Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-04T02:27:15.341420Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-04T02:27:15.361894Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\016user-1@builtin\022\030\022\026\n\024all-users@well-known\032\016user-1@builtin\"\007Builtin*\017**** (E3DE7296)0\00 ... cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2026-01-04T02:27:17.719515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-01-04T02:27:17.719572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-04T02:27:17.721525Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710663 2026-01-04T02:27:17.721545Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:27:17.721596Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:27:17.721695Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7591327370483199032:2199], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:27:17.721732Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:27:17.721747Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:17.721756Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:17.721786Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-04T02:27:17.721823Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767493637641786 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.721892Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767493637641786 issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.730728Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-04T02:27:17.730810Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-04T02:27:17.730823Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:17.731457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-04T02:27:17.731511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-04T02:27:17.731547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-04T02:27:17.731586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-04T02:27:17.731631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-04T02:27:17.731982Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591327349008360924:2204], Recipient [1:7591327349008361084:2199]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:27:17.731996Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:27:17.732020Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:17.732033Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:17.732064Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-04T02:27:17.732083Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767493637641786 errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.754504Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:27:17.754558Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:17.754597Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:27:17.754729Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:27:17.755439Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 2 } } Success: true ConfigTxSeqNo: 10 2026-01-04T02:27:17.755510Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 2 } } } 2026-01-04T02:27:17.775513Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-04T02:27:17.790715Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 11 2026-01-04T02:27:17.790809Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7591327370483199125:2199], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-04T02:27:17.790875Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-04T02:27:17.790894Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:17.790903Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:17.790942Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-04T02:27:17.790958Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-04T02:27:17.809682Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591327370483199145:2398], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493637641786&action=2" } UserToken: "" } 2026-01-04T02:27:17.809711Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:27:17.809888Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493637641786&action=2" } } 2026-01-04T02:27:17.827821Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:27:17.827855Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:27:17.827862Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:17.827868Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:27:17.827938Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767493637641786 2026-01-04T02:27:17.827954Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767493637641786 issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.829387Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767493637641786 issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.829420Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-04T02:27:17.833392Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767493637641786 code=SUCCESS errorcode=UNAUTHORIZED issue=AccessDenied: Access denied for request 2026-01-04T02:27:17.828146Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639260 Duration# 0.008886s 2026-01-04T02:27:17.842952Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-04T02:27:17.842998Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:27:17.847270Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.014507s 2026-01-04T02:27:17.883593Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591327370483199153:2400], Recipient [1:7591327349008361084:2199]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493637641786&action=2" } UserToken: "" } 2026-01-04T02:27:17.883619Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:27:17.883766Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767493637641786&action=2" ready: true status: SUCCESS } } 2026-01-04T02:27:17.904753Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:27:17.904899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:27:20.453265Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591327360964532182:2192];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:20.453347Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=timeout; |84.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TKesusTest::TestAttachNewSessions |84.2%| [TA] $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |84.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache |84.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_vdisk_io/test-results/unittest/{meta.json ... results_accumulator.log} |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/ydb-core-tx-scheme_board-ut_cache >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 >> DstCreator::ExistingDst [GOOD] >> DstCreator::EmptyReplicationConfig >> DstCreator::ColumnTypeMismatch [GOOD] >> KqpScanArrowFormat::AggregateByColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumn >> DataShardWrite::InsertImmediate [GOOD] >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace >> TKesusTest::TestAttachNewSessions [GOOD] >> TKesusTest::TestAttachMissingSession >> KqpResultSetFormats::ArrowFormat_Types_Dict_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 >> TKesusTest::TestAttachMissingSession [GOOD] >> TKesusTest::TestAttachOldGeneration >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 >> TKesusTest::TestAttachOldGeneration [GOOD] >> TKesusTest::TestAttachFastPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ColumnTypeMismatch [GOOD] Test command err: 2026-01-04T02:27:20.246493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327382485451948:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:20.247848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:20.597910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:20.621737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:20.621992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:20.702181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:20.705999Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327382485451735:2081] 1767493640225544 != 1767493640225547 2026-01-04T02:27:20.709389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:20.785685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:21.106479Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:21.106500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:21.106513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:21.106607Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:21.210097Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:21.694655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:21.711649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:27:21.715303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:21.986121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:22.083607Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:22.083629Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:22.084298Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:24.940158Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493641879, tx_id: 281474976710658 } } } 2026-01-04T02:27:24.940531Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:25.019113Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:27:25.022460Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767493642110 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "extra" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:27:25.022758Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Columns size mismatch: expected: 2, got: 3 2026-01-04T02:27:25.234044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327382485451948:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:25.234109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:26.038940Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327407534069020:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:26.039184Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:26.105610Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:26.281903Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:26.329630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:26.329699Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:26.366186Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:26.370083Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327407534068918:2081] 1767493646020925 != 1767493646020928 2026-01-04T02:27:26.403323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:26.686741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:26.998424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:26.998454Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:26.998460Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:26.998535Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:27.045982Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:27.376761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:27.383542Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:27:27.386462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:27.461781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:27.519813Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:27.519836Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:27.520381Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:30.381218Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493647493, tx_id: 281474976710658 } } } 2026-01-04T02:27:30.381519Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:30.383129Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:27:30.384355Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767493647556 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:27:30.384650Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Column type mismatch: name: value, expected: Utf8, got: Uint32 |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TKesusTest::TestAttachFastPath [GOOD] >> TKesusTest::TestAttachFastPathBlocked >> DataShardWrite::UpsertNoLocksArbiterRestart [GOOD] >> DataShardWrite::UpsertLostPrepareArbiterRestart >> KqpResultSetFormats::ArrowFormat_Types_List_2 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_List_3 >> KqpResultSetFormats::ArrowFormat_LargeTable_Limit [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning+isOlap >> DstCreator::SamePartitionCount [GOOD] >> TKesusTest::TestAttachFastPathBlocked [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base_reboots/ydb-core-tx-schemeshard-ut_base_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAttachFastPathBlocked [GOOD] Test command err: 2026-01-04T02:27:31.334676Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:27:31.334793Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:27:31.354438Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:27:31.354548Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:27:31.382276Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:27:31.382800Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=16516147359558347688, session=0, seqNo=0) 2026-01-04T02:27:31.382976Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:27:31.405471Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=16516147359558347688, session=1) 2026-01-04T02:27:31.405834Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=9403931995674937152, session=0, seqNo=0) 2026-01-04T02:27:31.405984Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:27:31.420320Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=9403931995674937152, session=2) 2026-01-04T02:27:31.993867Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:27:31.993994Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:27:32.010299Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:27:32.010433Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:27:32.046609Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:27:32.047173Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2161], cookie=2959852714248475550, session=1, seqNo=0) 2026-01-04T02:27:32.066440Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2161], cookie=2959852714248475550, session=1) 2026-01-04T02:27:32.650514Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:27:32.650622Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:27:32.669035Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:27:32.669577Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:27:32.710092Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:27:32.710970Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=16298081978189536785, session=0, seqNo=0) 2026-01-04T02:27:32.711120Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:27:32.736123Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=16298081978189536785, session=1) 2026-01-04T02:27:33.554273Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:27:33.554396Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:27:33.597015Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:27:33.597623Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:27:33.636682Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:27:33.637065Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[4:137:2161], cookie=13527104050656543407, path="") 2026-01-04T02:27:33.655052Z node 4 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[4:137:2161], cookie=13527104050656543407, status=SUCCESS) 2026-01-04T02:27:33.656029Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:146:2168], cookie=5679179877885855207, session=0, seqNo=0) 2026-01-04T02:27:33.656180Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:27:33.672051Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:146:2168], cookie=5679179877885855207, session=1) 2026-01-04T02:27:33.672856Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:147:2169], cookie=111, session=0, seqNo=0) 2026-01-04T02:27:33.672999Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:27:33.673193Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:262: [72057594037927937] Fast-path attach session=1 to sender=[4:147:2169], cookie=222, seqNo=0 2026-01-04T02:27:33.685636Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:147:2169], cookie=111, session=2) 2026-01-04T02:27:34.593658Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:27:34.593774Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:27:34.618871Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:27:34.618992Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:27:34.635321Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:27:34.635665Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[5:135:2159], cookie=12147774736350703146, path="") 2026-01-04T02:27:34.668752Z node 5 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[5:135:2159], cookie=12147774736350703146, status=SUCCESS) 2026-01-04T02:27:34.669939Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:144:2166], cookie=3888335732682304746, session=0, seqNo=0) 2026-01-04T02:27:34.670112Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:27:34.684079Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:144:2166], cookie=3888335732682304746, session=1) 2026-01-04T02:27:34.685004Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:144:2166], cookie=123, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:27:34.685200Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:27:34.685302Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:27:34.685727Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:145:2167], cookie=111, session=0, seqNo=0) 2026-01-04T02:27:34.685852Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:27:34.686007Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:145:2167], cookie=222, session=1, seqNo=0) 2026-01-04T02:27:34.699950Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:144:2166], cookie=123) 2026-01-04T02:27:34.700044Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:145:2167], cookie=111, session=2) 2026-01-04T02:27:34.700099Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:145:2167], cookie=222, session=1) |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] >> DstCreator::EmptyReplicationConfig [GOOD] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut |84.3%| [LD] {RESULT} $(B)/ydb/core/fq/libs/common/ut/ydb-core-fq-libs-common-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-11 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::SamePartitionCount [GOOD] Test command err: 2026-01-04T02:27:16.337449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327363368222053:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:16.342340Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:16.976217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:17.078541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:17.078632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:17.153894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327363368221882:2081] 1767493636282502 != 1767493636282505 2026-01-04T02:27:17.157161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:17.175399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:17.290253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:17.345900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:17.910609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:17.910639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:17.910648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:17.910722Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:18.694649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:18.712122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:27:18.717797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:19.029652Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:19.029674Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:19.030259Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:21.333266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327363368222053:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:21.333342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:25.451337Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493638883, tx_id: 281474976715659 } } } 2026-01-04T02:27:25.451692Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:25.453260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:25.454616Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715660} 2026-01-04T02:27:25.454642Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715660 2026-01-04T02:27:25.526657Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715660 2026-01-04T02:27:25.526684Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:27:27.138764Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327413908708644:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:27.138890Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:27.156524Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:27:27.398366Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:27.398583Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:27.402044Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327413908708603:2081] 1767493647105459 != 1767493647105462 2026-01-04T02:27:27.439923Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:27.440012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:27.504008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:27.604280Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:28.145985Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:28.394418Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:28.394439Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:28.394445Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:28.394538Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:28.856567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:28.864770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:28.994672Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:28.994692Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:28.995134Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:32.119171Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591327413908708644:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:32.119238Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:33.511115Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493649012, tx_id: 281474976710658 } } } 2026-01-04T02:27:33.511573Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:33.513368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:33.519640Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710659} 2026-01-04T02:27:33.519666Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976710659 2026-01-04T02:27:33.622438Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710659 2026-01-04T02:27:33.622475Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] >> TBlobStorageProxyTest::TestBlock |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |84.3%| [LD] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/dsproxy/ut/ydb-core-blobstorage-dsproxy-ut >> TBackupCollectionTests::TableWithMixedIndexTypes [GOOD] >> TBackupCollectionTests::MultipleTablesWithIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::EmptyReplicationConfig [GOOD] Test command err: 2026-01-04T02:27:25.185151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327402181173381:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:25.185251Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:25.510072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:25.685529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:25.685658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:25.713968Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327402181173324:2081] 1767493645181317 != 1767493645181320 2026-01-04T02:27:25.736347Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:25.758393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:25.767753Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:26.213826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:26.458712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:26.458746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:26.458753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:26.458829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:26.931644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:26.939087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:27:26.942821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:27.165094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:27.212861Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:27.212882Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:27.214720Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:30.123909Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493647080, tx_id: 281474976715658 } } } 2026-01-04T02:27:30.124308Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:30.169842Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:27:30.173428Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1767493647248 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:27:30.173743Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:27:30.189961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327402181173381:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:30.190072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:31.316260Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:27:31.317047Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327427656606739:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:31.317086Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:31.473920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:31.474786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:31.474859Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:31.489989Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327427656606527:2081] 1767493651283456 != 1767493651283459 2026-01-04T02:27:31.513641Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:31.522985Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:31.766679Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:31.980887Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:31.980903Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:31.980908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:31.980987Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:32.277708Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:32.326197Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:32.332795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:27:32.336051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:32.403917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:27:32.458501Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:27:32.458524Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:27:32.459104Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:27:35.466437Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767493652435, tx_id: 281474976710658 } } } 2026-01-04T02:27:35.466732Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:27:35.468214Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:27:35.469188Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767493652477 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:27:35.469366Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Empty replication config >> DataShardWrite::ImmediateAndPlannedCommittedOpsRace [GOOD] >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |84.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/ydb-core-tx-schemeshard-ut_user_attributes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_NONE [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS [GOOD] >> TModificationsValidatorTests::TestIsValidationRequired_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_RemoveItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsSameScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_TENANTS_AND_NODE_TYPES [GOOD] >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TBlobStorageProxyTest::TestBlock [GOOD] >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-49 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline [GOOD] Test command err: 2026-01-04T02:26:14.426174Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:14.427989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:26:14.428045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:14.437959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:14.438370Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:26:14.438655Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:14.488879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:14.497311Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:14.497426Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:14.499232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:26:14.499313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:26:14.499392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:26:14.499843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:14.501348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:14.501458Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:26:14.623840Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:14.663628Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:26:14.663838Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:14.663971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:26:14.664012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:26:14.664065Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:26:14.664106Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:14.664339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:14.664388Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:14.664671Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:26:14.664767Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:26:14.664842Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:14.664880Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:14.664914Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:26:14.664968Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:26:14.665006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:14.665050Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:26:14.665106Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:14.665199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:14.665233Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:14.665296Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:26:14.679160Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:26:14.679254Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:14.679374Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:26:14.679551Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:26:14.679606Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:26:14.679662Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:26:14.679723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:26:14.679776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:26:14.679810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:26:14.679841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:14.680155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:14.680193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:26:14.680225Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:26:14.680256Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:14.680305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:26:14.680356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:26:14.680389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:26:14.680433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:14.680462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:14.698454Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:26:14.698534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:14.698575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:14.698632Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:26:14.698705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:14.699290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:14.699351Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:14.699393Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:26:14.699527Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:26:14.699562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:26:14.699685Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:14.699724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:26:14.699769Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:26:14.745930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:26:14.754104Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:26:14.754199Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:14.754494Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:14.754536Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:14.754597Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:14.754640Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:14.754696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:26:14.754744Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:26:14.754780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 02:27:38.606508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2026-01-04T02:27:38.606545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2026-01-04T02:27:38.606607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:27:38.606643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-04T02:27:38.606679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:27:38.606721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:27:38.701163Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:27:38.701406Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:27:38.701447Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-04T02:27:38.703140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:27:38.703228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:27:38.703282Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:27:38.704281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:27:38.704339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:38.704405Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:27:38.704661Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:27:38.704716Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:38.704749Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:27:38.704813Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:238:2230], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:27:38.705542Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:27:38.705582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-04T02:27:38.705740Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:27:38.705792Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-04T02:27:38.705900Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:27:38.706075Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:27:38.706108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:38.706152Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:27:38.706251Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:27:38.706291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:27:38.706342Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-04T02:27:38.706379Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:27:38.706416Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-04T02:27:38.706445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-04T02:27:38.706478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:27:38.706508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-04T02:27:38.706548Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-04T02:27:38.706583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-04T02:27:38.706607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:27:38.706627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-04T02:27:38.706664Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-04T02:27:38.706686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-04T02:27:38.707211Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-04T02:27:38.707264Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:27:38.707313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-04T02:27:38.707343Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-04T02:27:38.707377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-04T02:27:38.707412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:27:38.707602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-04T02:27:38.707628Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-04T02:27:38.707655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-04T02:27:38.707682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-04T02:27:38.707717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:27:38.707745Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-04T02:27:38.707785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-04T02:27:38.707819Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:27:38.809925Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:27:38.810012Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-04T02:27:38.810050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-04T02:27:38.810467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:27:38.810520Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:38.810562Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-04T02:27:38.830636Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:27:38.830701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:27:38.830766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:27:38.830848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:27:38.830885Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:27:38.831213Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:27:38.831262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:27:38.831295Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_AddItems_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsExpandScope_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_NONE [GOOD] >> TModificationsValidatorTests::TestIndexAndModificationsShrink_ModifyItemsNarrowScope_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] >> Cdc::Write[PqRunner] [GOOD] >> Cdc::Write[YdsRunner] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/ut/ydb-core-mon-ut |84.3%| [LD] {RESULT} $(B)/ydb/core/mon/ut/ydb-core-mon-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainUnaffected_TENANTS_AND_NODE_TYPES [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |84.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestBatchedPutRequestDoesNotContainAHugeBlob [GOOD] |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 >> KqpScanArrowInChanels::AggregateByColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::UpsertLostPrepareArbiterRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:39.208373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:39.371048Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:39.371568Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:39.371629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:39.762296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:39.762430Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:39.874570Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493595042056 != 1767493595042059 2026-01-04T02:26:39.897372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:39.955313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:40.087705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:40.494598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:40.518504Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:40.653287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:40.699513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:40.700596Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:40.700907Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:26:40.701184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:40.766415Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:40.767790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:40.767952Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:40.769879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:40.769980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:40.770035Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:40.770448Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:40.770599Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:40.770709Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:26:40.781567Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:40.831690Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:40.831927Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:40.832080Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:26:40.832144Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:40.832205Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:40.832245Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:40.832432Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:40.832495Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:40.832921Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:40.833041Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:40.833132Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:40.833178Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:40.833226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:40.833266Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:40.833308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:40.833348Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:40.833416Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:40.833554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:40.833607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:40.833674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:26:40.834121Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:26:40.834192Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:40.834310Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:40.834583Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:40.834646Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:40.834749Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:40.834810Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:40.834852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:40.834910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:40.834948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:40.835326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:40.835385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:26:40.835426Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:40.835464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:40.835529Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:26:40.835569Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:40.835607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:26:40.835642Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:26:40.835681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:40.837619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:26:40.837701Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:40.848583Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:40.848717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.966590Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1167:2975], serverId# [10:1168:2976], sessionId# [0:0:0] 2026-01-04T02:27:41.966719Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1166:2974], Recipient [10:943:2797]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-04T02:27:41.969795Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1171:2979], Recipient [10:943:2797]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.969881Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.969923Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [10:1170:2978], serverId# [10:1171:2979], sessionId# [0:0:0] 2026-01-04T02:27:41.970182Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1169:2977], Recipient [10:943:2797]: NKikimrTxDataShard.TEvRead ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-04T02:27:41.970327Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-04T02:27:41.970371Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2001/1000001 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:27:41.970415Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-04T02:27:41.970462Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CheckRead 2026-01-04T02:27:41.970541Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-04T02:27:41.970571Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CheckRead 2026-01-04T02:27:41.970597Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-04T02:27:41.970624Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-04T02:27:41.970672Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037890 2026-01-04T02:27:41.970703Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-04T02:27:41.970727Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-04T02:27:41.970750Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit ExecuteRead 2026-01-04T02:27:41.970777Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit ExecuteRead 2026-01-04T02:27:41.970860Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 1002 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-04T02:27:41.971054Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[10:1169:2977], 1002} after executionsCount# 1 2026-01-04T02:27:41.971104Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[10:1169:2977], 1002} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:27:41.971171Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[10:1169:2977], 1002} finished in read 2026-01-04T02:27:41.971221Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-04T02:27:41.971247Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit ExecuteRead 2026-01-04T02:27:41.971270Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T02:27:41.971295Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037890 on unit CompletedOperations 2026-01-04T02:27:41.971338Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037890 is Executed 2026-01-04T02:27:41.971360Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T02:27:41.971383Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037890 has finished 2026-01-04T02:27:41.971412Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-04T02:27:41.971491Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-04T02:27:41.972223Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1174:2982], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.972274Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.973212Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1173:2981], serverId# [10:1174:2982], sessionId# [0:0:0] 2026-01-04T02:27:41.973441Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [10:1172:2980], Recipient [10:936:2792]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-04T02:27:41.974426Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1177:2985], Recipient [10:936:2792]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.974471Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:27:41.974507Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [10:1176:2984], serverId# [10:1177:2985], sessionId# [0:0:0] 2026-01-04T02:27:41.974689Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1175:2983], Recipient [10:936:2792]: NKikimrTxDataShard.TEvRead ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-04T02:27:41.974828Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037891, FollowerId 0 2026-01-04T02:27:41.974873Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037891 CompleteEdge# v2000/281474976715657 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:27:41.974909Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037891 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-04T02:27:41.974958Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CheckRead 2026-01-04T02:27:41.975032Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-04T02:27:41.975061Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CheckRead 2026-01-04T02:27:41.975106Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit BuildAndWaitDependencies 2026-01-04T02:27:41.975138Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit BuildAndWaitDependencies 2026-01-04T02:27:41.975188Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037891 2026-01-04T02:27:41.975221Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-04T02:27:41.975247Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit BuildAndWaitDependencies 2026-01-04T02:27:41.975272Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit ExecuteRead 2026-01-04T02:27:41.975299Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit ExecuteRead 2026-01-04T02:27:41.975380Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037891 Execute read# 1, request: { ReadId: 1003 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-04T02:27:41.975516Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037891 Complete read# {[10:1175:2983], 1003} after executionsCount# 1 2026-01-04T02:27:41.975561Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037891 read iterator# {[10:1175:2983], 1003} sends rowCount# 0, bytes# 0, quota rows left# 18446744073709551615, quota bytes left# 18446744073709551615, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:27:41.975620Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037891 read iterator# {[10:1175:2983], 1003} finished in read 2026-01-04T02:27:41.975672Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-04T02:27:41.975696Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit ExecuteRead 2026-01-04T02:27:41.975721Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037891 to execution unit CompletedOperations 2026-01-04T02:27:41.975748Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037891 on unit CompletedOperations 2026-01-04T02:27:41.975795Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037891 is Executed 2026-01-04T02:27:41.975821Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037891 executing on unit CompletedOperations 2026-01-04T02:27:41.975869Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037891 has finished 2026-01-04T02:27:41.975899Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037891 2026-01-04T02:27:41.975977Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037891 |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_EmptyDict [GOOD] Test command err: Trying to start YDB, gRPC: 7903, MsgBus: 8225 2026-01-04T02:26:00.909544Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327036681455088:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:00.919760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:01.290701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:01.332373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:01.332484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:01.414592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:01.542749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:01.602068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:01.628660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:01.628690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:01.628697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:01.628787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:01.923887Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:02.138840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:02.158129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:26:02.297561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:02.694269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:03.113289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:03.303830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:05.913955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327036681455088:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:05.914070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:26:06.015042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327062451260699:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.015169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.015743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327062451260709:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.015800Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:06.616275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.757942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.872575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.974798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.070960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.120616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.182711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.253580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:07.406293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327066746228880:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:07.406420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:07.406829Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327066746228885:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:07.406870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327066746228886:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:07.407150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:07.411573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:07.433531Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327066746228889:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:26:07.508488Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327066746228942:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;compo ... -01-04T02:27:22.683863Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:22.870234Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:22.957464Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:22.957588Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:22.976670Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:22.996177Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:23.117621Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:23.308551Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:23.308584Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:23.308596Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:23.308712Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:23.721982Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:24.299697Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:24.309246Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:27:27.685426Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591327389598573383:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:27.685511Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:29.444291Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327419663345202:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:29.444449Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:29.444942Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327419663345214:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:29.445006Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327419663345215:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:29.445194Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:29.454280Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:27:29.475654Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591327419663345218:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:27:29.581581Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591327419663345269:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=344;columns=1; Trying to start YDB, gRPC: 6607, MsgBus: 3621 2026-01-04T02:27:32.115068Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7591327435235667333:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:32.115144Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:32.181948Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:27:32.302213Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:32.302369Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:32.303405Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:32.306165Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7591327435235667305:2081] 1767493652113376 != 1767493652113379 2026-01-04T02:27:32.324119Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:32.484386Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:27:32.542673Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:32.542702Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:32.542716Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:32.542844Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:33.122534Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:33.616510Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:33.632787Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:27:37.177612Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7591327435235667333:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:37.178617Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:27:40.091386Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327469595406572:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:40.091584Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:40.092554Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327469595406585:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:40.092615Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327469595406584:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:40.092681Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:27:40.101436Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:27:40.134786Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7591327469595406588:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:27:40.230736Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7591327469595406641:2547] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=120;columns=1; |84.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> Cdc::DocApi[TopicRunner] [GOOD] >> Cdc::HugeKey[PqRunner] |84.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |84.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut |84.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut/ydb-core-sys_view-ut >> DataShardWrite::WriteCommitVersion [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr_reboots/ydb-core-tx-schemeshard-ut_rtmr_reboots >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TBackupCollectionTests::MultipleTablesWithIndexes [GOOD] >> TBackupCollectionTests::IncrementalBackupWithIndexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |84.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut/ydb-core-cms-ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut/ydb-core-cms-ut >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-7 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName >> KqpResultSetFormats::ArrowFormat_Types_List_3 [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Tuple >> TNetClassifierUpdaterTest::TestGetUpdatesFromHttpServer [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build_reboots/ydb-core-tx-schemeshard-ut_index_build_reboots >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> Cdc::NaN[PqRunner] [GOOD] >> Cdc::NaN[YdsRunner] |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme |84.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/scheme/ydb-core-kqp-ut-scheme >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetReadSessionsInfo request" ErrorCode: BAD_REQUEST } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-13 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-50 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching >> KqpResultSetFormats::ArrowFormat_Returning+isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_Returning-isOlap >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TBackupCollectionTests::IncrementalBackupWithIndexes [GOOD] >> TBackupCollectionTests::OmitIndexesFlag >> Cdc::Write[YdsRunner] [GOOD] >> Cdc::Write[TopicRunner] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-04T02:27:49.072058Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:49.079745Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:49.080121Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:27:49.080204Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:49.080279Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:27:49.081029Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2026-01-04T02:27:49.081134Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:49.100928Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:49.101136Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:49.102191Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:49.102405Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:49.102871Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:49.103298Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2225] 2026-01-04T02:27:49.106105Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:49.106173Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:27:49.106247Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2225] 2026-01-04T02:27:49.106298Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:49.106356Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:49.106407Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:27:49.106447Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:27:49.106495Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:49.106529Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:49.106743Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:49.106780Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:27:49.106895Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:49.107171Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:27:49.108251Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2259], now have 1 active actors on pipe 2026-01-04T02:27:49.208114Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:49.213387Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:49.213876Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:27:49.213948Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:49.214006Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:49.214883Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-04T02:27:49.214957Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:49.218589Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:49.218735Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:49.219575Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:49.219746Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:49.220202Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:49.220431Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2326] 2026-01-04T02:27:49.223252Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:49.223315Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:49.223375Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2326] 2026-01-04T02:27:49.223422Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:49.223514Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:49.223551Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:49.223587Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:49.223624Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:49.223658Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:49.223729Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:49.223772Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:49.223871Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:49.224109Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:49.224694Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-04T02:27:49.226461Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:416:2363], now have 1 active actors on pipe 2026-01-04T02:27:49.226846Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-04T02:27:49.227032Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:416:2363] destroyed 2026-01-04T02:27:49.227407Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:418:2364] destroyed 2026-01-04T02:27:50.432905Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:50.436673Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:50.437055Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:27:50.437139Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:50.437198Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:27:50.437953Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:258:2252], now have 1 active actors on pipe 2026-01-04T02:27:50.438101Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:50.440623Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 3 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:50.44 ... -04T02:27:50.677858Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:50.678150Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:27:50.678766Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:469:2400], now have 1 active actors on pipe 2026-01-04T02:27:50.732062Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:50.743464Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:50.743845Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:27:50.743924Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:50.743986Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:50.744813Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:518:2436], now have 1 active actors on pipe 2026-01-04T02:27:50.744891Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:50.756561Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:50.756722Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:50.757645Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 6 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 6 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:50.757819Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:50.758258Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:50.758505Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:526:2407] 2026-01-04T02:27:50.770535Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:50.770670Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:50.770726Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:526:2407] 2026-01-04T02:27:50.770787Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:50.770858Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:50.770898Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:50.770934Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:50.770972Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:50.771006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:50.771046Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:50.771081Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:50.771183Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:50.771465Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:50.771998Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:529:2441], now have 1 active actors on pipe 2026-01-04T02:27:50.773444Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2026-01-04T02:27:50.773588Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:536:2445], now have 1 active actors on pipe 2026-01-04T02:27:50.773632Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:537:2445], now have 1 active actors on pipe 2026-01-04T02:27:50.786820Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:542:2449], now have 1 active actors on pipe 2026-01-04T02:27:50.878533Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:50.881422Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:50.882775Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:50.882847Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:50.882986Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:50.883330Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:50.883582Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:598:2452] 2026-01-04T02:27:50.885516Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:27:50.891748Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:27:50.892089Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:27:50.892225Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:27:50.892579Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:27:50.892665Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:27:50.892913Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:27:50.892978Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:27:50.893021Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:27:50.893063Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:27:50.893172Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:27:50.893416Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:27:50.893461Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:50.893503Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:50.893570Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:598:2452] 2026-01-04T02:27:50.893629Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:50.893713Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:50.893752Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:50.893790Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:50.897995Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:50.898059Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:50.898109Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:50.898143Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:50.898261Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:50.898534Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:50.899237Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:536:2445] destroyed 2026-01-04T02:27:50.899312Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:535:2444] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 3 ErrorCode: OK } PartitionLocation { Partition: 2 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 3 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/ydb-core-tx-columnshard-ut_rw ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnNoClientSpecified [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "No clientId specified in CmdGetReadSessionsInfo" ErrorCode: BAD_REQUEST } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> DataShardWrite::WriteUniqueRowsInsertDuplicateBeforeCommit [GOOD] >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-04T02:27:51.882186Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:51.891815Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:51.892314Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:27:51.892384Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:51.892468Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:27:51.893245Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2026-01-04T02:27:51.893362Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:51.941188Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:51.941376Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:51.944036Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:51.944287Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:51.944838Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:51.945342Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2225] 2026-01-04T02:27:51.957024Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:51.957121Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:27:51.957193Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2225] 2026-01-04T02:27:51.957250Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:51.957319Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:51.957362Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:27:51.957398Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:27:51.957452Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:51.957487Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:51.957540Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:51.957588Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:27:51.957719Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:51.958037Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:27:51.958589Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2259], now have 1 active actors on pipe 2026-01-04T02:27:52.045680Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:52.052823Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:52.053251Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-04T02:27:52.053310Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:52.053371Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-04T02:27:52.054269Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-04T02:27:52.054339Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:52.064278Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:52.064448Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:52.065282Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:52.065447Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:52.065905Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:52.066157Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:407:2326] 2026-01-04T02:27:52.075337Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:52.075428Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-04T02:27:52.075486Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:407:2326] 2026-01-04T02:27:52.075540Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:52.075613Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:52.075653Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:27:52.075688Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:27:52.075725Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:52.075757Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:52.075816Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:52.075858Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-04T02:27:52.075962Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:52.076274Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:27:52.076863Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-04T02:27:52.136692Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:52.148245Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:52.148669Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-04T02:27:52.148740Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:52.148811Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-04T02:27:52.149920Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:459:2396], now have 1 active actors on pipe 2026-01-04T02:27:52.150013Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:52.159365Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:27:52.159555Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:52.160801Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... 37928138][Partition][1][StateIdle] Try persist 2026-01-04T02:27:54.160687Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:54.160990Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:27:54.161638Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:471:2401], now have 1 active actors on pipe 2026-01-04T02:27:54.209201Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:54.228704Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:54.229201Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:27:54.229260Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:54.229315Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:54.230054Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:520:2437], now have 1 active actors on pipe 2026-01-04T02:27:54.230214Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:54.239319Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:54.239490Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:54.240214Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:54.240411Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:54.240826Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:54.241075Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:528:2408] 2026-01-04T02:27:54.246723Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:54.246814Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:54.246874Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:528:2408] 2026-01-04T02:27:54.246938Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:54.247012Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:54.247067Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:54.247119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:54.247164Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:54.247208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:54.247261Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:54.247305Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:54.247426Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:54.247702Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:54.248315Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:531:2442], now have 1 active actors on pipe 2026-01-04T02:27:54.249598Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:537:2445], now have 1 active actors on pipe 2026-01-04T02:27:54.250345Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2026-01-04T02:27:54.250576Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:539:2446], now have 1 active actors on pipe 2026-01-04T02:27:54.263004Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:547:2453], now have 1 active actors on pipe 2026-01-04T02:27:54.307829Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:54.309899Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:54.310826Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:54.310890Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:54.311028Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:54.311347Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:54.311575Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:603:2456] 2026-01-04T02:27:54.314041Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:27:54.315335Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:27:54.315643Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:27:54.315781Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:27:54.316020Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:27:54.316103Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:27:54.316317Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:27:54.316364Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:27:54.316411Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:27:54.316463Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:27:54.316573Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:27:54.316766Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:27:54.316816Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:54.316862Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:54.316908Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:603:2456] 2026-01-04T02:27:54.316973Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:54.317036Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:54.317084Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:54.317127Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:54.317169Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:54.317211Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:54.317250Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:54.317292Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:54.317392Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:54.317576Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:54.318959Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:538:2446] destroyed 2026-01-04T02:27:54.319052Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:537:2445] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionOffsetsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 StartOffset: 0 EndOffset: 0 ErrorCode: OK WriteTimestampEstimateMS: 0 } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "partition is not ready yet" } ErrorCode: OK } } } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_all_types-pk_types12-all_types12-index12---] [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TCacheTest::MigrationLostMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: 2026-01-04T02:27:53.511255Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:53.515148Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:53.515500Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:27:53.515636Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.515723Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:27:53.516561Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:258:2252], now have 1 active actors on pipe 2026-01-04T02:27:53.516665Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:53.540853Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:53.541034Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.542215Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:53.542414Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:53.542813Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:53.543218Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [1:266:2223] 2026-01-04T02:27:53.546181Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:53.546245Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:27:53.546287Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [1:266:2223] 2026-01-04T02:27:53.546336Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:53.546395Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:53.546455Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:27:53.546501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:27:53.546546Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:53.546580Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:53.546639Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:53.546677Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:27:53.546785Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:53.547048Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:27:53.547575Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [1:269:2257], now have 1 active actors on pipe 2026-01-04T02:27:53.616758Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:53.620812Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:53.621180Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-04T02:27:53.621273Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.621360Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-04T02:27:53.622177Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:399:2355], now have 1 active actors on pipe 2026-01-04T02:27:53.622300Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:53.624868Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:53.625004Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.625898Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:27:53.626029Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:53.626338Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:53.626573Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [1:407:2326] 2026-01-04T02:27:53.628822Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:53.628888Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-04T02:27:53.628955Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [1:407:2326] 2026-01-04T02:27:53.629005Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:53.629066Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:53.629105Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:27:53.629140Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:27:53.629177Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:53.629212Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:53.629250Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:53.629290Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-04T02:27:53.629373Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:53.629594Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:27:53.630143Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [1:410:2360], now have 1 active actors on pipe 2026-01-04T02:27:53.673920Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:53.692721Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:53.692995Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-04T02:27:53.693067Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.693110Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-04T02:27:53.693660Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [1:459:2396], now have 1 active actors on pipe 2026-01-04T02:27:53.693756Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:53.697306Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:27:53.697461Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:53.698486Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [1:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:27:53.698634Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializ ... 0000, To tx_18446744073709551615 2026-01-04T02:27:55.917592Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:27:55.917660Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:55.917721Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:55.918545Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:519:2437], now have 1 active actors on pipe 2026-01-04T02:27:55.918627Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:27:55.921387Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:55.921508Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:55.922143Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 12 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 12 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:27:55.922293Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:55.922617Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:55.922833Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:527:2408] 2026-01-04T02:27:55.925226Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:55.925289Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:55.925334Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [3:527:2408] 2026-01-04T02:27:55.925388Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:55.925449Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:55.925489Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:55.925531Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:55.925568Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:55.925604Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:55.925649Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:55.925685Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:55.925788Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:55.926056Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:55.926501Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:530:2442], now have 1 active actors on pipe 2026-01-04T02:27:55.928069Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:537:2445], now have 1 active actors on pipe 2026-01-04T02:27:55.928673Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:540:2446], now have 1 active actors on pipe 2026-01-04T02:27:55.928754Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:539:2446], now have 1 active actors on pipe 2026-01-04T02:27:55.928810Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:541:2446], now have 1 active actors on pipe 2026-01-04T02:27:55.929471Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:554:2457], now have 1 active actors on pipe 2026-01-04T02:27:55.964829Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:27:55.967303Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:27:55.968149Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:27:55.968212Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:27:55.968335Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:27:55.968669Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:27:55.968881Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:610:2460] 2026-01-04T02:27:55.976753Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:27:55.978189Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:27:55.978533Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:27:55.978650Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:27:55.986280Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:27:55.986406Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:27:55.986788Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:27:55.986856Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:27:55.986908Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:27:55.986948Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:27:55.987055Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:27:55.987284Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:27:55.987330Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:27:55.987374Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:27:55.987421Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:610:2460] 2026-01-04T02:27:55.987478Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:27:55.987540Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:27:55.987582Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:27:55.987616Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:27:55.987654Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:55.987689Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:27:55.987730Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:27:55.987766Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:27:55.987855Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:27:55.988033Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:27:55.988810Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:537:2445] destroyed 2026-01-04T02:27:55.988868Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928137] server disconnected, pipe [3:539:2446] destroyed 2026-01-04T02:27:55.988943Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:540:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 3 ErrorCode: OK } PartitionResult { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } ErrorCode: OK } } } |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 >> IcebergClusterProcessor::ValidateDdlCreationForHadoopWithS3 [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 >> TBackupCollectionTests::OmitIndexesFlag [GOOD] >> TBackupCollectionTests::IndexCdcStreamCountRotation >> KqpScanArrowFormat::AggregateNoColumn [GOOD] >> KqpScanArrowFormat::AggregateNoColumnNoRemaps >> KqpResultSetFormats::ArrowFormat_Types_Tuple [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Struct >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutWarehouse [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> TCacheTest::Recreate >> Cdc::HugeKey[PqRunner] [GOOD] >> Cdc::HugeKey[YdsRunner] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 >> Cache::Test1 [GOOD] >> Cache::Test2 [GOOD] >> Cache::Test3 [GOOD] >> TCacheTest::MigrationLostMessage [GOOD] >> TCacheTest::MigrationUndo >> TCacheTest::Recreate [GOOD] >> TCacheTest::SysLocks >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test3 [GOOD] |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> ActorPage::InvalidTokenForbidden |84.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TCacheTest::MigrationUndo [GOOD] >> TCacheTest::SysLocks [GOOD] >> ActorHandler::NoValidGroupForbidden >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationUndo [GOOD] Test command err: 2026-01-04T02:27:57.148624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:27:57.148689Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... HARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/compile_cache_queries', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.377192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720667 2026-01-04T02:28:01.377255Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.377298Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720668 2026-01-04T02:28:01.377363Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_read_bytes_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.377395Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-04T02:28:01.377451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-04T02:28:01.377514Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.379034Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.379166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-04T02:28:01.379310Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.379434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-04T02:28:01.379493Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.379664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-04T02:28:01.379771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-04T02:28:01.379831Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380022Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-04T02:28:01.380083Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-04T02:28:01.380228Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-04T02:28:01.380559Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380592Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-04T02:28:01.380729Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-04T02:28:01.380806Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-04T02:28:01.380909Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.380948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-04T02:28:01.380980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-04T02:28:01.381011Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.381037Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.381079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-04T02:28:01.381107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-04T02:28:01.381130Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.381179Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-04T02:28:01.381211Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.381238Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-04T02:28:01.381337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-04T02:28:01.381418Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-04T02:28:01.385393Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-04T02:28:01.385570Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:601) 2026-01-04T02:28:01.388556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-04T02:28:01.391786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-04T02:28:01.400176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-04T02:28:01.401116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:28:01.402928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:736:2067] recipient: [2:49:2096] Leader for TabletID 72057594046678944 is [2:408:2397] sender: [2:739:2067] recipient: [2:738:2629] Leader for TabletID 72057594046678944 is [2:740:2630] sender: [2:741:2067] recipient: [2:738:2629] 2026-01-04T02:28:01.473949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:01.474021Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |84.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> KqpResultSetFormats::ArrowFormat_Returning-isOlap [GOOD] >> KqpResultSetFormats::ArrowFormat_ColumnOrder |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |84.4%| [LD] {RESULT} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/health_check/ut/ydb-core-health_check-ut |84.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots |84.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume_reboots/ydb-core-tx-schemeshard-ut_bsvolume_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::SysLocks [GOOD] Test command err: 2026-01-04T02:27:59.471243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:27:59.471321Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:28:02.369702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 >> ActorHandler::OptionsNoContent |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/balance_coverage/ut/ydb-core-tx-balance_coverage-ut >> DataShardWrite::WriteUniqueRowsInsertDuplicateAtCommit [GOOD] >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_subdomain_reboots/ydb-core-tx-schemeshard-ut_subdomain_reboots >> ActorHandler::HttpOk >> MonPage::HttpOk >> TBackupCollectionTests::IndexCdcStreamCountRotation [GOOD] >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] >> TCmsTest::DynamicConfig >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-33 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-6 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-39 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 >> Cdc::Write[TopicRunner] [GOOD] >> Cdc::UpdateStream >> TCmsTest::RequestReplaceDevices >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DisableWritesToDatabase-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:27:56.062124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:27:56.083691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.083762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:27:56.083822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:27:56.083863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:27:56.083909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:27:56.083976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.084119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:27:56.091182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:27:56.091557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:27:56.698188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:27:56.698259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:56.716774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:27:56.720082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:27:56.720330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:27:56.796775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:27:56.815677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:27:56.841298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:27:56.878175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:27:56.928812Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:56.973606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:27:57.080380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:27:57.080477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:57.080570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:27:57.080628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:27:57.080747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:27:57.085539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:27:58.177150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.214577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.214815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.214899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.214997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.215796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... lete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:07.095028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:07.096935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:07.097001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:28:07.097230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:28:07.097440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:07.097483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:28:07.097529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:28:07.098076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:07.098138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:28:07.098253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:07.098322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:28:07.098370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-04T02:28:07.099397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:07.099553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:07.099595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:28:07.099632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-04T02:28:07.099676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:28:07.108053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:07.108227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:07.108271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:28:07.108308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:28:07.108346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:28:07.108467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:28:07.118342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:07.118426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:28:07.118891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:28:07.119113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:28:07.119156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:07.119191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:28:07.119223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:07.119276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:28:07.119360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:813:2780] message: TxId: 103 2026-01-04T02:28:07.119438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:07.119492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:28:07.119589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:28:07.119706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:28:07.120271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:07.120312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:28:07.120973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:28:07.122612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:28:07.131231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:07.131305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-04T02:28:07.131397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:28:07.131433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1067:3000] 2026-01-04T02:28:07.132325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-04T02:28:07.133472Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:28:07.133757Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 292us result status StatusSuccess 2026-01-04T02:28:07.136923Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "unquoted_storage_pool" Kind: "unquoted_storage_pool_kind" } StoragePools { Name: "quoted_storage_pool" Kind: "quoted_storage_pool_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "unquoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "quoted_storage_pool_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { storage_quotas { unit_kind: "quoted_storage_pool_kind" data_size_hard_quota: 1 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 >> TCmsTest::DynamicConfig [GOOD] >> TCmsTest::DisabledEvictVDisks >> KqpResultSetFormats::ArrowFormat_Types_Struct [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-52 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 >> Cdc::NaN[YdsRunner] [GOOD] >> Cdc::NaN[TopicRunner] >> ActorPage::InvalidTokenForbidden [GOOD] >> ActorPage::NoUseAuthOk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 >> TKeyValueTest::TestObtainLockNewApi [GOOD] >> TKeyValueTest::TestReadRequestInFlightLimit >> ActorHandler::NoValidGroupForbidden [GOOD] >> ActorHandler::NoUseAuthOk >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> TCmsTest::RequestReplaceDevices [GOOD] >> TCmsTest::RequestReplacePDiskDoesntBreakGroup >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] >> KqpScanArrowInChanels::AggregateNoColumn [GOOD] >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps >> TCmsTest::RequestReplaceDevicePDisk >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestReadRequestInFlightLimit [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:84:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2119] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:91:2057] recipient: [9:88:2119] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2120] Leader for TabletID 72057594037927937 is [9:90:2120] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2119] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:91:2057] recipient: [10:88:2119] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2120] Leader for TabletID 72057594037927937 is [10:90:2120] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2121] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:93:2057] recipient: [11:90:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2122] Leader for TabletID 72057594037927937 is [11:92:2122] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2121] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:93:2057] recipient: [12:90:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2122] Leader for TabletID 72057594037927937 is [12:92:2122] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:90:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:92:2123] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:95:2057] recipient: [13:92:2123] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:94:2124] Leader for TabletID 72057594037927937 is [13:94:2124] sender: [13:210:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:90:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:93:2057] recipient: [14:92:2123] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:95:2057] recipient: [14:92:2123] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:94:2124] Leader for TabletID 72057594037927937 is [14:94:2124] sender: [14:210:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2123] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:96:2057] recipient: [15:93:2123] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2124] Leader for TabletID 72057594037927937 is [15:95:2124] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:93:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:96:2057] recipient: [16:95:2125] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:98:2057] recipient: [16:95:2125] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:97:2126] Leader for TabletID 72057594037927937 is [16:97:2126] sender: [16:213:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:93:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:96:2057] recipient: [17:95:2125] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:98:2057] recipient: [17:95:2125] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:97:2126] Leader for TabletID 72057594037927937 is [17:97:2126] sender: [17:213:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:94:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:97:2057] recipient: [18:96:2125] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:99:2057] recipient: [18:96:2125] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:98:2126] Leader for TabletID 72057594037927937 is [18:98:2126] sender: [18:214:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-40 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 >> TCmsTest::RequestReplacePDiskDoesntBreakGroup [GOOD] >> TCmsTest::RequestReplacePDiskConsecutiveWithDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::VolatileAndNonVolatileWritePlanStepCommitFailure [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:38.853167Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:39.066342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:39.066928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:39.066993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:39.372823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:39.372947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:39.445074Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493595991587 != 1767493595991590 2026-01-04T02:26:39.459816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:39.506409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:39.604608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:40.072290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:40.101155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:40.222616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:40.265069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:40.266243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:40.266608Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:26:40.266944Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:40.311740Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:40.312499Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:40.312617Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:40.314327Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:40.314409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:40.314457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:40.314794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:40.314934Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:40.315015Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:26:40.325851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:40.369140Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:40.369343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:40.369464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:26:40.369514Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:40.369556Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:40.369614Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:40.369755Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:40.369975Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:40.370430Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:40.370560Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:40.370665Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:40.370704Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:40.370767Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:40.370828Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:40.370871Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:40.370920Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:40.370971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:40.371139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:40.371183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:40.371264Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:26:40.371712Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:26:40.371775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:40.371897Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:40.372240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:40.372304Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:40.372415Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:40.372474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:40.372513Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:40.372564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:40.372611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:40.372966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:40.373023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:26:40.373072Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:40.373114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:40.373164Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:26:40.373194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:40.373226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:26:40.373261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:26:40.373284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:40.374805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:26:40.374870Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:40.386373Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:40.386469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:28:12.426527Z node 10 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:28:12.426648Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:921:2784], Recipient [10:1152:2957]: {TEvReadSet step# 3001 txid# 1234567890012 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-04T02:28:12.426679Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:12.426708Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890012 ... validating table 2026-01-04T02:28:12.621085Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [10:1204:2996], Recipient [10:1152:2957]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:28:12.621189Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:28:12.621254Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [10:1203:2995], serverId# [10:1204:2996], sessionId# [0:0:0] 2026-01-04T02:28:12.637141Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1210:2999], Recipient [10:1152:2957]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:28:12.637333Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:28:12.637434Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-04T02:28:12.637562Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:28:12.637626Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:28:12.637702Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:28:12.637753Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:28:12.640185Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-04T02:28:12.640285Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:28:12.640331Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:28:12.640358Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:28:12.640387Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:28:12.640587Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:28:12.640954Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-04T02:28:12.641026Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[10:1210:2999], 0} after executionsCount# 1 2026-01-04T02:28:12.641091Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[10:1210:2999], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:28:12.641221Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[10:1210:2999], 0} finished in read 2026-01-04T02:28:12.641321Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:28:12.641347Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:28:12.641376Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:28:12.641402Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:28:12.641454Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:28:12.641477Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:28:12.641508Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-04T02:28:12.641557Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:28:12.641733Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:28:12.642902Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [10:1210:2999], Recipient [10:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-04T02:28:12.643091Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1210:2999], Recipient [10:1152:2957]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:28:12.643175Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:28:12.643460Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:28:12.643523Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-04T02:28:12.643583Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:28:12.643607Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:28:12.643631Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:28:12.643672Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:28:12.643744Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-04T02:28:12.643779Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:28:12.643816Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:28:12.643842Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:28:12.643866Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:28:12.643959Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3001 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-04T02:28:12.644205Z node 10 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3001/18446744073709551615 2026-01-04T02:28:12.644250Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[10:1210:2999], 1} after executionsCount# 1 2026-01-04T02:28:12.644286Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[10:1210:2999], 1} sends rowCount# 3, bytes# 96, quota rows left# 996, quota bytes left# 5242784, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:28:12.644344Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[10:1210:2999], 1} finished in read 2026-01-04T02:28:12.644401Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:28:12.644437Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:28:12.644461Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:28:12.644486Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:28:12.644527Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:28:12.644548Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:28:12.644570Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-04T02:28:12.644595Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:28:12.644667Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:28:12.645376Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [10:1210:2999], Recipient [10:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:28:12.645428Z node 10 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1004 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1003 } }, { items { int32_value: 13 } items { int32_value: 1004 } } |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> ActorHandler::HttpOk [GOOD] >> ActorHandler::InvalidTokenForbidden >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-37 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-26 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] >> Cdc::HugeKey[YdsRunner] [GOOD] >> Cdc::HugeKey[TopicRunner] >> TCmsTest::RequestReplaceDevicePDisk [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_table_writer/ydb-core-tx-replication-service-ut_table_writer >> ActorPage::NoUseAuthOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:27:56.058728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:27:56.082313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.082394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:27:56.082456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:27:56.082498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:27:56.082532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:27:56.082601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.082680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:27:56.090694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:27:56.091061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:27:56.731117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:27:56.731191Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:56.757210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:27:56.758258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:27:56.758497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:27:56.801257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:27:56.817266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:27:56.845921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:27:56.880371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:27:56.928078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:56.973230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:27:57.076742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:27:57.076861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:57.076958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:27:57.077016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:27:57.077189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:27:57.085085Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:27:58.242265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.243971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.244698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 16.618097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:16.618898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:16.618965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:28:16.619176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:28:16.619360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:16.619406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:28:16.619458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:28:16.619570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:16.619630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:28:16.619733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:16.619780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:28:16.619835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-04T02:28:16.621524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:16.621646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:16.621693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:28:16.621739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-04T02:28:16.621788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:28:16.624462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:16.624560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:28:16.624594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:28:16.624649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:28:16.624688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:28:16.624781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:28:16.627438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:28:16.627506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:28:16.627916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:28:16.628150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:28:16.628193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:16.628234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:28:16.628270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:16.628324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:28:16.628398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:815:2782] message: TxId: 103 2026-01-04T02:28:16.628453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:28:16.628493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:28:16.628529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:28:16.628635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:28:16.629146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:16.629185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:28:16.629995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:28:16.631835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:28:16.633362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:16.633423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-04T02:28:16.633546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:28:16.633609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1747:3672] 2026-01-04T02:28:16.634593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 11 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-04T02:28:16.639133Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:28:16.639404Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 303us result status StatusSuccess 2026-01-04T02:28:16.639949Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_20_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 20] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] >> KqpResultSetFormats::ArrowFormat_ColumnOrder [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_None >> TCmsTest::RequestRestartServicesReject >> DataShardWrite::DoubleWriteUncommittedThenDoubleReadWithCommit [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks-Volatile >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot_Dirty [GOOD] Test command err: 2026-01-04T02:25:59.513627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:25:59.526451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:59.526521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:59.547781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:25:59.548229Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:25:59.548557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:25:59.722307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:25:59.799602Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:25:59.799731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:25:59.816466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:25:59.816595Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:25:59.816681Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:25:59.817264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:25:59.819097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:25:59.819405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:26:00.013912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:00.146696Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:26:00.146956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:00.147081Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:26:00.147136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:26:00.147193Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:26:00.147241Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:00.147501Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:00.147568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:00.147918Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:26:00.148044Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:26:00.148138Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:00.148195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:00.148238Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:26:00.148283Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:26:00.148324Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:00.148373Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:26:00.148416Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:00.148523Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:00.148562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:00.148614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:26:00.184946Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:26:00.185042Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:00.185177Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:26:00.185389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:26:00.185445Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:26:00.185498Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:26:00.185567Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:26:00.185628Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:26:00.185671Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:26:00.185707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:00.202299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:00.202388Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:26:00.202431Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:26:00.202475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:00.202546Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:26:00.202606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:26:00.202673Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:26:00.202731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:00.202770Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:00.216032Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:26:00.216141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:00.216178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:00.216238Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:26:00.216329Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:00.216918Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:00.216989Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:00.217031Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:26:00.217252Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:26:00.217290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:26:00.217607Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:00.217660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:26:00.217703Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:26:00.217751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:26:00.221765Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:26:00.255543Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:00.255939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:00.255996Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:00.256078Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:00.256142Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:00.256207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:26:00.256262Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:26:00.256300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 2:347:2314]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-04T02:28:13.264874Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.264903Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-04T02:28:13.264998Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-04T02:28:13.265031Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265061Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-04T02:28:13.265122Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-04T02:28:13.265155Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265184Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-04T02:28:13.265271Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-04T02:28:13.265303Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265333Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-04T02:28:13.265414Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-04T02:28:13.265444Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265473Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-04T02:28:13.265561Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-04T02:28:13.265595Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265627Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-04T02:28:13.265718Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-04T02:28:13.265749Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.265778Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-04T02:28:13.284309Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-04T02:28:13.284410Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.284457Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-04T02:28:13.284606Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-04T02:28:13.284645Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.284677Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-04T02:28:13.284779Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-04T02:28:13.284814Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.284844Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-04T02:28:13.284929Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-04T02:28:13.284960Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.284992Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-04T02:28:13.285078Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-04T02:28:13.285109Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285139Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-04T02:28:13.285222Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-04T02:28:13.285256Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285285Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-04T02:28:13.285372Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-04T02:28:13.285406Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285436Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-04T02:28:13.285516Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-04T02:28:13.285547Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285574Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-04T02:28:13.285653Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-04T02:28:13.285685Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285716Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-04T02:28:13.285797Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-04T02:28:13.285843Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.285873Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-04T02:28:13.285960Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-04T02:28:13.285993Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.286023Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-04T02:28:13.286102Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-04T02:28:13.286134Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.286163Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-04T02:28:13.286240Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:347:2314]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:28:13.286270Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:13.286298Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 31 31 30 26 17 31 28 31 17 28 13 24 15 23 29 22 24 17 14 23 23 24 9 9 23 14 24 14 24 17 - - actual 31 31 30 26 17 31 28 31 17 28 13 24 15 23 29 22 24 17 14 23 23 24 9 9 23 14 24 14 24 17 - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move_reboots/ydb-core-tx-schemeshard-ut_move_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 >> ActorHandler::NoUseAuthOk [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 >> TCmsTest::RequestReplacePDiskConsecutiveWithDone [GOOD] >> TCmsTest::RequestReplaceManyDevicesOnOneNode >> ActorHandler::OptionsNoContent [GOOD] >> ActorPage::HttpOk ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::NoUseAuthOk [GOOD] Test command err: 2026-01-04T02:28:01.555867Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327557570865831:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:01.556858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:02.153980Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:02.162960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:02.163081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:02.285531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:02.580900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:02.737511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:02.773961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:03.085681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:03.085705Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:03.085712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:03.085787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:03.506382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:03.641304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:03.659471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:03.679960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:28:10.618498Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327596605012915:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:10.618576Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:10.720192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:10.862349Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327596605012891:2081] 1767493690602325 != 1767493690602328 2026-01-04T02:28:10.875009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:10.875107Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:10.876555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:10.894300Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:28:10.894328Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:28:10.940995Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:11.025927Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:11.182697Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:11.182721Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:11.182729Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:11.182816Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:11.607106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:11.629852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:11.670105Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:11.675334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:11.680640Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:11.715773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:11.727488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TCmsTest::RequestReplaceDevicePDiskByPath [GOOD] >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks >> MonPage::HttpOk [GOOD] >> MonPage::OptionsNoContent |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tests/tools/fqrun/fqrun |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/tools/fqrun/fqrun |84.5%| [LD] {RESULT} $(B)/ydb/tests/tools/fqrun/fqrun >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 >> Cdc::UpdateStream [GOOD] >> Cdc::UpdateShardCount >> TCmsTest::RequestRestartServicesReject [GOOD] >> TCmsTest::RequestRestartServicesPartial >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsAndTags [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> TMLPConsumerTests::ReloadPQTablet 2026-01-04 02:27:59,798 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 02:28:00,080 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 326621 58.8M 58.6M 32.8M test_tool run_ut @/home/runner/.ya/build/build_root/c5zo/0017ef/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args 327813 889M 894M 843M └─ ydb-core-persqueue-pqtablet-partition-mlp-ut --trace-path-append /home/runner/.ya/build/build_root/c5zo/0017ef/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/ Test command err: 2026-01-04T02:18:01.710140Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591324981698816515:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:01.710745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:18:01.753936Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:18:01.924885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:18:01.991183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:18:01.991289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:18:01.995275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:18:02.004385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:18:02.005617Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591324981698816485:2081] 1767493081708368 != 1767493081708371 2026-01-04T02:18:02.137033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:18:02.157609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0017ef/r3tmp/yandexJUTS2D.tmp 2026-01-04T02:18:02.157633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0017ef/r3tmp/yandexJUTS2D.tmp 2026-01-04T02:18:02.157845Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0017ef/r3tmp/yandexJUTS2D.tmp 2026-01-04T02:18:02.157956Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:18:02.188548Z INFO: TTestServer started on Port 64478 GrpcPort 65040 PQClient connected to localhost:65040 2026-01-04T02:18:02.368508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:18:02.450963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:02.717766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:18:04.762476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324994583719409:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.762627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.763102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324994583719424:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.763158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.763654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591324994583719419:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:18:04.782517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:18:04.803337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591324994583719426:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:18:05.012532Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591324994583719490:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:18:05.038584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:05.078166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:18:05.188251Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591324998878686802:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:18:05.190807Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzM0ZmU5YzQtZDFhZGMxYmMtNDAxZjBhMzQtYzYyMzM0OTU=, ActorId: [1:7591324994583719407:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3cr9jafcwswz5as75wacgt, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:18:05.193021Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:18:05.194927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591324998878687088:2823] 2026-01-04T02:18:06.710406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591324981698816515:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:18:06.710490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:18:11.541987Z :TODO INFO: TTopicSdkTestSetup started 2026-01-04T02:18:11.554728Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:18:11.567285Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591325024648491105:2933] connected; active server actors: 1 2026-01-04T02:18:11.567752Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:18:11.585049Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:18:11.585263Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:18:11.587634Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:18:11.588185Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:18:11.588344Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:18:11.588383Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:18:11.588415Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-04T02:18:11.588523Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [1:7591325024648491102:2930], now have 1 active actors on pipe 20 ... ndle TEvents::TEvWakeup 2026-01-04T02:26:49.369818Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:50.373384Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:51.374120Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:52.374682Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:53.374989Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:54.375650Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:55.382034Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:56.382070Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:57.386137Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:58.390503Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:26:59.394143Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:00.396872Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:01.395587Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:02.395874Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:03.397982Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:04.401977Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:05.402285Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:06.406150Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:07.410192Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:08.410559Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:09.413414Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:10.417133Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:11.418459Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:12.423228Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:13.426033Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:14.429058Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:15.430052Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:16.438212Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:17.438223Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:18.441456Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:19.442144Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:20.444517Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:21.446046Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:22.446920Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:23.457947Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:24.455789Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:25.454346Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:26.457980Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:27.458603Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:28.461911Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:29.462694Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:30.466270Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:31.466452Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:32.470509Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:33.470625Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:34.471739Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:35.477998Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:36.478186Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:37.481928Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:38.478847Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:39.479852Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:40.485294Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:41.487784Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:42.488383Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:43.491452Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:44.495033Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:45.494279Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:46.495983Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:47.497957Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:48.500701Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:49.505821Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:50.509284Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:51.510018Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:52.513968Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:53.513076Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:54.514135Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:55.514695Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:56.514883Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:57.530164Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:58.532526Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup 2026-01-04T02:27:59.533365Z node 1 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:832: [72075186224037894][0][MLP][mlp-consumer] Handle TEvents::TEvWakeup Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/0017ef/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/0017ef/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D86651AF900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:27:56.058437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:27:56.080701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.080804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:27:56.080868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:27:56.080935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:27:56.080967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:27:56.081032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:27:56.081136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:27:56.090037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:27:56.090489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:27:56.593965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:27:56.594076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:56.694616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:27:56.695364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:27:56.695551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:27:56.804259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:27:56.816554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:27:56.846155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:27:56.878644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:27:56.934769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:56.973790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:27:57.079575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:27:57.079686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:27:57.079772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:27:57.079827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:27:57.079950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:27:57.085790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:27:58.219381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.220560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.220719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.220794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.220857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.233611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.233860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.233951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:27:58.234670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... 00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 2026-01-04T02:28:20.234564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 0 2026-01-04T02:28:20.234789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:28:20.234860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-04T02:28:20.234943Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-04T02:28:20.235009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D8665195500, stats written 1 2026-01-04T02:28:20.235691Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:28:20.235992Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 341us result status StatusSuccess 2026-01-04T02:28:20.236718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 162 LastUpdateTime: 162 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 162304 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 |84.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |84.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login |84.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |84.5%| [TM] {RESULT} ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest |84.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/ydb-core-tx-schemeshard-ut_login ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::NoUseAuthOk [GOOD] Test command err: 2026-01-04T02:28:03.291561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327566991860643:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:03.291694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:03.925177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:03.933308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:03.952306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:04.087426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:04.100386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327566991860501:2081] 1767493683191541 != 1767493683191544 2026-01-04T02:28:04.146968Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:04.326082Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:04.421343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:04.502511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:04.502533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:04.502541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:04.502618Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:05.081646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:05.231383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:05.282079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:05.288210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:28:12.113614Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:12.249941Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:12.257942Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:12.318119Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:12.349969Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327604992562859:2081] 1767493692032857 != 1767493692032860 2026-01-04T02:28:12.363397Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:12.363494Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:12.373592Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:12.566208Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:12.668597Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:12.668622Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:12.673160Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:12.673312Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:12.971550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:12.982605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:13.051974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:13.068513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:13.074027Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:28:13.110385Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TBackupCollectionTests::StreamRotationSafetyWithUserStreams [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:25:46.475223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:25:46.475321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:46.475358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:25:46.475395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:25:46.475433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:25:46.475481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:25:46.475558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:25:46.475653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:25:46.476471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:25:46.476776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:25:46.603263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:25:46.603340Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:46.615420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:25:46.618842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:25:46.619007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:25:46.626230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:25:46.626474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:25:46.627095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:25:46.627351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:25:46.630760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:46.630956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:25:46.634091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:25:46.634172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:25:46.634293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:25:46.634342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:25:46.634384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:25:46.634529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:25:46.997305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.998977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:25:46.999646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 2 2026-01-04T02:28:18.270143Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:2 2026-01-04T02:28:18.270163Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:2 2026-01-04T02:28:18.270219Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 59] was 1 2026-01-04T02:28:18.270611Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [24:129:2154], Recipient [24:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:28:18.270650Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:28:18.270698Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:28:18.270734Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 59], at schemeshard: 72057594046678944 2026-01-04T02:28:18.270797Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 57] was 1 2026-01-04T02:28:18.270837Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 57], at schemeshard: 72057594046678944 2026-01-04T02:28:18.270876Z node 24 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 7 2026-01-04T02:28:18.273170Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:28:18.273278Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [24:3102:4570] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976710759 at schemeshard: 72057594046678944 2026-01-04T02:28:18.273654Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435106, Sender [24:3102:4570], Recipient [24:129:2154]: NKikimr::NSchemeShard::TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-04T02:28:18.273694Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5475: StateWork, processing event TEvPrivate::TEvContinuousBackupCleanerResult 2026-01-04T02:28:18.273799Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:28:18.275169Z node 24 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:28:18.278337Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:28:18.290877Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [24:3169:4636], Recipient [24:129:2154]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:28:18.290977Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:28:18.291009Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-04T02:28:18.654501Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [24:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:28:18.654572Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:28:18.655258Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [24:129:2154], Recipient [24:129:2154]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:28:18.655309Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:28:18.702738Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [24:3189:4656], Recipient [24:129:2154]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:28:18.702812Z node 24 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:28:18.702928Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:28:18.703260Z node 24 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 313us result status StatusSuccess 2026-01-04T02:28:18.704452Z node 24 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table1" PathDescription { Self { Name: "Table1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 11 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } } TableSchemaVersion: 11 IsBackup: false CdcStreams { Name: "000000000000000A_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 46 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "09700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 48 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "19700101000019Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 61 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } CdcStreams { Name: "_continuousBackupImpl" Mode: ECdcStreamModeKeysOnly PathId { OwnerId: 72057594046678944 LocalId: 50 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatJson VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 56 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup_collection/unittest >> TCmsTest::RequestReplaceDeviceTwiceWithNoVDisks [GOOD] >> TCmsTest::RequestReplaceBrokenDevices >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-27 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 >> TCmsTest::RequestRestartServicesPartial [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 >> KqpScanArrowFormat::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowFormat::AggregateWithFunction |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_volatile/ydb-core-tx-datashard-ut_volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceManyDevicesOnOneNode [GOOD] Test command err: 2026-01-04T02:28:09.470054Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008200s 2026-01-04T02:28:09.535198Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005376s 2026-01-04T02:28:13.954017Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008150s 2026-01-04T02:28:17.133971Z node 20 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012251s 2026-01-04T02:28:21.630360Z node 36 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010404s 2026-01-04T02:28:21.662038Z node 37 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008194s >> TCmsTest::RequestReplaceBrokenDevices [GOOD] |84.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] >> TCmsTest::RequestRestartServicesRejectSecond [GOOD] >> TCmsTest::RequestRestartServicesWrongHost |84.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} |84.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup_collection/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |84.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index_build/ydb-core-tx-schemeshard-ut_index_build |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestReplaceBrokenDevices [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-47 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 >> Cdc::NaN[TopicRunner] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus >> TCmsTest::DisabledEvictVDisks [GOOD] >> TCmsTest::EmergencyDuringRollingRestart >> TCmsTest::RequestRestartServicesWrongHost [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] >> TColumnShardTestReadWrite::ReadStale ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_Types_Variant [GOOD] Test command err: Trying to start YDB, gRPC: 2528, MsgBus: 14139 2026-01-04T02:26:09.257781Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327075899069338:2243];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:09.265403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:09.407711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:10.056140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:10.056254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:10.205068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:10.228757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:10.245540Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:10.279164Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327075899069134:2081] 1767493569247617 != 1767493569247620 2026-01-04T02:26:10.325247Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:10.537665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:10.537788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:10.537794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:10.537817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:10.537897Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:11.627642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:11.819037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:12.533453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:12.971529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:13.148668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:14.262064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327075899069338:2243];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:14.265182Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:26:19.007246Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327114553776730:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:19.007391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:19.007781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327118848744036:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:19.007824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:19.408615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.448070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.499068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.582037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.632761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.727108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.802748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:19.880001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:20.039539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327123143712218:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:20.039632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:20.040238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327123143712223:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:20.040295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327123143712224:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:20.040342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:20.047863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:20.077447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:26:20.078279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327123143712227:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:26:20.181417Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327123143712278:3790] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EP ... 02:27:59.866042Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:59.870245Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591327548131503358:2081] 1767493679411096 != 1767493679411099 2026-01-04T02:27:59.917580Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:59.925860Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:00.013958Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:00.306713Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:00.306743Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:00.306756Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:00.306901Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:00.361476Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:00.455362Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:01.776419Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:01.791433Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:04.418104Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591327548131503387:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:04.418228Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:28:07.794297Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327582491242633:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:07.794447Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:07.794951Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327582491242645:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:07.795023Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327582491242646:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:07.795128Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:07.800346Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:28:07.829027Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591327582491242649:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:28:07.911051Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591327582491242700:2549] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; Trying to start YDB, gRPC: 1614, MsgBus: 12582 2026-01-04T02:28:09.702881Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7591327593423544814:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:09.702979Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:09.936997Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:10.016180Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7591327593423544788:2081] 1767493689696499 != 1767493689696502 2026-01-04T02:28:10.104839Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:10.110473Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:10.110597Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:10.188817Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:10.189921Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:10.406841Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:10.406886Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:10.406898Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:10.407025Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:10.716390Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:10.766203Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:12.218602Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:14.700850Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7591327593423544814:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:14.700964Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:28:25.106548Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:28:25.106582Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:25.170235Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327662143022443:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:25.170460Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:25.171339Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327662143022464:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:25.171418Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591327662143022465:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:25.171937Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:28:25.180941Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:28:25.206799Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7591327662143022468:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:28:25.310685Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7591327662143022519:2566] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=264;columns=1; |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKString >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesWrongHost [GOOD] Test command err: 2026-01-04T02:28:22.803806Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005970s 2026-01-04T02:28:22.814069Z node 15 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005769s |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-28 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 >> KqpResultSetFormats::ArrowFormat_Compression_None [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-36 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/query_replay/ydb_query_replay >> TColumnShardTestReadWrite::ReadStale [GOOD] |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/query_replay/ydb_query_replay |84.6%| [LD] {RESULT} $(B)/ydb/tools/query_replay/ydb_query_replay >> KqpScanArrowInChanels::AggregateNoColumnNoRemaps [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction >> Cdc::HugeKey[TopicRunner] [GOOD] >> Cdc::HugeKeyDebezium >> TCmsTest::EmergencyDuringRollingRestart [GOOD] >> TCmsTest::DisableMaintenance >> Cdc::UpdateShardCount [GOOD] >> Cdc::UpdateRetentionPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorHandler::InvalidTokenForbidden [GOOD] Test command err: 2026-01-04T02:28:06.659270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327580947427467:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:06.659532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:07.490090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:07.523993Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:07.524078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:07.671246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:07.764189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:07.775659Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:07.806025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327580947427232:2081] 1767493686591482 != 1767493686591485 2026-01-04T02:28:07.833949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:08.142109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:08.142133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:08.142152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:08.142226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:08.917542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:08.928657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:09.052974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:09.062860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:16.546995Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327621489719365:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:16.547111Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:16.654378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:16.839059Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:16.839298Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:16.852881Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:17.105970Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327621489719210:2081] 1767493696511506 != 1767493696511509 2026-01-04T02:28:17.139298Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:17.251896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:17.351063Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:17.566141Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:17.742898Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:17.742937Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:17.742944Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:17.743052Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:18.070015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:19.347574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:19.358493Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:19.426465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:19.450150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:19.458415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestEmpty [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |84.6%| [LD] {RESULT} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet/ut/ydb-core-tablet-ut >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadStale [GOOD] Test command err: 2026-01-04T02:28:31.657322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:31.690998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:31.691239Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:31.698638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:31.698919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:31.699150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:31.699304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:31.699427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:31.699552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:31.699658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:31.699764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:31.699887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:31.700005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.700164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:31.700295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:31.700415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:31.732256Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:31.732422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:31.732475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:31.732670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.732844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:31.732932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:31.732985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:31.733078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:31.733149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:31.733201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:31.733241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:31.733439Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.733508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:31.733553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:31.733585Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:31.733704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:31.733763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:31.733828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:31.733858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:31.733933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:31.733992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:31.734028Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:31.734075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:31.734116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:31.734149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:31.734387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:31.734453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:31.734508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:31.734697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:31.734763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.734795Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.734841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:31.734898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:31.734938Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:31.734991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:31.735030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:31.735060Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:31.735192Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:31.735245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:28:32.557696Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-04T02:28:32.558182Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:28:32.558258Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:28:32.558342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:28:32.558402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:28:32.558480Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:28:32.576618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:28:32.576738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:28:32.576794Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:28:32.576929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:28:32.577413Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 1 version: {1767493352629:max} readable: {1767493712629:max} at tablet 9437184 2026-01-04T02:28:32.589481Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 1 at tablet 9437184 2026-01-04T02:28:32.591501Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=constructor.cpp:17;event=overriden_columns;ids=1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043; 2026-01-04T02:28:32.594029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-04T02:28:32.594198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } Columns { Id: 3 } Columns { Id: 4 } Columns { Id: 5 } Columns { Id: 6 } Columns { Id: 7 } Columns { Id: 8 } Columns { Id: 9 } Columns { Id: 10 } Columns { Id: 4294967040 } Columns { Id: 4294967041 } Columns { Id: 4294967042 } Columns { Id: 4294967043 } } } ; 2026-01-04T02:28:32.596433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4},{"from":6},{"from":8},{"from":10},{"from":12},{"from":14},{"from":16},{"from":18},{"from":20},{"from":22},{"from":24},{"from":26},{"from":28}]},{"owner_id":18,"inputs":[{"from":29}]},{"owner_id":2,"inputs":[{"from":29}]},{"owner_id":20,"inputs":[{"from":29}]},{"owner_id":4,"inputs":[{"from":29}]},{"owner_id":22,"inputs":[{"from":29}]},{"owner_id":6,"inputs":[{"from":29}]},{"owner_id":24,"inputs":[{"from":29}]},{"owner_id":8,"inputs":[{"from":29}]},{"owner_id":26,"inputs":[{"from":29}]},{"owner_id":10,"inputs":[{"from":29}]},{"owner_id":28,"inputs":[{"from":29}]},{"owner_id":29,"inputs":[{"from":30}]},{"owner_id":12,"inputs":[{"from":29}]},{"owner_id":30,"inputs":[]},{"owner_id":14,"inputs":[{"from":29}]},{"owner_id":16,"inputs":[{"from":29}]}],"nodes":{"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":33,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":33,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":33,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10,4294967040,4294967041,4294967042,4294967043","t":"Projection"},"w":462,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":33,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":33,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":33,"id":16},"24":{"p":{"i":"4294967041","p":{"address":{"name":"_yql_tx_id","id":4294967041}},"o":"4294967041","t":"AssembleOriginalData"},"w":33,"id":24},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":33,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":33,"id":10},"29":{"p":{"i":"0","p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"4294967040,4294967041,1,4294967042,2,4294967043,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":28,"id":29},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":33,"id":6},"30":{"p":{"p":{"data":[{"name":"_yql_plan_step","id":4294967040},{"name":"_yql_tx_id","id":4294967041},{"name":"timestamp","id":1},{"name":"_yql_write_id","id":4294967042},{"name":"resource_type","id":2},{"name":"_yql_delete_flag","id":4294967043},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":30},"22":{"p":{"i":"4294967040","p":{"address":{"name":"_yql_plan_step","id":4294967040}},"o":"4294967040","t":"AssembleOriginalData"},"w":33,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":33,"id":12},"28":{"p":{"i":"4294967043","p":{"address":{"name":"_yql_delete_flag","id":4294967043}},"o":"4294967043","t":"AssembleOriginalData"},"w":33,"id":28},"26":{"p":{"i":"4294967042","p":{"address":{"name":"_yql_write_id","id":4294967042}},"o":"4294967042","t":"AssembleOriginalData"},"w":33,"id":26}}}; 2026-01-04T02:28:32.602632Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=1;gen=0;table=test_olap_table;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1767493352629:max}. CS min read snapshot: {1767493412629:max}. now: 2026-01-04T02:28:32.602550Z; 2026-01-04T02:28:32.623922Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767493352629:max} readable: {1767493712629:max} at tablet 9437184 2026-01-04T02:28:32.640023Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:28:32.640309Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-04T02:28:32.640413Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 6 } } } ; 2026-01-04T02:28:32.641229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1},{"name":"message","id":6}]},"o":"1,6","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,6","t":"Projection"},"w":18,"id":0}}}; 2026-01-04T02:28:32.643310Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493352629:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build metadata;details=Snapshot too old: {1767493352629:max}. CS min read snapshot: {1767493412629:max}. now: 2026-01-04T02:28:32.643140Z; |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 >> Backup::ProposeBackup >> MonPage::OptionsNoContent [GOOD] >> ActorPage::HttpOk [GOOD] |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestOneSplit [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 |84.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithMergeBack [GOOD] |84.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |84.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |84.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes |84.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/indexes/ydb-core-kqp-ut-indexes >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 >> TCmsTest::DisableMaintenance [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::HttpOk [GOOD] Test command err: 2026-01-04T02:28:05.514968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327574031249338:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:05.515123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:05.597323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:06.408192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:06.529934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:06.557950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:06.647638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:06.647744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:06.661600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:07.004857Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:07.028922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327574031249230:2081] 1767493685474289 != 1767493685474292 2026-01-04T02:28:07.040130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:07.326413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:07.326436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:07.326442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:07.326516Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:07.943593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:08.192410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:08.234945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:20.916596Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327641553961810:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:20.916665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:21.093913Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:21.305992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:21.331722Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:21.336219Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327641553961787:2081] 1767493700914409 != 1767493700914412 2026-01-04T02:28:21.353617Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:21.353714Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:21.442458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:21.720628Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:21.720669Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:21.720680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:21.720764Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:21.833012Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:21.942889Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:23.146279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:23.220445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:23.230487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertWithoutLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:26:37.057828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:37.181121Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:37.181594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:37.181649Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:37.470892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:37.471012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:37.542396Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493594279049 != 1767493594279052 2026-01-04T02:26:37.558017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:37.604344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:37.721259Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:38.058421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:38.074425Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:38.208729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:38.260449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:38.261465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:38.261773Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:26:38.262097Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:38.309226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:38.310078Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:38.310192Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:38.311941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:38.312019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:38.312085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:38.312473Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:38.312636Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:38.312744Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:26:38.323904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:38.372953Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:38.373183Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:38.373308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:26:38.373343Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:38.373381Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:38.373432Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:38.373596Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:38.373678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:38.374078Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:38.374179Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:38.374270Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:38.374311Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:38.374400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:26:38.374432Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:26:38.374465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:26:38.374515Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:38.374556Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:38.374698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:38.374732Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:38.374777Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:26:38.375147Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:26:38.375185Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:38.375286Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:38.375554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:26:38.375604Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:38.375708Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:26:38.375771Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:26:38.375822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:26:38.375865Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:26:38.375901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:26:38.376248Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:38.376287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:26:38.376318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:26:38.376351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:26:38.376395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:26:38.376438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:26:38.376470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:26:38.376524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:26:38.376552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:38.378009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:26:38.378064Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:38.392547Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:38.392633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... :2784], Recipient [12:918:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-04T02:28:34.239092Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:34.239154Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-04T02:28:34.239245Z node 12 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:28:34.239485Z node 12 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:28:34.239584Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [12:918:2782], Recipient [12:922:2784]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 2} 2026-01-04T02:28:34.239619Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:34.239647Z node 12 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 1234567890011 2026-01-04T02:28:34.462598Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1168:2969], Recipient [12:918:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:28:34.462844Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:28:34.462953Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-04T02:28:34.463081Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:28:34.463143Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:28:34.463199Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:28:34.463245Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:28:34.463317Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-04T02:28:34.463370Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:28:34.463396Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:28:34.463420Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:28:34.463446Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:28:34.463628Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:28:34.463994Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:28:34.464068Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[12:1168:2969], 0} after executionsCount# 1 2026-01-04T02:28:34.464162Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[12:1168:2969], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:28:34.464280Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[12:1168:2969], 0} finished in read 2026-01-04T02:28:34.464367Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:28:34.464395Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:28:34.464420Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:28:34.464446Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:28:34.464495Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:28:34.464548Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:28:34.464581Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-04T02:28:34.464635Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:28:34.464766Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:28:34.465761Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1168:2969], Recipient [12:918:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:28:34.468254Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:28:34.468511Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [12:1168:2969], Recipient [12:922:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-04T02:28:34.468799Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:28:34.468869Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-04T02:28:34.468940Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:28:34.468969Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:28:34.469005Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:28:34.469063Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:28:34.469125Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-04T02:28:34.469156Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:28:34.469188Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:28:34.469211Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:28:34.469234Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:28:34.469327Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-04T02:28:34.469538Z node 12 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:28:34.469579Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[12:1168:2969], 1} after executionsCount# 1 2026-01-04T02:28:34.469641Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[12:1168:2969], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:28:34.469717Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[12:1168:2969], 1} finished in read 2026-01-04T02:28:34.469769Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:28:34.469793Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:28:34.469838Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:28:34.469865Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:28:34.469911Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:28:34.469933Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:28:34.469966Z node 12 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-04T02:28:34.469999Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:28:34.470077Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:28:34.470873Z node 12 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [12:1168:2969], Recipient [12:922:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:28:34.470929Z node 12 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 2 } items { int32_value: 1003 } }, { items { int32_value: 11 } items { int32_value: 1002 } }, { items { int32_value: 12 } items { int32_value: 1004 } } |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> Backup::ProposeBackup [GOOD] >> EvWrite::AbortInTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> MonPage::OptionsNoContent [GOOD] Test command err: 2026-01-04T02:28:07.768053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:07.768165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:07.900382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:07.900537Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:07.983777Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327577929962950:2081] 1767493686916339 != 1767493686916342 2026-01-04T02:28:07.987203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:07.999428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:08.105818Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:08.117887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:08.368549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:08.368576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:08.368583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:08.368660Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:09.177185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:09.184705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:28:09.323776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:09.356875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:09.382311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:28:21.246027Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:21.246201Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:21.530062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:21.568196Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:21.568730Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:21.599345Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327644165877590:2081] 1767493701118517 != 1767493701118520 2026-01-04T02:28:21.628950Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:21.638015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:22.038022Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:22.038044Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:22.038053Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:22.038151Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:22.120917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:22.197564Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:22.959341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:23.179134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:28:23.196617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:28:23.247687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/solomon/reading/py3test >> listing_paging.py::TestListingPaging::test_listing_paging_monitoring [GOOD] >> LocalTableWriter::ConsistentWrite |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/tests/solomon/reading/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-29 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::DisableMaintenance [GOOD] Test command err: 2026-01-04T02:28:10.514672Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvGetConfigRequest { }, response# NKikimr::NCms::TEvCms::TEvGetConfigResponse { Status { Code: OK } Config { DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 60 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false } } 2026-01-04T02:28:10.515168Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-04T02:28:10.546682Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:28:10.546872Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-04T02:28:10.548990Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-04T02:28:10.549792Z node 2 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-2-2" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 2 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-3-3" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 3 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-4-4" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 4 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-5-5" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 5 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-6-6" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 6 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-7-7" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 7 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-8-8" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 8 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-04T02:28:10.559993Z node 2 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-04T02:28:10.560055Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:28:10.560215Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-04T02:28:10.560281Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-04T02:28:10.560309Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-04T02:28:10.560334Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-04T02:28 ... 6: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.612761Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.612831Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.612885Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.612991Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.613074Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 240110 2026-01-04T02:28:22.613137Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:28:22.613549Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# FAULTY, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 3 StateLimit# 1, dry run# 0 2026-01-04T02:28:22.613658Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-04T02:28:22.622179Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-04T02:28:22.622719Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-04T02:28:22.622829Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 2026-01-04T02:28:22.644681Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-04T02:28:22.677037Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:28:22.677158Z node 2 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-04T02:28:22.677247Z node 2 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:04:00Z 2026-01-04T02:28:22.689159Z node 2 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:28:22.689313Z node 2 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "2" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 2 has not yet been completed" } 2026-01-04T02:28:22.689388Z node 2 :CMS DEBUG: cms.cpp:416: Result: ERROR (reason: Evict vdisks is disabled in Sentinel (self heal)) 2026-01-04T02:28:22.689564Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:28:22.689774Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:28:22.706745Z node 2 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-04T02:28:22.707024Z node 2 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ERROR Reason: "Evict vdisks is disabled in Sentinel (self heal)" } RequestId: "user-r-1" } 2026-01-04T02:28:22.707699Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:23: TTxUpdateConfig Execute 2026-01-04T02:28:22.723159Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:37: TTxUpdateConfig Complete 2026-01-04T02:28:22.723453Z node 2 :CMS DEBUG: cms_tx_update_config.cpp:44: Updated config: DefaultRetryTime: 300000000 DefaultPermissionDuration: 300000000 TenantLimits { DisabledNodesRatioLimit: 0 } ClusterLimits { DisabledNodesRatioLimit: 0 } InfoCollectionTimeout: 15000000 LogConfig { DefaultLevel: ENABLED TTL: 1209600000000 } SentinelConfig { Enable: true UpdateConfigInterval: 3600000000 RetryUpdateConfig: 60000000 UpdateStateInterval: 60000000 UpdateStateTimeout: 45000000 RetryChangeStatus: 10000000 ChangeStatusRetries: 5 DefaultStateLimit: 1 DataCenterRatio: 50 RoomRatio: 70 RackRatio: 90 DryRun: false EvictVDisksStatus: FAULTY GoodStateLimit: 5 FaultyPDisksThresholdPerNode: 0 PileRatio: 50 StateStorageSelfHealConfig { Enable: false NodeGoodStateLimit: 10 NodePrettyGoodStateLimit: 7 NodeBadStateLimit: 10 WaitForConfigStep: 60000000 RelaxTime: 600000000 PileupReplicas: false OverrideReplicasInRingCount: 0 OverrideRingsCount: 0 ReplicasSpecificVolume: 200 } InitialDeploymentGracePeriod: 600000000 } DisableMaintenance: false 2026-01-04T02:28:28.529790Z node 2 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:28:28.529925Z node 2 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:28:28.530165Z node 2 :CMS DEBUG: cms.cpp:1178: Running CleanupWalleTasks 2026-01-04T02:28:28.530501Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-04T02:28:28.530571Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-04T02:28:28.530610Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-04T02:28:28.530651Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-04T02:28:28.530683Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-04T02:28:28.530709Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-04T02:28:28.530734Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-04T02:28:28.530766Z node 2 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:28:28.531022Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 2 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-2.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.531715Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 3 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-3.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.531937Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532025Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532104Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532176Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532231Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532301Z node 2 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 300110 2026-01-04T02:28:28.532365Z node 2 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:28:28.532625Z node 2 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:2, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-04T02:28:28.532704Z node 2 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-04T02:28:28.532984Z node 2 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-04T02:28:28.533232Z node 2 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-04T02:28:28.533280Z node 2 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 2:2 2026-01-04T02:28:30.994002Z node 14 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008108s 2026-01-04T02:28:31.066006Z node 10 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008209s 2026-01-04T02:28:31.079083Z node 17 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639240 Duration# 0.005320s 2026-01-04T02:28:34.576732Z node 30 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.031466s 2026-01-04T02:28:34.966117Z node 20 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009930s |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> LocalTableWriter::WriteTable >> EvWrite::AbortInTransaction [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 >> IndexBuildTest::RejectsCreate >> VectorIndexBuildTest::CreateAndDrop >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] >> Cdc::RacyRebootAndSplitWithTxInflight [GOOD] >> Cdc::RacyActivateAndEnqueue |84.7%| [TA] $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedWriteThenDropTable |84.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 >> TWebLoginService::AuditLogEmptySIDsLoginSuccess >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query |84.7%| [TA] {RESULT} $(B)/ydb/tests/solomon/reading/test-results/py3test/{meta.json ... results_accumulator.log} |84.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/query/ydb-core-kqp-ut-query >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::AbortInTransaction [GOOD] Test command err: 2026-01-04T02:28:36.872194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:36.906299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:36.906580Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:36.915399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:36.915643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:36.915901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:36.916038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:36.916186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:36.916283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:36.916388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:36.916507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:36.916624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:36.916745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.916857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:36.916969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:36.917083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:36.962642Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:36.962871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:36.962933Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:36.963131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:36.963310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:36.963395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:36.963441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:36.963564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:36.963640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:36.963687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:36.963721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:36.963912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:36.963980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:36.964023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:36.964055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:36.964174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:36.964237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:36.964306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:36.964345Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:36.964408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:36.964455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:36.964485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:36.964535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:36.964579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:36.964610Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:36.964867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:36.964932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:36.964970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:36.965096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:36.965145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.965177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.965243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:36.965304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:36.965350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:36.965403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:36.965450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:36.974211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:36.974487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:36.974568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] execute at tablet 9437184 2026-01-04T02:28:40.619473Z node 2 :TX_COLUMNSHARD ERROR: ctor_logger.h:56: TxPlanStep[5] Ignore old txIds [112] for step 1767493720782 last planned step 1767493720782 at tablet 9437184 2026-01-04T02:28:40.634768Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:28:40.634851Z node 2 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tx_id=0;lock_id=222;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-04T02:28:40.634936Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[5] complete at tablet 9437184 2026-01-04T02:28:40.635387Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767493720782:max} readable: {1767493720782:max} at tablet 9437184 2026-01-04T02:28:40.635543Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:28:40.635811Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493720782:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-04T02:28:40.635908Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493720782:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } Columns { Id: 2 } } } ; 2026-01-04T02:28:40.636842Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493720782:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":4,"inputs":[{"from":5}]},{"owner_id":5,"inputs":[{"from":6}]},{"owner_id":6,"inputs":[]}],"nodes":{"2":{"p":{"i":"1","p":{"address":{"name":"key","id":1}},"o":"1","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"0","p":{"data":[{"name":"key","id":1},{"name":"field","id":2}]},"o":"1,2","t":"FetchOriginalData"},"w":4,"id":5},"4":{"p":{"i":"2","p":{"address":{"name":"field","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"1,2","t":"Projection"},"w":18,"id":0}}}; 2026-01-04T02:28:40.639091Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493720782:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:28:40.656324Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:112:2142];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767493720782:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[2:184:2196];trace_detailed=; 2026-01-04T02:28:40.657323Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2;column_names=field,key;);; 2026-01-04T02:28:40.657567Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; 2026-01-04T02:28:40.662094Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.662461Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.662778Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:28:40.663002Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.663208Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.663458Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:184:2196] finished for tablet 9437184 2026-01-04T02:28:40.664086Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:183:2195];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":4874703,"name":"_full_task","f":4874703,"d_finished":0,"c":0,"l":4882294,"d":7591},"events":[{"name":"bootstrap","f":4875198,"d_finished":6027,"c":1,"l":4881225,"d":6027},{"a":4881437,"name":"ack","f":4881437,"d_finished":0,"c":0,"l":4882294,"d":857},{"a":4881407,"name":"processing","f":4881407,"d_finished":0,"c":0,"l":4882294,"d":887},{"name":"ProduceResults","f":4876475,"d_finished":5166,"c":2,"l":4881941,"d":5166},{"a":4881949,"name":"Finish","f":4881949,"d_finished":0,"c":0,"l":4882294,"d":345}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.664211Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:183:2195];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:28:40.664747Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:183:2195];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":4874703,"name":"_full_task","f":4874703,"d_finished":0,"c":0,"l":4882958,"d":8255},"events":[{"name":"bootstrap","f":4875198,"d_finished":6027,"c":1,"l":4881225,"d":6027},{"a":4881437,"name":"ack","f":4881437,"d_finished":0,"c":0,"l":4882958,"d":1521},{"a":4881407,"name":"processing","f":4881407,"d_finished":0,"c":0,"l":4882958,"d":1551},{"name":"ProduceResults","f":4876475,"d_finished":5166,"c":2,"l":4881941,"d":5166},{"a":4881949,"name":"Finish","f":4881949,"d_finished":0,"c":0,"l":4882958,"d":1009}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:28:40.664851Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:28:40.639054Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:28:40.664906Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:28:40.665050Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:184:2196];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:49;event=insert_to_cache;key=string;records=0;size=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::YellowGroupIssueWhenPartialGroupStatus [GOOD] >> THealthCheckTest::TestTabletIsDead >> VectorIndexBuildTest::RecreatedColumns >> TWebLoginService::AuditLogEmptySIDsLoginSuccess [GOOD] >> TWebLoginService::AuditLogAdminLoginSuccess |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |84.7%| [LD] {RESULT} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mon/audit/ut/ydb-core-mon-audit-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 >> Cdc::UpdateRetentionPeriod [GOOD] >> Cdc::SupportedTypes >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 >> TSchemeShardLoginTest::DisableBuiltinAuthMechanism [GOOD] >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup >> TWebLoginService::AuditLogAdminLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadPassword ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandaloneExoticTypes [GOOD] Test command err: 2026-01-04T02:28:31.763331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:31.801662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:31.801937Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:31.809409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:31.809712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:31.810015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:31.810139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:31.810278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:31.810374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:31.810532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:31.810669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:31.810785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:31.810905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.811042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:31.811167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:31.811271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:31.841734Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:31.841881Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:31.841963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:31.842147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.842289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:31.842389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:31.842437Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:31.842528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:31.842584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:31.842636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:31.842694Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:31.842885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.842947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:31.842998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:31.843034Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:31.843130Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:31.843182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:31.843222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:31.843251Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:31.843302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:31.843330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:31.843351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:31.843382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:31.843412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:31.843432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:31.843602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:31.843684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:31.843732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:31.843900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:31.843936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.843956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.843989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:31.844017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:31.844047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:31.844084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:31.844403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:31.844441Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:31.844560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:31.844593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-04T02:28:42.603449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> IndexBuildTest::RejectsCreate [GOOD] >> IndexBuildTest::RejectsCreateUniq |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut |84.7%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> Cdc::HugeKeyDebezium [GOOD] >> Cdc::Drop[PqRunner] |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/ydb-core-kqp-ut-federated_query-generic_ut >> LocalTableWriter::ConsistentWrite [GOOD] >> CrossShardUniqIndexValidationTest::Validation [GOOD] >> FulltextIndexBuildTest::Basic >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_3_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 3] [GOOD] |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |84.7%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |84.7%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> LocalTableWriter::WriteTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-30 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/ydb-core-tx-datashard-ut_object_storage_listing >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 >> TSchemeShardLoginTest::FailedLoginUserUnderNameOfGroup [GOOD] >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters >> TWebLoginService::AuditLogLdapLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginBadBind >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ConsistentWrite [GOOD] Test command err: 2026-01-04T02:28:40.048666Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327723464103091:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:40.048749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:40.144487Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:40.881491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:40.881596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:40.895218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:41.226919Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:41.262749Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327719169135558:2081] 1767493719969029 != 1767493719969032 2026-01-04T02:28:41.262835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:41.262931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:41.340101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:41.559211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:42.050329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:42.050366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:42.050375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:42.050444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:43.045709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:43.070480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:43.614015Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handshake: worker# [1:7591327736349005591:2498] 2026-01-04T02:28:43.614353Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:28:43.614597Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:28:43.614633Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Send handshake: worker# [1:7591327736349005591:2498] 2026-01-04T02:28:43.630040Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:28:43.647672Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-04T02:28:43.647867Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 },{ Order: 2 BodySize: 48 },{ Order: 3 BodySize: 48 }] } 2026-01-04T02:28:43.648022Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:28:43.648058Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.648162Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 2 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 3 Group: 0 Step: 3 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:28:43.665974Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:28:43.666066Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.666139Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2026-01-04T02:28:43.666766Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:28:43.667196Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:28:43.667686Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } VersionTxIds { Version { Step: 30 TxId: 0 } TxId: 3 } 2026-01-04T02:28:43.667790Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 5 BodySize: 49 },{ Order: 6 BodySize: 49 },{ Order: 7 BodySize: 49 },{ Order: 8 BodySize: 49 }] } 2026-01-04T02:28:43.667916Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 6 Group: 0 Step: 12 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 7 Group: 0 Step: 21 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 8 Group: 0 Step: 22 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-04T02:28:43.674406Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:28:43.674467Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.674507Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [5,6,7,8] } 2026-01-04T02:28:43.674988Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:28:43.675167Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 9 BodySize: 49 },{ Order: 10 BodySize: 49 }] } 2026-01-04T02:28:43.675262Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 9 Group: 0 Step: 13 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 10 Group: 0 Step: 23 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-04T02:28:43.677183Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327736349005686:2559] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:28:43.677243Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.677293Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [9,10] } 2026-01-04T02:28:43.677844Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327736349005683:2559] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 19b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest >> KqpResultSetFormats::ArrowFormat_Compression_ZSTD [GOOD] >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME >> TResourceBroker::TestQueueWithConfigure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WriteTable [GOOD] Test command err: 2026-01-04T02:28:40.799422Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327725821718337:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:40.799477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:41.541143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:41.541238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:41.541937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:41.586575Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:41.839903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:41.857974Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327725821718195:2081] 1767493720701393 != 1767493720701396 2026-01-04T02:28:41.929548Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:41.953921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:42.634526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:42.634578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:42.634592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:42.634677Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:43.277326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:43.299949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:43.526805Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handshake: worker# [1:7591327738706620920:2492] 2026-01-04T02:28:43.527165Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:28:43.527405Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:28:43.527440Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Send handshake: worker# [1:7591327738706620920:2492] 2026-01-04T02:28:43.534104Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 35b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 23b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:28:43.535258Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 },{ Order: 2 BodySize: 35 },{ Order: 3 BodySize: 23 }] } 2026-01-04T02:28:43.535427Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327738706621013:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:28:43.535465Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.535548Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327738706621013:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 35b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 23b }] } 2026-01-04T02:28:43.539851Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591327738706621013:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:28:43.539908Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:28:43.539955Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591327738706621010:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } |84.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_7_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 7] [GOOD] |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |84.7%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> TResourceBroker::TestQueueWithConfigure [GOOD] >> TResourceBroker::TestOverusageDifferentResources >> TWebLoginService::AuditLogLdapLoginBadBind [GOOD] >> TWebLoginService::AuditLogCreateModifyUser >> IndexBuildTest::RejectsCreateUniq [GOOD] >> IndexBuildTest::RejectsDropIndex >> DataShardVolatile::DistributedWriteThenDropTable [GOOD] >> DataShardVolatile::DistributedWriteThenImmediateUpsert >> IndexBuildTest::CancellationNotEnoughRetries >> TResourceBrokerConfig::UpdateQueues [GOOD] >> TResourceBrokerConfig::DefaultConfig >> TResourceBrokerConfig::DefaultConfig [GOOD] >> TResourceBroker::TestOverusageDifferentResources [GOOD] >> FulltextIndexBuildTest::Basic [GOOD] >> FulltextIndexBuildTest::FlatRelevance >> TWebLoginService::AuditLogCreateModifyUser [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 >> VectorIndexBuildTest::CreateAndDrop [GOOD] >> VectorIndexBuildTest::Metering_CommonDB >> TSchemeShardLoginTest::ChangeAcceptablePasswordParameters [GOOD] >> TSchemeShardLoginTest::ChangeAccountLockoutParameters >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestOverusageDifferentResources [GOOD] Test command err: 2026-01-04T02:28:51.359685Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'compaction1\' uses unknown queue \'queue_default1\'" 2026-01-04T02:28:51.359885Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' is required" 2026-01-04T02:28:51.360077Z node 1 :RESOURCE_BROKER ERROR: resource_broker.cpp:1240: Configure result: Success: false Message: "task \'unknown\' uses unknown queue \'queue_default\'" |84.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |84.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |84.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots |84.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview_reboots/ydb-core-tx-schemeshard-ut_sysview_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::DefaultConfig [GOOD] Test command err: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 1073741824 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 10737418240 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 17179869184 } Total queues cpu: 90 |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateTasks [GOOD] >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] >> KqpScanArrowInChanels::AggregateWithFunction [GOOD] >> KqpScanArrowInChanels::AggregateEmptySum >> KqpScanArrowFormat::AggregateWithFunction [GOOD] >> KqpScanArrowFormat::AggregateEmptySum ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogCreateModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:28:44.354682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:44.354793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:44.354850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:44.354896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:44.354940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:44.354975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:44.355064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:44.355149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:44.356065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:44.356410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:44.766587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:44.766664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:44.807338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:44.815971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:44.816252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:44.848835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:44.849115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:44.849869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:44.850155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:44.866706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:44.866935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:44.868328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:44.868406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:44.868580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:44.868652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:44.868703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:44.868897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:44.911243Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:28:45.347918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:28:45.348223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:28:45.348429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:28:45.348476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:28:45.348760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:28:45.348828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:45.351523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:45.351783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:28:45.352030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:28:45.352159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:28:45.352210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:28:45.352271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:28:45.354569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:28:45.354622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:28:45.354660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:28:45.356504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:28:45.356576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:28:45.356636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:28:45.356706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:28:45.360962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:28:45.363317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:28:45.363523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:28:45.364667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:45.364815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:28:45.364868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:28:45.365181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:28:45.365239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:28:45.365436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:28:45.365535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:28:45.368341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:45.368408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ration: MODIFY USER, path: /MyRoot 2026-01-04T02:28:52.455268Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:52.455317Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:52.455557Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:52.455619Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-04T02:28:52.456307Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:28:52.456430Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:28:52.456477Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:28:52.456522Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:28:52.456580Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:28:52.456710Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:28:52.459120Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 AUDIT LOG buffer(6): 2026-01-04T02:28:52.365732Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T02:28:52.396291Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-04T02:28:52.415422Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-04T02:28:52.427204Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-04T02:28:52.439035Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-04T02:28:52.451355Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] AUDIT LOG checked line: 2026-01-04T02:28:52.451355Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-04T02:28:52.462875Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" Password: "password1" CanLogin: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:28:52.468563Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-04T02:28:52.468733Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:28:52.468778Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:28:52.468828Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:28:52.468870Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:28:52.468941Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:28:52.469012Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-04T02:28:52.469058Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:28:52.469103Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-04T02:28:52.469144Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-04T02:28:52.469222Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:28:52.472747Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:28:52.472915Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-04T02:28:52.473155Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:52.473212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:52.473432Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:52.473481Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-04T02:28:52.474180Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:28:52.474304Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:28:52.474364Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:28:52.474408Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:28:52.474479Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:28:52.474604Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-04T02:28:52.477561Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 AUDIT LOG buffer(7): 2026-01-04T02:28:52.365732Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T02:28:52.396291Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-04T02:28:52.415422Z: component=schemeshard, tx_id=102, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-04T02:28:52.427204Z: component=schemeshard, tx_id=103, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[blocking] 2026-01-04T02:28:52.439035Z: component=schemeshard, tx_id=104, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[unblocking] 2026-01-04T02:28:52.451355Z: component=schemeshard, tx_id=105, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password] 2026-01-04T02:28:52.468411Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] AUDIT LOG checked line: 2026-01-04T02:28:52.468411Z: component=schemeshard, tx_id=106, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=MODIFY USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1, login_user_change=[password, blocking] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfProducer [GOOD] >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer >> IndexBuildTest::RejectsDropIndex [GOOD] >> IndexBuildTest::RejectsDropIndexUniq |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerConfig::UpdateResourceLimit [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] >> Cdc::RacyActivateAndEnqueue [GOOD] >> Cdc::RacyCreateAndSend >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodesAndRebootOfConsumer [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [3:166:2058] recipient: [3:164:2140] Leader for TabletID 9437184 is [3:172:2144] sender: [3:173:2058] recipient: [3:164:2140] Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [4:174:2049] recipient: [4:167:2097] Leader for TabletID 9437185 is [4:188:2100] sender: [4:189:2049] recipient: [4:167:2097] Leader for TabletID 9437184 is [3:172:2144] sender: [3:216:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [3:218:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:220:2049] recipient: [4:45:2053] Leader for TabletID 9437185 is [4:188:2100] sender: [3:223:2058] recipient: [3:15:2062] Leader for TabletID 9437185 is [4:188:2100] sender: [4:221:2049] recipient: [4:161:2096] Leader for TabletID 9437185 is [4:188:2100] sender: [4:226:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [4:228:2049] recipient: [4:225:2113] Leader for TabletID 9437185 is [4:227:2114] sender: [3:258:2058] recipient: [3:15:2062] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscribeOutOfOrder [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TBlockBlobStorageTest::DelayedErrorsNotIgnored >> TTabletCountersPercentile::SingleBucket [GOOD] >> TTabletCountersPercentile::StartFromZero >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-31 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 >> TTabletCountersPercentile::StartFromZero [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 >> TFlatMetrics::MaximumValue3 [GOOD] >> TFlatMetrics::MaximumValue4 [GOOD] |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::StartFromZero [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed >> VectorIndexBuildTest::RecreatedColumns [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false >> FulltextIndexBuildTest::FlatRelevance [GOOD] >> IndexBuildTest::BaseCase >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamed [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue4 [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap/ydb-core-tx-schemeshard-ut_olap |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::AuditDisabledWithoutAppData [GOOD] >> IndexBuildTest::RejectsDropIndexUniq [GOOD] >> IndexBuildTest::RejectsCancelUniq >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TBlockBlobStorageTest::DelayedErrorsNotIgnored [GOOD] >> TFlatMetrics::DecayingAverageAvg [GOOD] >> Cdc::Drop[PqRunner] [GOOD] >> Cdc::Drop[YdsRunner] >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedNoOverflowCheck [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::DecayingAverageAvg [GOOD] Test command err: ... waiting for all block results ... passing block result OK for [1:105:2138] ... blocking block result NO_GROUP for [1:106:2138] ... blocking block result NO_GROUP for [1:107:2138] ... blocking block result NO_GROUP for [1:108:2138] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 >> DataShardVolatile::DistributedWriteThenImmediateUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenSplit >> VectorIndexBuildTest::Metering_CommonDB [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false >> GenericFederatedQuery::ClickHouseManagedSelectAll |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |84.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/yql/ydb-core-kqp-ut-yql >> KqpQuery::SelectWhereInSubquery |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut |84.8%| [LD] {RESULT} $(B)/ydb/core/external_sources/s3/ut/ydb-core-external_sources-s3-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_5_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 5] [GOOD] |84.8%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 >> ObjectStorageListingTest::ListingNoFilter |84.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing >> GenericFederatedQuery::IcebergHiveSaSelectAll >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-32 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-false [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS [GOOD] Test command err: 2026-01-04T02:26:19.327951Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:19.329588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:26:19.329629Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:19.339509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:19.339809Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:26:19.340053Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:19.389051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:19.396350Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:19.396438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:19.398032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:26:19.398103Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:26:19.398168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:26:19.398550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:19.400031Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:19.400110Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:26:19.478518Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:19.530487Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:26:19.530689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:19.530786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:26:19.530836Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:26:19.530917Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:26:19.530958Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:19.531180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:19.531251Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:19.531555Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:26:19.531657Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:26:19.531728Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:19.531765Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:19.531800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:26:19.531832Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:26:19.531882Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:19.531932Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:26:19.531974Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:19.532063Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:19.532096Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:19.532164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:26:19.539586Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:26:19.539677Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:19.539804Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:26:19.539973Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:26:19.540020Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:26:19.540069Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:26:19.540168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:26:19.540213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:26:19.540269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:26:19.540309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:19.540601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:19.540633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:26:19.540660Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:26:19.540684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:19.540723Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:26:19.540755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:26:19.540780Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:26:19.540806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:19.540832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:19.570158Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:26:19.570262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:19.570299Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:19.570340Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:26:19.570398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:19.570984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:19.571069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:19.571109Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:26:19.571237Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:26:19.571273Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:26:19.571413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:19.571463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:26:19.571496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:26:19.571541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:26:19.583135Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:26:19.583226Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:19.583502Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:19.583553Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:19.583619Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:19.583658Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:19.583690Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:26:19.583742Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:26:19.583788Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 349:2316]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-04T02:28:59.319433Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.319466Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-04T02:28:59.319556Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-04T02:28:59.319592Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.319628Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-04T02:28:59.319725Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-04T02:28:59.319759Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.319794Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-04T02:28:59.319883Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-04T02:28:59.319917Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.319971Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-04T02:28:59.320058Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-04T02:28:59.320094Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320127Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-04T02:28:59.320235Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-04T02:28:59.320270Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320304Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-04T02:28:59.320396Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-04T02:28:59.320436Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320470Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-04T02:28:59.320584Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-04T02:28:59.320620Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320653Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-04T02:28:59.320716Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-04T02:28:59.320748Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320780Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-04T02:28:59.320872Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-04T02:28:59.320905Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.320937Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-04T02:28:59.321027Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-04T02:28:59.321061Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.321098Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-04T02:28:59.321216Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-04T02:28:59.321250Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.321285Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-04T02:28:59.321350Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-04T02:28:59.321382Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.321416Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-04T02:28:59.321535Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-04T02:28:59.321572Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.321605Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-04T02:28:59.321672Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-04T02:28:59.321705Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.321739Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-04T02:28:59.329615Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-04T02:28:59.329722Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.329774Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-04T02:28:59.330060Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-04T02:28:59.330111Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.330148Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-04T02:28:59.330251Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-04T02:28:59.330289Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.330324Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-04T02:28:59.330423Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-04T02:28:59.330457Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.330490Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-04T02:28:59.330584Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:28:59.330622Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:28:59.330657Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 29 23 30 30 31 29 31 31 28 25 31 29 31 25 23 25 27 25 29 24 24 24 29 26 25 24 25 25 25 - - - actual 29 23 30 30 31 29 31 31 28 25 31 29 31 25 23 25 27 25 29 24 24 24 29 26 25 24 25 25 25 - - - interm 1 5 4 4 4 5 4 3 6 6 5 5 6 3 - 3 - 6 6 - 6 - 5 6 - - - - - - - - ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeEmptyTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:25:59.849398Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:00.413642Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:00.470701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:26:00.489315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:00.489622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:26:02.005598Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:02.175751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:02.175880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:02.234982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:02.397817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:26:03.432806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:03.434186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:03.434245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:03.434281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:03.434748Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:03.535610Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:04.717956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:26:12.385034Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:12.395849Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:26:12.401530Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:26:12.456838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:12.457003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:12.506193Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:26:12.512560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:12.764577Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.765526Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.766541Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.767532Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.767922Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.768212Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.768473Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.768740Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.768883Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:26:12.995891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:26:13.007631Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:13.007765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:13.023651Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:13.277704Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:13.350616Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:26:13.350733Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:26:13.383495Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:26:13.383790Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:26:13.384084Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:26:13.384164Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:26:13.384221Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:26:13.384284Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:26:13.384348Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:26:13.384408Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:26:13.385148Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:26:13.587499Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:26:13.588130Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:26:13.591738Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:26:13.591815Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:26:13.591921Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:13.600856Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:13.614807Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:26:13.624551Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:26:13.727328Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:26:13.747721Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:13.747864Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:26:13.771421Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:26:13.772977Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:26:14.183535Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:26:14.187825Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:14.199347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:14.208628Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:26:14.208849Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... hemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:26:30.970654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:33.536210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:26:33.536289Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:49.289917Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:26:49.290011Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:26:49.290069Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:26:49.290108Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:26:52.248681Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:26:52.248774Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:26:52.249124Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:26:52.271290Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:26:52.475953Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:26:52.480356Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:26:52.524645Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:26:52.524844Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], Start read next stream part 2026-01-04T02:26:52.561836Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4488:4023], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:52.562020Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4499:4028], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:52.562212Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:52.563475Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4503:4032], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:52.563578Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:52.573599Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4520:4037], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:52.577628Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720659:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:52.748415Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4502:4031], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720659 completed, doublechecking } 2026-01-04T02:26:52.958603Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4591:4081], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:26:53.079515Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4590:4080] txid# 281474976720660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:26:53.876805Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4612:4094]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:26:53.877098Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:26:53.877530Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4614:4096] 2026-01-04T02:26:53.877621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4614:4096] 2026-01-04T02:26:53.878604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4615:4097] 2026-01-04T02:26:53.878810Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4614:4096], server id = [2:4615:4097], tablet id = 72075186224037894, status = OK 2026-01-04T02:26:53.878899Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4615:4097], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:26:53.878972Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:26:53.879175Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:26:53.879276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4612:4094], StatRequests.size() = 1 2026-01-04T02:26:53.879391Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:29:00.033307Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:29:00.033621Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], Start read next stream part 2026-01-04T02:29:00.034428Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3d8czc8a91snd66qcndykr", SessionId: ydb://session/3?node_id=2&id=Njg4MTQ3YTItNThlOTlkNjQtNTAzMTk0NDEtOTM1ZDY4MTI=, Slow query, duration: 127.480851s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:29:00.036317Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4800:4209], ActorId: [2:4801:4210], Starting query actor #1 [2:4802:4211] 2026-01-04T02:29:00.036393Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4801:4210], ActorId: [2:4802:4211], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:29:00.039837Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-04T02:29:00.040599Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4801:4210], ActorId: [2:4802:4211], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTMyYjUzY2ItNzk4YTljNmUtYzkyZTRjY2EtOWI5YzkwY2Y=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:29:00.043426Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:29:00.043504Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4465:2468], ActorId: [2:4475:4017], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmU5MzkwNzYtN2JjOGFlZjctNDhjMWYwMjQtMmI1ZDhlOTA=, TxId: 2026-01-04T02:29:00.135448Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4819:4225]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:29:00.135783Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:29:00.135852Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4819:4225], StatRequests.size() = 1 2026-01-04T02:29:00.316039Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4801:4210], ActorId: [2:4802:4211], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTMyYjUzY2ItNzk4YTljNmUtYzkyZTRjY2EtOWI5YzkwY2Y=, TxId: 2026-01-04T02:29:00.316157Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4801:4210], ActorId: [2:4802:4211], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTMyYjUzY2ItNzk4YTljNmUtYzkyZTRjY2EtOWI5YzkwY2Y=, TxId: 2026-01-04T02:29:00.316583Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4800:4209], ActorId: [2:4801:4210], Got response [2:4802:4211] SUCCESS 2026-01-04T02:29:00.316904Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:29:00.335884Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:29:00.336002Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3542:3449] |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> THealthCheckTest::TestTabletIsDead [GOOD] >> THealthCheckTest::TestStoppedTabletIsNotDead |84.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |84.8%| [LD] {RESULT} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |84.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tiering/ut/ydb-core-tx-tiering-ut |84.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_scan/ydb-core-tx-datashard-ut_kqp_scan >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/ydb-core-tx-schemeshard-ut_bsvolume >> DataShardVolatile::DistributedWriteThenSplit [GOOD] >> DataShardVolatile::DistributedWriteThenReadIterator >> Cdc::RacyCreateAndSend [GOOD] >> Cdc::RacySplitAndDropTable >> Cdc::SupportedTypes [GOOD] >> Cdc::StringEscaping |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-3 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 >> IndexBuildTest::RejectsCancelUniq [GOOD] >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TSchemeShardLoginTest::ChangeAccountLockoutParameters [GOOD] >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_9_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 9] [GOOD] |84.9%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> Cdc::Drop[YdsRunner] [GOOD] >> Cdc::Drop[TopicRunner] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_10_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 10] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 >> THealthCheckTest::TestStoppedTabletIsNotDead [GOOD] >> THealthCheckTest::TestTabletsInUnresolvaleDatabase |84.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_split_merge_reboots/ydb-core-tx-schemeshard-ut_split_merge_reboots >> ObjectStorageListingTest::FilterListing [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 >> TOlapNaming::CreateColumnTableExtraSymbolsOk >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 >> KqpQuery::SelectWhereInSubquery [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 >> ObjectStorageListingTest::ListingNoFilter [GOOD] >> VectorIndexBuildTest::DescriptionIsPersisted-prefixed-true [GOOD] >> VectorIndexBuildTest::CancelBroken >> KqpYql::NonStrictDml ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::FilterListing [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:10.521775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:11.034559Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:11.036039Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:11.036110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:11.800338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:11.800504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:11.889854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493745956352 != 1767493745956355 2026-01-04T02:29:11.908392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:11.959302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:12.071621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:12.398556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:12.413686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:12.524486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:12.570770Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:29:12.571081Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:12.630519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:29:12.630684Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:29:12.632656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:29:12.632763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:29:12.632823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:29:12.633276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:29:12.633462Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:29:12.633579Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:29:12.644772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:29:12.677643Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:29:12.678013Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:29:12.678185Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:29:12.678231Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:29:12.678269Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:29:12.678317Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:12.678917Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:29:12.679029Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:29:12.679131Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:12.679191Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:29:12.679256Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:29:12.679318Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:12.679445Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:29:12.679983Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:29:12.680338Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:29:12.680433Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:29:12.682461Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:12.694182Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:29:12.694334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:29:12.821189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:29:12.826642Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:29:12.826744Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:12.827221Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:12.827297Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:29:12.827354Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:29:12.827680Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:29:12.827864Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:29:12.828109Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:12.828182Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:29:12.831073Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:29:12.831616Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:29:12.834574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:29:12.834640Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:12.835608Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:29:12.835687Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:12.837179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:12.837233Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:29:12.837303Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:29:12.837373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:29:12.837428Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:29:12.837523Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:12.843278Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:12.845141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:29:12.845216Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:29:12.846288Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:29:12.858592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:12.858738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:12.859195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:12.860244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:12.860513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:12.865452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:12.873332Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:13.005696Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:13.009704Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:29:13.099265Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:13.557426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1054:2873], serverId# [1:1055:2874], sessionId# [0:0:0] 2026-01-04T02:29:13.558101Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:29:13.558433Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:29:13.569713Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:13.576127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1062:2880], serverId# [1:1063:2881], sessionId# [0:0:0] 2026-01-04T02:29:13.576499Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:29:13.576763Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 1 2026-01-04T02:29:13.577045Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1062:2880], serverId# [1:1063:2881], sessionId# [0:0:0] 2026-01-04T02:29:13.579619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1068:2886], serverId# [1:1069:2887], sessionId# [0:0:0] 2026-01-04T02:29:13.579861Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:29:13.580112Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 1 common prefixes: 1 2026-01-04T02:29:13.580344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1068:2886], serverId# [1:1069:2887], sessionId# [0:0:0] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> TOlap::StoreStatsQuota >> KqpScanArrowInChanels::AggregateEmptySum [GOOD] >> KqpScanArrowInChanels::JoinWithParams >> KqpResultSetFormats::ArrowFormat_Compression_LZ4_FRAME [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement >> TOlap::CreateTableWithNullableKeysNotAllowed >> KqpScanArrowFormat::AggregateEmptySum [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 >> DataShardVolatile::DistributedWriteThenReadIterator [GOOD] >> DataShardVolatile::DistributedWriteThenReadIteratorStream >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueries |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |84.9%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ydb-core-persqueue-ut >> TNetClassifierUpdaterTest::TestFiltrationByNetboxCustomFieldsOnly [GOOD] >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> ObjectStorageListingTest::ListingNoFilter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:11.201051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:11.476933Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:11.477442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:11.477512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:12.335727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:12.335883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:12.725380Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493745635251 != 1767493745635254 2026-01-04T02:29:12.823773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:12.939202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:13.116904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:13.878178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:13.893314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:14.023793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:14.081151Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:29:14.081404Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:14.156040Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:29:14.156197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:29:14.162248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:29:14.162367Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:29:14.162440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:29:14.162906Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:29:14.163102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:29:14.163221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:29:14.174412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:29:14.285393Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:29:14.285626Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:29:14.285742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:29:14.285778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:29:14.289821Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:29:14.289927Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:14.290493Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:29:14.290589Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:29:14.290666Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:14.290714Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:29:14.290780Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:29:14.290839Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:14.290960Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:29:14.291413Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:29:14.291696Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:29:14.291784Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:29:14.293458Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:14.306425Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:29:14.306551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:29:14.444087Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:29:14.479159Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:29:14.479263Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:14.479774Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:14.479878Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:29:14.479933Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:29:14.480247Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:29:14.480434Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:29:14.480700Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:29:14.480769Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:29:14.497905Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:29:14.498492Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:29:14.502252Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:29:14.502327Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:14.503322Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:29:14.503426Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:14.504882Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:29:14.504934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:29:14.504999Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:29:14.505067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:29:14.505123Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:29:14.505223Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:14.510517Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:14.512515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:29:14.512602Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:29:14.513639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:29:14.524267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:14.524425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:14.524874Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:14.532796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:14.533311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:14.538565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:14.546369Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:14.695800Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:29:14.713624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:29:14.870386Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:15.872644Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1054:2873], serverId# [1:1055:2874], sessionId# [0:0:0] 2026-01-04T02:29:15.873171Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:29:15.873365Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:29:15.888525Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:29:15.894313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1062:2880], serverId# [1:1063:2881], sessionId# [0:0:0] 2026-01-04T02:29:15.894601Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test/")), end at key ((type:2, value:"d\0\0\0") (type:4608, value:"/test0")) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:29:15.896021Z node 1 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 3 common prefixes: 2 2026-01-04T02:29:15.896292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [1:1062:2880], serverId# [1:1063:2881], sessionId# [0:0:0] |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_external_blobs/ydb-core-tx-datashard-ut_external_blobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::CheckThatLockedOutParametersIsRestoredFromLocalDb [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:28:45.303065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:45.303203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:45.304602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:45.304662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:45.304729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:45.304761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:45.304838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:45.304909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:45.305876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:45.306224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:45.486861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:45.486953Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:45.523314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:45.527695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:45.527918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:45.538260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:45.538538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:45.539284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:45.539558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:45.548446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:45.548660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:45.550170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:45.550257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:45.550420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:45.550470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:45.550521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:45.550658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:46.007424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.008982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.009982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 29:15.629357Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:29:15.629384Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:29:15.629582Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.629742Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.629991Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.630148Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.630261Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.630424Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.630834Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.631023Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.631494Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.631601Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.631878Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.632069Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.632142Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.632274Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.632614Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.632728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.633219Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.633590Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.633696Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.633764Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.634120Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.634207Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.634297Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:15.646270Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:15.674276Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:15.674422Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:15.675250Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:15.675336Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:15.675423Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:15.683135Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:788:2756] sender: [5:846:2058] recipient: [5:15:2062] 2026-01-04T02:29:15.753527Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:29:15.753625Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:29:15.803442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-04T02:29:15.803643Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:15.803695Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:15.803950Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:15.804001Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:839:2796], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:29:15.804853Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:29:17.805716Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:29:17.811932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:29:17.825518Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:29:17.826136Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-04T02:29:17.826652Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:17.826838Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 222us result status StatusSuccess 2026-01-04T02:29:17.827385Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 4 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAui1r61A0dq4x6kag3EpR\nVBRgfEpMuEHLiqfDJSiQxMRaZ9Dobv6tN2e2/U34S1OAAYnctjQ/Om0714skVWD6\nGyerJZmBNnIKD+Cg7Da5LhvV3urkX2o/Pmg8wA27nsagPwz03yoPjFExKIfd/14G\n8v7oUbUb3bicNuSjv1G4hpOPLyoWOAqx+R7udqx1+IJWff8i8YquA7ro3EC7q4ov\nVI+nB3XRAPelexUQfhwhj7rVuM727kOTtPww3tE2LjOdOAuTj79+KiJTKcsVm666\n5qwHPa1BuhfhoJdN64i8MDgNL02CE9l0xyM8K/CuM1/90q/PXBwuvs2Vq820+XqU\nqwIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580152762 } PublicKeys { KeyId: 2 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA1lx9xdeXyQekeIrsqXhe\nqQEmny3tlAW1wloEAQyotaXfCG1JHc/26B9qitu0SlEbJA5zYenqN0wSWjhN6bTM\nN3nmBLwcQR7r2U+2z/c8vw+fvJ6NK+N6pL6IJfuAngMmKv8+lNOK0ghcXHO6JQ2T\nubPI+T4t9hcYm/nDsRgltpmPYy9/sZFJlEdA/GffSgBkXDI1+yxfXoYM3Swdl7Uh\nth/4/EhihNcHjEsANv9g+hJQye/X7sKxWag6DwsjrckpnXEAklHlaJyYm2el7BaG\n80Wtj1bnWvUvh5CFS6wLempPRH0Fnta6OdKda6sfDyccEyytoyoZCPP+bUaM3+wU\nsQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580153353 } PublicKeys { KeyId: 3 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAschMKLiiZH/uusvrBgr1\nXAsnJbQxMCBZKFXAxm6ZMhmK5Xih40Cs3lfoyGeMZxVQq4lxmcn/JuzPCNSSAfrt\n/MHnBkWUV6kGLfiwImmr+zZUMqqoJwUxJBxKH6kaI8KOfr40V/yYVlu3A7wlSVFv\nkR1lTnWVtqQx/UJHOJROowbxKo9g+usyZHvVidoQXCWy6TzSvCxpOnOi4uYkg53Y\nTQnZurQCn32Rn0K834D2/E6dQrNj+5o1XiwLA0W6/VioGEqx9eVmMXPIVIPrGodt\nxfZIfBbsRa3cjsR9J3JevEjQD0k+/ZxOcmgC+ExI/DCTCpccAv+PpZcl25OxR3oM\nuQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580155800 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-10 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 >> VectorIndexBuildTest::CancelBroken [GOOD] >> VectorIndexBuildTest::CreateBuildProposeReject >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 >> GenericFederatedQuery::ClickHouseManagedSelectAll [GOOD] >> GenericFederatedQuery::ClickHouseManagedSelectConstant >> VectorIndexBuildTest::SimpleDuplicates-Overlap-false [GOOD] >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true >> Cdc::StringEscaping [GOOD] >> Cdc::SplitTopicPartition_TopicAutoPartitioning >> KqpYql::TableUseBeforeCreate >> KqpYql::InsertCVList-useSink >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowFormat::AggregateEmptySum [GOOD] Test command err: Trying to start YDB, gRPC: 26732, MsgBus: 24353 2026-01-04T02:26:03.274666Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327052787484335:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:03.274806Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:04.130333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:04.130436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:04.279477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:04.351494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:04.379388Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:04.385526Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:04.397932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327052787484309:2081] 1767493563264958 != 1767493563264961 2026-01-04T02:26:04.641983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:04.750351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:04.750372Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:04.750378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:04.750470Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:05.886785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:06.041088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.317894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.628194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:06.783896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:08.278399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327052787484335:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:08.278476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:26:10.211337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327082852257267:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:10.211423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:10.211830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327082852257277:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:10.211862Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.030293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.100268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.274024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.363542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.427771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.517995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.612772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.744755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.864387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327087147225472:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.864492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.865003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327087147225477:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.865051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327087147225478:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.865086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:11.870889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:11.883686Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327087147225481:2496], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:26:11.968221Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327087147225532:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 9725, MsgBus: 25354 2026-01-04T02:26:15.918346Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327104674409690:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:15.925778Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... =cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:54.814036Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:54.826197Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:54.826339Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:54.837295Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:55.022649Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:55.026270Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591327784023421069:2081] 1767493734415262 != 1767493734415265 2026-01-04T02:28:55.117939Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:28:55.438129Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:55.439318Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:55.439338Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:55.439355Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:55.439505Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:28:55.504946Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:58.455866Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:58.481831Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:58.765649Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:59.468322Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591327784023421107:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:59.472283Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:28:59.498987Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:59.758623Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.014091Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327839857997855:2412], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:07.014270Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:07.014974Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327839857997865:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:07.015055Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:07.236981Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.346365Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.446467Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.528523Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.755163Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.895214Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:08.002354Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:08.151216Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:08.458107Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327844152966041:2497], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:08.458228Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:08.458899Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327844152966046:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:08.458986Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327844152966047:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:08.459489Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:08.468191Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:08.508445Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591327844152966050:2502], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:08.577700Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591327844152966103:3804] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:09.826627Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:29:09.826663Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:14.799601Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493754005, txId: 281474976710673] shutting down |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat |84.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest |84.9%| [LD] {RESULT} $(B)/ydb/core/mind/ut_fat/ydb-core-mind-ut_fat >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-8 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 |84.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |84.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/ydb-core-blobstorage-storagepoolmon-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::RejectsOnCrossShardDuplicatesUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:28:42.954040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:42.954262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:42.954339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:42.954425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:42.954468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:42.954518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:42.954584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:42.954692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:42.955560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:42.955900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:43.317894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:43.317967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:43.355119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:43.359330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:43.359569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:43.375071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:43.375375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:43.376175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:43.376475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:43.387764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:43.388052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:43.389444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:43.389509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:43.389614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:43.389662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:43.389726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:43.389904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:43.683662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.690979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.691079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.691153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... DROP LOCK, path: /MyRoot/ServerLessDB/Table/test_index/indexImplTable 2026-01-04T02:29:21.686493Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976725762, status# StatusPathDoesNotExist 2026-01-04T02:29:21.686586Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549 2026-01-04T02:29:21.686716Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3152: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, id# 107, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-04T02:29:21.686891Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3157: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvModifySchemeTransactionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejection_Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500), SubscribersCount: 1, CreateSender: [6:1555:3426], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, cookie: 107, record: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" TxId: 281474976725762 SchemeshardId: 72075186233409549, status: StatusPathDoesNotExist 2026-01-04T02:29:21.687105Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:29:21.690373Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected 2026-01-04T02:29:21.690557Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Rejected TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Rejected, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/Table/test_index', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760, SubscribersCount: 1, CreateSender: [6:1555:3426], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725762, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 2, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:21.690631Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-04T02:29:21.690816Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:29:21.690871Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:1570:3441] TestWaitNotification: OK eventTxId 107 2026-01-04T02:29:21.691718Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-04T02:29:21.692100Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 Issues { message: "Duplicate key found: (index1=500, index2=500); At Rejection_Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/Table/test_index\', error: path has been deleted (id: [OwnerId: 72075186233409549, LocalPathId: 33], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 600, drop txId: 281474976725760" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-04T02:29:21.693066Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:29:21.693370Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 308us result status StatusSuccess 2026-01-04T02:29:21.706700Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:29:21.707991Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__forget.cpp:18: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: DoExecute TxId: 108 DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-04T02:29:21.708398Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index_tx_base.h:102: TIndexBuilder::TXTYPE_FORGET_INDEX_BUILD: Reply TxId: 108 Status: SUCCESS BUILDINDEX RESPONSE Forget: NKikimrIndexBuilder.TEvForgetResponse TxId: 108 Status: SUCCESS 2026-01-04T02:29:21.718474Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__list.cpp:23: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" PageSize: 100 PageToken: "" 2026-01-04T02:29:21.718625Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_LIST_INDEX_BUILD: Reply Status: SUCCESS NextPageToken: "0" BUILDINDEX RESPONSE LIST: NKikimrIndexBuilder.TEvListResponse Status: SUCCESS NextPageToken: "0" >> IndexBuildTest::BaseCase [GOOD] >> IndexBuildTest::BaseCaseUniq |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-34 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] >> KqpScan::ScanRetryRead >> Cdc::RacySplitAndDropTable [GOOD] >> Cdc::RenameTable >> Cdc::Drop[TopicRunner] [GOOD] >> Cdc::DropColumn >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [FAIL] >> DataShardVolatile::DistributedWriteThenReadIteratorStream [GOOD] >> DataShardVolatile::DistributedWriteThenScanQuery >> TOlap::CreateTableWithNullableKeysNotAllowed [GOOD] >> TOlap::CreateTableWithNullableKeys >> KqpYql::NonStrictDml [GOOD] >> KqpYql::PgIntPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::CreateBuildProposeReject [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:28:42.948692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:42.948783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:42.948818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:42.948857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:42.948893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:42.948921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:42.949013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:42.949110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:42.958286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:42.958714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:43.086130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:43.086195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:43.103110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:43.103347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:43.103562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:43.129052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:43.129308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:43.130055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:43.130354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:43.133739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:43.133976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:43.135229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:43.135297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:43.135401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:43.135441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:43.135542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:43.135743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:43.340458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.341518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.341673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.341750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.341839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.341934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.342523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.343051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:43.343162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1174:3121], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 300, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:24.736822Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710766:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710766 msg type: 269090816 2026-01-04T02:29:24.736945Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710766, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:29:24.737169Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710766, at schemeshard: 72057594046678944 2026-01-04T02:29:24.737220Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 0/1, is published: true 2026-01-04T02:29:24.737272Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710766, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Add transaction: 281474976710766 at step: 5000047 FAKE_COORDINATOR: advance: minStep5000047 State->FrontStep: 5000046 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710766 at step: 5000047 2026-01-04T02:29:24.737553Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000047, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:24.737691Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710766 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000047 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:29:24.737759Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710766:0 HandleReply TEvOperationPlan: step# 5000047 2026-01-04T02:29:24.737830Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710766:0 128 -> 240 2026-01-04T02:29:24.740736Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710766:0, at schemeshard: 72057594046678944 2026-01-04T02:29:24.740818Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710766:0 ProgressState 2026-01-04T02:29:24.740931Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-04T02:29:24.740985Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-04T02:29:24.741037Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710766:0 progress is 1/1 2026-01-04T02:29:24.741078Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-04T02:29:24.741117Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710766, ready parts: 1/1, is published: true 2026-01-04T02:29:24.741199Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710766 2026-01-04T02:29:24.741253Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710766 ready parts: 1/1 2026-01-04T02:29:24.741290Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710766:0 2026-01-04T02:29:24.741331Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710766:0 2026-01-04T02:29:24.741417Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710766 2026-01-04T02:29:24.743477Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710766 2026-01-04T02:29:24.743563Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710766 2026-01-04T02:29:24.743633Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710766 2026-01-04T02:29:24.743792Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1174:3121], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 300, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710766 2026-01-04T02:29:24.745334Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-04T02:29:24.745441Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1174:3121], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 300, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:24.745484Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:29:24.747024Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-04T02:29:24.747155Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1174:3121], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710762, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710763, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000044, ApplyTxId: 281474976710765, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710766, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 2 UploadBytes: 42 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 300, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:24.747205Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-04T02:29:24.747378Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:29:24.747433Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:1175:3122] TestWaitNotification: OK eventTxId 103 2026-01-04T02:29:24.748247Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 103 2026-01-04T02:29:24.748603Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 103 Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: INVALID UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } >> TOlapNaming::CreateColumnTableExtraSymbolsOk [GOOD] >> TOlapNaming::CreateColumnStoreOk |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |85.0%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/federated_query/large_results/ydb-core-kqp-ut-federated_query-large_results |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |85.0%| [LD] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/keyvalue/ut_trace/ydb-core-keyvalue-ut_trace >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 >> KqpScan::RemoteShardScan |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 >> GenericFederatedQuery::ClickHouseManagedSelectConstant [GOOD] >> GenericFederatedQuery::ClickHouseSelectCount >> GenericFederatedQuery::IcebergHiveSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectConstant |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> KqpQuery::TableSink_ReplaceDataShardDataQuery+UseSink [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:29:15.864496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:15.864609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:15.864677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:15.864716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:15.864750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:15.864782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:15.864843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:15.864907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:15.865845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:15.866163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:15.982606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:15.982664Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:15.995158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:15.995366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:15.995560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:16.004887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:16.005140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:16.005821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:16.006156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:16.014044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:16.014284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:16.015513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:16.015587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:16.015750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:16.015795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:16.015906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:16.016114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:16.159102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.160946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.161009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.161063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.161169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:16.161262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Count reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:29:28.505527Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:29:28.509054Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:28.509117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:28.509358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:29:28.509516Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:28.509557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:29:28.509606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 2026-01-04T02:29:28.510050Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:29:28.510110Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_store.cpp:246: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:29:28.510178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_store.cpp:269: TCreateOlapStore TProposedWaitParts operationId# 101:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-04T02:29:28.511005Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:29:28.511106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:29:28.511146Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:29:28.511187Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:29:28.511228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:29:28.515473Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:29:28.515561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:29:28.515591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:29:28.515620Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:29:28.515654Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:29:28.515744Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:29:28.518595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-04T02:29:28.519321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:29:28.521906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:29:28.539244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-04T02:29:28.539329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:29:28.539470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:29:28.547014Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:29:28.547244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:29:28.547290Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:29:28.547420Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:29:28.547460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:29:28.547499Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:29:28.547529Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:29:28.547564Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:29:28.547638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:749:2725] message: TxId: 101 2026-01-04T02:29:28.547682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:29:28.547712Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:29:28.547739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:29:28.547862Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:29:28.550210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:29:28.550283Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:750:2726] TestWaitNotification: OK eventTxId 101 2026-01-04T02:29:28.550698Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:28.550897Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore" took 220us result status StatusSuccess 2026-01-04T02:29:28.551415Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore" PathDescription { Self { Name: "OlapStore" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnStoreVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "OlapStore" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/driver_lib/run/ut/ydb-core-driver_lib-run-ut |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 >> KqpYql::InsertCVList-useSink [GOOD] >> KqpYql::InsertIgnore >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 >> KqpScripting::StreamExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-9 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 >> KqpYql::PgIntPrimaryKey [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 >> KqpYql::TableUseBeforeCreate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString [GOOD] Test command err: 2026-01-04T02:28:31.826190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:31.852656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:31.852903Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:31.859025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:31.859330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:31.859504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:31.859576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:31.859638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:31.859766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:31.859872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:31.860004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:31.860126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:31.860235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.860319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:31.860444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:31.860524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:31.883798Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:31.883948Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:31.884019Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:31.884219Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.884362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:31.884440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:31.884473Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:31.884560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:31.884620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:31.884669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:31.884712Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:31.884885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:31.884930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:31.884967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:31.884985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:31.885048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:31.885082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:31.885104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:31.885125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:31.885157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:31.885182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:31.885216Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:31.885252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:31.885281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:31.885299Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:31.885494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:31.885543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:31.885576Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:31.885730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:31.885762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.885784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:31.885842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:31.885875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:31.885919Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:31.885964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:31.885999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:31.886020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:31.886109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:31.886143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-04T02:29:27.654895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=894; 2026-01-04T02:29:27.654954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=64727; 2026-01-04T02:29:27.654995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=64833; 2026-01-04T02:29:27.655051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-04T02:29:27.655392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=295; 2026-01-04T02:29:27.655441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=65571; 2026-01-04T02:29:27.655612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=98; 2026-01-04T02:29:27.655723Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=65; 2026-01-04T02:29:27.656190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=419; 2026-01-04T02:29:27.656617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=371; 2026-01-04T02:29:27.676565Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19841; 2026-01-04T02:29:27.694360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=17667; 2026-01-04T02:29:27.694477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:29:27.694528Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:29:27.694563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:29:27.694630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2026-01-04T02:29:27.694669Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:29:27.694783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=79; 2026-01-04T02:29:27.694830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:29:27.694903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=28; 2026-01-04T02:29:27.694993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=50; 2026-01-04T02:29:27.695065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=40; 2026-01-04T02:29:27.695122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=112732; 2026-01-04T02:29:27.695255Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:29:27.695362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:29:27.695412Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:29:27.695469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:29:27.695506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:29:27.695681Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:29:27.695741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:29:27.695778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:29:27.695816Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:29:27.695873Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493414282;tx_id=18446744073709551615;;current_snapshot_ts=1767493713294; 2026-01-04T02:29:27.695911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:29:27.695949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:27.695981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:27.696091Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:29:27.696291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.053000s; 2026-01-04T02:29:27.703576Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:29:27.704378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:29:27.704448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:29:27.704550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:29:27.704607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:29:27.704682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493414282;tx_id=18446744073709551615;;current_snapshot_ts=1767493713294; 2026-01-04T02:29:27.704733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:29:27.704805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:27.704854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:27.704946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:29:27.705003Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:29:27.706501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.157000s; 2026-01-04T02:29:27.706564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-35 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> DataShardVolatile::DistributedWriteThenScanQuery [GOOD] >> DataShardVolatile::DistributedWriteWithAsyncIndex >> TBSV::ShardsNotLeftInShardsToDelete ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::PgIntPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 6483, MsgBus: 8016 2026-01-04T02:29:16.834352Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327878473108092:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:16.834447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:16.910299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:17.316830Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:17.392550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:17.392673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:17.442644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:17.583548Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327878473107878:2081] 1767493756809009 != 1767493756809012 2026-01-04T02:29:17.584919Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:17.620745Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:17.714502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:17.714523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:17.714529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:17.714633Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:17.821951Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:18.315374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:18.330614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:29:18.438695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:18.787252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:19.065699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:19.228732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:21.820366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327878473108092:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:21.820432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:22.102443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327904242913525:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.102657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.103347Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327904242913535:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.103403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.433847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.471456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.499489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.531523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.565509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.610473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.659101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.728763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.820300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327904242914409:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.820370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.820447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327904242914414:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.820652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327904242914416:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.820674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:22.824560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:22.855677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327904242914417:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:29:22.972076Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327904242914471:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 25976, MsgBus: 5210 2026-01-04T02:29:26.718228Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327923909033784:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:26.718292Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:26.741879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:26.791564Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:26.827247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:26.827341Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:26.831041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:26.934499Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:26.934532Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:26.934540Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:26.934622Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:27.036060Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:27.464911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:27.727329Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:30.149513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327941088903817:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.149634Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.150652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327941088903827:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.150729Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.173960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.236037Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327941088903919:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.236126Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.236327Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327941088903924:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.236379Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327941088903925:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.236440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.240104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:30.252527Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591327941088903928:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:29:30.338287Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591327941088903979:2593] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:31.723530Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591327923909033784:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:31.723891Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> Cdc::SplitTopicPartition_TopicAutoPartitioning [GOOD] >> Cdc::ShouldDeliverChangesOnSplitMerge >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 >> TBSV::CleanupDroppedVolumesOnRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableUseBeforeCreate [GOOD] Test command err: Trying to start YDB, gRPC: 21090, MsgBus: 25826 2026-01-04T02:29:22.345321Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327906456081402:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:22.345523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:23.169788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:23.176420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:23.214644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:23.352885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:23.478335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:23.485874Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327906456081227:2081] 1767493762289313 != 1767493762289316 2026-01-04T02:29:23.518080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:23.563034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:23.739181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:23.739204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:23.739212Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:23.739356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:24.722051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:24.841253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:25.157025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:25.576353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:26.025692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.346134Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327906456081402:2212];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:27.346205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:28.710925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327932225886891:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.711263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.717947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327932225886901:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.718058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.244280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.318982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.365963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.439376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.475570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.529951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.565861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.626636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:29.718996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327936520855073:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.719106Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.719364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327936520855079:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.719405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327936520855078:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.719438Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:29.723406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:29.739899Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327936520855082:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:29.820683Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327936520855133:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:3:13: Error: At function: KiReadTable!
:3:13: Error: Cannot find table 'db.[/Root/NewTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> THealthCheckTest::TestTabletsInUnresolvaleDatabase [GOOD] >> THealthCheckTest::UnknowPDiskState >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-14 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 >> GenericFederatedQuery::ClickHouseSelectCount [GOOD] >> GenericFederatedQuery::ClickHouseFilterPushdown >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] |85.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_13_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 13] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] >> GenericFederatedQuery::IcebergHiveSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveSaSelectCount >> TOlap::CreateTableWithNullableKeys [GOOD] >> TOlap::CustomDefaultPresets >> Cdc::DropColumn [GOOD] >> Cdc::DropIndex >> Cdc::RenameTable [GOOD] >> Cdc::ResolvedTimestamps ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShardsNotLeftInShardsToDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:35.850045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:35.850163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:35.850206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:35.850248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:35.850287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:35.850318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:35.850422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:35.850500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:35.851426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:35.851779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:36.034756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:36.034829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:36.091242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:36.104324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:36.104655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:36.128679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:36.128960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:36.129703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:36.130076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:36.154980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:36.155297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:36.156757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:36.156845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:36.156998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:36.157062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:36.157128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:36.157308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.166521Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:29:36.349138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:29:36.349402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.349713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:29:36.349770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:29:36.350150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:29:36.350236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:36.354962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:36.355249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:29:36.355531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.355613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:29:36.355656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:29:36.355725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:29:36.358281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.358350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:29:36.358390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:29:36.363904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.363980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.364043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:36.364133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:29:36.367962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:29:36.373073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:29:36.373335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:29:36.374666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:36.374856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:29:36.374925Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:36.375232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:29:36.375302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:36.375472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:29:36.375554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:29:36.383554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:36.383625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... d: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:29:36.628000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:29:36.628044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:36.628084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:29:36.628134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:36.628215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:29:36.628277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:29:36.628333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:29:36.628395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:36.628431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:29:36.628463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:29:36.628577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:29:36.628626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:29:36.628676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:29:36.628707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-04T02:29:36.632270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:29:36.632338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:29:36.634860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:29:36.634937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:29:36.635923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:29:36.636015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:29:36.636048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:29:36.636232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:36.636289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:36.636468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:29:36.636638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:36.636693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:29:36.636757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 102, path id: 2 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:29:36.637371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:36.637480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:36.637556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:29:36.637602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-04T02:29:36.637646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:29:36.646493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:29:36.646578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:29:36.646688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:29:36.647123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:36.647205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:36.647254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:29:36.647289Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:29:36.647330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:29:36.647401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:29:36.648420Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 2026-01-04T02:29:36.648634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:36.649229Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 2026-01-04T02:29:36.662217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:29:36.664357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:29:36.675453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:29:36.675928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:29:36.682973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:29:36.683144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:29:36.683533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:29:36.683587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:29:36.684009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:29:36.684162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:29:36.684215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:394:2373] TestWaitNotification: OK eventTxId 102 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-04T02:29:36.684639Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-04T02:29:36.684737Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 { Type { Kind: Struct Struct { Member { Name: "ShardsToDelete" Type { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "List" Type { Kind: List List { Item { Kind: Struct Struct { Member { Name: "ShardIdx" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } Member { Name: "Truncated" Type { Kind: Data Data { Scheme: 6 } } } } } } } } } } Value { Struct { Optional { Struct { } Struct { Bool: false } } } } } >> TBSV::ShouldLimitBlockStoreVolumeDropRate |85.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut |85.0%| [LD] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/ydb-core-blobstorage-vdisk-syncer-ut >> ExternalBlobsMultipleChannels::WithCompaction >> KqpScanArrowInChanels::JoinWithParams [GOOD] >> KqpResultSetFormats::ArrowFormat_Multistatement [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::CleanupDroppedVolumesOnRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:36.574026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:36.574129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:36.574172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:36.574208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:36.574259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:36.574290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:36.574389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:36.574490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:36.575355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:36.575727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:36.820425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:36.820495Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:36.847799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:36.852439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:36.852737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:36.870529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:36.870805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:36.871575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:36.871904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:36.881110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:36.881369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:36.882760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:36.882854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:36.883017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:36.883076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:36.883146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:36.883340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:36.903820Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:29:37.229301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:29:37.229565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:37.233968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:29:37.234068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:29:37.234385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:29:37.234491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:37.241985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:37.242273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:29:37.242567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:37.242653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:29:37.242694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:29:37.242760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:29:37.250870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:37.250960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:29:37.251013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:29:37.259828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:37.259924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:29:37.259980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:37.260040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:29:37.268774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:29:37.271722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:29:37.271951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:29:37.273253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:37.273418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:29:37.273491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:37.273794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:29:37.273883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:29:37.274049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:29:37.274133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:29:37.276836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:37.276908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... [1:15:2062] 2026-01-04T02:29:38.367062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:38.367318Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 305us result status StatusPathDoesNotExist 2026-01-04T02:29:38.367548Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:29:38.368958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Leader for TabletID 72057594046678944 is [1:404:2380] sender: [1:473:2058] recipient: [1:106:2140] Leader for TabletID 72057594046678944 is [1:404:2380] sender: [1:476:2058] recipient: [1:475:2434] Leader for TabletID 72057594046678944 is [1:477:2435] sender: [1:478:2058] recipient: [1:475:2434] 2026-01-04T02:29:38.465748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:38.465920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:38.465974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:38.466015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:38.466054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:38.466087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:38.466199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:38.466284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:38.467187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:38.467489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:38.492418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:38.503058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:38.503471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:38.503934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:38.504000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:38.504130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:38.504945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1418: TTxInit for Paths, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:38.505084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.505182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.505633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.505723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-04T02:29:38.506037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.506141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.506295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.506403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.506509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.506701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.507017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.507252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.507676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.507778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.508858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.509920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:38.526090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:38.528771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:38.528863Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:38.528982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:38.529043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:38.529092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:38.537961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:477:2435] sender: [1:539:2058] recipient: [1:15:2062] 2026-01-04T02:29:38.575162Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:38.575456Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/BSVolume" took 317us result status StatusPathDoesNotExist 2026-01-04T02:29:38.575636Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |85.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/statistics/database/ut/ydb-core-statistics-database-ut >> TPQTest::TestPartitionTotalQuota >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [FAIL] |85.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpScanArrowInChanels::JoinWithParams [GOOD] Test command err: Trying to start YDB, gRPC: 26691, MsgBus: 25543 2026-01-04T02:26:05.761633Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327058544843954:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:05.761749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:06.478944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:06.479053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:06.616868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:06.682870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:06.685136Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327058544843734:2081] 1767493565725477 != 1767493565725480 2026-01-04T02:26:06.695147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:06.722244Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:06.842629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:07.010438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:07.010462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:07.010468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:07.010557Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:08.038828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:08.201330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:08.592671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.023467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.178310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:10.761881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327058544843954:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:10.761943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:26:15.038799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327101494518618:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.038918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.039272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327101494518628:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.039307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.597652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.670899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.711421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.769000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.828063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:15.921125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:16.016847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:16.129106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:16.374551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327105789486802:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.374658Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.375569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327105789486808:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.375725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327105789486807:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.375957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.380144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:16.417922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327105789486811:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:26:16.520189Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327105789486862:3790] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:26:20.010448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:17.905919Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:18.011847Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:18.012014Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:18.117948Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591327884342495154:2081] 1767493757377890 != 1767493757377893 2026-01-04T02:29:18.178615Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:18.198774Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:18.307284Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:18.426089Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:18.494766Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:18.494801Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:18.494817Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:18.494941Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:20.564779Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:20.604745Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:20.883652Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:21.857998Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.179213Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.417625Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591327884342495180:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:22.417716Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:30.089990Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327940177071961:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.090140Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.090622Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327940177071970:2416], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.090701Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:30.306652Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.374382Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.480561Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.555734Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.630872Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.741492Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:30.859419Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:31.085459Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:31.259473Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327944472040150:2503], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:31.259649Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:31.260123Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327944472040155:2506], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:31.260206Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327944472040156:2507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:31.260261Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:31.268069Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:31.289495Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591327944472040159:2508], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:31.352034Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591327944472040210:3822] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:32.790671Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:29:32.790714Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:36.853007Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493776846, txId: 281474976710673] shutting down 2026-01-04T02:29:37.480241Z node 12 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493777504, txId: 281474976710675] shutting down |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_index/ydb-core-tx-schemeshard-ut_index |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut |85.1%| [LD] {RESULT} $(B)/ydb/services/dynamic_config/ut/ydb-services-dynamic_config-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TBSV::ShouldLimitBlockStoreVolumeDropRate [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:39.270386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:39.270477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:39.270520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:39.270555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:39.270600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:39.270631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:39.270716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:39.270815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:39.271640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:39.271959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:39.487940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:39.488004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:39.511820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:39.516057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:39.516410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:39.537081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:39.537362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:39.538120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:39.538458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:39.546631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:39.546915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:39.548274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:39.548371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:39.548528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:39.548586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:39.548660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:39.548839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:39.853671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.854858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.855921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:39.856020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000064, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:42.449943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000064 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:29:42.449998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_bsv.cpp:40: TDropBlockStoreVolume TPropose, operationId: 129:0 HandleReply TEvOperationPlan, step: 5000064, at schemeshard: 72057594046678944 2026-01-04T02:29:42.450125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-04T02:29:42.450286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:29:42.450327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:29:42.450373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:29:42.450421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:29:42.450486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:29:42.450544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-04T02:29:42.450591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-04T02:29:42.450641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:29:42.450682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-04T02:29:42.450717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-04T02:29:42.450839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 2 2026-01-04T02:29:42.450883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-04T02:29:42.450917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 57 2026-01-04T02:29:42.450949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 49], 18446744073709551615 2026-01-04T02:29:42.452110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-04T02:29:42.452181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-01-04T02:29:42.452285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-04T02:29:42.452330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-04T02:29:42.454182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 2, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:29:42.454270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:24 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:29:42.454300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:23 hive 72057594037968897 at ss 72057594046678944 FAKE_COORDINATOR: Erasing txId 129 2026-01-04T02:29:42.454530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:42.454578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:42.454771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 49] 2026-01-04T02:29:42.454902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:42.454933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-04T02:29:42.454973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 129, path id: 49 2026-01-04T02:29:42.455841Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:29:42.455933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 49 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:29:42.455972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:29:42.456037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 49], version: 18446744073709551615 2026-01-04T02:29:42.456078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 49] was 1 2026-01-04T02:29:42.456486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:29:42.456529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 49], at schemeshard: 72057594046678944 2026-01-04T02:29:42.456606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:29:42.456836Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 24 TxId_Deprecated: 0 2026-01-04T02:29:42.457013Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 23 TxId_Deprecated: 0 2026-01-04T02:29:42.457129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:29:42.457215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 57 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:29:42.457246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:29:42.457274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 57 2026-01-04T02:29:42.457300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:29:42.457367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-04T02:29:42.457595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 24, at schemeshard: 72057594046678944 2026-01-04T02:29:42.459891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 23, at schemeshard: 72057594046678944 2026-01-04T02:29:42.463717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-04T02:29:42.464222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:29:42.466680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-04T02:29:42.466792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-04T02:29:42.466906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 TestModificationResult got TxId: 129, wait until txId: 129 TestWaitNotification wait txId: 129 2026-01-04T02:29:42.467550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 129: send EvNotifyTxCompletion 2026-01-04T02:29:42.467596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 129 2026-01-04T02:29:42.468297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-04T02:29:42.468410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-04T02:29:42.468451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:2071:3940] TestWaitNotification: OK eventTxId 129 >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [FAIL] |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::ReducedSizeClassCalcTest [GOOD] >> KqpQuery::TableSink_ReplaceDataShardDataQuery-UseSink [GOOD] >> KqpQuery::TableSinkWithSubquery |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/conveyor_composite/ut/ydb-core-tx-conveyor_composite-ut >> KqpScan::ScanRetryRead [GOOD] >> KqpScan::ScanRetryReadRanges |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TBlobStorageStoragePoolMonTest::SizeClassCalcTest [GOOD] |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/ydb-core-tx-schemeshard-ut_replication >> TOlap::CustomDefaultPresets [GOOD] >> TKeyValueTracingTest::ReadSmall >> KqpYql::InsertIgnore [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptSeveralQueriesComplex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [FAIL] Test command err: Trying to start YDB, gRPC: 18016, MsgBus: 32646 2026-01-04T02:29:19.016528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327891222448032:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:19.016598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:19.952367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:19.976899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:19.988848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:20.024779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:20.217737Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:20.308814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:20.325730Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327886927480545:2081] 1767493758931786 != 1767493758931789 2026-01-04T02:29:20.404627Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:20.657230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:20.657258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:20.657267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:20.657377Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:20.904013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:22.293667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:22.314311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:22.441320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:22.742203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:23.092621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:23.232553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:24.013993Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327891222448032:2214];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:24.014079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:27.478923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327925582188107:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:27.479058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:27.485966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327925582188117:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:27.486099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:27.871864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.919650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.020501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.068581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.142546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.215927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.265237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.332243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.481918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327929877156307:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.482047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.482426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327929877156312:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.482502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327929877156313:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.482668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.486620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:28.508670Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327929877156316:2497], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:28.580163Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327929877156367:3789] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65170, MsgBus: 10891 2026-01-04T02:29:32.897847Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327949555466128:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:32.897960Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:32.912950Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:33.021625Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:33.023900Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327949555466090:2081] 1767493772896017 != 1767493772896020 2026-01-04T02:29:33.035933Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:33.036025Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:33.061417Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:33.195581Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:33.253893Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:33.253918Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:33.253933Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:33.254021Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:33.906137Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:34.306777Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:34.332199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:34.520587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:34.837193Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:34.925437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:37.643911Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327971030304449:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:37.643999Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:37.644384Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327971030304459:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:37.644426Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:37.888662Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:37.901929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591327949555466128:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:37.902017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:37.976321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.132802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.300449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.412393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.526732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.626918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.735189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.953903Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327975325272641:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.954020Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.954440Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327975325272646:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.954501Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327975325272647:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.954627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.959365Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:39.009757Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591327975325272650:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:29:39.087644Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591327979620239997:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Result: [[[[101u]]];[[[102u]]];[[[103u]]];[[[104u]]];[[[105u]]]] |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CustomDefaultPresets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:29:18.096867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:18.096987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:18.097064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:18.097120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:18.097176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:18.097209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:18.097298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:18.097394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:18.098717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:18.099084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:18.205260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:18.205337Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:18.221773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:18.222069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:18.222272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:18.231590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:18.231850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:18.232691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:18.233030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:18.236463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:18.236685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:18.238033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:18.238111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:18.238252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:18.238311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:18.238439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:18.238650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:18.406970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.408980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.409049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.409112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.409213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.409337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... MESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:212:2213], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-04T02:29:45.849240Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:29:45.849295Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:29:45.849371Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-04T02:29:45.850536Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:45.886097Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:45.886193Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:29:45.886248Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:29:45.886310Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:29:45.887468Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:45.887568Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:29:45.887602Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:29:45.887637Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:29:45.887674Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:29:45.887907Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:29:45.899037Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-04T02:29:45.899146Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-04T02:29:45.899249Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-04T02:29:45.900100Z node 3 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-04T02:29:45.907263Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-04T02:29:45.907492Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-04T02:29:45.908041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:29:45.908720Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:29:45.911230Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:29:45.924595Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-04T02:29:45.924678Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:29:45.924813Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:29:45.927061Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:29:45.927267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:29:45.927315Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:29:45.927480Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:29:45.927534Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:45.927576Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:29:45.927616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:45.927659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:29:45.927739Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:752:2729] message: TxId: 102 2026-01-04T02:29:45.927804Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:29:45.927852Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:29:45.927890Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:29:45.928047Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:29:45.930392Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:29:45.930461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [3:814:2783] TestWaitNotification: OK eventTxId 102 2026-01-04T02:29:45.931080Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/OlapStore/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:45.931387Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/OlapStore/ColumnTable" took 336us result status StatusSuccess 2026-01-04T02:29:45.985440Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/OlapStore/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> GenericFederatedQuery::IcebergHiveSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveSaFilterPushdown >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] >> TKeyValueTracingTest::WriteSmall >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] >> AutoConfig::GetServicePoolsWith1CPU [GOOD] >> TKeyValueTracingTest::ReadSmall [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertIgnore [GOOD] Test command err: Trying to start YDB, gRPC: 6542, MsgBus: 14065 2026-01-04T02:29:22.190481Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327905011834798:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:22.190665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:22.518958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:22.519076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:22.556496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:22.577521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:22.590622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:22.783674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:22.783701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:22.783725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:22.783805Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:22.804637Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:23.198506Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:23.568185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:23.578551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:23.660697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:23.912789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:24.198111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:24.294273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:26.522062Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327922191705815:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:26.522195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:26.522757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327922191705825:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:26.522797Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:27.066463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.155121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.186012Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327905011834798:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:27.186092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:27.293604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.388787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.483488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.582822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.677361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:27.828587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:28.032475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327930781641299:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.032578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.033072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327930781641304:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.033113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327930781641305:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.033239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:28.042155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:28.071313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327930781641308:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:28.139475Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327930781641359:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:31.131119Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591327943666543583:2543], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dd7f634xkz9b71k6pbqyz. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MTQwMTRhZTAtNDAyYTYyMjMtNTFjMzYyNDUtMTI5N ... yResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id#
: Error: Execution, code: 1060
: Error: Duplicated keys found., code: 2012 Trying to start YDB, gRPC: 30855, MsgBus: 22870 2026-01-04T02:29:32.394091Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327950199881147:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:32.394137Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:32.422123Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:32.483566Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:32.524966Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:32.525061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:32.528085Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:32.625983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:32.686125Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:32.686143Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:32.686153Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:32.686216Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:33.398037Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:33.747545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:33.752901Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:29:33.766839Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:33.883764Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:34.285380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:34.493165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:37.395145Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591327950199881147:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:37.395212Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:41.607526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327988854588674:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:41.607626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:41.608124Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327988854588684:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:41.608187Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:41.789412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:41.858339Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:41.908573Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:41.986567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.062215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.139622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.237496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.434992Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.595613Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327993149556863:2495], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:42.595722Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:42.596241Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327993149556868:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:42.596290Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591327993149556869:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:42.596327Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:42.601991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:42.642462Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591327993149556872:2500], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:29:42.717367Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591327993149556923:3792] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Table intent determination, code: 1040
:3:35: Error: INSERT OR IGNORE is not yet supported for Kikimr. |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> IndexBuildTest::BaseCaseUniq [GOOD] >> IndexBuildTest::CancelBuild |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith4AndMoreCPUs [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_vector_index_build_reboots/tx-schemeshard-ut_vector_index_build_reboots |85.1%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> DataShardVolatile::DistributedWriteWithAsyncIndex [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith1CPU [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadSmall [GOOD] >> THealthCheckTest::UnknowPDiskState [GOOD] >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse |85.1%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] >> AutoConfig::GetASPoolsWith2CPUs [GOOD] >> AutoConfig::GetASPoolsWith3CPUs [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] |85.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteSmall [GOOD] |85.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith3CPUs [GOOD] |85.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |85.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsith1CPU [GOOD] >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] |85.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |85.1%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_read_iterator/ydb-core-tx-datashard-ut_read_iterator >> KqpScan::RemoteShardScan [GOOD] >> KqpScan::ScanDuringSplit >> Cdc::ShouldDeliverChangesOnSplitMerge [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [FAIL] >> Cdc::ShouldBreakLocksOnConcurrentMoveTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 >> Cdc::DropIndex [GOOD] >> Cdc::InitialScan >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetGrpcXdsBootstrapConfigEnvIfVariableAlreadySet [GOOD] |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanNotSetEnvIfXdsBootstrapConfigIsAbsent [GOOD] |85.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime [GOOD] Test command err: 2026-01-04T02:28:37.830047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:37.863233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:37.863511Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:37.870760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:37.871022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:37.871277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:37.871392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:37.871511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:37.871618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:37.871726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:37.871839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:37.871945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:37.872065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:37.872214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:37.872319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:37.872441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:37.918802Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:37.918940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:37.919003Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:37.919210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:37.919402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:37.919487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:37.919533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:37.919655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:37.919729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:37.919779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:37.919823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:37.920024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:37.920093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:37.920134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:37.920185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:37.920296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:37.920369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:37.920422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:37.920461Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:37.920526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:37.920562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:37.920597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:37.920635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:37.920673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:37.920720Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:37.920959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:37.921043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:37.921086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:37.921218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:37.921261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:37.921289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:37.921362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:37.921408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:37.921451Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:37.921508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:37.921554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:37.921622Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:37.921770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:37.921862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... me=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-04T02:29:47.297914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1034; 2026-01-04T02:29:47.298044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=157486; 2026-01-04T02:29:47.298097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=157613; 2026-01-04T02:29:47.298173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=16; 2026-01-04T02:29:47.298599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=376; 2026-01-04T02:29:47.298644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=158595; 2026-01-04T02:29:47.298801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=107; 2026-01-04T02:29:47.298928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=80; 2026-01-04T02:29:47.299338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=361; 2026-01-04T02:29:47.299709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=315; 2026-01-04T02:29:47.332767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=32975; 2026-01-04T02:29:47.365448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=32541; 2026-01-04T02:29:47.365574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:29:47.365625Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-04T02:29:47.365668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:29:47.365741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=39; 2026-01-04T02:29:47.365784Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:29:47.370035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=96; 2026-01-04T02:29:47.370140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2026-01-04T02:29:47.370213Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=34; 2026-01-04T02:29:47.370304Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2026-01-04T02:29:47.370380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=44; 2026-01-04T02:29:47.370413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=239628; 2026-01-04T02:29:47.370543Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:29:47.370653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:29:47.370708Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:29:47.370770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:29:47.370813Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:29:47.371029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:29:47.371082Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:29:47.371120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:29:47.371161Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:29:47.371221Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493420237;tx_id=18446744073709551615;;current_snapshot_ts=1767493719302; 2026-01-04T02:29:47.371270Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:29:47.371309Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:47.371344Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:47.371424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:29:47.371606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.077000s; 2026-01-04T02:29:47.387487Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:29:47.387784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:29:47.387835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:29:47.387908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:29:47.387950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:29:47.388015Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493420237;tx_id=18446744073709551615;;current_snapshot_ts=1767493719302; 2026-01-04T02:29:47.388063Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:29:47.388105Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:47.388144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:29:47.388232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:29:47.388278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:29:47.389186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.046000s; 2026-01-04T02:29:47.389225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |85.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetASPoolsWith4AndMoreCPUs [GOOD] |85.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] >> TSyncBrokerTests::ShouldProcessAfterRelease >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 >> TSyncBrokerTests::ShouldProcessAfterRelease [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue >> KqpQuery::TableSinkWithSubquery [GOOD] >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] |85.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnvWithSomeNumberOfXdsServers [GOOD] |85.2%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 >> StatisticsSaveLoad::Simple |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |85.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_shred_reboots/ydb-core-tx-schemeshard-ut_shred_reboots >> TGRpcConsoleTest::SimpleConfigTest [GOOD] >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseInQueue [GOOD] Test command err: 2026-01-04T02:29:54.106094Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-04T02:29:54.106229Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-04T02:29:54.106292Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:111: ProcessQueue(), token sent: VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 0 waiting# 1 2026-01-04T02:29:54.106329Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:122: ProcessQueue() done: active# 1 waiting# 0 2026-01-04T02:29:54.106396Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-04T02:29:54.274070Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-04T02:29:54.274211Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-04T02:29:54.274266Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:161: TEvReleaseSyncToken, removed from queue: VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 0 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> Cdc::ResolvedTimestamps [GOOD] >> Cdc::ResolvedTimestampsMultiplePartitions |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> IndexBuildTest::CancelBuild [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::TableSinkWithSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 31044, MsgBus: 26960 2026-01-04T02:29:02.721960Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327821600307187:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:02.722108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:02.831299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:03.450666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:03.468927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:03.469032Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:03.552455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:03.616204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327821600307163:2081] 1767493742693258 != 1767493742693261 2026-01-04T02:29:03.616954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:03.757918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:03.771364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:04.021064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:04.021087Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:04.022601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:04.022733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:05.061326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:05.076607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:05.181534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:05.458352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:05.704707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:05.863464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:07.722792Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327821600307187:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:07.722881Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:09.464694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327851665080126:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:09.464858Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:09.465792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327851665080136:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:09.465911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.085407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.156692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.200658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.266811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.357337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.453598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.513782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.623129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:10.822137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327855960048324:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.822223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.822305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327855960048329:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.825951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327855960048331:2494], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.826030Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:10.829561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:10.858080Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327855960048332:2495], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:10.959260Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327855960048386:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: E ... etch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:35.645580Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:35.693092Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:36.099150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:36.575102Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591327966698439583:2438], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:36.575187Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:36.575463Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591327966698439588:2441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:36.575502Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591327966698439589:2442], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:36.575807Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:36.580443Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:36.628203Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591327966698439592:2443], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:29:36.701233Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591327966698439645:3418] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 30410, MsgBus: 10380 2026-01-04T02:29:44.560726Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591327998443851184:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:44.562140Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:44.741986Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:44.817286Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:44.817382Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:44.829572Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:44.839149Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:44.840542Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591327998443851061:2081] 1767493784518168 != 1767493784518171 2026-01-04T02:29:45.025789Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:45.082478Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:45.082503Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:45.082513Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:45.082602Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:45.464429Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:46.434488Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:49.542961Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591327998443851184:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:49.543055Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:51.480168Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328028508623033:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.480320Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.486013Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328028508623043:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.486176Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.523091Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:51.636337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:51.770010Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328028508623210:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.770168Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.770812Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328028508623215:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.770866Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328028508623216:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.771198Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:51.776005Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:51.804939Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591328028508623219:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:29:51.890060Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591328028508623270:2654] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:52.440058Z node 4 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037888 Cancelled read: {[4:7591328032803590624:2361], 0} 2026-01-04T02:29:52.701450Z node 4 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[4:7591328032803590666:2379], 0} |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest >> TGRpcConsoleTest::SimpleConfigTest [GOOD] |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> TReplicationTests::Create |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/tools/stress_tool/ydb_stress_tool >> TReplicationTests::Disabled |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.2%| [LD] {RESULT} $(B)/ydb/tools/stress_tool/ydb_stress_tool |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.2%| [LD] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/lib/ydb_cli/topic/ut/ydb-public-lib-ydb_cli-topic-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuild [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:28:49.226790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:49.226898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:49.226937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:49.226972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:49.227011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:49.227059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:49.227148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:49.227248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:49.228128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:49.228468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:49.448947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:49.449009Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:49.464911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:49.469190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:49.469405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:49.488742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:49.489016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:49.489743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:49.490023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:49.493295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:49.493483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:49.494865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:49.494937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:49.495052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:49.495109Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:49.495181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:49.495366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:49.719886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.720906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.721771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.723274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.723405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.723504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:49.723595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... on IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:29:55.628887Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:134:2157] message: TxId: 281474976710760 2026-01-04T02:29:55.634011Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:29:55.634100Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:29:55.634136Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:29:55.634224Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:29:55.637266Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:29:55.637360Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:29:55.637441Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-04T02:29:55.637583Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1563:3426], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:29:55.655278Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-04T02:29:55.655465Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1563:3426], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:55.655541Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-04T02:29:55.658111Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-04T02:29:55.658379Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [5:1563:3426], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:29:55.658450Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-04T02:29:55.658669Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:29:55.658726Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [5:1662:3514] TestWaitNotification: OK eventTxId 102 2026-01-04T02:29:55.669431Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-04T02:29:55.669763Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-04T02:29:55.675452Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:29:55.675824Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 2.87ms result status StatusSuccess 2026-01-04T02:29:55.676363Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:29:55.679137Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:29:55.697033Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 17.9ms result status StatusPathDoesNotExist 2026-01-04T02:29:55.697314Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [FAIL] Test command err: Thread 1 failed |85.2%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/arrow/unittest >> KqpResultSetFormats::ArrowFormat_SchemaInclusionMode_Always [GOOD] Test command err: Trying to start YDB, gRPC: 7953, MsgBus: 18364 2026-01-04T02:26:06.408729Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327064515953199:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:06.408905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:07.011818Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:07.049370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:07.049474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:07.257950Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327064515952986:2081] 1767493566363302 != 1767493566363305 2026-01-04T02:26:07.288070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:07.292908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:07.304197Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:07.393443Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:07.647047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:07.647080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:07.647088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:07.647199Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:08.709466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:08.723456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:26:08.885397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.226565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.492324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:09.646578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:11.409164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327064515953199:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:11.409225Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:26:15.644310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327103170660574:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.644409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.645199Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327103170660584:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:15.645247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:16.830307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:16.912789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.014195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.103080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.212174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.355151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.459446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.554237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:17.706595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327111760596060:2498], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:17.706685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:17.707133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327111760596065:2501], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:17.707179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591327111760596066:2502], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:17.707302Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:26:17.711697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:26:17.736457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591327111760596069:2503], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:26:17.820181Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591327111760596120:3796] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=280;columns=3; Trying to start YDB, gRPC: 9705, MsgBus: 11508 2026-01-04T02:2 ... KLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327947397884475:2496], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:32.160108Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:32.170514Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327947397884480:2499], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:32.170672Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591327947397884481:2500], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:32.170780Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:32.183979Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:32.220697Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591327947397884486:2503], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:29:32.298640Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591327947397884551:3826] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:29:33.544392Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:29:33.544422Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=360;columns=3; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=248;columns=2; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; Trying to start YDB, gRPC: 9432, MsgBus: 3610 2026-01-04T02:29:40.432450Z node 13 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[13:7591327981864794811:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:40.432529Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:40.551947Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:40.782041Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:40.893669Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:40.898036Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:40.911822Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:40.922160Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:7591327981864794709:2081] 1767493780389365 != 1767493780389368 2026-01-04T02:29:41.074186Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:41.325929Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:41.420062Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:41.420089Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:41.420103Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:41.420243Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:41.488991Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:43.607841Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:43.622818Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:43.655447Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:45.434191Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7591327981864794811:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:45.434304Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:52.501371Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591328033404403645:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:52.501549Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:52.502550Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591328033404403662:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:52.502702Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:52.503335Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591328033404403668:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:52.513489Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:52.579077Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7591328033404403670:2382], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:29:52.644210Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7591328033404403723:2877] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1232;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1232;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1936;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=528;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=1680;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=896;columns=4; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=448;columns=4; |85.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/arrow/unittest |85.2%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |85.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |85.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |85.2%| [LD] {RESULT} $(B)/ydb/services/persqueue_v1/ut/ydb-services-persqueue_v1-ut |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] >> DataShardVolatile::DistributedWriteThenLateWriteReadCommit [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink >> TReplicationTests::Create [GOOD] >> TReplicationTests::CreateDropRecreate >> TReplicationTests::Disabled [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.3%| [TA] $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::CreateSequential |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |85.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ExtBlobsMultipleColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:43.974150Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:44.103474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:44.103949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:44.104008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:44.424573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:44.424753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:44.558451Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493780884565 != 1767493780884568 2026-01-04T02:29:44.572133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:44.623265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:44.768222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:45.263818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:45.286789Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:45.420972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:45.892119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:45.892321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:45.892439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:45.893458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:45.893684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:45.898680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:46.050342Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:29:46.120269Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/dynamic_config/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/services/dynamic_config/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> THealthCheckTest::TestSystemStateRetriesAfterReceivingResponse [GOOD] >> THealthCheckTest::TestStateStorageYellow |85.3%| [TA] $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> TReplicationTests::CreateDropRecreate [GOOD] >> TReplicationTests::ConsistencyLevel >> TReplicationTests::CreateSequential [GOOD] >> TReplicationTests::CreateInParallel >> StatisticsSaveLoad::Delete |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> VectorIndexBuildTest::SimpleDuplicates-Overlap-true [GOOD] >> VectorIndexBuildTest::PrefixedDuplicates >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-38 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 >> KqpScan::ScanRetryReadRanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:44.912412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:45.045390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:45.045868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:45.045940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:45.355199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:45.355335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:45.445671Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493779512210 != 1767493779512213 2026-01-04T02:29:45.458913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:45.506876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:45.600615Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:45.978824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:45.995194Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:46.143323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:46.519066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:46.519236Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:46.519352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:46.520452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:46.520636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:46.525583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:46.679666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:29:46.830164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:01.281429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TEvLocalSyncDataTests::SqueezeBlocks3 [GOOD] >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 >> TQuorumTrackerTests::Erasure4Plus2BlockNotIncludingMyFailDomain_8_2 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] >> TSyncNeighborsTests::SerDes3 [GOOD] >> TReplicationTests::CreateInParallel [GOOD] >> TReplicationTests::CreateWithoutCredentials >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 >> TSyncBrokerTests::ShouldReturnTokensWithSameVDiskId [GOOD] >> TSyncNeighborsTests::SerDes1 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 >> TReplicationTests::ConsistencyLevel [GOOD] >> TReplicationTests::CommitInterval >> TSyncNeighborsTests::SerDes2 [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_4_2 [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::Erasure4Plus2BlockIncludingMyFailDomain_8_2 [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes3 [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-12 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes1 [GOOD] Test command err: 2026-01-04T02:30:05.015100Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-04T02:30:05.015239Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:51: TEvQuerySyncToken, token sent (1): VDisk actor id# [0:1:1] actor id# [1:6:2053] active# 1 waiting# 0 |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncNeighborsTests::SerDes2 [GOOD] >> TSyncBrokerTests::ShouldEnqueue >> TSyncBrokerTests::ShouldEnqueue [GOOD] >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId >> TReplicationTests::CreateWithoutCredentials [GOOD] >> TReplicationTests::SecureMode |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanRetryReadRanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:33.915968Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:33.917293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:33.931130Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:29:34.182645Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:34.209577Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:34.211104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:34.211456Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:34.211758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:34.228345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:34.228449Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:35.157665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:35.331861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:35.331997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:35.332605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:35.332711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:35.398969Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:29:35.399616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:35.400014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:35.592353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:35.652902Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:36.745041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:36.810835Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:36.811001Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:37.270266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:38.232840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1724:3111], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.232999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1735:3116], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.233488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.243366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1740:3121], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.243977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:38.260613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:38.959807Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1738:3119], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:29:39.198487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1832:3185] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 2 -- EvScan [1:1912:3238] -> [2:1867:2433] -- EvScanData from [2:1917:2440]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"column0","type":{"optional_type":{"item":{"type_id":4}}}}],"rows":[{"items":[{"uint64_value":596400}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":3,"VirtualTimestamp":{"Step":3000,"TxId":281474976715661},"Finished":true} 2026-01-04T02:29:43.655521Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down ... waiting for SysViewsRoster update finished 2026-01-04T02:29:56.099568Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:56.100687Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:56.101215Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:29:56.113583Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:56.114385Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:56.117768Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:56.118488Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:56.118703Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:56.121235Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:56.121517Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:56.523030Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:56.665730Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:56.665901Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:56.666962Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:56.667156Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:56.722350Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:29:56.723291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:56.723720Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:56.840801Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:56.893328Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:58.004993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:58.118703Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:58.119988Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:58.536845Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:59.377581Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1729:3115], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:59.377718Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1740:3120], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:59.377914Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:59.379100Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1744:3124], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:59.379188Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:29:59.384739Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:29:59.977203Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:1743:3123], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:30:00.075788Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1837:3189] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } -- nodeId: 4 -- EvScan [3:1917:3242] -> [4:1872:2433] -- EvScanData from [4:1921:2440]: pass -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"rows":[{"items":[{"uint32_value":2},{"uint32_value":22}]},{"items":[{"uint32_value":21},{"uint32_value":2121}]},{"items":[{"uint32_value":22},{"uint32_value":2222}]},{"items":[{"uint32_value":23},{"uint32_value":2323}]},{"items":[{"uint32_value":24},{"uint32_value":2424}]},{"items":[{"uint32_value":25},{"uint32_value":2525}]},{"items":[{"uint32_value":26},{"uint32_value":2626}]},{"items":[{"uint32_value":27},{"uint32_value":2727}]},{"items":[{"uint32_value":28},{"uint32_value":2828}]},{"items":[{"uint32_value":29},{"uint32_value":2929}]},{"items":[{"uint32_value":40},{"uint32_value":4040}]},{"items":[{"uint32_value":41},{"uint32_value":4141}]},{"items":[{"uint32_value":42},{"uint32_value":4242}]},{"items":[{"uint32_value":43},{"uint32_value":4343}]},{"items":[{"uint32_value":44},{"uint32_value":4444}]},{"items":[{"uint32_value":45},{"uint32_value":4545}]},{"items":[{"uint32_value":46},{"uint32_value":4646}]},{"items":[{"uint32_value":47},{"uint32_value":4747}]},{"items":[{"uint32_value":48},{"uint32_value":4848}]},{"items":[{"uint32_value":49},{"uint32_value":4949}]},{"items":[{"uint32_value":50},{"uint32_value":5050}]}],"format":1},"SeqNo":1,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":false} -- EvStreamData: {"ResultSet":{"columns":[{"name":"key","type":{"optional_type":{"item":{"type_id":2}}}},{"name":"value","type":{"optional_type":{"item":{"type_id":2}}}}],"format":1},"SeqNo":2,"QueryResultIndex":0,"ChannelId":2,"VirtualTimestamp":{"Step":3000,"TxId":281474976710661},"Finished":true} 2026-01-04T02:30:01.938838Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] >> TQuorumTrackerTests::ErasureNoneNeverHasQuorum_4_1 [GOOD] >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks1 [GOOD] >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] >> Cdc::InitialScan [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 >> TReplicationTests::CommitInterval [GOOD] >> TReplicationTests::Alter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldEnqueueWithSameVDiskId [GOOD] Test command err: 2026-01-04T02:30:06.301224Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-04T02:30:06.301345Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [1:6:2053] active# 1 waiting# 1 2026-01-04T02:30:06.461884Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-04T02:30:06.462053Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:94: TEvQuerySyncToken, enqueued (2): VDisk actor id# [0:1:2] actor id# [2:6:2053] active# 1 waiting# 1 2026-01-04T02:30:06.462121Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:82: TEvQuerySyncToken, enqueued (1): VDisk actor id# [0:1:2] actor id# [2:7:2054] active# 1 waiting# 1 |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReturnToken >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] >> TSyncBrokerTests::ShouldReturnToken [GOOD] >> TSyncBrokerTests::ShouldReleaseToken |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TEvLocalSyncDataTests::SqueezeBlocks2 [GOOD] |85.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TQuorumTrackerTests::ErasureMirror3IncludingMyFailDomain_5_2 [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] |85.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TKeyValueTracingTest::ReadHuge |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |85.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl >> Cdc::ResolvedTimestampsMultiplePartitions [GOOD] >> Cdc::ResolvedTimestampsVolatileOutOfOrder |85.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |85.3%| [TA] {RESULT} $(B)/ydb/services/dynamic_config/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.3%| [LD] {RESULT} $(B)/ydb/core/sys_view/ut_large/ydb-core-sys_view-ut_large |85.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/ydb-core-tx-schemeshard-ut_ttl |85.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/arrow/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicationTests::SecureMode [GOOD] >> TReplicationTests::Describe |85.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith3CPUs [GOOD] |85.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> TKeyValueTracingTest::WriteHuge >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/syncer/ut/unittest >> TSyncBrokerTests::ShouldReleaseToken [GOOD] Test command err: 2026-01-04T02:30:07.649891Z node 1 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [1:5:2052] active# 1 waiting# 0 2026-01-04T02:30:07.774114Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:66: TEvQuerySyncToken, token sent (2): VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 1 waiting# 0 2026-01-04T02:30:07.774281Z node 2 :BS_SYNCER DEBUG: blobstorage_syncer_broker.cpp:142: TEvReleaseSyncToken, token released: VDisk actor id# [0:1:1] actor id# [2:5:2052] active# 0 waiting# 0 |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/syncer/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TReplicationTests::Alter [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TReplicationTests::CannotAddReplicationConfig |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TReplicationTests::Describe [GOOD] >> TReplicationTests::CreateReplicatedTable >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly |85.4%| [TA] $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> AutoConfig::GetServicePoolsWith2CPUs [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentMoveIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentDropIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-62 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv |85.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] >> KqpScan::ScanDuringSplit [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] >> TPQTest::TestPartitionTotalQuota [GOOD] >> TPQTest::TestPartitionPerConsumerQuota >> TKeyValueTracingTest::WriteHuge [GOOD] |85.4%| [TS] {asan, default-linux-x86_64, release} ydb/core/driver_lib/run/ut/unittest >> XdsBootstrapConfigInitializer::CanSetGrpcXdsBootstrapConfigEnv [GOOD] |85.4%| [TS] {BAZEL_UPLOAD} ydb/core/driver_lib/run/ut/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::ReadHuge [GOOD] |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview |85.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/syncer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/sysview/ydb-core-kqp-ut-sysview >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut_trace/unittest >> TKeyValueTracingTest::WriteHuge [GOOD] |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut_trace/unittest >> TReplicationTests::CannotAddReplicationConfig [GOOD] >> TReplicationTests::CannotSetAsyncReplicaAttribute |85.4%| [TA] $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TA] $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp_Reboot [GOOD] Test command err: 2026-01-04T02:28:36.475577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:36.513798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:36.514252Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:36.521699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:36.522146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:36.522413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:36.522551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:36.522709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:36.522792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:36.522883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:36.522958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:36.523047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:36.523180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.523277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:36.523395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:36.523489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:36.551019Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:36.551168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:36.551213Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:36.551374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:36.551508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:36.551608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:36.551661Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:36.551765Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:36.551844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:36.551897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:36.551943Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:36.552131Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:36.552224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:36.552256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:36.552280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:36.552350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:36.552405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:36.552445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:36.552465Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:36.552500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:36.552525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:36.552547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:36.552578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:36.552620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:36.552645Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:36.552815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:36.552852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:36.552881Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:36.553002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:36.553049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.553073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:36.553112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:36.553146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:36.553183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:36.553219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:36.553252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:36.553287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:36.553441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:36.553494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-04T02:30:06.064549Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=899; 2026-01-04T02:30:06.064604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=77824; 2026-01-04T02:30:06.064645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=77945; 2026-01-04T02:30:06.064697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-04T02:30:06.064994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=259; 2026-01-04T02:30:06.065040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=78768; 2026-01-04T02:30:06.065210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=115; 2026-01-04T02:30:06.065321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2026-01-04T02:30:06.065692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=324; 2026-01-04T02:30:06.066191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=448; 2026-01-04T02:30:06.089417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=23129; 2026-01-04T02:30:06.109602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=20044; 2026-01-04T02:30:06.109728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:30:06.109789Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-04T02:30:06.109858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:30:06.109953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=51; 2026-01-04T02:30:06.110002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:30:06.110100Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=56; 2026-01-04T02:30:06.110151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:30:06.110225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-04T02:30:06.110323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2026-01-04T02:30:06.110409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2026-01-04T02:30:06.110451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=133996; 2026-01-04T02:30:06.110596Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:30:06.110718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:30:06.110780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:30:06.110853Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:30:06.110901Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:30:06.111119Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:06.111193Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:30:06.111232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:06.111285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:30:06.111354Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493420597;tx_id=18446744073709551615;;current_snapshot_ts=1767493717998; 2026-01-04T02:30:06.111400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:06.111446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:06.111485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:06.111583Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:06.111790Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.008000s; 2026-01-04T02:30:06.116407Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:30:06.116694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:30:06.116754Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:06.116834Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:06.116887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:30:06.116953Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493420597;tx_id=18446744073709551615;;current_snapshot_ts=1767493717998; 2026-01-04T02:30:06.117000Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:06.117048Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:06.117093Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:06.117172Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:30:06.117227Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:06.119018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.136000s; 2026-01-04T02:30:06.119069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAlterCompression::test_availability_data [GOOD] |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 >> StatisticsSaveLoad::Simple [GOOD] |85.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.4%| [TA] {RESULT} $(B)/ydb/core/driver_lib/run/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |85.5%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/ydb-core-mind-bscontroller-ut_bscontroller |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [FAIL] |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest >> TReplicationTests::CannotSetAsyncReplicaAttribute [GOOD] >> TReplicationTests::AlterReplicatedTable >> TReplicationTests::CreateReplicatedTable [GOOD] >> TReplicationTests::DropReplicationWithInvalidCredentials >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:00.125482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:00.234310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:00.244279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:00.244723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:00.244874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:00.767126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:00.895319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:00.895483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:00.948678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:01.031533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:01.896293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:01.897755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:01.897848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:01.897887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:01.898402Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:02.002369Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:02.900672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:30:08.230464Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:08.240650Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:30:08.245350Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:08.330688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:08.330816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:08.375592Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:08.378127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:08.663332Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.664217Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.665137Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.666613Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.666935Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.667207Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.667401Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.667657Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.667780Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:08.883039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:30:08.911594Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:08.911739Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:08.929464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:09.178472Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:09.286748Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:30:09.286897Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:30:09.335374Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:30:09.335698Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:30:09.335951Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:30:09.336020Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:30:09.336081Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:30:09.336137Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:30:09.336212Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:30:09.336295Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:30:09.343030Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:30:09.560139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:30:09.560699Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:30:09.563164Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:30:09.563234Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:30:09.563335Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:09.573472Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:09.582732Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:30:09.607043Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:30:09.693174Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:30:09.721457Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:09.721582Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:09.739060Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:30:09.740497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:30:10.494358Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:10.496851Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:10.501129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:10.516218Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:30:10.516362Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 281474976720658 2026-01-04T02:30:10.531871Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on tx: 281474976720658 registered 2026-01-04T02:30:10.859156Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-01-04T02:30:11.014558Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:30:11.014654Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Column diff is empty, finishing 2026-01-04T02:30:11.768704Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:11.771710Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2788:3376] Owner: [1:2787:3375]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:30:11.771803Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2788:3376] Owner: [1:2787:3375]. Column diff is empty, finishing 2026-01-04T02:30:11.772553Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2797:3379], ActorId: [1:2798:3380], Starting query actor #1 [1:2799:3381] 2026-01-04T02:30:11.772644Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2798:3380], ActorId: [1:2799:3381], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:11.828140Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2798:3380], ActorId: [1:2799:3381], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=MjBhNGQ2MWUtZjhkNDVjYTYtNjg1YThhMWUtMjdlZmUyYzg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:30:12.516681Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2819:3395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:12.516940Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:30:12.517044Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2821:3397] 2026-01-04T02:30:12.517124Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2821:3397] 2026-01-04T02:30:12.517799Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2822:3007] 2026-01-04T02:30:12.518504Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2821:3397], server id = [2:2822:3007], tablet id = 72075186224037894, status = OK 2026-01-04T02:30:12.518963Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2822:3007], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:30:12.519094Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:30:12.519504Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:30:12.519610Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2819:3395], StatRequests.size() = 1 2026-01-04T02:30:12.660788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:30:12.728288Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2798:3380], ActorId: [1:2799:3381], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=MjBhNGQ2MWUtZjhkNDVjYTYtNjg1YThhMWUtMjdlZmUyYzg=, TxId: 2026-01-04T02:30:12.728395Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2798:3380], ActorId: [1:2799:3381], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=MjBhNGQ2MWUtZjhkNDVjYTYtNjg1YThhMWUtMjdlZmUyYzg=, TxId: 2026-01-04T02:30:12.728747Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2797:3379], ActorId: [1:2798:3380], Got response [1:2799:3381] SUCCESS 2026-01-04T02:30:12.733150Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2839:3402], ActorId: [1:2840:3403], Starting query actor #1 [1:2841:3404] 2026-01-04T02:30:12.733279Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2840:3403], ActorId: [1:2841:3404], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:12.737418Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2840:3403], ActorId: [1:2841:3404], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZTk1NjY3MjktZjc4MmM3ZTAtOTYxZDE1M2YtNGQwZGQ1YjM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-04T02:30:12.793574Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2850:3413]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:12.793845Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:30:12.793914Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2850:3413], StatRequests.size() = 1 2026-01-04T02:30:13.005049Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2840:3403], ActorId: [1:2841:3404], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTk1NjY3MjktZjc4MmM3ZTAtOTYxZDE1M2YtNGQwZGQ1YjM=, TxId: 01ke3degq90dzjgs1npas4przv 2026-01-04T02:30:13.005221Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2840:3403], ActorId: [1:2841:3404], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZTk1NjY3MjktZjc4MmM3ZTAtOTYxZDE1M2YtNGQwZGQ1YjM=, TxId: 01ke3degq90dzjgs1npas4przv 2026-01-04T02:30:13.005699Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2839:3402], ActorId: [1:2840:3403], Got response [1:2841:3404] SUCCESS 2026-01-04T02:30:13.009086Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2872:3423], ActorId: [1:2873:3424], Starting query actor #1 [1:2874:3425] 2026-01-04T02:30:13.009177Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2873:3424], ActorId: [1:2874:3425], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:13.014475Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2873:3424], ActorId: [1:2874:3425], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=YjNkMGY3YmEtMWY3NjhmZmEtMjYxZGM4NzYtYWJmOTY0MGY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-04T02:30:13.095411Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2873:3424], ActorId: [1:2874:3425], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YjNkMGY3YmEtMWY3NjhmZmEtMjYxZGM4NzYtYWJmOTY0MGY=, TxId: 01ke3degrqbvz4b5qqbze3df8p 2026-01-04T02:30:13.095565Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2873:3424], ActorId: [1:2874:3425], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YjNkMGY3YmEtMWY3NjhmZmEtMjYxZGM4NzYtYWJmOTY0MGY=, TxId: 01ke3degrqbvz4b5qqbze3df8p 2026-01-04T02:30:13.095938Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2872:3423], ActorId: [1:2873:3424], Got response [1:2874:3425] SUCCESS |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TReplicationTests::AlterReplicatedTable [GOOD] >> TReplicationTests::AlterReplicatedIndexTable |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TReplicationTests::DropReplicationWithInvalidCredentials [GOOD] >> TReplicationTests::DropReplicationWithUnknownSecret >> TSchemeShardTTLTests::BuildIndexShouldSucceed |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8_Reboot [GOOD] Test command err: 2026-01-04T02:28:34.664075Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:34.704104Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:34.704394Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:34.713121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:34.713380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:34.713884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:34.714072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:34.714207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:34.714315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:34.714432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:34.714545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:34.714645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:34.714781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:34.714922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:34.715026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:34.715126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:34.747903Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:34.748071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:34.748135Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:34.748358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:34.748538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:34.748631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:34.748676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:34.748763Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:34.748819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:34.748862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:34.748900Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:34.749089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:34.749153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:34.749199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:34.749232Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:34.749329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:34.749383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:34.749434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:34.749476Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:34.749531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:34.749573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:34.749606Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:34.749654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:34.749692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:34.749744Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:34.750028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:34.750088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:34.750125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:34.750235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:34.750294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:34.750331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:34.750380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:34.750420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:34.750475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:34.750533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:34.750573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:34.750604Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:34.750778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:34.750844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=13; 2026-01-04T02:30:10.585590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1396; 2026-01-04T02:30:10.585679Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=103504; 2026-01-04T02:30:10.585749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=103654; 2026-01-04T02:30:10.590003Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=4142; 2026-01-04T02:30:10.590588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=464; 2026-01-04T02:30:10.590663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=109065; 2026-01-04T02:30:10.590871Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=133; 2026-01-04T02:30:10.591029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=83; 2026-01-04T02:30:10.591521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=403; 2026-01-04T02:30:10.591941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=353; 2026-01-04T02:30:10.647297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=55243; 2026-01-04T02:30:10.699541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=52101; 2026-01-04T02:30:10.699685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:30:10.699751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-04T02:30:10.699795Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:30:10.699875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2026-01-04T02:30:10.699920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:30:10.700001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-04T02:30:10.700043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:30:10.700109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-04T02:30:10.700220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=73; 2026-01-04T02:30:10.700336Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=61; 2026-01-04T02:30:10.700393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=227874; 2026-01-04T02:30:10.700534Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:30:10.700645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:30:10.700704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:30:10.700770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:30:10.700819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:30:10.701011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:10.701080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:30:10.701117Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:10.701163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:30:10.701230Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493418800;tx_id=18446744073709551615;;current_snapshot_ts=1767493716190; 2026-01-04T02:30:10.701272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:10.701315Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:10.701355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:10.701445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:10.701665Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.199000s; 2026-01-04T02:30:10.720988Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:30:10.721290Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:30:10.721359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:10.721452Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:10.721531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:30:10.721611Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493418800;tx_id=18446744073709551615;;current_snapshot_ts=1767493716190; 2026-01-04T02:30:10.721672Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:10.721738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:10.721900Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:10.722009Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:30:10.722076Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:10.722866Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.149000s; 2026-01-04T02:30:10.722941Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] >> Cdc::InitialScan_WithTopicSchemeTx [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning >> KqpSystemView::Join >> KqpSystemView::QuerySessionsOrderByDesc |85.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 >> TReplicationTests::AlterReplicatedIndexTable [GOOD] >> TReplicationTests::CopyReplicatedTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:16.727840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:16.727959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:16.728005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:16.728041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:16.728083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:16.728114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:16.728181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:16.728279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:16.729165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:16.729489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:16.829929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:16.829997Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:16.844259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:16.844503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:16.844668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:16.852892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:16.853142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:16.853892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:16.854230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:16.857354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:16.857561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:16.858881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:16.858948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:16.859040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:16.859096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:16.859126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:16.859325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:17.000927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.005713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.005917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.006822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.007109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:17.007203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:30:18.476743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:18.476826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:18.476883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:30:18.476923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:30:18.476966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:30:18.477063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:30:18.494745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:30:18.515340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1134 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:30:18.515406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:30:18.515566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1134 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:30:18.515708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1134 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:30:18.516396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:18.516432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:30:18.516541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:18.516584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:18.516663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:18.516720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:18.516759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.516785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:30:18.516813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:30:18.520814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.523151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.523435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.523481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:30:18.523562Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:18.523587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:18.523613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:18.523638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:18.523678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:30:18.523733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:783:2750] message: TxId: 102 2026-01-04T02:30:18.523772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:18.523806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:18.523835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:18.523937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:30:18.529745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:18.529821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:867:2826] TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:18.530363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:18.530636Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 282us result status StatusSuccess 2026-01-04T02:30:18.531177Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] >> KqpSystemView::PartitionStatsRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnBeforeEpochTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:18.266887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:18.267000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.267043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:18.267082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:18.267121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:18.267151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:18.267204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.267289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:18.268200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:18.268506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:18.378913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:18.378984Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.404868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:18.405109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:18.405274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:18.418918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:18.419206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:18.420063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:18.420427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:18.423784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.423991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:18.425372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:18.425446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.425569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:18.425627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:18.425673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:18.425950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.606713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.607906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.608907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.609000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.609093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.609205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:30:19.451158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:19.451225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:19.452957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.453003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:19.453050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:19.454594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.454630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.454670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.454708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.454827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:19.455983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:19.456101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:30:19.456357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:19.456446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:19.456488Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.456710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:19.456765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.456881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:30:19.456934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:19.458492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:19.458539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:19.458696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:19.458733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:30:19.459035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.459073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:30:19.459155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:30:19.459183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.459214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:30:19.459242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.459270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:30:19.459304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.459341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:30:19.459402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:30:19.459460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:19.459499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:30:19.459534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:30:19.459901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:30:19.459980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:30:19.460013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:30:19.460052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:30:19.460106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:30:19.460187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:30:19.462776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:30:19.463164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767493819.464045 475399 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-04T02:30:19.464368Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:669:2659] Bootstrap 2026-01-04T02:30:19.465334Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:669:2659] Become StateWork (SchemeCache [1:674:2664]) 2026-01-04T02:30:19.467902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:19.468185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.468283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3153600000 Tiers { ApplyAfterSeconds: 3153600000 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-04T02:30:19.468626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL should be less than 1767493819 seconds (20457 days, 56 years). The ttl behaviour is undefined before 1970., at schemeshard: 72057594046678944 2026-01-04T02:30:19.469442Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:669:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:30:19.471928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL should be less than 1767493819 seconds (20457 days, 56 years). The ttl behaviour is undefined before 1970." TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:19.472127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL should be less than 1767493819 seconds (20457 days, 56 years). The ttl behaviour is undefined before 1970., operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:30:19.472442Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpSysColV1::SelectRange >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] >> KqpSysColV1::StreamSelectRowById ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiers [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:18.211199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:18.211280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.211309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:18.211335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:18.211363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:18.211386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:18.211419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.211484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:18.212103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:18.212367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:18.294375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:18.294434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.309933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:18.310182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:18.310799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:18.354482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:18.354767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:18.355533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:18.355841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:18.368135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.368372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:18.369639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:18.369705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.369860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:18.369916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:18.369966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:18.370181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.596343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.597954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.598613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:30:19.798435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:19.799077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:30:19.799164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:30:19.799201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:30:19.799227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:30:19.799260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:30:19.799332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:30:19.799944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1588 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:30:19.800075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:30:19.800268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1588 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:30:19.800390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1588 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:30:19.801940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:30:19.801996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:30:19.802143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:30:19.802199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:19.802289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:30:19.802356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:19.802412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.802459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:30:19.802528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:30:19.807704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:30:19.807863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:30:19.808038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.808372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.808701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.808752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:30:19.808858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:30:19.808902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:30:19.808947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:30:19.808992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:30:19.809038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:30:19.809117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:735:2713] message: TxId: 101 2026-01-04T02:30:19.809168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:30:19.809222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:30:19.809254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:30:19.809418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:30:19.811087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:30:19.811144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:736:2714] TestWaitNotification: OK eventTxId 101 2026-01-04T02:30:19.811607Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:19.811865Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 271us result status StatusSuccess 2026-01-04T02:30:19.812358Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::DropReplicationWithUnknownSecret [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:58.553965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:58.554055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:58.554097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:58.554163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:58.554219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:58.554251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:58.554304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:58.554392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:58.555232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:58.555570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:58.790119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:58.790187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:58.821327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:58.835025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:58.835283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:58.844375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:58.844716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:58.845450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:58.845746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:58.862656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:58.862945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:58.864361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:58.864435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:58.864558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:58.864606Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:58.864647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:58.864793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:59.109143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.110963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:59.111681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:19.348575Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:19.348630Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:30:19.348695Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:19.348755Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:19.348802Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:19.348970Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:30:19.349031Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:30:19.349087Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:30:19.349136Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:30:19.350001Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 10 } 2026-01-04T02:30:19.350053Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-04T02:30:19.350126Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:19.350210Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:19.350250Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:30:19.350307Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:30:19.350373Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:19.350485Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:30:19.351154Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [10:210:2211], Recipient [10:127:2152]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 } 2026-01-04T02:30:19.351199Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-04T02:30:19.351259Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:19.351338Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:19.351370Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:30:19.351403Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:30:19.351439Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:30:19.351536Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:30:19.351594Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:30:19.351983Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [10:127:2152], Recipient [10:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:30:19.352031Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:30:19.352105Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:19.352178Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:30:19.352287Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:30:19.355253Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:30:19.356830Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:30:19.356878Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:30:19.356982Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:30:19.357017Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:30:19.357316Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:30:19.357657Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:30:19.357751Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:30:19.358223Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [10:845:2810], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:30:19.358297Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:30:19.358351Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:30:19.358511Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [10:792:2757], Recipient [10:127:2152]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-04T02:30:19.358552Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:30:19.358656Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:30:19.358776Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:19.358828Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:843:2808] 2026-01-04T02:30:19.359050Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [10:845:2810], Recipient [10:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:30:19.359102Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:30:19.359151Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:19.359612Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [10:846:2811], Recipient [10:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:30:19.359682Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:30:19.359804Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Replication" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:19.360038Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Replication" took 239us result status StatusPathDoesNotExist 2026-01-04T02:30:19.360250Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Replication\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Replication" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths >> BsControllerConfig::MergeIntersectingBoxes |85.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_ColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:18.648806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:18.648925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.648984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:18.649020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:18.649056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:18.649148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:18.649217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.649284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:18.650159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:18.650512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:18.786476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:18.786538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.826931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:18.831403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:18.831591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:18.862431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:18.862695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:18.863417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:18.863680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:18.878536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.878743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:18.880059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:18.880123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.880298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:18.880346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:18.880389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:18.880549Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.081651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.090992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.091950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.092014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.092106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.092254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.884725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.884857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:19.886706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:19.886891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:30:19.887315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:19.887435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:19.887475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.887779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:19.887848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:19.888012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:30:19.888085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:19.890073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:19.890121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:19.890310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:19.890356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:19.890739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.890782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:30:19.890881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:30:19.890915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.890948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:30:19.890981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.891017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:30:19.891076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:30:19.891114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:30:19.891140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:30:19.891216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:19.891265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:30:19.891307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:30:19.892076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:30:19.892181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:30:19.892223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:30:19.892277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:30:19.892323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:30:19.892408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:30:19.898701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:30:19.899324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767493819.900637 475447 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 101 2026-01-04T02:30:19.901125Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:673:2663] Bootstrap 2026-01-04T02:30:19.902340Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:673:2663] Become StateWork (SchemeCache [1:678:2668]) 2026-01-04T02:30:19.905288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:19.905680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.906224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-04T02:30:19.907432Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:30:19.910598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:19.910816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-04T02:30:19.911437Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 W0000 00:00:1767493819.911900 475447 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TColumnTableDescription: 11:43: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-04T02:30:19.914753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:19.915058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.915263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, at schemeshard: 72057594046678944 2026-01-04T02:30:19.917419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Type \'DyNumber\' specified for column \'modified_at\' is not supported" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:19.917598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'DyNumber' specified for column 'modified_at' is not supported, operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox >> VectorIndexBuildTest::PrefixedDuplicates [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:18.361982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:18.362091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.362135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:18.362176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:18.362216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:18.362274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:18.362329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:18.362400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:18.363305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:18.363615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:18.502477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:18.502541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.518129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:18.526322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:18.526537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:18.543047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:18.543356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:18.544134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:18.544444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:18.547696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.547890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:18.549310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:18.549382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:18.549520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:18.549569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:18.549623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:18.549838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:18.825965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.826660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.826735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.826786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.826888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:18.827784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 8Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:20.532042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:20.532103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-04T02:30:20.532147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-04T02:30:20.534702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.534763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-04T02:30:20.534848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:30:20.534890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:20.534927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:30:20.534953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:20.534996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:30:20.535064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:126:2151] message: TxId: 281474976710760 2026-01-04T02:30:20.535112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:20.535141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:30:20.535172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:30:20.535240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:30:20.537663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:30:20.537726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:30:20.537835Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-04T02:30:20.537965Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:793:2763], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:30:20.541482Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-04T02:30:20.541647Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:793:2763], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:30:20.541718Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:30:20.543731Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-04T02:30:20.543884Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:793:2763], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:30:20.543960Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-04T02:30:20.544138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:20.544197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:884:2843] TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:20.544922Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:20.545212Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 413us result status StatusSuccess 2026-01-04T02:30:20.545815Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> BsControllerConfig::DeleteStoragePool >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccessOnSimultaneousAddColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:19.465531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:19.465627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:19.465671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:19.465703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:19.465751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:19.465787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:19.466071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:19.466148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:19.467024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:19.467367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:19.573620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:19.573679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:19.591105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:19.591308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:19.591469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:19.599765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:19.599992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:19.600781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:19.601036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:19.603986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:19.604171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:19.605357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:19.605411Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:19.605516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:19.605576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:19.605628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:19.605832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:19.789241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.790550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.790690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.790816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.790896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.790977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:19.791734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:30:20.923286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.923343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:30:20.924132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:20.924274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:30:20.924323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:30:20.924365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:30:20.924430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:30:20.924520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:30:20.925228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1313 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:30:20.925273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:30:20.925419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1313 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:30:20.925616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1313 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:30:20.926393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:20.926440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:30:20.926583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:20.926655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:20.926767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:20.926835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:20.926877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.926929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:30:20.926970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:30:20.930917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:30:20.931065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.931214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.931542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:20.931600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:30:20.931708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:20.931753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:20.931790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:20.931820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:20.931859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:30:20.931936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2026-01-04T02:30:20.932005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:20.932044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:20.932077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:20.932230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:30:20.936348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:20.936411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:802:2772] TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:20.936944Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:20.937253Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 276us result status StatusSuccess 2026-01-04T02:30:20.937932Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] >> BsControllerConfig::ManyPDisksRestarts >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 |85.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_trace/ydb-core-tx-datashard-ut_trace >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-16 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 >> TPQTest::TestPartitionPerConsumerQuota [GOOD] >> TPQTest::TestPartitionWriteQuota >> BsControllerConfig::SoleCommandRollback >> StatisticsSaveLoad::Delete [GOOD] >> BsControllerConfig::AddDriveSerial >> BsControllerConfig::OverlayMap >> DataShardVolatile::TwoAppendsMustBeVolatile+UseSink [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_replication/unittest >> TReplicationTests::CopyReplicatedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:58.422192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:58.422289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:58.422327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:58.422380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:58.422438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:58.422469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:58.422524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:58.422603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:58.423476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:58.423798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:58.666554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:58.666622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:58.707083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:58.711758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:58.711988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:58.718204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:58.718480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:58.719197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:58.719468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:58.722917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:58.723152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:58.724460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:58.724528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:58.724644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:58.724687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:58.724727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:58.724862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:58.960743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.961779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.962757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.962895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.962966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:58.963817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 822 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.097119Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:30:22.097294Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 822 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.097380Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:22.097535Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 822 RawX2: 42949675749 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.097637Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:22.097707Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-04T02:30:22.104689Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.106100Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.125941Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 42949675663 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.126016Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:30:22.126170Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 42949675663 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.126233Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:22.126334Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 717 RawX2: 42949675663 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:22.126401Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:22.126454Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.126515Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:30:22.126563Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:30:22.126594Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:30:22.137503Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.138237Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.138325Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-04T02:30:22.138424Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-04T02:30:22.138501Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-04T02:30:22.138625Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-04T02:30:22.138686Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-04T02:30:22.142610Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:22.142696Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:30:22.142894Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:22.142948Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:22.143006Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:30:22.143056Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:22.143109Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:30:22.143218Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [10:745:2723] message: TxId: 102 2026-01-04T02:30:22.143291Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:30:22.143346Z node 10 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:22.143398Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:22.143596Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:30:22.143652Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:30:22.147291Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:22.147395Z node 10 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [10:850:2809] TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:22.148299Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/CopyTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:22.148623Z node 10 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/CopyTable" took 368us result status StatusSuccess 2026-01-04T02:30:22.149214Z node 10 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/CopyTable" PathDescription { Self { Name: "CopyTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "CopyTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_replication/unittest >> BsControllerConfig::SoleCommandRollback [GOOD] >> BsControllerConfig::UnsupportedCommandError >> BsControllerConfig::Basic >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 >> BsControllerConfig::OverlayMap [GOOD] >> BsControllerConfig::OverlayMapCrossReferences >> BsControllerConfig::UnsupportedCommandError [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck >> BsControllerConfig::SelectAllGroups >> BsControllerConfig::PDiskCreate >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::UnsupportedCommandError [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:26:2057] recipient: [1:23:2068] Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:29:2057] recipient: [1:23:2068] 2026-01-04T02:30:23.455235Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:23.456312Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:23.456647Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:23.457234Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:23.459302Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:23.459562Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.459592Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.459795Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:23.470473Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:23.470657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:23.470834Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:23.470963Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.471091Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.471202Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:28:2070] sender: [1:49:2057] recipient: [1:11:2058] 2026-01-04T02:30:23.482830Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:23.482981Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.507069Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.507205Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.507277Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.507351Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.507475Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.507546Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.507596Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.507677Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.518859Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.519005Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:23.530358Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:23.530501Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:23.531873Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:23.531923Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:23.532101Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:23.532148Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:23.557147Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: CYAN } } } Status { Success: true } Success: true 2026-01-04T02:30:23.572282Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateSettings { PDiskSpaceColorBorder: YELLOW } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-04T02:30:23.599106Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } Status { Success: true BaseConfig { Node { NodeId: 1 PhysicalLocation: "\n\001\000\000\000\024\001\000\000\000\036\001\000\000\000(\001\000\000\000" HostKey { Fqdn: "::1" IcPort: 12001 } Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } } Settings { DefaultMaxSlots: 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [2:26:2057] recipient: [2:24:2068] Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:29:2057] recipient: [2:24:2068] 2026-01-04T02:30:23.840497Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:23.841494Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:23.841777Z node 2 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:23.850121Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:23.851506Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:23.851719Z node 2 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.851746Z node 2 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.851934Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:23.877562Z node 2 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:23.877680Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:23.877784Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:23.877936Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.878059Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.878159Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [2:28:2070] sender: [2:49:2057] recipient: [2:11:2058] 2026-01-04T02:30:23.889530Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:23.889735Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.914293Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.914454Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.914537Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.914625Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.914762Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.914833Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.914912Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.914971Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.925646Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.925775Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:23.939763Z node 2 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEna ... 16 EnableSelfHeal: true EnableDonorMode: true ScrubPeriodicitySeconds: 2592000 PDiskSpaceMarginPromille: 150 GroupReserveMin: 0 GroupReservePartPPM: 0 MaxScrubbedDisksAtOnce: 4294967295 PDiskSpaceColorBorder: CYAN EnableGroupLayoutSanitizer: false AllowMultipleRealmsOccupation: true UseSelfHealLocalPolicy: false TryToRelocateBrokenDisksLocallyFirst: false } } } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [3:26:2057] recipient: [3:23:2068] Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:29:2057] recipient: [3:23:2068] 2026-01-04T02:30:24.265645Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:24.266785Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:24.267035Z node 3 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:24.267651Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:24.268835Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:24.269041Z node 3 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.269071Z node 3 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.269236Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:24.278714Z node 3 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:24.278836Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:24.278935Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:24.279030Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.279161Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.279240Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [3:28:2070] sender: [3:49:2057] recipient: [3:11:2058] 2026-01-04T02:30:24.292184Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:24.292301Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.315535Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.315684Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.315770Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.315876Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.315979Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.316029Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.316065Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.316126Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.328699Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.328839Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.342323Z node 3 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.342456Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:24.343842Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:24.343903Z node 3 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:24.344106Z node 3 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:24.344149Z node 3 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:24.344747Z node 3 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [4:26:2057] recipient: [4:23:2068] Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:29:2057] recipient: [4:23:2068] 2026-01-04T02:30:24.568744Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:24.569385Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:24.569565Z node 4 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:24.570460Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:24.570867Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:24.571000Z node 4 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.571019Z node 4 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.571139Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:24.580228Z node 4 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:24.580380Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:24.580498Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:24.580653Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.580763Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.580880Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [4:28:2070] sender: [4:49:2057] recipient: [4:11:2058] 2026-01-04T02:30:24.592330Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:24.592499Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.618391Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.618528Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.618603Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.618698Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.618842Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.618927Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.618969Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.619019Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.630302Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.630428Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.642317Z node 4 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.642437Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:24.643703Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:24.643767Z node 4 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:24.643964Z node 4 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:24.644007Z node 4 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:24.644663Z node 4 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { } } Status { ErrorDescription: "unsupported command 0" } ErrorDescription: "unsupported command 0" |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::Delete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:07.926101Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:08.293328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:08.303771Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:08.304233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:08.304438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:09.264336Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:09.465069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:09.465226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:09.527959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:09.682334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:11.068315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:11.069465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:11.069531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:11.069572Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:11.074707Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:11.146476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:11.931678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:30:17.268667Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:17.278123Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:30:17.288445Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:17.325455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:17.325579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:17.366108Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:17.374733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:17.771961Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.772752Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.784327Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.785252Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.785556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.785747Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.785974Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.786165Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.786267Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.925390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:30:17.987558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:17.987701Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:18.003688Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:18.229192Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.368276Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:30:18.368410Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:30:18.465254Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:30:18.465596Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:30:18.474159Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:30:18.474270Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:30:18.474342Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:30:18.474407Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:30:18.474466Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:30:18.474546Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:30:18.475682Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:30:18.703272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:30:18.703787Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:30:18.709967Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:30:18.710042Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:30:18.710172Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:18.714924Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:18.718923Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:30:18.726432Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:30:18.831721Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:30:18.855522Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:18.855653Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:18.879101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:30:18.880543Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:30:19.494488Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:19.497180Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:19.502380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:19.507321Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:30:19.507433Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 281474976720658 2026-01-04T02:30:19.531254Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on tx: 281474976720658 registered 2026-01-04T02:30:19.854109Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-01-04T02:30:20.023819Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:30:20.023980Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Column diff is empty, finishing 2026-01-04T02:30:20.669690Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:20.672924Z node 1 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [1:2786:3374] Owner: [1:2785:3373]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:30:20.673023Z node 1 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [1:2786:3374] Owner: [1:2785:3373]. Column diff is empty, finishing 2026-01-04T02:30:20.673584Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2795:3377], ActorId: [1:2796:3378], Starting query actor #1 [1:2797:3379] 2026-01-04T02:30:20.673661Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2796:3378], ActorId: [1:2797:3379], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:20.754482Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2796:3378], ActorId: [1:2797:3379], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZWVmMjMzYzgtZTg2YjlmZmYtYzg2ZGM3ZGUtM2MzNTQzNzc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:30:21.772601Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:2817:3393]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:21.772912Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:30:21.773015Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:2819:3395] 2026-01-04T02:30:21.773107Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:2819:3395] 2026-01-04T02:30:21.773850Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2820:3007] 2026-01-04T02:30:21.774262Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:2819:3395], server id = [2:2820:3007], tablet id = 72075186224037894, status = OK 2026-01-04T02:30:21.774525Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:2820:3007], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:30:21.774617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:30:21.774897Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:30:21.774991Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:2817:3393], StatRequests.size() = 1 2026-01-04T02:30:21.905563Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:30:21.967502Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2796:3378], ActorId: [1:2797:3379], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZWVmMjMzYzgtZTg2YjlmZmYtYzg2ZGM3ZGUtM2MzNTQzNzc=, TxId: 2026-01-04T02:30:21.967603Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2796:3378], ActorId: [1:2797:3379], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=ZWVmMjMzYzgtZTg2YjlmZmYtYzg2ZGM3ZGUtM2MzNTQzNzc=, TxId: 2026-01-04T02:30:21.967956Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2795:3377], ActorId: [1:2796:3378], Got response [1:2797:3379] SUCCESS 2026-01-04T02:30:21.969228Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2837:3400], ActorId: [1:2838:3401], Starting query actor #1 [1:2839:3402] 2026-01-04T02:30:21.969305Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2838:3401], ActorId: [1:2839:3402], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:21.972734Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2838:3401], ActorId: [1:2839:3402], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=YWY3YzY3ZDYtNWI1NDQzMmItZDdlZDA1YzctNWQyODZjYWY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-04T02:30:22.079237Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:2848:3411]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:22.079504Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:30:22.079566Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:2848:3411], StatRequests.size() = 1 2026-01-04T02:30:22.252443Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2838:3401], ActorId: [1:2839:3402], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=YWY3YzY3ZDYtNWI1NDQzMmItZDdlZDA1YzctNWQyODZjYWY=, TxId: 2026-01-04T02:30:22.252547Z node 1 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [1:2838:3401], ActorId: [1:2839:3402], Finish with SUCCESS, SessionId: ydb://session/3?node_id=1&id=YWY3YzY3ZDYtNWI1NDQzMmItZDdlZDA1YzctNWQyODZjYWY=, TxId: 2026-01-04T02:30:22.253083Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2837:3400], ActorId: [1:2838:3401], Got response [1:2839:3402] SUCCESS 2026-01-04T02:30:22.254050Z node 1 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [1:2870:3415], ActorId: [1:2871:3416], Starting query actor #1 [1:2872:3417] 2026-01-04T02:30:22.254128Z node 1 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [1:2871:3416], ActorId: [1:2872:3417], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:30:22.257649Z node 1 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [1:2871:3416], ActorId: [1:2872:3417], RunDataQuery with SessionId: ydb://session/3?node_id=1&id=ZTRiMDFmNjItZWE4NjUwYTgtMjU3OGIyOTgtN2Q3Y2UyMzI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_type AS Uint32; DECLARE $column_tag AS Uint32; SELECT data FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id AND stat_type = $stat_type AND column_tag = $column_tag; 2026-01-04T02:30:22.286647Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:2881:3426]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:22.286896Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:30:22.286950Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:2881:3426], StatRequests.size() = 1 2026-01-04T02:30:22.428264Z node 1 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [1:2871:3416], ActorId: [1:2872:3417], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=1&id=ZTRiMDFmNjItZWE4NjUwYTgtMjU3OGIyOTgtN2Q3Y2UyMzI=, TxId: 01ke3desy3bn70xxt80jtkj8k5 2026-01-04T02:30:22.428455Z node 1 :STATISTICS WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:2871:3416], ActorId: [1:2872:3417], Finish with BAD_REQUEST, Issues: {
: Error: No data }, SessionId: ydb://session/3?node_id=1&id=ZTRiMDFmNjItZWE4NjUwYTgtMjU3OGIyOTgtN2Q3Y2UyMzI=, TxId: 01ke3desy3bn70xxt80jtkj8k5 2026-01-04T02:30:22.428786Z node 1 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [1:2870:3415], ActorId: [1:2871:3416], Got response [1:2872:3417] BAD_REQUEST >> StatisticsSaveLoad::ForbidAccess [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 >> KqpSystemView::Sessions+EnableRealSystemViewPaths ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QuerySessionsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 61979, MsgBus: 62889 2026-01-04T02:30:19.642111Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328150345942671:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:19.642339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:19.994888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:20.020674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:20.021891Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:20.109463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:20.111548Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:20.112023Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328150345942460:2081] 1767493819599799 != 1767493819599802 2026-01-04T02:30:20.306542Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:20.341881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:20.341913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:20.341924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:20.341999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:20.632229Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:20.983646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:21.026307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:21.041875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:30:24.114701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328171820779906:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.114853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.115343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328171820779918:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.115397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328171820779919:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.115539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.120116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:24.144665Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328171820779922:2347], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:30:24.305441Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328171820779973:2580] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:24.641935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328150345942671:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:24.642030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> BsControllerConfig::SelectAllGroups [GOOD] >> BsControllerConfig::SoleCommandErrorWhenCombined >> ExternalIndex::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TNetClassifierUpdaterTest::TestFiltrationByNetboxTags [GOOD] Test command err: 2026-01-04T02:27:39.649512Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327461943131802:2222];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:39.649596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:40.213374Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:40.235296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:40.235378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:40.277297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:40.284572Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:27553) connection closed with error: Connection refused 2026-01-04T02:27:40.291250Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:27:40.310381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:40.401408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:40.662725Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:44.863707Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591327483006279283:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:44.887698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:45.082964Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:45.227904Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:45.228023Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:45.243360Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:45.254880Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591327483006279264:2081] 1767493664860464 != 1767493664860467 2026-01-04T02:27:45.329977Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:45.340286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:45.363614Z node 2 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:4788) connection closed with error: Connection refused 2026-01-04T02:27:45.387804Z node 2 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:27:45.757108Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:45.889538Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:49.080854Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591327507820854893:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:49.130464Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:49.336949Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:49.343587Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:49.343686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:49.349294Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:49.381741Z node 3 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:9871) connection closed with error: Connection refused 2026-01-04T02:27:49.384116Z node 3 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:27:49.384577Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:49.538022Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:50.080826Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:54.250894Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591327529719426051:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:54.250932Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:54.338140Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:54.403777Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:54.403855Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:54.406662Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:54.417200Z node 4 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#28,[::1]:1104) connection closed with error: Connection refused 2026-01-04T02:27:54.417548Z node 4 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:27:54.437664Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:54.531900Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:55.287830Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:58.906141Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:27:58.906204Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:59.169911Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:59.258758Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:59.258846Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:59.259964Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:59.314308Z node 5 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:29095) connection closed with error: Connection refused 2026-01-04T02:27:59.353769Z node 5 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:27:59.429609Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:59.709373Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:59.868261Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:28:04.203923Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:04.204921Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591327570985599976:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:28:04.205614Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:28:04.348527Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:28:04.349276Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:04.353959Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591327570985599861:2081] 1767493684160335 != 1767493684160338 2026-01-04T02:28:04.367204Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:04.367283Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:04.390600Z node 6 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[ ... s 2026-01-04T02:29:28.721963Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:38.564805Z node 19 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[19:7591327974793571574:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:38.564868Z node 19 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:38.716349Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:39.018281Z node 19 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [19:7591327974793571547:2081] 1767493778544887 != 1767493778544890 2026-01-04T02:29:39.065047Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:39.224097Z node 19 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#30,[::1]:24623) connection closed with error: Connection refused 2026-01-04T02:29:39.224731Z node 19 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:29:39.225222Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:39.225329Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:39.231749Z node 19 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:39.314335Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:39.413904Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:39.623629Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:40.822522Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:29:50.435441Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7591328024166835219:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:50.435501Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:50.531304Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:50.706142Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7591328024166835185:2081] 1767493790396904 != 1767493790396907 2026-01-04T02:29:50.736532Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:50.745432Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:50.746954Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:50.747065Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:50.750668Z node 20 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#32,[::1]:64280) connection closed with error: Connection refused 2026-01-04T02:29:50.756870Z node 20 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:29:50.758481Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:51.173269Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:29:51.395212Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:59.233440Z node 21 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[21:7591328065483114760:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:59.233522Z node 21 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:59.349897Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:59.595355Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:59.595492Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:59.595775Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:59.639713Z node 21 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#34,[::1]:12210) connection closed with error: Connection refused 2026-01-04T02:29:59.646544Z node 21 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:29:59.689380Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:29:59.712628Z node 21 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:00.106976Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:30:00.259528Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:08.980501Z node 22 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[22:7591328105456361531:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:08.980577Z node 22 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:09.157157Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:30:09.429959Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:30:09.451459Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:09.453067Z node 22 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [22:7591328105456361507:2081] 1767493808961567 != 1767493808961570 2026-01-04T02:30:09.474819Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:09.474951Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:09.509404Z node 22 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#36,[::1]:8767) connection closed with error: Connection refused 2026-01-04T02:30:09.511832Z node 22 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:30:09.512277Z node 22 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:09.909949Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:30:10.006156Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:18.018132Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:30:18.018277Z node 23 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:18.260880Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:18.261008Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:18.273069Z node 23 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [23:7591328140608433708:2081] 1767493817884960 != 1767493817884963 2026-01-04T02:30:18.277761Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:18.339985Z node 23 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(23, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:18.346387Z node 23 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#38,[::1]:16435) connection closed with error: Connection refused 2026-01-04T02:30:18.349587Z node 23 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:30:18.353997Z node 23 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:30:18.950080Z node 23 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> Cdc::ResolvedTimestampsVolatileOutOfOrder [GOOD] >> Cdc::SequentialSplitMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/database/ut/unittest >> StatisticsSaveLoad::ForbidAccess [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:08.060659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:08.179747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:08.188339Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:08.188739Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:08.188891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:09.273777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:09.497393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:09.497562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:09.556094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:09.668568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:10.692669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:10.693688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:10.693754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:10.693791Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:10.694208Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:10.798311Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:11.603210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:30:16.903572Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:16.913486Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:30:16.918670Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:30:16.976092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:16.976251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:17.014229Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:17.020942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:17.303697Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.305090Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.305928Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.306772Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.307034Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.307252Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.307495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.307717Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.307817Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:30:17.469730Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:30:17.519453Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:17.519586Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:17.536036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:17.809842Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:17.914583Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:30:17.914702Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:30:17.962308Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:30:17.962589Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:30:17.962798Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:30:17.962868Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:30:17.962928Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:30:17.962980Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:30:17.963029Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:30:17.963077Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:30:17.963801Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:30:18.134900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:30:18.135462Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:30:18.137737Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:30:18.137844Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:30:18.137937Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:18.141897Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:18.145918Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:30:18.150101Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:30:18.236629Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:30:18.253943Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:18.254054Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:30:18.269340Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:30:18.270377Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:30:18.842319Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:30:18.844759Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:30:18.854564Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:18.866059Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:30:18.866197Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 281474976720658 2026-01-04T02:30:18.895939Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on tx: 281474976720658 registered 2026-01-04T02:30:19.229178Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-01-04T02:30:19.367450Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:30:19.367548Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Column diff is empty, finishing 2026-01-04T02:30:20.091134Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:20.405644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:3377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:20.405922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:20.406487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2807:3382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:20.406603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:20.428921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:21.332789Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3096:3434], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.512566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.513277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3100:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.513400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.519440Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3103:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:21.519847Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:30:21.519980Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3105:3442] 2026-01-04T02:30:21.520083Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3105:3442] 2026-01-04T02:30:21.520861Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3106:3200] 2026-01-04T02:30:21.521167Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3105:3442], server id = [2:3106:3200], tablet id = 72075186224037894, status = OK 2026-01-04T02:30:21.521411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3106:3200], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:30:21.521495Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:30:21.521754Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:30:21.528271Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3103:3440], StatRequests.size() = 1 2026-01-04T02:30:21.572001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:30:21.572455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3110:3446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.572641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.573338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3114:3450], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.573488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.573582Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3117:3453], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:21.581640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:21.844590Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:30:21.844680Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:30:21.925965Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3105:3442], schemeshard count = 1 2026-01-04T02:30:22.394977Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3119:3455], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:30:22.611989Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3225:3520] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:22.634986Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3248:3536]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:30:22.635192Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:30:22.635244Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3248:3536], StatRequests.size() = 1 2026-01-04T02:30:22.946048Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3327:3566], for# user@builtin, access# DescribeSchema 2026-01-04T02:30:22.946141Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:3327:3566], for# user@builtin, access# DescribeSchema 2026-01-04T02:30:22.959147Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:3317:3562], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:17: Error: At function: KiReadTable!
:2:17: Error: Cannot find table 'db.[/Root/Database/.metadata/_statistics]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:30:22.961967Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjE1OThiNzYtODRkY2ZhMS0xOGE4NDZmOC1mNjAyMzEzYg==, ActorId: [1:3308:3554], ActorState: ExecuteState, LegacyTraceId: 01ke3detdh6tqgnnk8w91cmtya, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 17 } message: "At function: KiReadTable!" end_position { row: 2 column: 17 } severity: 1 issues { position { row: 2 column: 17 } message: "Cannot find table \'db.[/Root/Database/.metadata/_statistics]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/database/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentDropIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] >> BsControllerConfig::AddDriveSerial [GOOD] >> BsControllerConfig::AddDriveSerialMassive |85.6%| [TA] $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-64 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 >> BsControllerConfig::Basic [GOOD] >> BsControllerConfig::CommandRollbackWhenAlone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::ClickHouseFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 15061, MsgBus: 19523 2026-01-04T02:29:02.292455Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327820624828460:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:02.292521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:03.023304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:03.023431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:03.060324Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:03.171895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:03.245944Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327820624828249:2081] 1767493742246926 != 1767493742246929 2026-01-04T02:29:03.265586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:03.342384Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:03.421904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:03.610633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:03.610659Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:03.610665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:03.610740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:04.719833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:29:04.720261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:29:04.727169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:04.727960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:29:04.730804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493744779, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:29:04.743675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:29:04.743740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:29:04.744028Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591327824919796080:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:29:04.744504Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327820624828217:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:04.744648Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327820624828220:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:04.744686Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327820624828223:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:04.744995Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327820624828703:2213][/Root] Path was updated to new version: owner# [1:7591327820624828539:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:04.745389Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591327824919796080:2251] Ack update: ack to# [1:7591327820624828606:2148], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:29:04.745612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-04T02:29:04.746503Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327824919796190:2289][/Root] Path was updated to new version: owner# [1:7591327824919796184:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:04.746704Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327824919796191:2290][/Root] Path was updated to new version: owner# [1:7591327824919796185:2285], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:04.750536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:07.294406Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327820624828460:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:07.294509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:07.386996Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:29:07.394678Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002cca/r3tmp/spilling-tmp-runner/node_1_3b87eba4-933b1d61-343fedc8-9a346e7e, actor: [1:7591327842099665582:2308] 2026-01-04T02:29:07.394913Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002cca/r3tmp/spilling-tmp-runner E0104 02:29:07.404475319 459721 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:07.404673810 459721 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:07.427686Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dce45e9cqe05wf39tzh71", Request has 18444976579962.123975s seconds to be completed E0104 02:29:07.432453791 459721 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:07.432671777 459721 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:07.432672Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dce45e9cqe05wf39tzh71", Created new session, sessionId: ydb://session/3?node_id=1&id=MmU2MjcwOTEtOTAwOGI3YzQtZjRiZTcwMTYtMTExNjNlYzY=, workerId: [1:7591327842099665604:2329], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:29:07.432991Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dce45e9cqe05wf39tzh71 2026-01-04T02:29:07.433104Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:29:07.433148Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:29:07.433192Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:29:07.440234Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327842099665606:2500][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591327820624828539:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:07.440336Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327842099665607:2501][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591327820624828539:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:07.447261Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327842099665608:2502][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591327820624828539:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:07.447839Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:29:07.463154199 459721 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:07.463309910 459721 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:07.464881Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:29:07.478794749 459721 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:07.478914632 459721 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:07.4 ... pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } table: "example_1" type_mapping_settings { date_time_format: STRING_FORMAT } DescribeTable result. GRpcStatusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0104 02:30:24.999004212 476060 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:30:24.999196794 476060 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:30:25.009947Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-04T02:30:25.255165Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591328176075762984:2342] TxId: 281474976710663. Ctx: { TraceId: 01ke3dew6431h3kq964m12yx32, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmUyYmQzNmItYjdmZWM0MDctMTUwYTU5MDktYmY3ZWNkOTI=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: CLICKHOUSE endpoint { host: "rc1a-d6dv17lv47v5mcop.db.yandex.net" port: 8443 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: HTTP } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:30:25.262663Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01ke3dew6431h3kq964m12yx32, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmUyYmQzNmItYjdmZWM0MDctMTUwYTU5MDktYmY3ZWNkOTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591328176075762988:2357] 2026-01-04T02:30:25.263556Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01ke3dew6431h3kq964m12yx32, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NmUyYmQzNmItYjdmZWM0MDctMTUwYTU5MDktYmY3ZWNkOTI=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591328176075762989:2358] 2026-01-04T02:30:25.264872Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dew6431h3kq964m12yx32", Forwarded response to sender actor, requestId: 4, sender: [9:7591328171780795581:2341], selfId: [9:7591328150305958367:2266], source: [9:7591328171780795582:2342] 2026-01-04T02:30:25.266449Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NmUyYmQzNmItYjdmZWM0MDctMTUwYTU5MDktYmY3ZWNkOTI=, workerId: [9:7591328171780795582:2342], local sessions count: 0 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpSystemView::ReadSuccess >> KqpSystemView::NodesRange1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::SoleCommandErrorWhenCombined [GOOD] Test command err: 2026-01-04T02:30:25.246941Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:25.248216Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:25.248620Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:25.249248Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:25.250905Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:25.251159Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:25.251188Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:25.251470Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:25.261288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:25.261415Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:25.261576Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:25.261673Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:25.261824Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:25.261904Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:25.561785Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.227446s 2026-01-04T02:30:25.561953Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.227644s Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:26:2057] recipient: [11:24:2068] Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:29:2057] recipient: [11:24:2068] 2026-01-04T02:30:27.270149Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:27.270971Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:27.271169Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:27.272957Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:27.273423Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:27.273540Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:27.273588Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:27.273781Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:27.283726Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:27.283857Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:27.283961Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:27.284116Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:27.284231Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:27.284317Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:28:2070] sender: [11:49:2057] recipient: [11:11:2058] 2026-01-04T02:30:27.295536Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:27.295707Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:27.316887Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:27.317022Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:27.317158Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:27.317251Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:27.317347Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:27.317389Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:27.317426Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:27.317470Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:27.328502Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:27.328654Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:27.340303Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:27.340445Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:27.341837Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:27.341896Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:27.342145Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:27.342192Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:27.342804Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [12:26:2057] recipient: [12:23:2068] Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:29:2057] recipient: [12:23:2068] 2026-01-04T02:30:27.590384Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:27.591749Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:27.592003Z node 12 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:27.593067Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:27.593382Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:27.593551Z node 12 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:27.593569Z node 12 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:27.593688Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:27.601317Z node 12 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:27.601440Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:27.601531Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:27.601602Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:27.601655Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:27.601702Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [12:28:2070] sender: [12:49:2057] recipient: [12:11:2058] 2026-01-04T02:30:27.613109Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:27.613268Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:27.634598Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:27.634715Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:27.634808Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:27.634880Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:27.634996Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:27.635062Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:27.635109Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:27.635160Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:27.646248Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:27.646374Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:27.657964Z node 12 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:27.658095Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:27.659482Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:27.659536Z node 12 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:27.659712Z node 12 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:27.659770Z node 12 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:27.660501Z node 12 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { } } Command { EnableSelfHeal { } } Command { QueryBaseConfig { } } } Status { Success: true } Status { ErrorDescription: "command must be sole" } ErrorDescription: "command must be sole" |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] >> Cdc::InitialScan_TopicAutoPartitioning [GOOD] >> Cdc::InitialScanDebezium >> BsControllerConfig::PDiskCreate [GOOD] >> BsControllerConfig::ReassignGroupDisk >> BsControllerConfig::OverlayMapCrossReferences [GOOD] >> KqpSystemView::PartitionStatsRange1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenAlone [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:212:2077] 2026-01-04T02:30:24.516556Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:24.517683Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:24.518153Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:24.518798Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:24.520390Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:24.520636Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.520664Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.520871Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:24.530088Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:24.530211Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:24.530376Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:24.530475Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.530569Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.530644Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-04T02:30:24.542194Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:24.542348Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.572662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.572789Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.572884Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.572983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.573093Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.573162Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.573210Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.573259Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.585899Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.586050Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.597951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.598125Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:24.600133Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:24.600240Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:24.600519Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:24.600586Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:24.621368Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:203:2077] 2026-01-04T02:30:26.640454Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:26.641376Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:26.641623Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:26.642997Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:26.643414Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:26.643651Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:26.643678Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:26.643904Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:26.653656Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:26.653786Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:26.653910Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:26.654010Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:26.654132Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:26.654205Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-04T02:30:26.666131Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:26.666319Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:26.694413Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:26.694552Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:26.694622Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:26.694706Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:26.694830Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:26.694910Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:26.694977Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:26.695043Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:26.709015Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:26.709158Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:26.719982Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:26.720116Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:26.721556Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:26.721608Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:26.721824Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:26.721870Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:26.722539Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {} Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:23:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [21:26:2057] recipient: [21:23:2068] Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:29:2057] recipient: [21:23:2068] 2026-01-04T02:30:28.338258Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:28.339285Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:28.339539Z node 21 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:28.340864Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:28.341460Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:28.341659Z node 21 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:28.341696Z node 21 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:28.341955Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:28.353600Z node 21 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:28.353760Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:28.366370Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:28.366605Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:28.366735Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:28.366831Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [21:28:2070] sender: [21:49:2057] recipient: [21:11:2058] 2026-01-04T02:30:28.378307Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:28.378446Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:28.399905Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:28.400033Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:28.400243Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:28.400375Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:28.400490Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:28.400541Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:28.400592Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:28.400655Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:28.411331Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:28.411463Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:28.422166Z node 21 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:28.422321Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:28.423612Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:28.423660Z node 21 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:28.423840Z node 21 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:28.423882Z node 21 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:28.428058Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [22:26:2057] recipient: [22:24:2068] Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:29:2057] recipient: [22:24:2068] 2026-01-04T02:30:28.643690Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:28.644735Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:28.644969Z node 22 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:28.646579Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:28.647045Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:28.647152Z node 22 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:28.647196Z node 22 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:28.647366Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:28.661733Z node 22 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:28.661910Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:28.662055Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:28.662165Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:28.662261Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:28.662331Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [22:28:2070] sender: [22:49:2057] recipient: [22:11:2058] 2026-01-04T02:30:28.674216Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:28.674448Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:28.698328Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:28.698460Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:28.698553Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:28.698646Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:28.698788Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:28.698858Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:28.698896Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:28.698940Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:28.709638Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:28.709790Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:28.722325Z node 22 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:28.722456Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:28.723832Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:28.723891Z node 22 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:28.724109Z node 22 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:28.724178Z node 22 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:28.724861Z node 22 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenAlone" } } Rollback: true } Status { Success: true } ErrorDescription: "transaction rollback" RollbackSuccess: true >> KqpSysColV1::StreamSelectRowById [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> KqpSysColV1::SelectRange [GOOD] >> KqpSysColV0::InnerJoinSelect >> KqpSysColV1::InnerJoinSelectAsterisk >> KqpSysColV1::StreamSelectRowAsterisk |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::OverlayMapCrossReferences [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-41 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 >> KqpSysColV0::SelectRowById >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 15468, MsgBus: 4491 2026-01-04T02:30:20.552177Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.024489s 2026-01-04T02:30:20.572752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328153395512002:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:20.572858Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:20.960877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:20.987579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:20.987722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:21.000006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:21.098856Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:21.226067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:21.282497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:21.282520Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:21.282538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:21.282613Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:21.578300Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:22.274840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:22.282171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:30:22.359245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:22.601224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:22.817636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:22.900759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.204992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328174870350237:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.205096Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.205480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328174870350246:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.205528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.567018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328153395512002:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:25.567108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:25.856899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.896655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.942768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.986758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.036197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.097205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.149109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.207961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.297623Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179165318422:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.297713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.298088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179165318428:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.298146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179165318427:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.298185Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.304104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:26.317058Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328179165318431:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:26.396453Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328179165318482:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:28.455223Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493828442, txId: 281474976715673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 13607, MsgBus: 11763 2026-01-04T02:30:21.173653Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328159730355750:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:21.174421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:21.721102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:21.810723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:21.812809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:21.906438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:22.010277Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:22.029902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:22.175018Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:22.222649Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:22.222669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:22.222680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:22.222774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:22.855531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:22.867407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:22.957903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.231679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.408668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.489786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.775725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328176910226659:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.775845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.777483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328176910226669:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.777537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.135907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.174707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328159730355750:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:26.174850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:26.188090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.259892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.292763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.340390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.389561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.453398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.523208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.617560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328181205194837:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.617651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.618271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328181205194842:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.618327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328181205194843:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.618387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.623293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:26.670840Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328181205194846:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:26.747943Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328181205194899:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:28.761768Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493828786, txId: 281474976710673] shutting down |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 12842, MsgBus: 6897 2026-01-04T02:30:21.018291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328157175961313:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:21.018362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:21.070692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:21.645876Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:21.680394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:21.680510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:21.761336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:21.917239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:21.922097Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328152880993965:2081] 1767493821007434 != 1767493821007437 2026-01-04T02:30:21.924259Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:22.037932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:22.126941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:22.126964Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:22.126972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:22.127052Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:22.779489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:22.849028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.140693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.467250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:23.596374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.025946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328157175961313:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:26.026032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:26.105526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328178650799621:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.105629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.110382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328178650799630:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.110468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.518685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.558548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.606539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.652532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.695964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.744058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.782525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.835804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:26.966586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328178650800509:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.966678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.966975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328178650800515:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.967018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.967035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328178650800514:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.971857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:26.985974Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328178650800518:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:27.072287Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328182945767867:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSystemView::PartitionStatsSimple |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> ExternalIndex::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:25:28.295565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:25:28.436661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:25:28.445091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:25:28.445152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/cs_index/external;error=incorrect path status: LookupError; 2026-01-04T02:25:28.445296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:25:28.445745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:317:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:25:28.917231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:25:28.917378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:25:29.034799Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493525057095 != 1767493525057099 2026-01-04T02:25:29.053780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:25:29.104438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:25:29.224455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:25:29.938927Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-04T02:25:29.939335Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:69:2116] Handle TEvGetProxyServicesRequest 2026-01-04T02:25:29.958199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:25:29.959436Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:25:29.959489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:25:29.959524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:25:29.959780Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:25:29.978864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:25:30.159039Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:69:2116] Handle TEvProposeTransaction 2026-01-04T02:25:30.159118Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:69:2116] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:25:30.159226Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:69:2116] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:907:2769] 2026-01-04T02:25:30.256439Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:907:2769] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateColumnStore CreateColumnStore { Name: "olapStore" ColumnShardCount: 4 SchemaPresets { Name: "default" Schema { Columns { Name: "timestamp" Type: "Timestamp" NotNull: true } Columns { Name: "resource_id" Type: "Utf8" DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Name: "uid" Type: "Utf8" NotNull: true StorageId: "__MEMORY" } Columns { Name: "level" Type: "Int32" } Columns { Name: "message" Type: "Utf8" StorageId: "__MEMORY" } Columns { Name: "new_column1" Type: "Uint64" } Columns { Name: "json_payload" Type: "JsonDocument" } KeyColumnNames: "timestamp" KeyColumnNames: "uid" } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:25:30.256543Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:907:2769] txid# 281474976710657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:25:30.257129Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:907:2769] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:25:30.257236Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:907:2769] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:25:30.257539Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:907:2769] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:25:30.257701Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:907:2769] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:25:30.257865Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:907:2769] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-04T02:25:30.261292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-04T02:25:30.261935Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:907:2769] txid# 281474976710657 HANDLE EvClientConnected 2026-01-04T02:25:30.262669Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:907:2769] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-04T02:25:30.262759Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:907:2769] txid# 281474976710657 SEND to# [1:906:2768] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-04T02:25:30.344844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:25:30.376486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:25:30.376782Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-04T02:25:30.384190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:25:30.384500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:25:30.384726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:25:30.384861Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:25:30.384985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:25:30.385111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:25:30.385217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:25:30.385341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:25:30.385459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:25:30.385555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:25:30.385687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:25:30.385794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:25:30.385923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:25:30.412458Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 72075186224037888 2026-01-04T02:25:30.412865Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:25:30.412925Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:25:30.413112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: ... olumn: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-04T02:29:48.457639Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:48.457720Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711216 ProcessProposeKqpTransaction 2026-01-04T02:29:48.468938Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:48.469002Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711217 ProcessProposeKqpTransaction 2026-01-04T02:29:48.595398Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:48.595459Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711218 ProcessProposeKqpTransaction 2026-01-04T02:29:48.606769Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:48.606831Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711219 ProcessProposeKqpTransaction 2026-01-04T02:29:48.803363Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:29:48.803535Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:29:48.803601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:972:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:29:48.803659Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:977:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple`;EXPECTATION=0 2026-01-04T02:29:58.937008Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:11793:10569], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:29:58.940500Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWYyZjdhN2EtZWM2NThmMGMtMmE5NzcxMTAtMTM2YTA0NTU=, ActorId: [1:11789:10566], ActorState: ExecuteState, LegacyTraceId: 01ke3de2zvew0pyfkfqqmm55g8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 2026-01-04T02:29:59.659673Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:59.659752Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711220 ProcessProposeKqpTransaction 2026-01-04T02:29:59.671291Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:59.671359Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711221 ProcessProposeKqpTransaction 2026-01-04T02:29:59.934670Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:59.934727Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711222 ProcessProposeKqpTransaction 2026-01-04T02:29:59.944698Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:29:59.944762Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711223 ProcessProposeKqpTransaction 2026-01-04T02:30:00.130892Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:30:00.131041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:30:00.131103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:972:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:30:00.131164Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:977:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/Root/olapStore/olapTable/ext_index_simple1`;EXPECTATION=0 2026-01-04T02:30:11.208535Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:11.208647Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711224 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;RESULT=;EXPECTATION=1 2026-01-04T02:30:12.136278Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:12.136371Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711225 ProcessProposeKqpTransaction 2026-01-04T02:30:12.203543Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:12.203627Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711226 ProcessProposeKqpTransaction 2026-01-04T02:30:12.793148Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:12.793235Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711227 ProcessProposeKqpTransaction 2026-01-04T02:30:12.837273Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:12.837361Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711228 ProcessProposeKqpTransaction 2026-01-04T02:30:13.099752Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:30:13.099945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:30:13.100019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:972:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:30:13.100087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:977:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/cs_index/external`;EXPECTATION=1 2026-01-04T02:30:24.045447Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:24.045550Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711229 ProcessProposeKqpTransaction REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 2026-01-04T02:30:24.703123Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:24.703188Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711230 ProcessProposeKqpTransaction 2026-01-04T02:30:24.713967Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:24.714031Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711231 ProcessProposeKqpTransaction 2026-01-04T02:30:24.897390Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:24.897489Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711232 ProcessProposeKqpTransaction 2026-01-04T02:30:24.910968Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [1:69:2116] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:24.911073Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [1:69:2116] TxId# 281474976711233 ProcessProposeKqpTransaction 2026-01-04T02:30:25.120381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:966:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:30:25.120559Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:968:2817];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:30:25.120640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:972:2820];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:30:25.120722Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:977:2824];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |85.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 >> BsControllerConfig::AddDriveSerialMassive [GOOD] >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-53 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 >> KqpSysColV1::StreamInnerJoinTables >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::AddDriveSerialMassive [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:212:2077] 2026-01-04T02:30:24.042235Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:24.043385Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:24.049935Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:24.050748Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:24.052608Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:24.052897Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.052946Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.053234Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:24.080791Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:24.090094Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:24.090325Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:24.090455Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.090548Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.090627Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:247:2066] recipient: [1:20:2067] 2026-01-04T02:30:24.104709Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:24.104853Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.144898Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.145048Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.145166Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.145248Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.145361Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.145430Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.145479Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.145544Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.156507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.156651Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.167907Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.168048Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:24.169484Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:24.169551Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:24.169848Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:24.169931Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:24.189377Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-04T02:30:24.190778Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-04T02:30:24.191506Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:203:2077] 2026-01-04T02:30:26.083716Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:26.084833Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:26.085131Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:26.086540Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:26.086951Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:26.087223Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:26.087271Z node 11 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:26.087494Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:26.097104Z node 11 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:26.097249Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:26.097358Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:26.097451Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:26.097552Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:26.097609Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:247:2066] recipient: [11:20:2067] 2026-01-04T02:30:26.109030Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:26.109164Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:26.136418Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:26.136591Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:26.136772Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:26.136894Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:26.137034Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:26.137105Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:26.137150Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:26.137242Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:26.148051Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:26.148265Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:26.159119Z node 11 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:26.159252Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:26.160468Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:26.160505Z node 11 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:26.160654Z node 11 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:26.160706Z node 11 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:26.161210Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_123" BoxId: 1 } } } 2026-01-04T02:30:26.162313Z node 11 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# ... ommand { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-04T02:30:28.290829Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-04T02:30:28.291441Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-04T02:30:28.292117Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-04T02:30:28.292778Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-04T02:30:28.293457Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-04T02:30:28.294257Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-04T02:30:28.294856Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-04T02:30:28.295559Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-04T02:30:28.296178Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-04T02:30:28.297167Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-04T02:30:28.297915Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-04T02:30:28.298585Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-04T02:30:28.299303Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-04T02:30:28.300055Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:198:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [31:224:2066] recipient: [31:198:2077] Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:227:2066] recipient: [31:198:2077] 2026-01-04T02:30:30.196898Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:30.197984Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:30.198298Z node 31 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:30.199796Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:30.200234Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:30.200447Z node 31 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:30.200475Z node 31 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:30.200718Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:30.211420Z node 31 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:30.211559Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:30.211687Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:30.211823Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:30.211923Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:30.212038Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [31:226:2079] sender: [31:247:2066] recipient: [31:20:2067] 2026-01-04T02:30:30.223368Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:30.223516Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:30.250405Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:30.250538Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:30.250634Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:30.250723Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:30.250845Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:30.250907Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:30.250960Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:30.251014Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:30.262208Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:30.262350Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:30.277413Z node 31 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:30.277571Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:30.279040Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:30.279096Z node 31 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:30.279318Z node 31 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:30.279408Z node 31 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:30.280344Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_0" BoxId: 1 } } } 2026-01-04T02:30:30.281499Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_1" BoxId: 1 } } } 2026-01-04T02:30:30.286649Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_2" BoxId: 1 } } } 2026-01-04T02:30:30.287444Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_3" BoxId: 1 } } } 2026-01-04T02:30:30.288230Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_4" BoxId: 1 } } } 2026-01-04T02:30:30.288966Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_5" BoxId: 1 } } } 2026-01-04T02:30:30.289641Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_6" BoxId: 1 } } } 2026-01-04T02:30:30.292255Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_7" BoxId: 1 } } } 2026-01-04T02:30:30.292985Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_8" BoxId: 1 } } } 2026-01-04T02:30:30.293909Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { AddDriveSerial { Serial: "SN_9" BoxId: 1 } } } 2026-01-04T02:30:30.294729Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_0" } } } 2026-01-04T02:30:30.295624Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_1" } } } 2026-01-04T02:30:30.296363Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_2" } } } 2026-01-04T02:30:30.297168Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_3" } } } 2026-01-04T02:30:30.297905Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_4" } } } 2026-01-04T02:30:30.298826Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_5" } } } 2026-01-04T02:30:30.299606Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_6" } } } 2026-01-04T02:30:30.300448Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_7" } } } 2026-01-04T02:30:30.301220Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_8" } } } 2026-01-04T02:30:30.302017Z node 31 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { RemoveDriveSerial { Serial: "SN_9" } } } |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> KqpSystemView::NodesSimple >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-15 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanAfterSplitSlowMetaRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:29:39.000470Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:39.014903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:39.019380Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:29:39.321442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:39.330784Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:29:39.332073Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:29:39.342321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:39.342749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:39.345058Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:29:39.345119Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:29:40.195058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:40.324517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:40.324651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:40.325122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:40.325193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:40.407713Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:29:40.408352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:40.408752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:40.581247Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:40.624852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:41.569944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:41.675700Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:41.675825Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:42.101388Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:224:2183] Handle TEvProposeTransaction 2026-01-04T02:29:42.101461Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:224:2183] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:29:42.101583Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:224:2183] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1562:3022] 2026-01-04T02:29:42.372148Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1562:3022] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 7 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:29:42.372280Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1562:3022] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:29:42.373301Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1562:3022] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:29:42.373428Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1562:3022] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:29:42.374376Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1562:3022] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:29:42.374557Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1562:3022] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:29:42.374723Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1562:3022] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:29:42.375134Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1562:3022] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:29:42.377304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:29:42.387356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1562:3022] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:29:42.387481Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1562:3022] txid# 281474976715657 SEND to# [1:1490:2980] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:29:42.496780Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:1639:3079] 2026-01-04T02:29:42.497074Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:42.573357Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:29:42.573858Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:29:42.575613Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:29:42.575703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:29:42.575770Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:29:42.576228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:29:42.576737Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:29:42.576824Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:1727:3079] in generation 1 2026-01-04T02:29:42.584507Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [1:1641:3080] 2026-01-04T02:29:42.584766Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:42.593556Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037892 actor [1:1644:3083] 2026-01-04T02:29:42.593791Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:42.662531Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:29:42.662650Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:29:42.662718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:29:42.664089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-04T02:29:42.664187Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-04T02:29:42.664239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-04T02:29:42.664561Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:29:42.664624Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:29:42.666030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037892 2026-01-04T02:29:42.666100Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037892 2026-01-04T02:29:42.666142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037892 2026-01-04T02:29:42.666437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:29:42.666817Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:29:42.666933Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [1:1768:3080] in generation 1 2026-01-04T02:29:42.667095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:29:42.667134Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037892 persisting started state actor id [1:1769:3083] in generation 1 2026-01-04T02:29:42.684793Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1733:2406] 2026-01-04T02:29:42.685055Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:29:42.761590Z node 2 :TX_DATASHARD INFO ... zUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [5:1940:3244] SeqNo# 1 Rows# 1 trace_id# 2026-01-04T02:30:30.904323Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2184:3383] TaskId# 4 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 419885 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 418487 FinishTimeMs: 1767493830891 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 123 BuildCpuTimeUs: 418364 HostName: "ghrun-46vtxmsa7i" NodeId: 5 StartTimeMs: 1767493830891 CreateTimeMs: 1767493829208 CurrentWaitOutputTimeUs: 54 UpdateTimeMs: 1767493830892 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:30:30.904438Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2183:3382], CA [5:2184:3383], trace_id# 2026-01-04T02:30:30.904952Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2183:3382] TaskId# 3 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 80216 DurationUs: 87000 Tasks { TaskId: 3 StageId: 1 CpuTimeUs: 74501 FinishTimeMs: 1767493830898 InputRows: 2 InputBytes: 12 OutputRows: 1 OutputBytes: 6 ComputeCpuTimeUs: 293 BuildCpuTimeUs: 74208 HostName: "ghrun-46vtxmsa7i" NodeId: 5 StartTimeMs: 1767493830811 CreateTimeMs: 1767493829134 UpdateTimeMs: 1767493830898 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:30:30.905027Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2183:3382] 2026-01-04T02:30:30.905085Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [5:2184:3383], trace_id# 2026-01-04T02:30:30.905709Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 4000 2026-01-04T02:30:30.905838Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037890: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646822 NKikimr::NKqp::TEvKqpExecuter::TEvStreamData NKikimrKqp.TEvExecuterStreamData ResultSet { columns { name: "column0" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint64_value: 596400 } } format: FORMAT_VALUE } SeqNo: 1 QueryResultIndex: 0 ChannelId: 4 VirtualTimestamp { Step: 3500 TxId: 281474976710663 } Finished: true 2026-01-04T02:30:30.906593Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:549} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send ack ChannelId# 4 SeqNo# 1 Enough# false FreeSpace# 100 Recipient# [5:2188:3383] trace_id# 2026-01-04T02:30:30.906717Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:179: TxId: 281474976710665, task: 4. Received channel data ack for channelId: 4, seqNo: 1, lastSentSeqNo: 1, freeSpace: 100, early finish: 0 2026-01-04T02:30:30.906840Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:207: TxId: 281474976710665, task: 4. PeerState, peerState:(freeSpace:100;inFlightBytes:0;inFlightCount:0;), sentSeqNo: 1, ackSeqNo: 1 2026-01-04T02:30:30.906891Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_channels.cpp:220: TxId: 281474976710665, task: 4. Resume compute actor 2026-01-04T02:30:30.907037Z node 5 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:30:30.907086Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2007: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll inputs 2026-01-04T02:30:30.907122Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:2022: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Poll sources 2026-01-04T02:30:30.907179Z node 5 :KQP_COMPUTE TRACE: dq_sync_compute_actor_base.h:37: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Resume execution, run status: Finished 2026-01-04T02:30:30.907215Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:405: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. ProcessOutputsState.Inflight: 0 2026-01-04T02:30:30.907251Z node 5 :KQP_COMPUTE TRACE: dq_compute_actor_impl.h:445: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Do not drain channelId: 4, finished 2026-01-04T02:30:30.907304Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710665, task: 4. Tasks execution finished, don't wait for ack delivery in input channelId: 3, seqNo: [1] 2026-01-04T02:30:30.907338Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710665, task: 4. Tasks execution finished 2026-01-04T02:30:30.907371Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [5:2184:3383], TxId: 281474976710665, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3deyy0amty6r8p328y0t03. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:30:30.907475Z node 5 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710665, task: 4. pass away 2026-01-04T02:30:30.907582Z node 5 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710665;task_id=4;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:30:30.911192Z node 5 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710665, taskId: 4. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:30:30.911911Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [5:2184:3383] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 420986 DurationUs: 16000 Tasks { TaskId: 4 StageId: 2 CpuTimeUs: 418498 FinishTimeMs: 1767493830907 InputRows: 1 InputBytes: 6 OutputRows: 1 OutputBytes: 6 ResultRows: 1 ResultBytes: 6 ComputeCpuTimeUs: 134 BuildCpuTimeUs: 418364 HostName: "ghrun-46vtxmsa7i" NodeId: 5 StartTimeMs: 1767493830891 CreateTimeMs: 1767493829208 UpdateTimeMs: 1767493830907 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:30:30.912002Z node 5 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [5:2184:3383] 2026-01-04T02:30:30.912254Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:30:30.912355Z node 5 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-04T02:30:30.912430Z node 5 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [5:2174:3244] TxId: 281474976710665. Ctx: { TraceId: 01ke3deyy0amty6r8p328y0t03, Database: , SessionId: ydb://session/3?node_id=5&id=Y2YyZjMxNzUtMjcwN2U0OGEtNTA5MDAwMmEtZjcyNGRlNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.012840s ReadRows: 100 ReadBytes: 800 RequestUnits# 100 ForceFlag# true trace_id# 2026-01-04T02:30:30.912572Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 4000 2026-01-04T02:30:30.912635Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037889: waitStep# 4000 readStep# 4000 observedStep# 4000 ... response 271646721 NKikimr::NKqp::NPrivateEvents::TEvQueryResponse NKikimrKqp.TEvQueryResponse Response { TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 911 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest >> TDataShardTrace::TestTraceDistributedSelect >> BsControllerConfig::ReassignGroupDisk [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors >> TDataShardTrace::TestTraceDistributedUpsert+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions-EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 27633, MsgBus: 10284 2026-01-04T02:30:21.312576Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328157650637225:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:21.312632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:21.931670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:21.960933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:21.976679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:21.980148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:22.085197Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:22.088073Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328157650637101:2081] 1767493821281463 != 1767493821281466 2026-01-04T02:30:22.127472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:22.164526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:22.164554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:22.164563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:22.164681Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:22.315826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:22.717773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:22.751565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:22.764113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:30:26.312117Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328157650637225:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:26.312215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 1 2026-01-04T02:30:26.962802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179125474542:2476], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.962957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.963422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179125474554:2479], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.963473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328179125474555:2480], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.963597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:26.969676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:26.985922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328179125474558:2481], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:30:27.145452Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328183420441906:2356] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 5], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-01-04T02:30:31.983459Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493831973, txId: 281474976710673] shutting down |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ReassignGroupDisk [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:224:2066] recipient: [1:212:2077] Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:227:2066] recipient: [1:212:2077] 2026-01-04T02:30:25.418408Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:25.419465Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:25.419732Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:25.420369Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:25.421919Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:25.422160Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:25.422186Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:25.422386Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:25.432514Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:25.432638Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:25.432839Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:25.432937Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:25.433035Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:25.433104Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:226:2079] sender: [1:248:2066] recipient: [1:20:2067] 2026-01-04T02:30:25.445109Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:25.445275Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:25.473444Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:25.473606Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:25.473721Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:25.473827Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:25.473970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:25.474027Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:25.474067Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:25.474127Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:25.485211Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:25.485385Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:25.497662Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:25.497845Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:25.499278Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:25.499339Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:25.499563Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:25.499625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:25.514625Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } } } Command { QueryBaseConfig { } } } 2026-01-04T02:30:25.515304Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-04T02:30:25.515355Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-04T02:30:25.515381Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-04T02:30:25.515404Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-04T02:30:25.515456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-04T02:30:25.515484Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-04T02:30:25.515552Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-04T02:30:25.515576Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-04T02:30:25.515604Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-04T02:30:25.515626Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-04T02:30:25.515647Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-04T02:30:25.515670Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-04T02:30:25.515692Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-04T02:30:25.515730Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-04T02:30:25.515777Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-04T02:30:25.515813Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-04T02:30:25.515839Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-04T02:30:25.515868Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-04T02:30:25.515891Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-04T02:30:25.515912Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-04T02:30:25.515934Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-04T02:30:25.529598Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-04T02:30:25.529712Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-04T02:30:25.529741Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-04T02:30:25.529764Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-04T02:30:25.529827Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-04T02:30:25.529861Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-04T02:30:25.529884Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-04T02:30:25.529907Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-04T02:30:25.529929Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [11:224:2066] recipient: [11:203:2077] Leader for TabletID 72057594037932033 is [11:226:2079] sender: [11:227:2066] recipient: [11:203:2077] 2026-01-04T02:30:27.520460Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:27.521537Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:27.521847Z node 11 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:27.523249Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:27.523638Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:27.523946Z node 11 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNo ... Port: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:30:29.677827Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 21:1000 Path# /dev/disk 2026-01-04T02:30:29.677877Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 22:1000 Path# /dev/disk 2026-01-04T02:30:29.677905Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 23:1000 Path# /dev/disk 2026-01-04T02:30:29.677927Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 24:1000 Path# /dev/disk 2026-01-04T02:30:29.677977Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 25:1000 Path# /dev/disk 2026-01-04T02:30:29.678001Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 26:1000 Path# /dev/disk 2026-01-04T02:30:29.678023Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 27:1000 Path# /dev/disk 2026-01-04T02:30:29.678045Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 28:1000 Path# /dev/disk 2026-01-04T02:30:29.678066Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 29:1000 Path# /dev/disk 2026-01-04T02:30:29.678088Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 30:1000 Path# /dev/disk 2026-01-04T02:30:29.678128Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 31:1000 Path# /dev/disk 2026-01-04T02:30:29.678151Z node 21 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 32:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-04T02:30:29.703617Z node 21 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [33:290:2068] recipient: [33:267:2079] Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:293:2068] recipient: [33:267:2079] 2026-01-04T02:30:32.014914Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:32.016049Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:32.016381Z node 33 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:32.018026Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:32.018245Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:32.018389Z node 33 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:32.018419Z node 33 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:32.018634Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:32.028748Z node 33 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:32.028906Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:32.029033Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:32.029219Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:32.029326Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:32.029399Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [33:292:2081] sender: [33:314:2068] recipient: [33:22:2069] 2026-01-04T02:30:32.044078Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:32.044268Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:32.070514Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:32.070648Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:32.070725Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:32.070805Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:32.070917Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:32.070969Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:32.071017Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:32.071077Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:32.081970Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:32.082137Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:32.094943Z node 33 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:32.095309Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:32.096674Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:32.096733Z node 33 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:32.096929Z node 33 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:32.096987Z node 33 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:32.097978Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "/dev/disk" } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 2 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 2 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 8 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:30:32.098502Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 33:1000 Path# /dev/disk 2026-01-04T02:30:32.098561Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 34:1000 Path# /dev/disk 2026-01-04T02:30:32.098600Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 35:1000 Path# /dev/disk 2026-01-04T02:30:32.098627Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 36:1000 Path# /dev/disk 2026-01-04T02:30:32.098651Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 37:1000 Path# /dev/disk 2026-01-04T02:30:32.098686Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 38:1000 Path# /dev/disk 2026-01-04T02:30:32.098718Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 39:1000 Path# /dev/disk 2026-01-04T02:30:32.098743Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 40:1000 Path# /dev/disk 2026-01-04T02:30:32.098769Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 41:1000 Path# /dev/disk 2026-01-04T02:30:32.098803Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 42:1000 Path# /dev/disk 2026-01-04T02:30:32.098833Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 43:1000 Path# /dev/disk 2026-01-04T02:30:32.099463Z node 33 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 44:1000 Path# /dev/disk Response# Status { Success: true } Status { Success: true } Status { Success: true } Success: true ConfigTxSeqNo: 1 2026-01-04T02:30:32.126184Z node 33 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { UpdateDriveStatus { HostKey { NodeId: 1 } Path: "/dev/disk" Status: INACTIVE } } } Response# Status { ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" FailReason: kHostNotFound FailParam { NodeId: 1 } } ErrorDescription: "Host not found NodeId# 1 HostKey# NodeId: 1\n incorrect" ConfigTxSeqNo: 1 |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-65 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 >> TDataShardTrace::TestTraceWriteImmediateOnShard |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [FAIL] |85.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> KqpSystemView::Join [GOOD] >> KqpSystemView::ReadSuccess [GOOD] |85.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |85.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink >> SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant >> Cdc::SequentialSplitMerge [GOOD] >> Cdc::MustNotLoseSchemaSnapshot >> KqpSystemView::PartitionStatsOrderByDesc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Join [GOOD] Test command err: Trying to start YDB, gRPC: 20719, MsgBus: 62847 2026-01-04T02:30:19.468952Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328152070953598:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:19.479064Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:19.548780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:20.027247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:20.035675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:20.035787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:20.099665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:20.210600Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:20.214024Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328152070953554:2081] 1767493819459668 != 1767493819459671 2026-01-04T02:30:20.238828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:20.385007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:20.385030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:20.385038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:20.385119Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:20.438048Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:21.194141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:21.203673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:21.292457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:21.617308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:21.853989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:21.937482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:24.398793Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328173545791917:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.399179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.403005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328173545791927:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.403092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:24.466738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328152070953598:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:24.466836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:24.748921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:24.836172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:24.896977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:24.989430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.068346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.150845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.207641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.285291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:25.424356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328177840760105:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.424441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.424803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328177840760111:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.424841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.424844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328177840760110:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:25.432386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:25.448347Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328177840760114:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:25.536176Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328177840760169:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:27.477049Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493827466, txId: 281474976710673] shutting down waiting... 2026-01-04T02:30:28.717018Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493828693, txId: 281474976710675] shutting down waiting... 2026-01-04T02:30:29.951749Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493829941, txId: 281474976710677] shutting down waiting... 2026-01-04T02:30:31.260708Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493831244, txId: 281474976710679] shutting down waiting... 2026-01-04T02:30:32.508638Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493832478, txId: 281474976710681] shutting down waiting... 2026-01-04T02:30:33.708650Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493833698, txId: 281474976710683] shutting down waiting... 2026-01-04T02:30:34.900814Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493834892, txId: 281474976710685] shutting down 2026-01-04T02:30:35.022355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:30:35.022383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:35.335296Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493835321, txId: 281474976710687] shutting down |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::ReadSuccess [GOOD] Test command err: Trying to start YDB, gRPC: 19482, MsgBus: 25727 2026-01-04T02:30:28.589651Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328188163616023:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:28.589909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:28.901275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:28.929334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:28.929460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:28.969080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328188163615912:2081] 1767493828582907 != 1767493828582910 2026-01-04T02:30:28.973665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:28.979253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:29.094332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:29.094359Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:29.094367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:29.094455Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:29.143176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:29.595127Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:29.669357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:29.687597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:30:29.744571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:29.951017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:30.172449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:30.278363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.296591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328205343486968:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.296752Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.305986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328205343486978:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.306124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.684927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.738964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.776140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.811068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.865626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.924097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.977223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.035558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.131243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328209638455148:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:33.131310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328209638455153:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:33.131357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:33.131958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328209638455156:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:33.132005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:33.135847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:33.152613Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328209638455155:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:33.257123Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328209638455210:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:33.589902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328188163616023:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:33.589983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:35.331927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:30:35.544618Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493835531, txId: 281474976715674] shutting down |85.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut |85.7%| [TA] {RESULT} $(B)/ydb/core/statistics/database/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV0::InnerJoinSelect [GOOD] >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] >> Cdc::InitialScanDebezium [GOOD] >> Cdc::InitialScanRacyCompleteAndRequest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 >> KqpSystemView::NodesRange2 >> KqpSystemView::PartitionStatsSimple [GOOD] >> KqpSysColV0::SelectRowById [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-42 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-21 [GOOD] >> BsControllerConfig::MergeIntersectingBoxes [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-18 [GOOD] >> DataShardVolatile::TwoAppendsMustBeVolatile-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-54 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-66 [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink >> KqpSystemView::FailResolve >> KqpSystemView::PartitionStatsFollower >> BsControllerConfig::DeleteStoragePool [GOOD] >> KqpSystemView::PartitionStatsParametricRanges >> Cdc::ShouldBreakLocksOnConcurrentCancelBuildIndex [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-67 [GOOD] >> KqpSysColV1::StreamInnerJoinTables [GOOD] >> BsControllerConfig::ExtendByCreatingSeparateBox [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-16 [GOOD] >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 >> BsControllerConfig::CommandRollbackWhenCombined >> BsControllerConfig::ExtendBoxAndStoragePool >> Cdc::ShouldBreakLocksOnConcurrentDropStream >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 >> BsControllerConfig::MoveGroups >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 27372, MsgBus: 27782 2026-01-04T02:30:30.075609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328197114600679:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:30.075916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:30.133530Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:30.422081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.422221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.448538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.521516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.522431Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328197114600644:2081] 1767493830064507 != 1767493830064510 2026-01-04T02:30:30.525881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:30.622922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:30.622955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:30.622976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:30.623073Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:30.736513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:31.103867Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:31.240039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:31.255477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:31.331286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:31.564860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:31.751956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:31.839076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.041987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214294471715:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.042090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.042540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214294471725:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.042583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.558263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.631984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.669445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.732480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.770020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.808428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.843632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.886562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.976468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214294472598:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.976530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.976729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214294472604:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.976773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.976785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214294472603:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.979895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:34.989481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328214294472607:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:35.078711Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328197114600679:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:35.078801Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:35.087446Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328218589439956:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::Sessions+EnableRealSystemViewPaths [GOOD] Test command err: Trying to start YDB, gRPC: 27873, MsgBus: 6364 2026-01-04T02:30:27.238448Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328184638543338:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:27.238588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:27.677098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:27.697097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:27.697206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:27.789494Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328184638543312:2081] 1767493827234148 != 1767493827234151 2026-01-04T02:30:27.814454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:27.819498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:28.046942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:28.096532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:28.096555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:28.096563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:28.096645Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:28.284225Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:28.574069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:28.583692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:28.590966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 1 2026-01-04T02:30:32.089385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328206113380986:2474], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.089521Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.090032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328206113380998:2477], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.090071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328206113380999:2478], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.090205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:32.102801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:32.117134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:30:32.119290Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328206113381002:2479], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:30:32.243523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328184638543338:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:32.243594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:32.244949Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328206113381055:2586] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ydb-cpp-sdk/dev 2026-01-04T02:30:37.624855Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493837617, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 22897, MsgBus: 27822 2026-01-04T02:30:31.487542Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328203534031975:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:31.487589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:31.793110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:31.811255Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:31.811384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:31.977164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:31.982017Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328203534031951:2081] 1767493831484978 != 1767493831484981 2026-01-04T02:30:32.006500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:32.154424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:32.270504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:32.270524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:32.270544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:32.270627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:32.513880Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:32.847792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:32.861866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:30:32.938989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.137472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.311908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.396113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.267131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220713903008:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.267298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.273976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220713903018:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.274083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.682936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.738761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.810986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.871412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.934398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.001402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.065991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.160223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.320142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328225008871190:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.320259Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.320578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328225008871195:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.320598Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328225008871196:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.320650Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.324533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:36.341324Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328225008871199:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:36.418138Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328225008871250:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:36.489099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328203534031975:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:36.489161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:38.507864Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493838494, txId: 281474976715673] shutting down >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] >> TDataShardTrace::TestTraceDistributedSelect [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 64760, MsgBus: 14523 2026-01-04T02:30:30.238786Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328196927072322:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:30.238919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:30.319378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:30.614198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.615344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.615547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.618126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.761197Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:30.905356Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:31.244652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:31.244676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:31.244690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:31.244802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:31.254928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:31.814390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:31.910199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.115620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.306054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.399647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.172477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214106943347:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.172585Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.174093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328214106943357:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.174161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.490411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.543213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.631847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.672096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.723611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.788365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.854921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.922120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.034960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328218401911527:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.035042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.035413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328218401911531:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.035466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328218401911533:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.035513Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.039621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:35.055962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328218401911536:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:35.151602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328218401911589:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:35.241531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328196927072322:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:35.245052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:37.142592Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493837172, txId: 281474976710673] shutting down |85.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7116, MsgBus: 18340 2026-01-04T02:30:30.143053Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328199098686443:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:30.143488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:30.177128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:30.484805Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.484935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.544050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.561872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.564727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:30.567969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328199098686280:2081] 1767493830101033 != 1767493830101036 2026-01-04T02:30:30.760287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:30.760315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:30.760332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:30.760421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:30.800552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:31.149958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:31.482903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:31.493873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:31.557670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:31.717671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:31.907301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:32.043207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.164365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328216278557345:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.164482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.165353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328216278557355:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.165409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:34.826503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.872628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.914994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.978244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.007067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.046179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.085508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.138269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328199098686443:2183];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:35.138401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:35.142629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.228823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220573525528:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.228939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.229360Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220573525533:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.229408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220573525534:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.229590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.234307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:35.245338Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328220573525537:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:35.315829Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328220573525590:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-66 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 |85.6%| [LD] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/ydb-core-fq-libs-result_formatter-ut >> KqpSystemView::NodesRange1 [GOOD] >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-true [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-22 [GOOD] >> KqpSystemView::NodesSimple [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-70 [GOOD] >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-43 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-67 [GOOD] >> Cdc::MustNotLoseSchemaSnapshot [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-55 [GOOD] >> KqpSystemView::FailResolve [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 >> Cdc::InitialScanRacyCompleteAndRequest [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-17 [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant [GOOD] >> KqpSystemView::PartitionStatsParametricRanges [GOOD] >> KqpSysColV1::UpdateAndDelete >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 |85.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/ydb-core-tx-schemeshard-ut_move |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 5760, MsgBus: 13783 2026-01-04T02:30:32.658605Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328207164308861:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:32.658658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:33.093891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:33.117513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:33.117612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:33.183971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:33.187450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:33.191953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328207164308814:2081] 1767493832656004 != 1767493832656007 2026-01-04T02:30:33.310384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:33.310406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:33.310414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:33.310515Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:33.392821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:33.678392Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:33.911310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:33.926618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:30:33.976762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.231048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.517582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.618980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.869992Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328224344179890:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.870135Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.870701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328224344179900:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.870753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.195004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.232546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.264677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.366167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.414315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.458777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.496885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.541278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.649430Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328228639148065:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.649495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.649540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328228639148070:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.649749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328228639148072:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.649817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.653506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:37.660453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328207164308861:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:37.660499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:37.669663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328228639148073:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:37.763970Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328228639148125:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:39.942115Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493839944, txId: 281474976715673] shutting down [[[108u];["One"];[8];["Value5"];[108u];["One"];#;["Value31"]]] |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_replication/test-results/unittest/{meta.json ... results_accumulator.log} |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx >> Cdc::InitialScanUpdatedRows >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck >> VectorIndexBuildTest::Shard_Build_Error >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-68 [GOOD] >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 >> VectorIndexBuildTest::Shard_Build_Error [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelectViaReadActors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:37.129410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:37.254847Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:37.255306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:37.255368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:37.545753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:37.545898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:37.624092Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493834386922 != 1767493834386925 2026-01-04T02:30:37.636230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:37.680288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:37.784280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:38.105613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:38.119343Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:38.229155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.388864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.389013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.389116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.390192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.390348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.394960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:40.426559Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:30:40.583169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:30:40.668215Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedSelect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:37.266241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:37.405253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:37.405731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:37.405793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:37.697059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:37.697183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:37.770798Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493834124588 != 1767493834124591 2026-01-04T02:30:37.783576Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:37.830910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:37.922644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:38.263647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:38.278686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:38.392123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.499755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.499874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.499960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.500872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.501011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.505862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:40.532253Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:30:40.686466Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:30:40.769537Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 61719, MsgBus: 19371 2026-01-04T02:30:37.563000Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328227872468150:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:37.563120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:37.868193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:37.871103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:37.871213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:37.979796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:38.008572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:38.138349Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:38.152070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:38.152086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:38.152108Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:38.152201Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:38.577220Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:38.667623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:38.676788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:38.730502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.886428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.042520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.114329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.090404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328245052339157:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.090512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.090764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328245052339167:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.090820Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.389343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.430472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.460643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.494483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.542318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.598433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.635160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.671316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.744480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328245052340037:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.744579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.745116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328245052340042:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.745152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328245052340043:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.745214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.748661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:41.763513Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328245052340046:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:41.868305Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328245052340097:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:42.564653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328227872468150:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:42.564774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:38.080958Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:38.212306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:38.212814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:38.212883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:38.559076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:38.559229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:38.659604Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493834586701 != 1767493834586704 2026-01-04T02:30:38.674434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:38.723180Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:38.833263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:39.130719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:39.149040Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:39.264768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:41.357038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.357180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.357274Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.358282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.358440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:41.365104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:41.398677Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:30:41.561129Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:30:41.643230Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (DataExecuter -> [(WaitForTableResolve) , (ComputeActor -> [(ForwardWriteActor)]) , (RunTasks) , (WaitTasks) , (Commit -> [(Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)]) , (Datashard.WriteTransaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWriteResult)])])])]) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 13408, MsgBus: 12125 2026-01-04T02:30:34.149132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328214875711013:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:34.149715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:34.764227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:34.764323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:34.765935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:34.790887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:34.973237Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:34.976129Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328214875710797:2081] 1767493834105530 != 1767493834105533 2026-01-04T02:30:34.992454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:35.090290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:35.090320Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:35.090341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:35.090432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:35.096706Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:35.689473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:35.825597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.209402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.436779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.535149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.574886Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328232055581852:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.575005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.575357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328232055581862:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.575396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.923902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.989170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.023243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.054079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.087240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.122507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.148666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328214875711013:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.148732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:39.166912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.221220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.324205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328236350550032:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.324309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.324682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328236350550038:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.324715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328236350550037:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.324724Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.328730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:39.361787Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328236350550041:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:39.461575Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328236350550092:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:41.189687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceWriteImmediateOnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:38.665318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:38.781254Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:38.781678Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:38.781740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:39.085066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:39.085232Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:39.195886Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493835700865 != 1767493835700868 2026-01-04T02:30:39.218832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:39.271193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:39.380467Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:39.697324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:39.717581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:39.823829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_trace/unittest >> TDataShardTrace::TestTraceDistributedUpsert-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:30:40.534931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:40.667479Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:30:40.667989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:40.668058Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:30:40.980499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.980632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:41.063667Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493837315144 != 1767493837315147 2026-01-04T02:30:41.087101Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:41.138946Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:41.224210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:41.537045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:41.551932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:41.670731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:43.809005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1129:2961], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:43.809125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1139:2966], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:43.809202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:43.811926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2970], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:43.812123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:43.817229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:43.847380Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:30:44.000512Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1143:2969], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:30:44.079142Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1207:3013] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trace: (Session.query.QUERY_ACTION_EXECUTE -> [(CompileService -> [(CompileActor)]) , (LiteralExecuter) , (DataExecuter -> [(WaitForTableResolve) , (RunTasks) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)]) , (Datashard.Transaction -> [(Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendWithConfirmedReadOnlyLease) , (Tablet.Transaction -> [(Tablet.Transaction.Execute -> [(Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit) , (Datashard.Unit)]) , (Tablet.WriteLog -> [(Tablet.WriteLog.LogEntry)]) , (Tablet.Transaction.Complete)]) , (Datashard.SendResult)])])]) |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt32 [GOOD] Test command err: 2026-01-04T02:28:37.942993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:28:38.042702Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:28:38.042986Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:28:38.059923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:28:38.060221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:28:38.060489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:28:38.060638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:28:38.060768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:28:38.060900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:28:38.061036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:28:38.061154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:28:38.061289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:28:38.061415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:28:38.061551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:28:38.061674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:28:38.061801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:28:38.130899Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:28:38.131071Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:28:38.131137Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:28:38.131353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:38.131526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:28:38.131601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:28:38.131653Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:28:38.131841Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:28:38.131921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:28:38.131981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:28:38.132020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:28:38.132250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:28:38.132333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:28:38.132378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:28:38.132414Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:28:38.132518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:28:38.132578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:28:38.132623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:28:38.132657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:28:38.132729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:28:38.132783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:28:38.132813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:28:38.132855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:28:38.132893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:28:38.132930Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:28:38.133146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:28:38.133210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:28:38.133260Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:28:38.133386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:28:38.133428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:28:38.133462Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:28:38.133513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:28:38.133571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:28:38.133612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:28:38.133658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:28:38.133695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:28:38.133724Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:28:38.134419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:28:38.134501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ory_size=286;data_size=262;sum=25914;count=92;size_of_portion=192; 2026-01-04T02:30:41.449658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7650; 2026-01-04T02:30:41.449747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-04T02:30:41.450838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1027; 2026-01-04T02:30:41.450903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9117; 2026-01-04T02:30:41.450956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9271; 2026-01-04T02:30:41.451026Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-04T02:30:41.451121Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2026-01-04T02:30:41.451175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=10046; 2026-01-04T02:30:41.451394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=151; 2026-01-04T02:30:41.451568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=118; 2026-01-04T02:30:41.451802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=173; 2026-01-04T02:30:41.451994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=149; 2026-01-04T02:30:41.455562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3507; 2026-01-04T02:30:41.458581Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2925; 2026-01-04T02:30:41.458673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:30:41.458726Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-04T02:30:41.458767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:30:41.458858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=57; 2026-01-04T02:30:41.458920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:30:41.459035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=74; 2026-01-04T02:30:41.459077Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:30:41.459158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=47; 2026-01-04T02:30:41.459246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=54; 2026-01-04T02:30:41.459358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=72; 2026-01-04T02:30:41.459411Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29324; 2026-01-04T02:30:41.459584Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:30:41.459721Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:30:41.459879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:30:41.459970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:30:41.460029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:30:41.460214Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:41.460309Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:30:41.460367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:41.460421Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:30:41.460519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:41.460580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:41.460627Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:41.460747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:41.460979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2026-01-04T02:30:41.463097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:30:41.463681Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:30:41.463753Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:30:41.463868Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:30:41.463936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:30:41.464011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:30:41.464080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:41.464130Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:30:41.464249Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:30:41.464324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:30:41.465483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.127000s; 2026-01-04T02:30:41.465541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange1 [GOOD] Test command err: Trying to start YDB, gRPC: 61698, MsgBus: 11053 2026-01-04T02:30:29.013092Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328188088118681:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:29.017650Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:29.057584Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328193048746616:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:29.057652Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:29.169602Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328192777991811:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:29.170092Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:29.230741Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328193298459355:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:29.230787Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:29.894342Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:29.914135Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:29.902051Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:29.906585Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:29.937864Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:29.971665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.036200Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:30.066124Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:30.072423Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.106330Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:30.109049Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.109113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:30.135362Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:30.169349Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.172369Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:30.193027Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.194267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:30.203076Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:30.230170Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:30:30.346014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.346156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.353102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.353170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.353403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.353443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.353573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.353612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.429652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:30.438142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:30.507142Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:30.514330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:30.533899Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:30.541793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:30.554014Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:30.554059Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:30:30.554112Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:30:30.554128Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:30:30.554700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.555264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.555406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.555568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.555660Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:30.556697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:30.661000Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:30.869928Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:31.262521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:31.262553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:31.262564Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:31.262652Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:32.752803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:33.005214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.442891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.764719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.909614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:34.065936Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328193048746616:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:34.066022Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:34.147570Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328188088118681:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:34.159375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:34.165904Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591328192777991811:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:34.166357Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:34.235296Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591328193298459355:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:34.235357Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:37.431060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328226742826561:2355], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.431183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.433260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328226742826571:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.433322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:37.880737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.966519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.026000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.125784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.269746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.381929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.485279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.581365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.840497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328231037794850:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.840590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.840670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328231037794855:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.840858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328231037794857:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.840903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:38.845285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:38.877016Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328231037794858:2407], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:38.948271Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328231037794935:4635] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:41.036508Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493841003, txId: 281474976715673] shutting down >> DataShardVolatile::VolatileCommitOnBlobStorageFailure+UseSink [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesSimple [GOOD] Test command err: Trying to start YDB, gRPC: 20190, MsgBus: 25539 2026-01-04T02:30:33.792509Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328210313994385:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:33.792596Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:33.822270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:33.856726Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328208980181188:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:33.871347Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:33.881842Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328212367458246:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:33.881919Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:33.955471Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:34.317727Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:34.323502Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:34.352491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:34.481955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:34.482113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:34.493276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:34.493939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:34.495377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:34.495440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:34.519392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:34.519583Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:30:34.523708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:34.528696Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:34.565285Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:34.700504Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:34.740093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:34.746435Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:34.755735Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:34.847173Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:34.940572Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:34.957927Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:34.986214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:34.986237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:34.986243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:34.986353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:35.632899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:35.738006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.436470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.074026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:37.237836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:38.793968Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328210313994385:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:38.794046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:38.857288Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328208980181188:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:38.857368Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:38.873174Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591328212367458246:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:38.873296Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:39.595312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328236083800283:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.595415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.595733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328236083800293:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.595774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:39.929961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:39.997988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.066273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.148191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.219323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.286480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.392968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.469138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:40.583792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328240378768692:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.583896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.584231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328240378768697:2417], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.584271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328240378768698:2418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.584311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:40.625071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976725670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:40.648072Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328240378768701:2419], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976725670 completed, doublechecking } 2026-01-04T02:30:40.745054Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328240378768784:4748] txid# 281474976725671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:42.856902Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493842841, txId: 281474976725673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 63251, MsgBus: 8504 2026-01-04T02:30:31.444408Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328203080924234:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:31.449717Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:31.477516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:31.798395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:31.798502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:31.871249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:31.907746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:32.013945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328203080924130:2081] 1767493831404360 != 1767493831404363 2026-01-04T02:30:32.023553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:32.164522Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:32.238381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:32.238418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:32.238426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:32.238523Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:32.459486Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:32.849340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:32.863467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:32.929657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.070930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.275277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:33.336566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.379403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220260795186:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.379535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.380175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328220260795196:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.380246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:35.747672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.786628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.854547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:35.927898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.031876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.175420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.252879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.351752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:36.435531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328203080924234:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:36.435579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:36.471840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328224555763372:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.471931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.472212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328224555763377:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.472249Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328224555763378:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.472345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:36.475903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:36.489863Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328224555763381:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:36.598741Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328224555763435:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_trace/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::CommandRollbackWhenCombined [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2026-01-04T02:30:23.269204Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:23.270762Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:23.271161Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:23.274363Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:23.275294Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:23.275605Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.275632Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:23.275952Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:23.288352Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:23.288507Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:23.288657Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:23.288802Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.288912Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:23.288995Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-04T02:30:23.301759Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:23.301951Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.374701Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:23.374885Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.374970Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:23.375064Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.375274Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:23.375380Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.375457Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:23.375518Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.386340Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:23.386496Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:23.397280Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:23.397431Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:23.398843Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:23.398892Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:23.399103Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:23.399156Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:23.416995Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } } 2026-01-04T02:30:23.426918Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-04T02:30:23.427056Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-04T02:30:23.427093Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-04T02:30:23.427122Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-04T02:30:23.427149Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-04T02:30:23.427182Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-04T02:30:23.427213Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-04T02:30:23.427262Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-04T02:30:23.427294Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-04T02:30:23.427345Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-04T02:30:23.427375Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-04T02:30:23.427402Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-04T02:30:23.427429Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-04T02:30:23.427456Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-04T02:30:23.427488Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-04T02:30:23.427533Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-04T02:30:23.427570Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-04T02:30:23.427599Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-04T02:30:23.427625Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-04T02:30:23.427666Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-04T02:30:23.427707Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-04T02:30:23.427734Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk ... UG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:40.737990Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:40.738319Z node 101 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:40.738368Z node 101 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:40.738568Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:40.749453Z node 101 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:40.749650Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:40.749768Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:40.749911Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:40.750010Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:40.750095Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [101:28:2070] sender: [101:49:2057] recipient: [101:11:2058] 2026-01-04T02:30:40.761538Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:40.761694Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:40.783945Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:40.784073Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:40.784146Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:40.784242Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:40.784418Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:40.784493Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:40.784541Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:40.784635Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:40.795324Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:40.795479Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:40.806463Z node 101 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:40.806588Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:40.808746Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:40.808905Z node 101 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:40.809157Z node 101 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:40.809204Z node 101 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:40.810049Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-04T02:30:40.810989Z node 101 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [102:26:2057] recipient: [102:24:2068] Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:29:2057] recipient: [102:24:2068] 2026-01-04T02:30:41.065423Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:41.066600Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:41.066873Z node 102 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:41.068636Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:41.069097Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:41.069238Z node 102 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:41.069278Z node 102 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:41.069515Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:41.081346Z node 102 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:41.081483Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:41.081697Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:41.081892Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:41.082213Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:41.082404Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [102:28:2070] sender: [102:49:2057] recipient: [102:11:2058] 2026-01-04T02:30:41.093952Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:41.094117Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:41.116865Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:41.117029Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:41.117109Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:41.117181Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:41.117322Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:41.117420Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:41.117465Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:41.117531Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:41.128268Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:41.128395Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:41.142298Z node 102 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:41.142435Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:41.144032Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:41.144085Z node 102 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:41.144358Z node 102 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:41.144403Z node 102 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:41.145104Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } Command { DefineHostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" } } Command { ReadHostConfig { } } Rollback: true } Status { Success: true } Status { Success: true } Status { Success: true HostConfig { HostConfigId: 1 Name: "TestCommandRollbackWhenCombined" ItemConfigGeneration: 1 } } ErrorDescription: "transaction rollback" RollbackSuccess: true 2026-01-04T02:30:41.147448Z node 102 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadHostConfig { } } } Status { Success: true } Success: true ConfigTxSeqNo: 1 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18437, MsgBus: 25090 2026-01-04T02:29:06.035704Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327836948280620:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:06.035773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:29:06.876709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:29:06.923906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:06.924006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:07.048220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:07.093503Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:29:07.152245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:29:07.191811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:07.194418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327832653313151:2081] 1767493745988185 != 1767493745988188 2026-01-04T02:29:07.474618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:07.474640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:07.474646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:07.474740Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:08.444656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:29:08.445051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:29:08.454866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:29:08.455528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:29:08.457369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767493748503, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:29:08.478639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:29:08.478698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:29:08.479003Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591327836948280975:2247] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:29:08.479420Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327832653313119:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:08.479501Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327832653313122:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:08.479543Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591327832653313125:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:29:08.479730Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327836948280901:2208][/Root] Path was updated to new version: owner# [1:7591327836948280735:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:08.480032Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591327836948280975:2247] Ack update: ack to# [1:7591327836948280802:2148], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:29:08.480191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-04T02:29:08.482106Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327841243248418:2289][/Root] Path was updated to new version: owner# [1:7591327841243248412:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:08.482421Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591327841243248419:2290][/Root] Path was updated to new version: owner# [1:7591327841243248413:2285], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:08.482982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:29:11.038323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591327836948280620:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:29:11.038407Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:29:13.637358Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:29:13.655872Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dchs7644010xsd2m0e5jw", Request has 18444976579955.895797s seconds to be completed 2026-01-04T02:29:13.662707Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c97/r3tmp/spilling-tmp-runner/node_1_181b3ae3-911abfd7-ea646bb0-cb980364, actor: [1:7591327867013052383:2312] 2026-01-04T02:29:13.662910Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c97/r3tmp/spilling-tmp-runner 2026-01-04T02:29:13.665797Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dchs7644010xsd2m0e5jw", Created new session, sessionId: ydb://session/3?node_id=1&id=YzY3NDg5ZmUtODQ4ZWU0MDctNjE0M2Y2OGUtYTY3YTBmYw==, workerId: [1:7591327867013052401:2329], database: /Root, longSession: 1, local sessions count: 1 E0104 02:29:13.667345696 460377 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:13.667492444 460377 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0104 02:29:13.669391286 460377 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:13.669516933 460377 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:13.666091Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dchs7644010xsd2m0e5jw 2026-01-04T02:29:13.678076Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:29:13.678181Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:29:13.678210Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:29:13.695776Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327867013052402:2507][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591327836948280735:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:13.696186Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327867013052403:2508][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591327836948280735:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:29:13.740319Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:29:13.741647Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591327867013052422:2510][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591327836948280735:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:29:13.746517002 460376 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:29:13.746680173 460376 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:29:13.754345Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:29:13.770934Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dcpxa8rv7j3e1hd39ja6p, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzY3NDg5ZmUtODQ4ZWU0MDctNjE0M2Y2OGUtYTY3YTBmYw==, PoolId: , DatabaseId: , IsStreamingQuery: 0 ... credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0104 02:30:35.891017600 479720 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:30:35.891233799 479720 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:30:35.893104Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:30:35.957411Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591328217514261728:2339] TxId: 281474976710662. Ctx: { TraceId: 01ke3df6mwbkm8dzcd9yrzbd8v, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTQ2MzAyOWMtNTNjNzE1ZDctNDQwZTFhNzUtNTliZTdmYWQ=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:30:35.972284Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3df6mwbkm8dzcd9yrzbd8v, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTQ2MzAyOWMtNTNjNzE1ZDctNDQwZTFhNzUtNTliZTdmYWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591328217514261732:2354] 2026-01-04T02:30:35.972880Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3df6mwbkm8dzcd9yrzbd8v, Database: /Root, SessionId: ydb://session/3?node_id=9&id=YTQ2MzAyOWMtNTNjNzE1ZDctNDQwZTFhNzUtNTliZTdmYWQ=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591328217514261733:2355] 2026-01-04T02:30:35.976124Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3df6mwbkm8dzcd9yrzbd8v", Forwarded response to sender actor, requestId: 4, sender: [9:7591328217514261625:2338], selfId: [9:7591328191744457134:2266], source: [9:7591328217514261626:2339] 2026-01-04T02:30:35.978009Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=YTQ2MzAyOWMtNTNjNzE1ZDctNDQwZTFhNzUtNTliZTdmYWQ=, workerId: [9:7591328217514261626:2339], local sessions count: 0 |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |85.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailResolve [GOOD] Test command err: Trying to start YDB, gRPC: 20814, MsgBus: 1244 2026-01-04T02:30:40.715182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328242419367679:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:40.715348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:41.063781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:41.064067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:41.064340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:41.066914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:41.172966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:41.174934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328242419367640:2081] 1767493840712034 != 1767493840712037 2026-01-04T02:30:41.269547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:41.269999Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:41.270011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:41.270023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:41.270095Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:41.746117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:41.781152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:41.861572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.181207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.406095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.489167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.316074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328259599238704:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.316197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.316539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328259599238714:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.316588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.675673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.711839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.762117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.801230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.845832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.880423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.909832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.948852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.020415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328263894206882:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.020526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.020725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328263894206887:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.021069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328263894206889:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.021128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.024013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:45.033276Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328263894206890:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:45.103817Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328263894206942:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:45.715249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328242419367679:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:45.715320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:46.527947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:30:46.664221Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7591328268189174636:4049], for# user0@builtin, access# SelectRow 2026-01-04T02:30:46.664472Z node 1 :KQP_EXECUTER ERROR: kqp_table_resolver.cpp:274: TxId: 281474976710675. Error resolving keys for entry: { TableId: [OwnerId: 72057594046644480, LocalPathId: 30] Access: 1 SyncVersion: false Status: AccessDenied Kind: KindUnknown PartitionsCount: 0 DomainInfo From: (Uint64 : NULL, Uint64 : NULL, Uint64 : NULL, Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-04T02:30:46.694218Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NWViM2JlOGItZTY0YjEyOTEtNzI5NzQ3OWEtMTMzZmZmZWQ=, ActorId: [1:7591328268189174609:2535], ActorState: ExecuteState, LegacyTraceId: 01ke3dfhgj1k2vg4catq5v5rs3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Failed to resolve table `/Root/.sys/partition_stats` status: AccessDenied." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-04T02:30:46.694566Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493846661, txId: 281474976710674] shutting down |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc >> KqpSysColV1::StreamSelectRange |85.8%| [TA] $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} |85.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_trace/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsParametricRanges [GOOD] Test command err: Trying to start YDB, gRPC: 14011, MsgBus: 11276 2026-01-04T02:30:40.883014Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328240335223620:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:40.883241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:40.942699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:41.253837Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:41.263068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:41.263208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:41.265637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:41.364379Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:41.366595Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328240335223596:2081] 1767493840875347 != 1767493840875350 2026-01-04T02:30:41.444221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:41.448823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:41.448846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:41.448878Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:41.448987Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:41.903654Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:41.999527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:42.008761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:42.086635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.248709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.420178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.503789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.446845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328257515094654:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.446986Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.447351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328257515094664:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.447403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.763698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.809225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.857307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.891122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.918548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.958094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.034193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.076526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.164476Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328261810062835:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.164559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.164632Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328261810062840:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.164735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328261810062842:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.164772Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.169031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:45.182252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328261810062844:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:45.264790Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328261810062895:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:45.885773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328240335223620:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:45.885885Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:47.134449Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493847122, txId: 281474976710673] shutting down |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |85.8%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/data_integrity/ydb-core-kqp-ut-data_integrity >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-67 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 >> ResultFormatter::StructWithNoFields [GOOD] >> ResultFormatter::StructTypeNameAsString [GOOD] >> ResultFormatter::Void [GOOD] >> ResultFormatter::VariantTuple [GOOD] >> ResultFormatter::EmptyResultSet [GOOD] >> ResultFormatter::EmptyList [GOOD] >> ResultFormatter::EmptyTuple [GOOD] >> KqpSysColV0::UpdateAndDelete >> KqpSystemView::PartitionStatsRange3 >> ResultFormatter::Primitive [GOOD] >> ResultFormatter::Struct [GOOD] >> KqpSysColV0::InnerJoinSelectAsterisk >> KqpSysColV0::SelectRange >> KqpSystemView::NodesRange2 [GOOD] >> KqpSysColV1::SelectRowById >> KqpSysColV0::SelectRowAsterisk >> Cdc::ShouldBreakLocksOnConcurrentDropStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex >> KqpSystemView::FailNavigate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Shard_Build_Error [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:28:45.812585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:45.812692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:45.812737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:45.812771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:45.812808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:45.812837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:45.812889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:45.812982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:45.813950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:45.814239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:45.978565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:45.978632Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:46.011915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:46.016400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:46.016619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:46.045135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:46.045412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:46.046152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:46.046401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:46.049237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:46.049426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:46.050729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:46.050797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:46.050910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:46.050960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:46.051032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:46.051166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:46.239804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.240803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.240956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.241946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:46.242024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409549 2026-01-04T02:30:47.500506Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409549 Forgetting tablet 72075186233409549 2026-01-04T02:30:47.502817Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:30:47.503061Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:30:47.503439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 Forgetting tablet 72075186233409548 2026-01-04T02:30:47.504118Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:846:2813], Recipient [6:864:2826]: NKikimr::TEvTablet::TEvTabletDead 2026-01-04T02:30:47.504627Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409548 2026-01-04T02:30:47.504734Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409548 2026-01-04T02:30:47.506162Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:30:47.506383Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:30:47.507033Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:47.507098Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-04T02:30:47.507190Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:30:47.507238Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:30:47.507267Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:30:47.510272Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:30:47.510348Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:30:47.510605Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:30:47.510649Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409548 2026-01-04T02:30:47.510845Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:30:47.522510Z node 6 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186233409547 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:30:47.522622Z node 6 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186233409547 Initiating switch from PreOffline to Offline state 2026-01-04T02:30:47.526071Z node 6 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186233409547 Reporting state Offline to schemeshard 72057594046678944 2026-01-04T02:30:47.526252Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [6:843:2812], Recipient [6:859:2823]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:30:47.526454Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 859 RawX2: 25769806599 } TabletId: 72075186233409547 State: 4 2026-01-04T02:30:47.526510Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409547, state: Offline, at schemeshard: 72057594046678944 2026-01-04T02:30:47.526769Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [6:1391:3310], Recipient [6:859:2823]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594046678944 Status: OK ServerId: [6:1392:3311] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-04T02:30:47.526800Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:30:47.528077Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:30:47.528143Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:30:47.528267Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552133, Sender [6:1300:3231], Recipient [6:859:2823]: NKikimrTxDataShard.TEvStateChangedResult TabletId: 72057594046678944 State: 4 2026-01-04T02:30:47.528304Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3158: StateWork, processing event TEvDataShard::TEvStateChangedResult 2026-01-04T02:30:47.528332Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186233409547 state Offline 2026-01-04T02:30:47.528513Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [6:1391:3310], Recipient [6:859:2823]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594046678944 ClientId: [6:1391:3310] ServerId: [6:1392:3311] } 2026-01-04T02:30:47.528543Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T02:30:47.528756Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:30:47.528888Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:30:47.529117Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 Forgetting tablet 72075186233409547 2026-01-04T02:30:47.529383Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268829696, Sender [6:843:2812], Recipient [6:859:2823]: NKikimr::TEvTablet::TEvTabletDead 2026-01-04T02:30:47.529703Z node 6 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186233409547 2026-01-04T02:30:47.529777Z node 6 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186233409547 2026-01-04T02:30:47.531055Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:47.531092Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:30:47.531157Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:30:47.531198Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:30:47.531227Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:30:47.533418Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:30:47.533464Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409547 2026-01-04T02:30:47.533619Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:30:47.565038Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-04T02:30:47.565492Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 360 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "One of the shards report BUILD_ERROR
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n at Filling stage, process has to be canceled, shardId: 72075186233409546, shardIdx: 72057594046678944:1" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72057594046678944:1 Status: BUILD_ERROR UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n
: Error: Datashard test fail\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 360 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/vectors" index { name: "index1" index_columns: "embedding" global_vector_kmeans_tree_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } >> KqpSystemView::PartitionStatsRange2 |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::StructTypeNameAsString [GOOD] |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantTuple [GOOD] |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::EmptyTuple [GOOD] >> TSchemeShardMoveTest::Replace >> TSchemeShardMoveTest::Reject >> TSchemeShardMoveTest::TwoTables >> TSchemeShardMoveTest::Chain |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Struct [GOOD] >> TSchemeShardMoveTest::MoveIndex |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TSchemeShardMoveTest::MoveMigratedTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 1690, MsgBus: 12387 2026-01-04T02:30:39.654374Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328237227038708:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.654448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:39.718376Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328236085705595:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.718467Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:39.723882Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328237452340459:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.724063Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:39.741128Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328236352220690:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.741275Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:39.767463Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328234694300104:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:39.767559Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:40.198381Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:40.224872Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:40.229826Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:40.235593Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:40.275147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:40.370567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.370708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:40.375502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.375643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:40.375777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.375812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:40.376330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.376366Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:40.377297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:40.377340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:40.427173Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:30:40.450633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:40.450828Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:30:40.472495Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:30:40.472525Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:30:40.472783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:40.474204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:40.474387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:40.474529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:40.549843Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:40.553911Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:40.570750Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:40.582151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:40.587528Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:40.586846Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:40.668528Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:40.728437Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:40.757369Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:40.815754Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:40.841847Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:41.006865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:41.006892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:41.006929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:41.007010Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:42.075830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:42.267763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.597331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.810204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:42.884610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:44.654399Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328237227038708:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:44.654482Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:44.719501Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328236085705595:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:44.719570Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:44.725911Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591328237452340459:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:44.725996Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:44.741470Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591328236352220690:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:44.741536Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:44.765025Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591328234694300104:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:44.765090Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:45.281319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328262996844598:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.281456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.281817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328262996844608:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.281901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:45.606122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.661988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.714917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.776692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.816988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.891928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:45.960600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:46.015120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:46.149560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328267291812862:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:46.149659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:46.149715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328267291812867:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:46.149765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328267291812869:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:46.149786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:46.152848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976735670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:46.168990Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328267291812871:2400], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976735670 completed, doublechecking } 2026-01-04T02:30:46.240012Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328267291812946:4545] txid# 281474976735671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:48.042837Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493848030, txId: 281474976735673] shutting down |85.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-71 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 >> TSchemeShardMoveTest::MoveIndex [GOOD] >> TSchemeShardMoveTest::MoveIndexDoesNonExisted >> TSchemeShardMoveTest::ResetCachedPath >> TSchemeShardMoveTest::MoveTableForBackup >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 >> ResultFormatter::List [GOOD] >> ResultFormatter::Null [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-44 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 >> TSchemeShardMoveTest::Boot >> TSchemeShardMoveTest::MoveIndexSameDst |85.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |85.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots |85.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table_reboots/ydb-core-tx-schemeshard-ut_external_table_reboots >> TSchemeShardMoveTest::TwoTables [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 >> TSchemeShardMoveTest::Replace [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-56 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Null [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] >> TSchemeShardMoveTest::MoveMigratedTable [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |85.9%| [LD] {RESULT} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/ydb_convert/ut/ydb-core-ydb_convert-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::TwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:51.897593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:51.897692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.897739Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:51.897774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:51.897841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:51.897880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:51.897930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.897997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:51.898969Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:51.899326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:52.014595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:52.014660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:52.041513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:52.041762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:52.042027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:52.050674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:52.050880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:52.051649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.051916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:52.054971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.055173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:52.056416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.056480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.056582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:52.056627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:52.056668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:52.056847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.064116Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:30:52.258224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:52.258473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.258677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:52.258723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:52.258939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:52.259011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.263269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.263481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:52.263815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.263873Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:52.263930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:52.263977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:52.272264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.272347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:52.272400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:52.274509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.274564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.274620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.274671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:52.278622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:52.284394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:52.284630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:52.285760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.285940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:52.286007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.286295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:52.286350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.288770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:52.288906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:52.291408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.291459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 8285Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.178507Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table1" took 223us result status StatusPathDoesNotExist 2026-01-04T02:30:54.178752Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:30:54.179331Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.179565Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove1" took 242us result status StatusSuccess 2026-01-04T02:30:54.180045Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove1" PathDescription { Self { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:54.180880Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.181087Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table2" took 180us result status StatusPathDoesNotExist 2026-01-04T02:30:54.181227Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:30:54.181749Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.281360Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove2" took 99.5ms result status StatusSuccess 2026-01-04T02:30:54.281851Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove2" PathDescription { Self { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TableMove2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:54.282767Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.282946Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 199us result status StatusSuccess 2026-01-04T02:30:54.283410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 13 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "TableMove1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "TableMove2" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> ResultFormatter::FormatEmptySchema [GOOD] >> ResultFormatter::FormatNonEmptySchema [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-18 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 >> TSchemeShardMoveTest::MoveIndexSameDst [GOOD] >> TSchemeShardMoveTest::MoveIntoBuildingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIndexDoesNonExisted [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:52.123852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:52.123945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:52.123985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:52.124038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:52.124085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:52.124118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:52.124207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:52.124272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:52.125120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:52.125428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:52.226883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:52.226934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:52.244282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:52.248680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:52.248899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:52.254414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:52.254670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:52.257326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.257633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:52.261740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.261964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:52.263269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.263339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.263436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:52.263480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:52.263522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:52.263659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.270864Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:30:52.412507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:52.412746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.412960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:52.413019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:52.413274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:52.413364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.415563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.415763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:52.415946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.416001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:52.416048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:52.416099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:52.418179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.418240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:52.418279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:52.420090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.420135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.420214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.420257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:52.423361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:52.425350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:52.425549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:52.426689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.426821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:52.426869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.427262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:52.427331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.427491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:52.427579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:52.430462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.430523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:54.549340Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.549572Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Sync" took 251us result status StatusSuccess 2026-01-04T02:30:54.550297Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Sync" PathDescription { Self { Name: "Sync" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 3 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 3 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:54.550941Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:30:54.551154Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Async" took 229us result status StatusSuccess 2026-01-04T02:30:54.551862Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/Async" PathDescription { Self { Name: "Async" PathId: 5 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 5 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 5 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 5 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Boot [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly >> TSchemeShardMoveTest::MoveTableForBackup [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::FormatNonEmptySchema [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] >> Cdc::MustNotLoseSchemaSnapshotWithVolatileTx [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterTable >> KqpSysColV1::StreamSelectRange [GOOD] >> KqpSysColV1::UpdateAndDelete [GOOD] >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] >> ResultFormatter::Utf8WithQuotes [GOOD] >> ResultFormatter::VariantStruct [GOOD] >> ResultFormatter::Tuple [GOOD] >> DataShardVolatile::VolatileCommitOnBlobStorageFailure-UseSink [GOOD] >> ResultFormatter::Tagged [GOOD] >> DataShardVolatile::VolatileTxAbortedOnSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ResetCachedPath [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:54.149774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:54.149982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.150032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:54.150068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:54.150111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:54.150142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:54.150195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.150264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:54.151178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:54.151511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:54.249968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:54.250038Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:54.266231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:54.266452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:54.266650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:54.280789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:54.281018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:54.281860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.282139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:54.285329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.285530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:54.286866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:54.286934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.287045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:54.287093Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:54.287137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:54.287302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.446853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.448916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.449024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 44 2026-01-04T02:30:56.231006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-04T02:30:56.231165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409549 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 1 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:56.236225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-04T02:30:56.239711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72075186233409549 for txId: 105 at step: 5000040 2026-01-04T02:30:56.240263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:56.240401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:56.240464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 105:0 HandleReply TEvOperationPlan, operationId: 105:0, stepId: 5000040, at schemeshard: 72057594046678944 2026-01-04T02:30:56.240783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 129 2026-01-04T02:30:56.240961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-04T02:30:56.257625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:56.257683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:30:56.257983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:56.258040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:30:56.258779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.258865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 105:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:30:56.266604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:30:56.266749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 8 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:30:56.266793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:30:56.266845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 8 2026-01-04T02:30:56.266896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:30:56.267028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 105 2026-01-04T02:30:56.268574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1361 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-04T02:30:56.268624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-04T02:30:56.268847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1361 } } CommitVersion { Step: 5000040 TxId: 105 } 2026-01-04T02:30:56.268993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 105 Step: 5000040 OrderId: 105 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1361 } } CommitVersion { Step: 5000040 TxId: 105 } debug: NTableState::TProposedWaitParts operationId# 105:0 2026-01-04T02:30:56.275201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-04T02:30:56.275262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 105, tablet: 72075186233409549, partId: 0 2026-01-04T02:30:56.275440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 105:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-04T02:30:56.275507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:56.275597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 105:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1084 RawX2: 4294970319 } Origin: 72075186233409549 State: 2 TxId: 105 Step: 0 Generation: 2 2026-01-04T02:30:56.275686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 105:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:56.275724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.275782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 105:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:30:56.275845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 129 -> 240 2026-01-04T02:30:56.277088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:30:56.279265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.279434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.279723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.279793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:30:56.279903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:30:56.279941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:30:56.279999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:30:56.280035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:30:56.280076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: true 2026-01-04T02:30:56.280180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 105 2026-01-04T02:30:56.280233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:30:56.280300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:30:56.280355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:30:56.280484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:30:56.286910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:30:56.286977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1249:3169] TestWaitNotification: OK eventTxId 105 |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> Cdc::InitialScanUpdatedRows [GOOD] >> Cdc::InitialScanAndLimits |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::VariantStruct [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveOldTableWithIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:52.507225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:52.507326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:52.507367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:52.507403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:52.507439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:52.507471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:52.507536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:52.507606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:52.508464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:52.508764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:52.613239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:52.613306Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:52.634203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:52.634431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:52.634647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:52.643269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:52.643487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:52.644285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.644603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:52.647699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.647921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:52.649136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.649195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.649304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:52.649345Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:52.649387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:52.649566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.767099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.767854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.767942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.767995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:52.768572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:56.579301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:2 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 731 RawX2: 8589937305 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:56.579380Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:56.579422Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-04T02:30:56.579466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:30:56.579513Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:2 129 -> 240 2026-01-04T02:30:56.580140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:56.580202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:30:56.580313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:56.580359Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:56.580429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 8589937310 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:30:56.580477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:56.580509Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.580537Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:30:56.580569Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:30:56.584999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-04T02:30:56.585169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.586489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-04T02:30:56.586848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:2, at schemeshard: 72057594046678944 2026-01-04T02:30:56.586902Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:2 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:56.586962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:30:56.587104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-04T02:30:56.587144Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-04T02:30:56.587187Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 2/3 2026-01-04T02:30:56.587225Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/3 2026-01-04T02:30:56.587280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/3, is published: true 2026-01-04T02:30:56.587488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.587772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:30:56.587811Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:56.587848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 102:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:30:56.587930Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-04T02:30:56.587962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-04T02:30:56.587992Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 3/3 2026-01-04T02:30:56.588017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-04T02:30:56.588044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/3, is published: true 2026-01-04T02:30:56.588111Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:786:2752] message: TxId: 102 2026-01-04T02:30:56.588156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/3 2026-01-04T02:30:56.588227Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:56.588264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:56.588408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-04T02:30:56.588450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:30:56.588490Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-04T02:30:56.588511Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-04T02:30:56.588544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-04T02:30:56.588568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:30:56.588593Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-04T02:30:56.588616Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-04T02:30:56.588659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:30:56.588683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:30:56.589088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:56.589141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:30:56.589213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:30:56.589252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:30:56.589310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:30:56.589346Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:30:56.589383Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:56.595781Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:56.595866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:886:2844] 2026-01-04T02:30:56.596018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] >> TSchemeShardMoveTest::Chain [GOOD] >> TSchemeShardMoveTest::Index >> THealthCheckTest::TestStateStorageYellow [GOOD] >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] >> ResultFormatter::Optional [GOOD] >> ResultFormatter::Pg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamSelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 62752, MsgBus: 15608 2026-01-04T02:30:49.044773Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328279886235033:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:49.045174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:49.329926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:49.349385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:49.349544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:49.430107Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328279886235007:2081] 1767493849043813 != 1767493849043816 2026-01-04T02:30:49.442588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:49.447862Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:49.537890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:49.558424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:49.558450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:49.558459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:49.558529Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:50.070389Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:50.093041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:50.164984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.352968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.525627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.617005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.040272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328297066106069:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.040400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.041005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328297066106079:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.041064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.448134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.508535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.567174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.617074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.665279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.732020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.770899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.818953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.902416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328297066106952:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.902494Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.902650Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328297066106957:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.902682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328297066106959:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.902776Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.906699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:53.920128Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328297066106961:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:54.007973Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328301361074310:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:54.045094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328279886235033:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:54.045155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:55.910284Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493855939, txId: 281474976710673] shutting down |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Tagged [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 27423, MsgBus: 4987 2026-01-04T02:30:47.175699Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328272263223717:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:47.175827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:47.367136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:47.377240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:47.377362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:47.425457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:47.469988Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:47.470024Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328272263223690:2081] 1767493847174090 != 1767493847174093 2026-01-04T02:30:47.529552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:47.532173Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:47.532216Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:47.532236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:47.532341Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:47.914289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:47.967099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:48.076937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:48.196823Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:48.248316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:48.315773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.412328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328285148127462:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:50.412475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:50.412885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328285148127472:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:50.412929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:50.733092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.784703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.836426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.880261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.913386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:51.000585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:51.061336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:51.125168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:51.275147Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328289443095639:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:51.275232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:51.280675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328289443095644:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:51.280785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328289443095645:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:51.280832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:51.290805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:51.318059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328289443095648:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:51.379519Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328289443095699:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:52.176329Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328272263223717:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:52.176427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> ResultFormatter::Pg [GOOD] >> ResultFormatter::EmptyDict [GOOD] >> ResultFormatter::Dict [GOOD] >> ResultFormatter::Decimal [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveTableWithSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:54.165440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:54.165523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.165562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:54.165597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:54.165629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:54.165665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:54.165713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.165771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:54.166635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:54.166932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:54.274166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:54.274244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:54.291486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:54.291737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:54.291976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:54.306366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:54.306576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:54.307341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.307621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:54.310471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.310653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:54.311565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:54.311611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.311705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:54.311750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:54.311787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:54.311921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.445366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.446989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.447457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EvNotifyTxCompletionResult to actorId: [2:779:2746] message: TxId: 102 2026-01-04T02:30:57.564950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-04T02:30:57.564994Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:30:57.565025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:30:57.565172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-04T02:30:57.565211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:30:57.565250Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-04T02:30:57.565271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-04T02:30:57.565314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:30:57.565339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:30:57.565743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:57.565796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:30:57.565897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:30:57.565941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:30:57.565979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:30:57.575237Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:57.575299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:878:2831] 2026-01-04T02:30:57.575670Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:57.579504Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:57.579727Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/myseq" took 255us result status StatusPathDoesNotExist 2026-01-04T02:30:57.579876Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/myseq\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table/myseq" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:30:57.580251Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:57.580394Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 159us result status StatusPathDoesNotExist 2026-01-04T02:30:57.580512Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Table" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:30:57.580842Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:57.581031Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove" took 203us result status StatusSuccess 2026-01-04T02:30:57.581460Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove" PathDescription { Self { Name: "TableMove" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: true } Table { Name: "TableMove" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 DefaultFromSequence: "myseq" NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 IsBackup: false Sequences { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:57.582047Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/myseq" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:30:57.582211Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/myseq" took 181us result status StatusSuccess 2026-01-04T02:30:57.582489Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/myseq" PathDescription { Self { Name: "myseq" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSequence CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SequenceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SequenceDescription { Name: "myseq" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 SequenceShard: 72075186233409546 MinValue: 1 MaxValue: 9223372036854775807 StartValue: 1 Cache: 1 Increment: 1 Cycle: false DataType: "Int64" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::MoveIntoBuildingIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:54.568759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:54.568852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.568900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:54.568938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:54.568978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:54.569008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:54.569066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.569141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:54.570124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:54.570451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:54.678893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:54.678975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:54.695691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:54.695935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:54.696159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:54.707857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:54.708083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:54.708960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.709255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:54.712417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.712648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:54.713921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:54.713986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.714115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:54.714164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:54.714217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:54.714391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.721417Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:30:54.858312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:54.858614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.858824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:54.858886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:54.859154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:54.859231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:54.861845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.862145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:54.862406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.862487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:54.862531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:54.862571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:54.871025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.871114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:54.871169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:54.873495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.873567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.873630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:54.873719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:54.877513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:54.879998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:54.880232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:54.881396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.881550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:54.881628Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:54.881965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:54.882024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:54.882221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:54.882311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:54.884609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:54.884675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ep: 5000042 2026-01-04T02:30:57.621669Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:57.621762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:57.621835Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-04T02:30:57.621886Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-04T02:30:57.623786Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-04T02:30:57.623842Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-04T02:30:57.623927Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:30:57.623961Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:57.623999Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:30:57.624031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:57.624066Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:30:57.624137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:128:2152] message: TxId: 281474976710760 2026-01-04T02:30:57.624195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:30:57.624230Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:30:57.624272Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:30:57.624336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:30:57.626480Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:30:57.626540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:30:57.626603Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-04T02:30:57.626717Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:857:2815], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:30:57.628234Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-04T02:30:57.628361Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:857:2815], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:30:57.628442Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:30:57.629819Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-04T02:30:57.629937Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Sync, IndexColumn: value0, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:857:2815], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:30:57.629998Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-04T02:30:57.630126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:30:57.630172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:1038:2984] TestWaitNotification: OK eventTxId 102 2026-01-04T02:30:57.630761Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:30:57.631033Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 314us result status StatusSuccess 2026-01-04T02:30:57.631576Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "SomeIndex" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableIndexes { Name: "Sync" LocalPathId: 41 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpSystemView::FailNavigate [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-68 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 >> KqpSysColV0::SelectRange [GOOD] >> KqpSystemView::PartitionStatsRange3 [GOOD] >> KqpSystemView::PartitionStatsRange2 [GOOD] >> TSchemeShardMoveTest::Reject [GOOD] >> TSchemeShardMoveTest::OneTable |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest >> KqpSysColV1::SelectRowById [GOOD] |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Pg [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-StrictAclCheck [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::AsyncIndexWithSyncInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:30:54.559566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:54.559649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.559694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:54.559726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:54.559788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:54.559816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:54.559867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:54.559924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:54.560774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:54.561069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:54.663159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:54.663224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:54.677655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:54.677959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:54.678150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:54.687384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:54.687613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:54.688362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:54.688602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:54.691958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.692193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:54.693376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:54.693438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:54.693544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:54.693584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:54.693622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:54.693775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:54.871833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.872810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.872930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.873963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:30:54.874066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 5186233409546 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:30:57.788905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:2, shardIdx: 72057594046678944:2, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:57.788948Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-04T02:30:57.788986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:2, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:30:57.789033Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:2 129 -> 240 2026-01-04T02:30:57.790128Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 730 RawX2: 8589937303 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:30:57.790178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409547, partId: 0 2026-01-04T02:30:57.790292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 730 RawX2: 8589937303 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:30:57.790334Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:30:57.790402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 730 RawX2: 8589937303 } Origin: 72075186233409547 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:30:57.790457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:57.790490Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:30:57.790518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:30:57.790548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-04T02:30:57.793628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-04T02:30:57.795711Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:30:57.798998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-04T02:30:57.799468Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:2, at schemeshard: 72057594046678944 2026-01-04T02:30:57.799524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:2 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:57.799575Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:2 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 40], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:30:57.799699Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 2/3 2026-01-04T02:30:57.799757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-04T02:30:57.799798Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 2/3 2026-01-04T02:30:57.799833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 2/3 2026-01-04T02:30:57.799897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 2/3, is published: true 2026-01-04T02:30:57.800304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:30:57.800619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:30:57.800660Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:57.800695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 103:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:30:57.800771Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 3/3 2026-01-04T02:30:57.800799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-04T02:30:57.800839Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 3/3 2026-01-04T02:30:57.800871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-04T02:30:57.800899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 3/3, is published: true 2026-01-04T02:30:57.800938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 3/3 2026-01-04T02:30:57.800983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:30:57.801035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:30:57.801172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 4 2026-01-04T02:30:57.801220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:30:57.801263Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-04T02:30:57.801287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-04T02:30:57.801320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-04T02:30:57.801345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:30:57.801367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-04T02:30:57.801388Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-04T02:30:57.801438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:30:57.801462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:30:57.804051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:30:57.804135Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:30:57.804253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:30:57.804305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:30:57.804376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:30:57.804417Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:30:57.804466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:30:57.818270Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [2:674:2663] Handle TEvGetProxyServicesRequest 2026-01-04T02:30:57.818534Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 3 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 103 2026-01-04T02:30:57.872214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:30:57.872278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:30:57.872752Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:30:57.872865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:30:57.872910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:1086:2968] TestWaitNotification: OK eventTxId 103 |85.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/result_formatter/ut/unittest >> ResultFormatter::Decimal [GOOD] |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/result_formatter/ut/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-24 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 64048, MsgBus: 63134 2026-01-04T02:30:49.073274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328281030363819:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:49.073770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:49.349954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:49.356775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:49.356926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:49.434034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:49.460967Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328281030363780:2081] 1767493849071186 != 1767493849071189 2026-01-04T02:30:49.482826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:49.590036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:49.598689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:49.598715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:49.598724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:49.598828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:50.085475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:50.086129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:50.108581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:50.156733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.328381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.520979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:50.601230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.118453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328298210234851:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.118589Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.119068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328298210234863:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.119121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.466291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.503407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.561251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.633930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.673370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.705737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.739470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.784969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.862433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328298210235731:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.862501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.862865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328298210235737:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.862929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328298210235735:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.863052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:53.867477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:53.883133Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328298210235740:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:53.946672Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328298210235791:3785] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:54.073383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328281030363819:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:54.073498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> BsControllerConfig::MoveGroups [GOOD] >> TPQTest::TestPartitionWriteQuota [GOOD] >> TPQTest::TestPartitionedBlobFails |85.9%| [TA] $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange3 [GOOD] Test command err: Trying to start YDB, gRPC: 25338, MsgBus: 23242 2026-01-04T02:30:50.849112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328285776163576:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:50.849168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.298749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.298885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.364950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.390896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328285776163544:2081] 1767493850848223 != 1767493850848226 2026-01-04T02:30:51.397679Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.405022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.609918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.831485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.831512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.831521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.831614Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:51.874894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.355129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.360108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:52.406986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.580861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.764314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.850406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:54.920760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328302956034599:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.920864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.921269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328302956034609:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.921317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.356390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.399728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.437537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.472044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.521747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.597522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.646134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.712699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.808048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307251002775:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.808124Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.808240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307251002780:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.808617Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307251002782:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.808711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.812278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.826621Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328307251002783:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:55.849976Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328285776163576:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:55.850190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:55.908602Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328307251002839:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:57.884353Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493857862, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 7918, MsgBus: 23565 2026-01-04T02:30:51.015211Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328288025665027:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:51.015641Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.180119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:51.612563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.661402Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.666964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.667059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.681882Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.871902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.871921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.871929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.872044Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:51.879885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:52.032368Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.357580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.367522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:52.415437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.625622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.840778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.912945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:54.903539Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328300910568755:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.903630Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.903930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328300910568765:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.903959Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.274814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.325400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.360460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.385595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.449254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.491020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.524463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.569333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.645015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328305205536934:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.645115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.645345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328305205536939:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.645406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328305205536940:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.645665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.648736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.660532Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328305205536943:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:55.746666Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328305205536994:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:56.015722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328288025665027:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:56.015798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |85.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |85.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |85.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRange2 [GOOD] Test command err: Trying to start YDB, gRPC: 1562, MsgBus: 16756 2026-01-04T02:30:51.089330Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328290053289912:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:51.089424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.104918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:51.544423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.556896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.565979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.622963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.710077Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328290053289779:2081] 1767493851027258 != 1767493851027261 2026-01-04T02:30:51.712806Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.762572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.982225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.982243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.982251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.982346Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:52.102159Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.642468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.743333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.996294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.202602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.311441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.132513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307233160831:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.132649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.138393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307233160841:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.145638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.534382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.577479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.614121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.653863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.698114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.735212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.789694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.840802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.934832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307233161719:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.934929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.935285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307233161724:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.935335Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307233161725:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.935447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.938780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.954059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328307233161728:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:56.023022Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328311528129075:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:56.084608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328290053289912:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:56.084681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:58.073075Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493858029, txId: 281474976715673] shutting down |85.9%| [TA] {RESULT} $(B)/ydb/core/fq/libs/result_formatter/ut/test-results/unittest/{meta.json ... results_accumulator.log} |85.9%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/slow/ydb-public-sdk-cpp-src-client-topic-ut-slow |85.9%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::FailNavigate [GOOD] Test command err: Trying to start YDB, gRPC: 62432, MsgBus: 24316 2026-01-04T02:30:51.098154Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328290199815952:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:51.098363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.122906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:51.542472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.665058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.665184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.667379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.803724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.869887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328290199815841:2081] 1767493851048835 != 1767493851048838 2026-01-04T02:30:51.960155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:52.030378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.033681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:52.033706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:52.033715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:52.033883Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:52.757905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.830621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.069218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.271234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.372310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.162959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307379686896:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.163071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.163404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307379686906:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.163449Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.444767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.481673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.529059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.566001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.608770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.673223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.703322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.753380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.854413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307379687780:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.854510Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.854804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307379687786:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.854872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328307379687785:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.854919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.858484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.870337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328307379687789:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:55.967565Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328307379687840:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:56.073920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328290199815952:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:56.073988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:57.785986Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7591328315969622763:3976], for# user0@builtin, access# DescribeSchema 2026-01-04T02:30:57.786020Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:7591328315969622763:3976], for# user0@builtin, access# DescribeSchema 2026-01-04T02:30:57.799117Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328315969622753:2540], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/.sys/partition_stats]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:30:57.799539Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWE1NzZkMGQtYmM5ZTI1NGMtOWQyZjAwZC0zZmM1M2I0MQ==, ActorId: [1:7591328315969622746:2536], ActorState: ExecuteState, LegacyTraceId: 01ke3dfwe504wrqyt363ecjaas, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/.sys/partition_stats]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |85.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::SelectRange [GOOD] Test command err: Trying to start YDB, gRPC: 28000, MsgBus: 7910 2026-01-04T02:30:51.015323Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328284879158840:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:51.015400Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.048937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:30:51.334068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.499259Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.510045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.521254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.695723Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.697988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328284879158670:2081] 1767493850951871 != 1767493850951874 2026-01-04T02:30:51.722763Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.930739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.930761Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.930770Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.930865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:52.017928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.428786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.528838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.746067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.985056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.106167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.172386Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328306353997022:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.172530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.175023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328306353997032:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.175165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.493581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.537781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.575070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.612858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.656982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.732708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.778722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.830051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.909662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328306353997898:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.909779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.910072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328306353997903:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.910121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328306353997904:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.910257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.915196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.928487Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328306353997907:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:55.972738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328284879158840:2207];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:55.972827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:56.017768Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328310648965254:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> KqpSysColV1::StreamInnerJoinSelect |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowById [GOOD] Test command err: Trying to start YDB, gRPC: 7988, MsgBus: 24296 2026-01-04T02:30:51.021938Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328287140896562:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:51.025153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.409891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.491566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.491690Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.586010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328282845929163:2081] 1767493851002794 != 1767493851002797 2026-01-04T02:30:51.609380Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.617736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.661924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.938776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.938802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.938815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.938953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:52.024641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.573451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.601329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:30:52.673459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.876972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.171207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.272583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.327826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304320767525:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.327933Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.328321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304320767535:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.328393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.640735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.690894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.722600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.773634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.806395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.848759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.886000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.928450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:56.009505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328287140896562:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:56.009642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:30:56.023412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328308615735703:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:56.023514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328308615735708:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:56.023520Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:56.023746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328308615735710:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:56.023793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:56.032947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:56.058893Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328308615735711:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:30:56.124570Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328308615735765:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageYellow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:28:41.190400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:41.196974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:41.200885Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:28:41.320390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:28:41.326559Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:28:41.327512Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:28:41.327765Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:28:41.327958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:41.329869Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:41.329926Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:28:41.854808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:42.024781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:42.024931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:42.025399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:42.025471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:42.092692Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:28:42.093325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:42.093688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:42.271598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:42.323233Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:28:43.684582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:28:43.684640Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:28:43.684671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:28:43.685160Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:28:53.451992Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:53.452727Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:53.455410Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:766:2348], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:28:53.468051Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:28:53.470151Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:385:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:28:53.470646Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:28:53.470718Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:53.472780Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:53.472917Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:28:54.377572Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:54.613411Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:54.613574Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:54.614665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:54.614767Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:54.672743Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:28:54.682403Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:54.683208Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:54.794997Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:54.836779Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:29:04.410631Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:29:04.413776Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:29:04.431066Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:29:04.433121Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:29:04.433203Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:29:04.433261Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:29:04.438832Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:29:04.449030Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-04T02:29:04.449458Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:29:04.522373Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:29:04.522551Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:29:04.573243Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:29:04.574235Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:29:04.954831Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 5 2026-01-04T02:29:04.955617Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connected -> Disconnected self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-3" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 3 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-4" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 4 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-5" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 5 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-3" reason: "YELLOW-7932-1231c6b1-4" reason: "YELLOW-7932-1231c6b1-5" type: "COMPUTE" level: 2 } issue_log { id: "RED-b954-1231c6b1-PersQueue" status: RED message: "Tablets are dead" location { compute { tablet { type: "PersQueue" id: "72075186224037888" count: 1 } } database { name: "/Root" } node { } } type: "TABLET" level: 4 } issue_log { id: "RED-3c4a-1231c6b1" status: RED message: "Compute has issues with tablets" location { database { name: "/Root" } } reason: "RED-b954-1231c6b1-PersQueue" type: "COMPUTE" level: 2 } issue_log { id: "R ... 1-04T02:30:50.167527Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:50.167599Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:50.167858Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:50.167927Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:50.168204Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:50.168275Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:50.168522Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:50.168590Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:50.168845Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:50.168914Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:50.258656Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-04T02:30:50.259187Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-04T02:30:50.259417Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2026-01-04T02:30:50.259576Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2026-01-04T02:30:50.259705Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 19 Cookie 19 2026-01-04T02:30:50.259818Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 20 Cookie 20 2026-01-04T02:30:50.259930Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 21 Cookie 21 2026-01-04T02:30:50.260047Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 22 Cookie 22 2026-01-04T02:30:50.260436Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.261043Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.261475Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.261707Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.262039Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.262246Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(19, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.262432Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.262591Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(22, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.262739Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(21, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:50.321402Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.368993Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.388360Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.423146Z node 19 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.490613Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.532250Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.618014Z node 22 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.636260Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:50.667845Z node 21 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:30:52.210507Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:52.210589Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:52.210645Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:52.211679Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:52.298495Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.298667Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.298744Z node 19 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.298812Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.298876Z node 21 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.299312Z node 22 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.303198Z node 15 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.303311Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.303383Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "YELLOW-7932-1231c6b1-17" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 17 host: "::1" port: 12004 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-18" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 18 host: "::1" port: 12005 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-19" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 19 host: "::1" port: 12006 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-20" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 20 host: "::1" port: 12007 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-21" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 21 host: "::1" port: 12008 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-22" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 22 host: "::1" port: 12009 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-17" reason: "YELLOW-7932-1231c6b1-18" reason: "YELLOW-7932-1231c6b1-19" reason: "YELLOW-7932-1231c6b1-20" reason: "YELLOW-7932-1231c6b1-21" reason: "YELLOW-7932-1231c6b1-22" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } issue_log { id: "YELLOW-1da1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-14" type: "STATE_STORAGE_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-b30b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-59f1" status: YELLOW message: "Multiple rings have unavailable replicas" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-14" type: "SCHEME_BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-106b-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 3 count: 3 } issue_log { id: "YELLOW-7532" status: YELLOW message: "Multiple rings have unavailable replicas" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-14" type: "BOARD_RING" level: 2 listed: 3 count: 3 } issue_log { id: "RED-0632-1-14" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 14 } } } } type: "BOARD_NODE" level: 3 listed: 3 count: 3 } location { id: 14 host: "::1" port: 12001 } |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MoveGroups [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2026-01-04T02:30:22.375477Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:22.376666Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:22.377051Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:22.380294Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:22.381197Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:22.381538Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:22.381573Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:22.382239Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:22.391988Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:22.392132Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:22.392330Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:22.392498Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:22.392597Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:22.392671Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-04T02:30:22.406138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:22.406379Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:22.454271Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:22.454391Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:22.454461Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:22.454553Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:22.454687Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:22.454771Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:22.454837Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:22.454902Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:22.466155Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:22.466288Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:22.477075Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:22.477223Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:22.478636Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:22.478687Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:22.478881Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:22.478928Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:22.497471Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 150 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:30:22.499035Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-04T02:30:22.499103Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-04T02:30:22.499128Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-04T02:30:22.499157Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-04T02:30:22.499181Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-04T02:30:22.499207Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-04T02:30:22.499230Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-04T02:30:22.499272Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-04T02:30:22.499299Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-04T02:30:22.499333Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-04T02:30:22.499377Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-04T02:30:22.499408Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-04T02:30:22.499440Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-04T02:30:22.499462Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-04T02:30:22.499486Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-04T02:30:22.499525Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-04T02:30:22.499619Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-04T02:30:22.499644Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-04T02:30:22.499668Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-04T02:30:22.499691Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-04T02:30:22.499734Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:3 ... 8:1000 Path# /dev/disk1 2026-01-04T02:30:50.503175Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1001 Path# /dev/disk2 2026-01-04T02:30:50.503232Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 178:1002 Path# /dev/disk3 2026-01-04T02:30:50.503278Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1000 Path# /dev/disk1 2026-01-04T02:30:50.503336Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1001 Path# /dev/disk2 2026-01-04T02:30:50.503382Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 179:1002 Path# /dev/disk3 2026-01-04T02:30:50.503425Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1000 Path# /dev/disk1 2026-01-04T02:30:50.503488Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1001 Path# /dev/disk2 2026-01-04T02:30:50.503542Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 180:1002 Path# /dev/disk3 2026-01-04T02:30:50.503595Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1000 Path# /dev/disk1 2026-01-04T02:30:50.503651Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1001 Path# /dev/disk2 2026-01-04T02:30:50.503704Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 181:1002 Path# /dev/disk3 2026-01-04T02:30:50.503759Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1000 Path# /dev/disk1 2026-01-04T02:30:50.503803Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1001 Path# /dev/disk2 2026-01-04T02:30:50.503860Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 182:1002 Path# /dev/disk3 2026-01-04T02:30:50.503911Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1000 Path# /dev/disk1 2026-01-04T02:30:50.503966Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1001 Path# /dev/disk2 2026-01-04T02:30:50.504024Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 183:1002 Path# /dev/disk3 2026-01-04T02:30:50.504077Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1000 Path# /dev/disk1 2026-01-04T02:30:50.504133Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1001 Path# /dev/disk2 2026-01-04T02:30:50.504203Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 184:1002 Path# /dev/disk3 2026-01-04T02:30:50.504254Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1000 Path# /dev/disk1 2026-01-04T02:30:50.504300Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1001 Path# /dev/disk2 2026-01-04T02:30:50.504332Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 185:1002 Path# /dev/disk3 2026-01-04T02:30:50.504390Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1000 Path# /dev/disk1 2026-01-04T02:30:50.504444Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1001 Path# /dev/disk2 2026-01-04T02:30:50.504491Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 186:1002 Path# /dev/disk3 2026-01-04T02:30:50.504536Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1000 Path# /dev/disk1 2026-01-04T02:30:50.504578Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1001 Path# /dev/disk2 2026-01-04T02:30:50.504621Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 187:1002 Path# /dev/disk3 2026-01-04T02:30:50.504664Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1000 Path# /dev/disk1 2026-01-04T02:30:50.504708Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1001 Path# /dev/disk2 2026-01-04T02:30:50.504753Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 188:1002 Path# /dev/disk3 2026-01-04T02:30:50.504812Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1000 Path# /dev/disk1 2026-01-04T02:30:50.504856Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1001 Path# /dev/disk2 2026-01-04T02:30:50.504902Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 189:1002 Path# /dev/disk3 2026-01-04T02:30:50.504944Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1000 Path# /dev/disk1 2026-01-04T02:30:50.504987Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1001 Path# /dev/disk2 2026-01-04T02:30:50.505035Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 190:1002 Path# /dev/disk3 2026-01-04T02:30:50.505080Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1000 Path# /dev/disk1 2026-01-04T02:30:50.505126Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1001 Path# /dev/disk2 2026-01-04T02:30:50.505169Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 191:1002 Path# /dev/disk3 2026-01-04T02:30:50.505197Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1000 Path# /dev/disk1 2026-01-04T02:30:50.505237Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1001 Path# /dev/disk2 2026-01-04T02:30:50.505279Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 192:1002 Path# /dev/disk3 2026-01-04T02:30:50.505329Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1000 Path# /dev/disk1 2026-01-04T02:30:50.505376Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1001 Path# /dev/disk2 2026-01-04T02:30:50.505426Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 193:1002 Path# /dev/disk3 2026-01-04T02:30:50.505475Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1000 Path# /dev/disk1 2026-01-04T02:30:50.505538Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1001 Path# /dev/disk2 2026-01-04T02:30:50.505585Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 194:1002 Path# /dev/disk3 2026-01-04T02:30:50.505633Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1000 Path# /dev/disk1 2026-01-04T02:30:50.505664Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1001 Path# /dev/disk2 2026-01-04T02:30:50.505709Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 195:1002 Path# /dev/disk3 2026-01-04T02:30:50.505755Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1000 Path# /dev/disk1 2026-01-04T02:30:50.505801Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1001 Path# /dev/disk2 2026-01-04T02:30:50.507129Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 196:1002 Path# /dev/disk3 2026-01-04T02:30:50.507181Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1000 Path# /dev/disk1 2026-01-04T02:30:50.507229Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1001 Path# /dev/disk2 2026-01-04T02:30:50.507293Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 197:1002 Path# /dev/disk3 2026-01-04T02:30:50.507346Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1000 Path# /dev/disk1 2026-01-04T02:30:50.507397Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1001 Path# /dev/disk2 2026-01-04T02:30:50.507458Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 198:1002 Path# /dev/disk3 2026-01-04T02:30:50.507505Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1000 Path# /dev/disk1 2026-01-04T02:30:50.507565Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1001 Path# /dev/disk2 2026-01-04T02:30:50.507621Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 199:1002 Path# /dev/disk3 2026-01-04T02:30:50.507649Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1000 Path# /dev/disk1 2026-01-04T02:30:50.507689Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1001 Path# /dev/disk2 2026-01-04T02:30:50.507733Z node 151 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 200:1002 Path# /dev/disk3 2026-01-04T02:30:50.890265Z node 151 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.394112s 2026-01-04T02:30:50.890513Z node 151 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.394392s 2026-01-04T02:30:50.928140Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-04T02:30:51.027355Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 1 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 1 ExplicitGroupId: 2147483748 } } } 2026-01-04T02:30:51.061699Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-04T02:30:51.189546Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 2 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 2 ExplicitGroupId: 2147483749 } } } 2026-01-04T02:30:51.207459Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } 2026-01-04T02:30:51.306994Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MoveGroups { BoxId: 1 OriginStoragePoolId: 2 OriginStoragePoolGeneration: 3 TargetStoragePoolId: 1 TargetStoragePoolGeneration: 3 } } } 2026-01-04T02:30:51.326256Z node 151 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { QueryBaseConfig { } } } |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ut/ydb-core-security-ut |86.0%| [LD] {RESULT} $(B)/ydb/core/security/ut/ydb-core-security-ut |86.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ut/ydb-core-security-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 9577, MsgBus: 14431 2026-01-04T02:30:50.969036Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328283447846535:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:50.969072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.390040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.398455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.398569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.506435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.627496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328283447846424:2081] 1767493850960653 != 1767493850960656 2026-01-04T02:30:51.633241Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.677789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.867033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.867058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.867066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.867166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:51.988780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.488779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.498683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:52.625024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.788938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.048971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:53.141399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.053276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304922684777:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.053461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.058202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304922684787:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.058299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.445595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.500523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.534249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.577569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.611505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.652104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.688765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.728577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.827924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304922685659:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.828014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.828340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304922685664:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.828382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304922685665:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.828424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.832552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.850371Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328304922685668:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:55.950415Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328304922685719:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:55.971043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328283447846535:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:55.974714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 >> CellsFromTupleTest::CellsFromTupleSuccess [GOOD] >> CellsFromTupleTest::CellsFromTupleSuccessPg >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> CellsFromTupleTest::CellsFromTupleSuccessPg [GOOD] >> CellsFromTupleTest::CellsFromTupleFails [GOOD] >> CellsFromTupleTest::CellsFromTupleFailsPg [GOOD] >> CompressionTests::Zstd [GOOD] >> CompressionTests::Unsupported [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |86.0%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink >> ConvertYdbPermissionNameToACLAttrs::TestEqualGranularAndDeprecatedAcl [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty >> ConvertMiniKQLValueToYdbValueTest::SimpleBool [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalString [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty [GOOD] >> ConvertMiniKQLValueToYdbValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::List [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalOptionalEmpty2 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::List [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] >> TSchemeShardMoveTest::Index [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_11_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 11] [GOOD] |86.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> ConvertMiniKQLValueToYdbValueTest::Void [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Struct [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Tuple [GOOD] >> ConvertMiniKQLValueToYdbValueTest::Variant [GOOD] >> ConvertTableDescription::StorageSettings |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::DecimalType [GOOD] |86.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> ConvertTableDescription::StorageSettings [GOOD] >> ConvertTableDescription::ColumnFamilies [GOOD] >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-69 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 >> ConvertMiniKQLTypeToYdbTypeTest::SimpleType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDate [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Optional [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::List ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::UpdateAndDelete [GOOD] Test command err: Trying to start YDB, gRPC: 8034, MsgBus: 13867 2026-01-04T02:30:50.807939Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328283401498984:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:50.808748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:51.185561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:51.185686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:51.209659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:51.220468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:51.232344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.237867Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328283401498924:2081] 1767493850798706 != 1767493850798709 2026-01-04T02:30:51.438595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:51.518432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:51.518452Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:51.518463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:51.518536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:51.818531Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:52.230427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.238403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:30:52.291049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.433251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.631199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:52.710875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:54.702661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328300581369982:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.702756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.703164Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328300581369992:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:54.703235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.110377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.153984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.206731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.262414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.301669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.378505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.413912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.461112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:30:55.563961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304876338159:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.564083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.564422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304876338165:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.564475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328304876338164:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.564882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:55.569188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:30:55.586516Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328304876338168:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:30:55.685025Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328304876338221:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:55.808069Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328283401498984:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:55.808176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::Dict [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-57 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 >> ConvertMiniKQLTypeToYdbTypeTest::List [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Struct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Dict [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-45 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 |86.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::Index [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:51.888131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:51.888241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.888291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:51.888326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:51.888365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:51.888397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:51.888455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.888506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:51.889109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:51.889359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:51.971503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:51.971579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.986894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:51.991010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:51.991244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:51.997592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:51.997847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:51.998569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:51.998839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:52.005394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.005606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:52.006973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.007055Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:52.007163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:52.007210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:52.007258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:52.007406Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.014391Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:30:52.145269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:52.145490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.145652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:52.145699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:52.145996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:52.146063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.150918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.151171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:52.151410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.151467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:52.151513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:52.151568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:52.156458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.156544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:52.156603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:52.159870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.159950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.160038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.160114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:52.163843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:52.166354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:52.166592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:52.167749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.167909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:52.167957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.168316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:52.168389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.168610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:52.168720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:52.175480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.175541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 7594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:01.940522Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Sync" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:31:01.940917Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Sync" took 433us result status StatusSuccess 2026-01-04T02:31:01.941997Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Sync" PathDescription { Self { Name: "Sync" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 11 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 10 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Sync" LocalPathId: 10 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value0" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value0" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 10 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:01.946917Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableMove/Async" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:31:02.043261Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableMove/Async" took 96.3ms result status StatusSuccess 2026-01-04T02:31:02.044381Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableMove/Async" PathDescription { Self { Name: "Async" PathId: 8 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 7 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 8 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeAsyncIndexImplTable Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 5 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "Async" LocalPathId: 8 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value1" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 8 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildSyncIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex >> ConvertMiniKQLValueToYdbValueTest::SimpleInt32 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleInt64 [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDate [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzDateTime [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleTzTimeStamp [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleDecimal [GOOD] >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |86.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::Dict [GOOD] |86.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/ydb-core-tx-schemeshard-ut_auditsettings |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32 [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDate [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzDateTime [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleTzTimeStamp [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleInt32TypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzDateTime [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::TTzTimeStamp [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::UuidType [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantTuple [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::VariantStruct [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Void [GOOD] >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbPermissionNameToACLAttrs::SimpleConvertGood [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> TSchemeShardMoveTest::OneTable [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Void [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleUuidTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Struct >> ConvertYdbValueToMiniKQLValueTest::Struct [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Tuple [GOOD] >> ConvertYdbValueToMiniKQLValueTest::Variant [GOOD] >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 >> ConvertYdbValueToMiniKQLValueTest::SimpleBool [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleBoolTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimal [GOOD] >> ConvertYdbValueToMiniKQLValueTest::SimpleDecimalTypeMissmatch [GOOD] >> ConvertYdbValueToMiniKQLValueTest::OptionalString [GOOD] >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::PgType [GOOD] |86.1%| [LD] {RESULT} $(B)/ydb/library/table_creator/ut/ydb-library-table_creator-ut |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLValueToYdbValueTest::SimpleUuid [GOOD] |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertMiniKQLTypeToYdbTypeTest::Tuple [GOOD] |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::SimpleUuid [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-19 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |86.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/ydb-core-tx-schemeshard-ut_external_table |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::VariantIndexUnderflow [GOOD] |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/ydb_convert/ut/unittest >> ConvertYdbValueToMiniKQLValueTest::PgValue [GOOD] |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |86.1%| [LD] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/persqueue_cluster_discovery/ut/ydb-services-persqueue_cluster_discovery-ut |86.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration |86.1%| [LD] {RESULT} $(B)/ydb/services/fq/ut_integration/ydb-services-fq-ut_integration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::OneTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:51.813249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:51.813337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.813377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:51.813412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:51.813467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:51.813534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:51.813605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.813686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:51.814712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:51.815050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:51.918518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:51.918578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.943991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:51.949339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:51.949600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:51.971286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:51.971530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:51.972327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:51.972611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:51.984249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:51.984450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:51.985984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:51.986065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:51.986168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:51.986211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:51.986255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:51.986398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.000388Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:30:52.185324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:52.185555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.185928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:52.186012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:52.186263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:52.186334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.190623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.190857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:52.191079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.191132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:52.191175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:52.191231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:52.193607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.193669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:52.193709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:52.203380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.203458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.203543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.203641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:52.208840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:52.211195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:52.211418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:52.212612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.212776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:52.212820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.213190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:52.213267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.213463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:52.213573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:52.215839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.215874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:31:03.354841Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:31:03.354991Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:31:03.355042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 108:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:31:03.355109Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-04T02:31:03.356123Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:31:03.356261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 23 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:31:03.356311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:31:03.356364Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 23 2026-01-04T02:31:03.356414Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:31:03.357556Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:31:03.357646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 4 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:31:03.357692Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:31:03.357733Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 4], version: 18446744073709551615 2026-01-04T02:31:03.357770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 4 2026-01-04T02:31:03.357926Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-04T02:31:03.360994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:31:03.361065Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:31:03.361386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 3 2026-01-04T02:31:03.361532Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:31:03.361579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:31:03.361627Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:31:03.361666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:31:03.361708Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-04T02:31:03.361822Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:342:2319] message: TxId: 108 2026-01-04T02:31:03.361880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:31:03.361926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-04T02:31:03.361968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-04T02:31:03.362092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 2 2026-01-04T02:31:03.364805Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:31:03.365181Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:31:03.403533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:31:03.403630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:841:2796] TestWaitNotification: OK eventTxId 108 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted 2026-01-04T02:31:03.404514Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-04T02:31:03.404599Z node 2 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 Deleted tabletId 72075186233409547 2026-01-04T02:31:03.429879Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046678944, message: Source { RawX1: 314 RawX2: 8589936891 } TabletId: 72075186233409546 State: 4 2026-01-04T02:31:03.430037Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186233409546, state: Offline, at schemeshard: 72057594046678944 2026-01-04T02:31:03.433587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:31:03.433735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:31:03.434330Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:31:03.434550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:03.434897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 4] was 1 Forgetting tablet 72075186233409546 2026-01-04T02:31:03.437689Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:31:03.437757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 4], at schemeshard: 72057594046678944 2026-01-04T02:31:03.438126Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:31:03.442216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:31:03.442334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:31:03.442445Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 Deleted tabletId 72075186233409546 2026-01-04T02:31:03.443478Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:03.443727Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 280us result status StatusSuccess 2026-01-04T02:31:03.444255Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 23 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 23 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 21 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_move/unittest >> TSchemeShardMoveTest::ReplaceVectorIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:30:51.693109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:30:51.693206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.693248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:30:51.693301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:30:51.693346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:30:51.693379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:30:51.693466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:30:51.693536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:30:51.694475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:30:51.694865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:30:51.795317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:30:51.795383Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:51.809733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:30:51.814706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:30:51.814934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:30:51.820808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:30:51.821070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:30:51.821916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:51.822192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:30:51.826808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:51.827037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:30:51.828405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:51.828489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:30:51.828597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:30:51.828651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:30:51.828696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:30:51.828846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:30:51.837105Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:30:51.999801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:30:52.000053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.000281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:30:52.000343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:30:52.000596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:30:52.000668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:52.005236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.005455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:30:52.005675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.005742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:30:52.005827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:30:52.005892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:30:52.010834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.010909Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:30:52.010957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:30:52.013536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.013613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:30:52.013683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.013744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:30:52.022783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:30:52.025319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:30:52.025552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:30:52.026796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:30:52.026989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:30:52.027039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.027388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:30:52.027460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:30:52.027667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:30:52.027778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:30:52.030354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:30:52.030404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 26-01-04T02:31:03.070760Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 65], version: 18446744073709551615 2026-01-04T02:31:03.070801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 4 2026-01-04T02:31:03.071408Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:31:03.071481Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 66 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:31:03.071519Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:31:03.071557Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 66], version: 18446744073709551615 2026-01-04T02:31:03.071597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 3 2026-01-04T02:31:03.072158Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:31:03.072265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 67 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:31:03.072348Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:31:03.072378Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 67], version: 18446744073709551615 2026-01-04T02:31:03.072409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 5 2026-01-04T02:31:03.072476Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 4/6, is published: true 2026-01-04T02:31:03.073619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:4, at schemeshard: 72057594046678944 2026-01-04T02:31:03.073675Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:4 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:31:03.076815Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 4 2026-01-04T02:31:03.076960Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-04T02:31:03.077010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-04T02:31:03.077049Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:4 progress is 5/6 2026-01-04T02:31:03.077081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 5/6 2026-01-04T02:31:03.077134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 5/6, is published: true 2026-01-04T02:31:03.077635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.077694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.077755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.078221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:2, at schemeshard: 72057594046678944 2026-01-04T02:31:03.078274Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:2 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:31:03.078502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 3 2026-01-04T02:31:03.078594Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-04T02:31:03.078623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-04T02:31:03.078671Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:2 progress is 6/6 2026-01-04T02:31:03.078704Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-04T02:31:03.078736Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 6/6, is published: true 2026-01-04T02:31:03.078833Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:740:2717] message: TxId: 107 2026-01-04T02:31:03.078897Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 6/6 2026-01-04T02:31:03.078978Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:31:03.079030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-04T02:31:03.079160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:31:03.079207Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:1 2026-01-04T02:31:03.079233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:1 2026-01-04T02:31:03.079266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 64] was 4 2026-01-04T02:31:03.079289Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:2 2026-01-04T02:31:03.079308Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:2 2026-01-04T02:31:03.079357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 65] was 2 2026-01-04T02:31:03.079385Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:3 2026-01-04T02:31:03.079405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:3 2026-01-04T02:31:03.079447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 66] was 2 2026-01-04T02:31:03.079478Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:4 2026-01-04T02:31:03.079510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:4 2026-01-04T02:31:03.079570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 3 2026-01-04T02:31:03.079603Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:5 2026-01-04T02:31:03.079625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:5 2026-01-04T02:31:03.079706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 68] was 1 2026-01-04T02:31:03.080855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:31:03.080920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 68], at schemeshard: 72057594046678944 2026-01-04T02:31:03.081006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 67] was 2 2026-01-04T02:31:03.097686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.097935Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.098007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.098078Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.107292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.107478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:31:03.107739Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:31:03.107798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:4121:5870] 2026-01-04T02:31:03.109876Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/ydb_convert/ut/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_move/unittest |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.1%| [TA] $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TA] {RESULT} $(B)/ydb/core/ydb_convert/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::ExtendBoxAndStoragePool [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:3064:2106] recipient: [1:2964:2117] Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3067:2106] recipient: [1:2964:2117] 2026-01-04T02:30:22.436181Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:22.437053Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:22.437362Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:22.439009Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:22.439613Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:22.439863Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:22.439897Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:22.440138Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:22.447917Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:22.448024Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:22.448143Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:22.448250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:22.448321Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:22.448371Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:3066:2119] sender: [1:3088:2106] recipient: [1:60:2107] 2026-01-04T02:30:22.459756Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:22.459889Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:22.501190Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:22.501332Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:22.501410Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:22.501480Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:22.501705Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:22.501823Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:22.501865Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:22.501913Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:22.513268Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:22.513402Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:22.524213Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:22.524394Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:22.525795Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:22.525873Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:22.526067Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:22.526112Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:22.542486Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 60 PDiskFilter { Property { Type: ROT } } } } Command { QueryBaseConfig { } } } 2026-01-04T02:30:22.543333Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# /dev/disk1 2026-01-04T02:30:22.543395Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# /dev/disk2 2026-01-04T02:30:22.543420Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# /dev/disk3 2026-01-04T02:30:22.543444Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1000 Path# /dev/disk1 2026-01-04T02:30:22.543467Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1001 Path# /dev/disk2 2026-01-04T02:30:22.543491Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 2:1002 Path# /dev/disk3 2026-01-04T02:30:22.543514Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1000 Path# /dev/disk1 2026-01-04T02:30:22.543555Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1001 Path# /dev/disk2 2026-01-04T02:30:22.543599Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 3:1002 Path# /dev/disk3 2026-01-04T02:30:22.543630Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1000 Path# /dev/disk1 2026-01-04T02:30:22.543657Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1001 Path# /dev/disk2 2026-01-04T02:30:22.543681Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 4:1002 Path# /dev/disk3 2026-01-04T02:30:22.543702Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1000 Path# /dev/disk1 2026-01-04T02:30:22.543725Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1001 Path# /dev/disk2 2026-01-04T02:30:22.543754Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 5:1002 Path# /dev/disk3 2026-01-04T02:30:22.543793Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1000 Path# /dev/disk1 2026-01-04T02:30:22.543833Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1001 Path# /dev/disk2 2026-01-04T02:30:22.543866Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 6:1002 Path# /dev/disk3 2026-01-04T02:30:22.543897Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1000 Path# /dev/disk1 2026-01-04T02:30:22.543937Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1001 Path# /dev/disk2 2026-01-04T02:30:22.543961Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 7:1002 Path# /dev/disk3 2026-01-04T02:30:22.543982Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1000 Path# /dev/disk1 2026-01-04T02:30:22.544004Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1001 Path# /dev/disk2 2026-01-04T02:30:22.544044Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 8:1002 Path# /dev/disk3 2026-01-04T02:30:22.544068Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1000 Path# /dev/disk1 2026-01-04T02:30:22.544090Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1001 Path# /dev/disk2 2026-01-04T02:30:22.544113Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 9:1002 Path# /dev/disk3 2026-01-04T02:30:22.544135Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1000 Path# /dev/disk1 2026-01-04T02:30:22.544175Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1001 Path# /dev/disk2 2026-01-04T02:30:22.544223Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 10:1002 Path# /dev/disk3 2026-01-04T02:30:22.544253Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1000 Path# /dev/disk1 2026-01-04T02:30:22.544281Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 11:1001 Path# /dev/disk2 2026-01-04T02:30:22.544306Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Cr ... R NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1000 Path# /dev/disk1 2026-01-04T02:30:54.204871Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1001 Path# /dev/disk2 2026-01-04T02:30:54.204894Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 204:1002 Path# /dev/disk3 2026-01-04T02:30:54.204920Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1000 Path# /dev/disk1 2026-01-04T02:30:54.204944Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1001 Path# /dev/disk2 2026-01-04T02:30:54.204969Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 205:1002 Path# /dev/disk3 2026-01-04T02:30:54.204993Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1000 Path# /dev/disk1 2026-01-04T02:30:54.205017Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1001 Path# /dev/disk2 2026-01-04T02:30:54.205042Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 206:1002 Path# /dev/disk3 2026-01-04T02:30:54.205066Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1000 Path# /dev/disk1 2026-01-04T02:30:54.205091Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1001 Path# /dev/disk2 2026-01-04T02:30:54.205116Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 207:1002 Path# /dev/disk3 2026-01-04T02:30:54.205140Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1000 Path# /dev/disk1 2026-01-04T02:30:54.205165Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1001 Path# /dev/disk2 2026-01-04T02:30:54.205190Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 208:1002 Path# /dev/disk3 2026-01-04T02:30:54.205220Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1000 Path# /dev/disk1 2026-01-04T02:30:54.205244Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1001 Path# /dev/disk2 2026-01-04T02:30:54.205270Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 209:1002 Path# /dev/disk3 2026-01-04T02:30:54.205294Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1000 Path# /dev/disk1 2026-01-04T02:30:54.205319Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1001 Path# /dev/disk2 2026-01-04T02:30:54.205343Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 210:1002 Path# /dev/disk3 2026-01-04T02:30:54.568119Z node 161 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.368865s 2026-01-04T02:30:54.568338Z node 161 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.369118s 2026-01-04T02:30:54.613766Z node 161 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 4 Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" SharedWithOs: true } Drive { Path: "/dev/disk3" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "first box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 4 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 4 } ItemConfigGeneration: 1 } } Command { DefineStoragePool { BoxId: 1 StoragePoolId: 1 Name: "first storage pool" ErasureSpecies: "block-4-2" VDiskKind: "Default" NumGroups: 180 PDiskFilter { Property { Type: ROT } } ItemConfigGeneration: 1 } } Command { QueryBaseConfig { } } } 2026-01-04T02:30:54.615850Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1000 Path# /dev/disk1 2026-01-04T02:30:54.615917Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1001 Path# /dev/disk2 2026-01-04T02:30:54.615948Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 211:1002 Path# /dev/disk3 2026-01-04T02:30:54.615975Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1000 Path# /dev/disk1 2026-01-04T02:30:54.616005Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1001 Path# /dev/disk2 2026-01-04T02:30:54.616050Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 212:1002 Path# /dev/disk3 2026-01-04T02:30:54.616076Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1000 Path# /dev/disk1 2026-01-04T02:30:54.616102Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1001 Path# /dev/disk2 2026-01-04T02:30:54.616127Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 213:1002 Path# /dev/disk3 2026-01-04T02:30:54.616151Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1000 Path# /dev/disk1 2026-01-04T02:30:54.616195Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1001 Path# /dev/disk2 2026-01-04T02:30:54.616238Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 214:1002 Path# /dev/disk3 2026-01-04T02:30:54.616266Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1000 Path# /dev/disk1 2026-01-04T02:30:54.616306Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1001 Path# /dev/disk2 2026-01-04T02:30:54.616350Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 215:1002 Path# /dev/disk3 2026-01-04T02:30:54.616377Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1000 Path# /dev/disk1 2026-01-04T02:30:54.616404Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1001 Path# /dev/disk2 2026-01-04T02:30:54.616440Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 216:1002 Path# /dev/disk3 2026-01-04T02:30:54.616475Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1000 Path# /dev/disk1 2026-01-04T02:30:54.616502Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1001 Path# /dev/disk2 2026-01-04T02:30:54.616527Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 217:1002 Path# /dev/disk3 2026-01-04T02:30:54.616551Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1000 Path# /dev/disk1 2026-01-04T02:30:54.616585Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1001 Path# /dev/disk2 2026-01-04T02:30:54.616630Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 218:1002 Path# /dev/disk3 2026-01-04T02:30:54.616664Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1000 Path# /dev/disk1 2026-01-04T02:30:54.616694Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1001 Path# /dev/disk2 2026-01-04T02:30:54.616736Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 219:1002 Path# /dev/disk3 2026-01-04T02:30:54.616763Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1000 Path# /dev/disk1 2026-01-04T02:30:54.616797Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1001 Path# /dev/disk2 2026-01-04T02:30:54.616831Z node 161 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 220:1002 Path# /dev/disk3 |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink |86.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |86.2%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> DataShardVolatile::VolatileTxAbortedOnSplit [GOOD] >> DataShardVolatile::VolatileTxAbortedOnDrop >> TTicketParserTest::LoginGood >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 >> TTicketParserTest::NebiusAuthenticationUnavailable >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService >> TTicketParserTest::AuthorizationRetryError |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> TTicketParserTest::AuthenticationWithUserAccount >> TTicketParserTest::BulkAuthorizationRetryError >> TTicketParserTest::TicketFromCertificateCheckIssuerGood >> TTicketParserTest::LoginRefreshGroupsWithError |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentAlterTable [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddIndex |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |86.2%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |86.2%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 >> KqpSystemView::PartitionStatsFollower [GOOD] >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists >> GroupWriteTest::WriteHardRateDispatcher [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExists >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 >> Yq_1::DescribeConnection >> Yq_1::DeleteConnections >> Yq_1::CreateConnection_With_Existing_Name >> KqpSysColV1::StreamInnerJoinSelect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsFollower [GOOD] Test command err: Trying to start YDB, gRPC: 12973, MsgBus: 28004 2026-01-04T02:30:40.779231Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328239886323140:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:30:40.779376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:30:41.116610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:30:41.125528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:30:41.125721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:30:41.159732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:30:41.256035Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:30:41.308100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:30:41.308127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:30:41.308138Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:30:41.308243Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:30:41.348482Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:30:41.759590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:30:41.786555Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:30:42.124361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:42.124405Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:42.124460Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:42.124473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:43.125704Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:43.125743Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:43.125795Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:43.125823Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:44.126426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:44.126473Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:30:44.126544Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:44.126558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:30:44.292814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328257066193151:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.292991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.293679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328257066193161:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.293728Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:30:44.526574Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:7591328257066193178:2509], Recipient [1:7591328239886323433:2146]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:30:44.526613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:30:44.526636Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:30:44.526672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:7591328257066193174:2506], Recipient [1:7591328239886323433:2146]: {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-04T02:30:44.526683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:30:44.641240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:30:44.641707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-04T02:30:44.641891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /Root/Followers, opId: 281474976710658:0, schema: Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false, at schemeshard: 72057594046644480 2026-01-04T02:30:44.642468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: Root, child name: Followers, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-04T02:30:44.642530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-04T02:30:44.642554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710658:0 type: TxCreateTable target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-04T02:30:44.642606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:30:44.642737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new path created for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:30:44.642756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710658:0 1 -> 2 2026-01-04T02:30:44.642788Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:83: TEvSysView::TEvSetPartitioning: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] path /Root/Followers ShardIndices size 1 2026-01-04T02:30:44.643478Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_create_table.cpp:766: TCreateTable Propose creating new table opId# 281474976710658:0 path# /Root/Followers pathId# [OwnerId: 72057594046644480, LocalPathId: 38] schemeshard# 72057594046644480 tx# WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "Followers" Columns { Name: "Key" Type: "Uint64" NotNull: false } Columns { Name: "Value" Type: "String" NotNull: false } KeyColumnNames: "Key" PartitionConfig { ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } FollowerGroups { FollowerCount: 3 RequireAllDataCenters: false } } Temporary: false } FailOnExist: false 2026-01-04T02:30:44.643613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:30:44.643645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB ... rdId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=2, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], pathId map=Followers, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:31:04.874870Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037888, followerId 2 2026-01-04T02:31:04.874910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:393: Do not want to split tablet 72075186224037888 by the CPU load from the follower ID 2, reason: SplitByLoadNotEnabledForTable 2026-01-04T02:31:04.874970Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:31:04.875262Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 0 stats DataSize: 800 RowCount: 4 IndexSize: 0 CPUCores: 0.001045 TabletId: 72075186224037888 NodeId: 1 StartTime: 1767493844653 AccessTime: 1767493845324 UpdateTime: 1767493845167 InFlightTxCount: 0 RowUpdates: 4 RowDeletes: 0 RowReads: 1 RangeReads: 0 RangeReadRows: 0 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 0 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:31:04.875376Z node 1 :SYSTEM_VIEWS TRACE: partition_stats.cpp:152: TEvSysView::TEvSendPartitionStats: domainKey [OwnerId: 72057594046644480, LocalPathId: 1] pathId [OwnerId: 72057594046644480, LocalPathId: 38] shardIdx 72057594046644480 1 followerId 2 stats DataSize: 0 RowCount: 0 IndexSize: 0 CPUCores: 0.000139 TabletId: 72075186224037888 NodeId: 1 StartTime: 1767493844770 AccessTime: 0 UpdateTime: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 1 RangeReadRows: 2 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 ByKeyFilterSize: 0 FollowerId: 2 LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:31:04.882050Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:31:04.882099Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:31:04.882116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-04T02:31:05.005054Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7591328239886323067:2070], interval end# 2026-01-04T02:31:05.000000Z, event interval end# 2026-01-04T02:31:05.000000Z 2026-01-04T02:31:05.005102Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7591328239886323067:2070], query logs count# 0, processor ids count# 0, processor id to database count# 0 2026-01-04T02:31:05.006024Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:595: Handle TEvPrivate::TEvProcessInterval: service id# [1:7591328239886323127:2078], interval end# 2026-01-04T02:31:05.000000Z, event interval end# 2026-01-04T02:31:05.000000Z 2026-01-04T02:31:05.006059Z node 1 :SYSTEM_VIEWS DEBUG: sysview_service.cpp:408: Rotate logs: service id# [1:7591328239886323127:2078], query logs count# 1, processor ids count# 1, processor id to database count# 0 2026-01-04T02:31:05.165590Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:05.165648Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:05.165696Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:05.165713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:06.166020Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:06.166066Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:06.166122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:06.166142Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:06.827597Z node 1 :SYSTEM_VIEWS INFO: sysview_service.cpp:888: Navigate by database succeeded: service id# [1:7591328239886323127:2078], database# /Root, no sysview processor 2026-01-04T02:31:07.167087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:07.167131Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:07.167181Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:07.167198Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... SELECT from partition_stats for /Root/Followers , attempt 2 2026-01-04T02:31:08.170200Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:08.170240Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:08.170282Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:08.170298Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:08.418636Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7591328360145408863:2495], owner: [1:7591328360145408860:2493], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-04T02:31:08.426526Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7591328360145408863:2495], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-04T02:31:08.426819Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7591328360145408863:2495], Recipient [1:7591328239886323433:2146]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-04T02:31:08.426843Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-04T02:31:08.427072Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7591328360145408863:2495], row count: 2, finished: 1 2026-01-04T02:31:08.427148Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7591328360145408863:2495], owner: [1:7591328360145408860:2493], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-04T02:31:08.429882Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7591328239886323127:2078], database# /Root, query hash# 3266603936201095014, cpu time# 483495 SELECT * FROM `/Root/.sys/partition_stats` WHERE FollowerId != 0 AND (RowReads != 0 OR RangeReadRows != 0) AND Path = '/Root/Followers' ... SELECT from partition_stats, attempt 0 2026-01-04T02:31:09.105999Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:47: Scan started, actor: [1:7591328364440376181:2505], owner: [1:7591328364440376178:2503], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-04T02:31:09.110883Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:329: Scan prepared, actor: [1:7591328364440376181:2505], schemeshard id: 72057594046644480, hive id: 72057594037968897, database: /Root, database owner: root@builtin, domain key: [OwnerId: 72057594046644480, LocalPathId: 1], database node count: 1 2026-01-04T02:31:09.111146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274595843, Sender [1:7591328364440376181:2505], Recipient [1:7591328239886323433:2146]: NKikimrSysView.TEvGetPartitionStats DomainKeyOwnerId: 72057594046644480 DomainKeyPathId: 1 From { } FromInclusive: true To { } ToInclusive: false IncludePathColumn: true 2026-01-04T02:31:09.111166Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5313: StateWork, processing event NSysView::TEvSysView::TEvGetPartitionStats 2026-01-04T02:31:09.111293Z node 1 :SYSTEM_VIEWS DEBUG: scan_actor_base_impl.h:67: Sending scan batch, actor: [1:7591328364440376181:2505], row count: 2, finished: 1 2026-01-04T02:31:09.111361Z node 1 :SYSTEM_VIEWS INFO: scan_actor_base_impl.h:126: Scan finished, actor: [1:7591328364440376181:2505], owner: [1:7591328364440376178:2503], scan id: 0, sys view info: Type: EPartitionStats SourceObject { OwnerId: 72057594046644480 LocalId: 1 } 2026-01-04T02:31:09.122090Z node 1 :SYSTEM_VIEWS TRACE: sysview_service.cpp:902: Collect query stats: service id# [1:7591328239886323127:2078], database# /Root, query hash# 18339066598126957035, cpu time# 654684 2026-01-04T02:31:09.171103Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:09.171146Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:31:09.171189Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:7591328239886323433:2146], Recipient [1:7591328239886323433:2146]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:31:09.171206Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 >> TTicketParserTest::LoginGood [GOOD] >> TTicketParserTest::LoginGoodWithGroups >> Yq_1::ModifyConnections >> Yq_1::DescribeJob >> Yq_1::ListConnections >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 >> Yq_1::Basic >> TTicketParserTest::TicketFromCertificateCheckIssuerGood [GOOD] >> TTicketParserTest::TicketFromCertificateCheckIssuerBad >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] >> TTicketParserTest::AuthenticationWithUserAccount [GOOD] >> TTicketParserTest::AuthenticationUnavailable >> PrivateApi::PingTask ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 10808, MsgBus: 18407 2026-01-04T02:31:01.181335Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328329448616116:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:01.186437Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:01.187685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:01.654027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:01.689728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:01.690211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:01.780161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:01.872252Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:01.877115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328329448615873:2081] 1767493861108847 != 1767493861108850 2026-01-04T02:31:01.934594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:02.154445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:02.154468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:02.154477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:02.154558Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:02.167102Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:03.008557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:03.019758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:03.108853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:03.337934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:03.527577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:03.642720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.012205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328350923454227:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:06.012333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:06.012700Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328350923454237:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:06.012762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:06.169685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328329448616116:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:06.169759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:06.411585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.494573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.539651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.612136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.685267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.767832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.830502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:06.909056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:07.029552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328355218422409:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:07.029627Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:07.029695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328355218422414:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:07.029904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328355218422416:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:07.029960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:07.033995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:07.047474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328355218422417:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:31:07.116809Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328355218422469:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:10.268770Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767493870296, txId: 281474976715673] shutting down >> AuthorizeRequestToAccessService::CanAuthorizeYdbInAccessService [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService |86.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> Cdc::InitialScanAndLimits [GOOD] >> Cdc::InitialScanComplete >> Yq_1::CreateQuery_With_Idempotency >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-20 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 >> Yq_1::Basic_Null >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] |86.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |86.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows |86.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_move/test-results/unittest/{meta.json ... results_accumulator.log} |86.2%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/ydb-core-tx-datashard-ut_upload_rows >> TTicketParserTest::NebiusAuthenticationUnavailable [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/kqp/provider/ut/ydb-core-kqp-provider-ut >> TTicketParserTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] >> TTicketParserTest::LoginBad >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut/unittest >> GroupWriteTest::WriteHardRateDispatcher [GOOD] Test command err: RandomSeed# 1969659083003781203 2026-01-04T02:25:07.678874Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 is bootstrapped, going to send TEvDiscover {TabletId# 5 MinGeneration# 1 ReadBody# false DiscoverBlockedGeneration# true ForceBlockedGeneration# 0 FromLeader# true Deadline# 18446744073709551} 2026-01-04T02:25:07.707367Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvDiscoverResult {Status# NODATA BlockedGeneration# 0 Id# [0:0:0:0:0:0:0] Size# 0 MinGeneration# 1} 2026-01-04T02:25:07.707441Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 1 going to send TEvBlock {TabletId# 5 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} 2026-01-04T02:25:07.710820Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 1 recieved TEvBlockResult {Status# OK} 2026-01-04T02:25:07.728480Z 1 00h01m00.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 0 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:25:07.731771Z 1 00h01m00.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} 2026-01-04T02:31:07.410849Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: Load tablet recieved PoisonPill, going to die 2026-01-04T02:31:07.421988Z 1 00h01m10.010512s :BS_LOAD_TEST DEBUG: TabletId# 5 Generation# 2 end working, going to send TEvCollectGarbage {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 2 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 0 IsMonitored# 1} 2026-01-04T02:31:07.921507Z 1 00h01m10.010512s :BS_LOAD_TEST INFO: TabletId# 5 Generation# 2 recieved TEvCollectGarbageResult {TabletId# 5 RecordGeneration# 2 PerGenerationCounter# 12 Channel# 0 Status# OK} |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:31:10.913386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:31:10.913514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:10.913563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:31:10.913602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:31:10.913645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:31:10.913674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:31:10.913758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:10.917987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:31:10.918999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:10.919337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:31:11.110316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:31:11.110412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:11.111281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:11.129927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:31:11.130184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:31:11.130380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:31:11.135686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:31:11.135908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:31:11.136619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:11.136891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:31:11.139623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:11.139817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:31:11.141230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:11.141291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:11.141408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:31:11.141455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:11.141498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:31:11.141684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:31:11.406617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.407666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.407805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.407880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.407962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.408828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... etID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:12.621921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_table.cpp:58: [72057594046678944] TAlterExternalTable TPropose, operationId: 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-04T02:31:12.622047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:31:12.622252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:31:12.622405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:31:12.623277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:31:12.623390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:31:12.625293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:12.625331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:12.625503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:31:12.625635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:31:12.625731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:12.625779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-04T02:31:12.625838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:31:12.625865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:31:12.626206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:31:12.626254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:31:12.626362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:31:12.626399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:31:12.626439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:31:12.626484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:31:12.626539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:31:12.626579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:31:12.626632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:31:12.626672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:31:12.626739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:31:12.626777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:31:12.626810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:31:12.626845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:31:12.626874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-04T02:31:12.627717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:31:12.627816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:31:12.627850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:31:12.627914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:31:12.627958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:31:12.628865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:31:12.628957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:31:12.628994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:31:12.629027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:31:12.629071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:31:12.629162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:31:12.634834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:31:12.635241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:31:12.635491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:31:12.635555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:31:12.635968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:31:12.636068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.636111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:796:2786] TestWaitNotification: OK eventTxId 104 2026-01-04T02:31:12.636697Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:12.636924Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 247us result status StatusSuccess 2026-01-04T02:31:12.637427Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 3 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 3 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/other_location" Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelReplaceExternalTableIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:31:10.813774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:31:10.818148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:10.818209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:31:10.818252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:31:10.818290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:31:10.818320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:31:10.818403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:10.818498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:31:10.819486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:10.819796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:31:11.102455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:31:11.102558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:11.103451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:11.151700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:31:11.151957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:31:11.152125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:31:11.173382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:31:11.173662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:31:11.174410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:11.174704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:31:11.184322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:11.184537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:31:11.185710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:11.185773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:11.185899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:31:11.185947Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:11.185994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:31:11.186156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:31:11.464063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.465966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.466113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.466208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:11.466311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... T_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.630051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.630194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-04T02:31:12.630287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.630327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.630449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-04T02:31:12.630510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-04T02:31:12.630638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-04T02:31:12.630692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.630722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.630802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.630825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.630900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:31:12.631015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:31:12.631103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:31:12.631386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-04T02:31:12.631587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-04T02:31:12.631764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.631911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.631974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.632065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-04T02:31:12.632155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-04T02:31:12.632218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.632405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.632497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-01-04T02:31:12.632624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.632749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.632914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.632941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.633071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.633093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.633207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.633233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:832:2822] 2026-01-04T02:31:12.633347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-01-04T02:31:12.633370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:832:2822] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-01-04T02:31:12.637202Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:12.637470Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 250us result status StatusSuccess 2026-01-04T02:31:12.637890Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 2 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |86.3%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/slow/ydb-core-persqueue-ut-slow |86.3%| [TA] $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 64918, MsgBus: 30347 |86.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed >> TPQCDTest::TestUnavailableWithoutBoth |86.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-70 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 >> TPQCDTest::TestUnavailableWithoutClustersList >> TPQCDTest::TestPrioritizeLocalDatacenter >> TTicketParserTest::TicketFromCertificateCheckIssuerBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad >> TTicketParserTest::LoginGoodWithGroups [GOOD] >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] >> AuthorizeRequestToAccessService::CanRefreshTokenForAccessService [GOOD] >> TTicketParserTest::AccessServiceAuthenticationOk ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 3451, MsgBus: 21928 |86.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 >> TTicketParserTest::AuthenticationUnavailable [GOOD] >> TTicketParserTest::AuthenticationUnsupported |86.3%| [TA] {RESULT} $(B)/ydb/core/load_test/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-59 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |86.3%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/ydb-public-sdk-cpp-src-client-persqueue_public-ut >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ShouldBreakLocksOnConcurrentFinalizeBuildAsyncIndex [GOOD] Test command err: 2026-01-04T02:26:27.372348Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327154506748325:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:27.372615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:27.802901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:27.830576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:27.830679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:28.028581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:28.029619Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327154506748207:2081] 1767493587352214 != 1767493587352217 2026-01-04T02:26:28.041766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:28.070148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:28.192244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:28.192267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:28.192277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:28.192354Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:28.231774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:28.241216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:28.272711Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7591327158801716338:2296] 2026-01-04T02:26:28.272970Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:28.291410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:28.291507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:28.293306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:28.293341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:28.293370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:28.293787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:28.293939Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:28.293967Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7591327158801716353:2296] in generation 1 2026-01-04T02:26:28.295451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:28.336921Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:28.337092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:28.337170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7591327158801716355:2297] 2026-01-04T02:26:28.337186Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:28.337212Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:28.337225Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:28.337425Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:28.337525Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:28.337557Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:28.337589Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:28.337606Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:28.337643Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:28.337680Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327158801716336:2497], serverId# [1:7591327158801716340:2498], sessionId# [0:0:0] 2026-01-04T02:26:28.341181Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:28.341419Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:28.341492Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-04T02:26:28.345558Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:28.346201Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:28.346301Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:28.350834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327158801716369:2514], serverId# [1:7591327158801716370:2515], sessionId# [0:0:0] 2026-01-04T02:26:28.366485Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767493588392 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493588392 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:26:28.366528Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:28.366724Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:28.366810Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:28.366825Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:28.366845Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767493588392:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-04T02:26:28.367169Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767493588392:281474976710657 keys extracted: 0 2026-01-04T02:26:28.367317Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:26:28.367398Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:28.367437Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:26:28.369729Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:26:28.370222Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:28.371593Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767493588391 2026-01-04T02:26:28.371615Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:28.371639Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1767493588399 2026-01-04T02:26:28.371680Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767493588392} 2026-01-04T02:26:28.371710Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:28.371740Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:28.371764Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:28.371791Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:26:28.371842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767493588392 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7591327154506748552:2140], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:26:28.371911Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-04T02:26:28.371942Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:28.372729Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T ... : 281474976710759] from 72075186224037891 at tablet 72075186224037891 send result to client [25:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:31:13.422380Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037891 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-04T02:31:13.422590Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:31:13.425457Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037891 state Ready 2026-01-04T02:31:13.425614Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2026-01-04T02:31:13.449318Z node 25 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 4500} 2026-01-04T02:31:13.449410Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:31:13.449448Z node 25 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:31:13.449526Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976710759] from 72075186224037888 at tablet 72075186224037888 send result to client [25:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:31:13.449584Z node 25 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710759 state Ready TxInFly 0 2026-01-04T02:31:13.449704Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:31:13.452693Z node 25 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710759 datashard 72075186224037888 state Ready 2026-01-04T02:31:13.452779Z node 25 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:31:13.506165Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:13.506250Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.506289Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:13.506326Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.506357Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:31:13.554215Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:13.554297Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.554332Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:13.554370Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.554403Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:31:13.590395Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:13.590495Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.590539Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:13.590577Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.590609Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:31:13.625948Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:13.626035Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.626072Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:13.626112Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.626145Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:31:13.665474Z node 25 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:13.665550Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.665585Z node 25 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:13.665622Z node 25 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:31:13.665651Z node 25 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:31:13.834625Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:8] at 72075186224037888 2026-01-04T02:31:13.834940Z node 25 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=8; 2026-01-04T02:31:13.835089Z node 25 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 8 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-04T02:31:13.835373Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 8 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:31:13.835796Z node 25 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 8 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:31:13.835914Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:31:13.836329Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [25:1405:3074], Table: `/Root/Table` ([72057594046644480:38:3]), SessionActorId: [25:1348:3074]Got LOCKS BROKEN for table `/Root/Table`. ShardID=72075186224037888, Sink=[25:1405:3074].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:31:13.836585Z node 25 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [25:1398:3074], SessionActorId: [25:1348:3074], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[25:1348:3074]. 2026-01-04T02:31:13.837170Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=25&id=OGI5MWEwNmEtNTMxZTljNzktNTAyMDRlNDQtMmIxYmJhODY=, ActorId: [25:1348:3074], ActorState: ExecuteState, LegacyTraceId: 01ke3dgc0p1y71xw6ysmsdtdj0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [25:1488:3074] from: [25:1398:3074] trace_id# 2026-01-04T02:31:13.837440Z node 25 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [25:1488:3074] TxId: 281474976715665. Ctx: { TraceId: 01ke3dgc0p1y71xw6ysmsdtdj0, Database: , SessionId: ydb://session/3?node_id=25&id=OGI5MWEwNmEtNTMxZTljNzktNTAyMDRlNDQtMmIxYmJhODY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Table`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:31:13.838392Z node 25 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=25&id=OGI5MWEwNmEtNTMxZTljNzktNTAyMDRlNDQtMmIxYmJhODY=, ActorId: [25:1348:3074], ActorState: ExecuteState, LegacyTraceId: 01ke3dgc0p1y71xw6ysmsdtdj0, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Table`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:31:13.839726Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:9] at 72075186224037888 2026-01-04T02:31:13.839810Z node 25 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:9] at 72075186224037888 2026-01-04T02:31:13.840084Z node 25 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-04T02:31:13.845770Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:31:13.845976Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:31:13.846972Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-04T02:31:13.847570Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 139 count 1 last offset 0, current partition end offset: 1 2026-01-04T02:31:13.847685Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-04T02:31:13.847860Z node 25 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 139 accessed 0 times before, last time 1970-01-01T00:00:04.000000Z 2026-01-04T02:31:13.848076Z node 25 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-04T02:31:13.848284Z node 25 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:31:13.848489Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:31:13.848800Z node 25 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 121 from pos 0 cbcount 1 2026-01-04T02:31:13.849637Z node 25 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |86.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest >> TTicketParserTest::LoginBad [GOOD] >> TTicketParserTest::LoginCheckRemovedUser >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLock+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11027, MsgBus: 32673 |86.3%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TTicketParserTest::LoginRefreshGroupsWithError [GOOD] >> TTicketParserTest::NebiusAccessServiceAuthenticationOk |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_column_build_reboots/ydb-core-tx-schemeshard-ut_column_build_reboots >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-21 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 >> TTicketParserTest::AuthorizationRetryError [GOOD] >> TTicketParserTest::AuthorizationRetryErrorImmediately >> TTicketParserTest::BulkAuthorizationRetryError [GOOD] >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately >> DataShardVolatile::VolatileTxAbortedOnDrop [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter+UseSink >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationBad [GOOD] >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions >> TTicketParserTest::LoginGoodWithDelayUpdateSecurityState [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutClustersList [GOOD] Test command err: 2026-01-04T02:31:17.989752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328401409396368:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:18.004090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:18.508112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:18.510854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:18.633498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:18.640039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:18.645551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:18.653375Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328401409396249:2081] 1767493877946733 != 1767493877946736 2026-01-04T02:31:18.865953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:18.866573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0031ae/r3tmp/yandexap0I8h.tmp 2026-01-04T02:31:18.866581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0031ae/r3tmp/yandexap0I8h.tmp 2026-01-04T02:31:18.866717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0031ae/r3tmp/yandexap0I8h.tmp 2026-01-04T02:31:18.866788Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:19.002341Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:22.294157Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328422884233703:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.294319Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.298630Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328422884233715:2382], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.298686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328422884233716:2383], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.298837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.312646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:22.347609Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328422884233719:2384], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning >> TTicketParserTest::AuthenticationUnsupported [GOOD] >> TTicketParserTest::AuthenticationUnknown ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutBoth [GOOD] Test command err: 2026-01-04T02:31:17.758785Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328397949316076:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:17.761898Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:17.808829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:18.245341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:18.262664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:18.262806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:18.409751Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:18.411145Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:18.713009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:18.713029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:18.713035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:18.713100Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:18.723577Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:18.762760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:21.698680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328415129186075:2374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:21.698848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:21.699402Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328415129186087:2377], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:21.699456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328415129186088:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:21.699788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:21.706517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:21.725388Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328415129186091:2379], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:31:21.854577Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328415129186148:2574] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> Cdc::ShouldBreakLocksOnConcurrentAddIndex [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAddStream >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] >> TPQCDTest::TestDiscoverClusters >> TTicketParserTest::AccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |86.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestUnavailableWithoutNetClassifier [GOOD] Test command err: 2026-01-04T02:31:14.700309Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328385922083657:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:14.700384Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:14.752559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:15.121906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.253403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:15.253508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:15.277921Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328385922083620:2081] 1767493874698563 != 1767493874698566 2026-01-04T02:31:15.288195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:15.295075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:15.388507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:15.544670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:15.544689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:15.544699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:15.544801Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:15.708536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:23339 2026-01-04T02:31:15.982102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:15.995115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:16.156354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:31:16.189341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:31:19.712652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328385922083657:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:19.712710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:19.713585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328407396921067:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.713709Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.715149Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328407396921079:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.715212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328407396921080:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.715379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.721030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:19.745993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328407396921119:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.746107Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.753721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328407396921129:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.753944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:19.756527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-04T02:31:19.766832Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328407396921083:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:31:19.936334Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328407396921144:2595] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:20.424669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:20.484511Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328407396921161:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:20.495474Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NGM1ODdkZGYtNDNkY2FhMmMtYWIwYzgyMmQtYjcwYjliMmM=, ActorId: [1:7591328407396921064:2326], ActorState: ExecuteState, LegacyTraceId: 01ke3dghw834qesjf5mj23a93w, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:20.503319Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:20.681339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:20.821008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |86.3%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> BsControllerConfig::ManyPDisksRestarts [GOOD] >> BsControllerConfig::MergeBoxes >> Cdc::InitialScanComplete [GOOD] >> Cdc::InitialScanEnqueuesZeroRecords |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |86.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |86.3%| [LD] {RESULT} $(B)/ydb/core/grpc_services/ut/ydb-core-grpc_services-ut |86.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |86.3%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_topic_reader/ydb-core-tx-replication-service-ut_topic_reader >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] >> TExternalTableTest::DropTableTwice >> TTicketParserTest::BulkAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::BulkAuthorization >> TTicketParserTest::NebiusAccessServiceAuthenticationOk [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryError ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestPrioritizeLocalDatacenter [GOOD] Test command err: 2026-01-04T02:31:18.190333Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328404631516269:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:18.190691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:18.277200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:18.679611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:18.759738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:18.759977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:18.865895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:18.878108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328404631516126:2081] 1767493878167476 != 1767493878167479 2026-01-04T02:31:18.878964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:18.883194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:19.124988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0031aa/r3tmp/yandexbFqXo6.tmp 2026-01-04T02:31:19.125018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0031aa/r3tmp/yandexbFqXo6.tmp 2026-01-04T02:31:19.125187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0031aa/r3tmp/yandexbFqXo6.tmp 2026-01-04T02:31:19.125287Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:19.185931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:19.273951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:11549 2026-01-04T02:31:19.517374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:19.538675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:19.546682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:31:19.591644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:22.029956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328417516418951:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.030153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.046198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328421811386283:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.046291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328421811386284:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.046456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:22.053611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:22.302099Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328421811386287:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:31:22.646747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:22.647707Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328421811386355:2594] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:22.882688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:23.006680Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328421811386405:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:23.009030Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDhmNDUzNy00M2M1NDI3LTcxZmQ5ZjU1LTQwYTc2NTc3, ActorId: [1:7591328417516418949:2325], ActorState: ExecuteState, LegacyTraceId: 01ke3dgm42034bby11zqc76j97, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:23.021659Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:23.095191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-04T02:31:23.178762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328404631516269:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:23.178815Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryError [GOOD] >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant >> PrivateApi::PingTask [GOOD] >> PrivateApi::GetTask >> TTicketParserTest::AuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationWithRequiredPermissions >> TTicketParserTest::NebiusAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::NebiusAuthorizationUnavailable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 >> TExternalTableTest::SchemeErrors ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap-useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 16576, MsgBus: 61862 |86.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TTicketParserTest::AuthenticationUnknown [GOOD] >> TTicketParserTest::Authorization >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-51 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] >> TExternalTableTest::DropTableTwice [GOOD] >> TExternalTableTest::ParallelCreateExternalTable ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestCloudClientsAreConsistentlyDistributed [GOOD] Test command err: 2026-01-04T02:31:17.778157Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328398137343206:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:17.778213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:18.575892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:18.633854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:18.643765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:18.841379Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:18.882639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:18.960510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:18.965960Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328398137343036:2081] 1767493877719188 != 1767493877719191 2026-01-04T02:31:19.001537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:19.298473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0031b8/r3tmp/yandexs1mw9i.tmp 2026-01-04T02:31:19.298495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0031b8/r3tmp/yandexs1mw9i.tmp 2026-01-04T02:31:19.298639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0031b8/r3tmp/yandexs1mw9i.tmp 2026-01-04T02:31:19.298724Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:20868 2026-01-04T02:31:20.247674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:20.270654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:20.294176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:31:22.786317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328398137343206:2193];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:22.786409Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:24.319079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328428202115078:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:24.319221Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:24.319573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328428202115090:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:24.319612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328428202115092:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:24.320153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:24.324978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:24.366540Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328428202115095:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:31:24.809686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:24.810757Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328428202115160:2603] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:25.214675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:25.225687Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328428202115205:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:25.228925Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWVlZGQzYjMtN2ZkNTljZDQtMmU4ZjkzNWEtOWFiNzk5, ActorId: [1:7591328428202115076:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dgpcw52g7cj53m7acrgz9, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:25.231043Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:25.384948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); >> TExternalTableTest::CreateExternalTable |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |86.4%| [LD] {RESULT} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/bscontroller/ut/ydb-core-mind-bscontroller-ut >> KikimrIcGateway::TestListPath ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService+isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 8291, MsgBus: 27156 |86.4%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [FAIL] >> TExternalTableTest::SchemeErrors [GOOD] >> TExternalTableTest::ParallelCreateExternalTable [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [FAIL] |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_worker/ydb-core-tx-replication-service-ut_worker >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 >> TTicketParserTest::AccessServiceAuthenticationApiKeyOk [GOOD] >> TTicketParserTest::AuthenticationRetryError >> TTicketParserTest::BulkAuthorization [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_serverless_reboots/ydb-core-tx-schemeshard-ut_serverless_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:31:28.191569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:31:28.191667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:28.191700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:31:28.191735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:31:28.191771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:31:28.191819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:31:28.191893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:28.191978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:31:28.192879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:28.193193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:31:28.336204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:31:28.336309Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:28.337188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:28.369795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:31:28.370351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:31:28.370551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:31:28.388940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:31:28.389201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:31:28.389948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:28.390263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:31:28.403363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:28.403588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:31:28.404766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:28.404828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:28.404927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:31:28.404975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:28.405012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:31:28.405194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:31:28.566416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.567938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:28.568582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... hildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:31.191852Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:31.192047Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 211us result status StatusSuccess 2026-01-04T02:31:31.192362Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:31.193158Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:31.193295Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 159us result status StatusSuccess 2026-01-04T02:31:31.193678Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:31.194339Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:31.194510Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable1" took 194us result status StatusSuccess 2026-01-04T02:31:31.194850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable1" PathDescription { Self { Name: "ExternalTable1" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 126 CreateStep: 5000041 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable1" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Content: "" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:31.195430Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/ExternalTable2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:31.195592Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/ExternalTable2" took 185us result status StatusSuccess 2026-01-04T02:31:31.195909Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/ExternalTable2" PathDescription { Self { Name: "ExternalTable2" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 127 CreateStep: 5000040 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable2" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key1" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false } Columns { Name: "key2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false } Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false } Content: "" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> KikimrIcGateway::TestLoadTableMetadata ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:31:29.358603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:31:29.358690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:29.358726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:31:29.358766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:31:29.358800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:31:29.358827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:31:29.358960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:29.359037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:31:29.360028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:29.360320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:31:29.511298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:31:29.511387Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:29.513065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:29.538901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:31:29.539159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:31:29.539319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:31:29.546751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:31:29.547018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:31:29.547750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:29.548023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:31:29.552418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:29.552629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:31:29.553728Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:29.553787Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:29.553910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:31:29.553955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:29.553996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:31:29.554137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:31:29.778976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.786703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.786911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:29.787988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... 026-01-04T02:31:30.901715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 126:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.902041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, at schemeshard: 72057594046678944 2026-01-04T02:31:30.905309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Type \'BlaBlaType\' specified for column \'RowId\' is not supported by storage" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.905636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Type 'BlaBlaType' specified for column 'RowId' is not supported by storage, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-04T02:31:30.909357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:30.909756Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "" Type: "Uint64" } } 2026-01-04T02:31:30.909906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 127:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.910075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Columns cannot have an empty name, at schemeshard: 72057594046678944 2026-01-04T02:31:30.914141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Columns cannot have an empty name" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.914437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Columns cannot have an empty name, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-04T02:31:30.918601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:30.919064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" TypeId: 27 } } 2026-01-04T02:31:30.919179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.919345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, at schemeshard: 72057594046678944 2026-01-04T02:31:30.922234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Cannot set TypeId for column \'RowId\', use Type" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.922515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot set TypeId for column 'RowId', use Type, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-04T02:31:30.926304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:30.926823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" } } 2026-01-04T02:31:30.926913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 129:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.927057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Missing Type for column 'RowId', at schemeshard: 72057594046678944 2026-01-04T02:31:30.931068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Missing Type for column \'RowId\'" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.931390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Missing Type for column 'RowId', operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 129, wait until txId: 129 TestModificationResults wait txId: 130 2026-01-04T02:31:30.936464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } } TxId: 130 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:30.936900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 130:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "RowId" Type: "Uint64" Id: 2 } Columns { Name: "RowId2" Type: "Uint64" Id: 2 } } 2026-01-04T02:31:30.936990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 130:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.937199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 130:1, propose status:StatusSchemeError, reason: Duplicate column id: 2, at schemeshard: 72057594046678944 2026-01-04T02:31:30.940026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 130, response: Status: StatusSchemeError Reason: "Duplicate column id: 2" TxId: 130 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.940547Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 130, database: /MyRoot, subject: , status: StatusSchemeError, reason: Duplicate column id: 2, operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 130, wait until txId: 130 TestModificationResults wait txId: 131 2026-01-04T02:31:30.944933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } } TxId: 131 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:30.945404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 131:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "Table2" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource1" Location: "/" Columns { Name: "RowId" Type: "Uint64" } Columns { Name: "Value" Type: "Utf8" } } 2026-01-04T02:31:30.945511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 131:0, path# /MyRoot/DirA/Table2 2026-01-04T02:31:30.945701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 131:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-04T02:31:30.954442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 131, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 131 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:31:30.954762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 131, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ExternalDataSource1', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: CREATE EXTERNAL TABLE, path: /MyRoot/DirA/Table2 TestModificationResult got TxId: 131, wait until txId: 131 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TPQTestSlow::TestWriteVeryBigMessage >> TExternalTableTest::CreateExternalTable [GOOD] >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists >> DataShardVolatile::UpsertNoLocksArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiter-UseSink >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [GOOD] >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest >> TTicketParserTest::LoginCheckRemovedUser [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount >> TTicketParserTest::NebiusAuthorizationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAuthorization >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> TTicketParserTest::AuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] >> TTicketParserTest::BulkAuthorizationWithRequiredPermissions [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |86.4%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_rtmr/ydb-core-tx-schemeshard-ut_rtmr >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestRelatedServicesAreRunning [GOOD] Test command err: 2026-01-04T02:31:24.727811Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328428081438944:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:24.729886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:25.333920Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:25.342365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:25.342475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:25.411865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:25.521015Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328428081438905:2081] 1767493884710405 != 1767493884710408 2026-01-04T02:31:25.555183Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:25.620907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:25.826220Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:25.878656Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00319b/r3tmp/yandexiLuM20.tmp 2026-01-04T02:31:25.878677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00319b/r3tmp/yandexiLuM20.tmp 2026-01-04T02:31:25.878866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00319b/r3tmp/yandexiLuM20.tmp 2026-01-04T02:31:25.878984Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration PQClient connected to localhost:10479 2026-01-04T02:31:26.305083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:26.312077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:26.412762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:31:26.431556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:31:29.730048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328428081438944:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:29.730132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:30.755834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328453851243624:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.755997Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.759245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328453851243664:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.759351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328453851243663:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.759405Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.764928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:30.807439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328453851243703:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.807535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:30.834829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328453851243668:2333], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:31:31.007115Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328453851243737:2599] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:31.821291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:31.826866Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328458146211042:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:31.829439Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2YwYjFiMTktYmE0YjllZTItZDI1NjFhMWEtNTZlOTYyYjc=, ActorId: [1:7591328453851243622:2323], ActorState: ExecuteState, LegacyTraceId: 01ke3dgwkkdda23emz8k45ra1r, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:31.841778Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:32.122695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:32.324531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> TTicketParserTest::Authorization [GOOD] >> TTicketParserTest::AuthorizationModify |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::CreateExternalTableShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:31:30.772030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:31:30.772124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:30.772164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:31:30.772216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:31:30.772256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:31:30.772317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:31:30.772391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:31:30.772482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:31:30.773376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:30.773676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:31:31.162517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:31:31.162612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:31.163537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:31:31.202748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:31:31.203085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:31:31.203275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:31:31.216571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:31:31.216821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:31:31.217583Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:31:31.217918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:31:31.221119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:31.221318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:31:31.227779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:31:31.227887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:31:31.228020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:31:31.228075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:31:31.228115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:31:31.228327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:31:31.511949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.513966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.514065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.514158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.514266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:31:31.514340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... rd__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:31:35.169278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:31:35.170105Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:31:35.170178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:31:35.170208Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:31:35.170235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:31:35.170263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:31:35.170356Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:31:35.172819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:31:35.174480Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:31:35.174583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:31:35.174811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:31:35.174873Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:31:35.175238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:31:35.175335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:31:35.175375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:741:2730] TestWaitNotification: OK eventTxId 102 2026-01-04T02:31:35.175811Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:35.176037Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 255us result status StatusSuccess 2026-01-04T02:31:35.176421Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-04T02:31:35.179914Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:31:35.180312Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/new_location" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-04T02:31:35.180527Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 103:0, path# /MyRoot/ExternalTable 2026-01-04T02:31:35.180672Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:31:35.184121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 103 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 102, at schemeshard: 72057594046678944 2026-01-04T02:31:35.184402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/ExternalTable', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:31:35.184762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:31:35.184811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:31:35.185210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:31:35.185304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:31:35.185341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:749:2738] TestWaitNotification: OK eventTxId 103 2026-01-04T02:31:35.185895Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:31:35.186092Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 285us result status StatusSuccess 2026-01-04T02:31:35.186456Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-04T02:31:36.363840Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.363871Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.363892Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.374513Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.378424Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.397489Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.400438Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.403879Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.403904Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.403924Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.404376Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.414093Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.414364Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.418072Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.418486Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:31:36.426711Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.427855Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.427890Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.429631Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.430434Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.430569Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.434014Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.435864Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.436469Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:36.436640Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.436696Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:31:36.437713Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.437741Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.437762Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.446906Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.448236Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.448433Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.450050Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.450575Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.450774Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:36.450892Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.450935Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:31:36.451741Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.451787Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.451814Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.452138Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.457510Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.457713Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.458154Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.458614Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.461895Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:36.462106Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.462169Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:31:36.463411Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.463437Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.463459Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.475268Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.482247Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.482465Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.482893Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.483484Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.483772Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:36.486397Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.486474Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:31:36.488732Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.488888Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.488914Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.489289Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.493774Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.494008Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.494365Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.496075Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.496660Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:36.496761Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.496809Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:31:36.503418Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.505345Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.505377Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:36.505952Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:36.506874Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:36.507065Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:36.510067Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:36.511413Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:36.512611Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:36.515365Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-04T02:31:36.515572Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:31:36.515791Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:36.515830Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:31:36.515860Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-04T02:31:36.515893Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-04T02:31:36.515940Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-04T02:31:36.515966Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-04T02:31:36.516232Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 1 } } 2026-01-04T02:31:36.516449Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [10, 12). Partition stream id: 1 Got commit req { cookies { assign_id: 1 partition_cookie: 2 } } |86.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |86.4%| [LD] {RESULT} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/datastreams/ut/ydb-services-datastreams-ut >> KikimrIcGateway::TestListPath [GOOD] >> KikimrIcGateway::TestDropTable >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationUnavailable [GOOD] Test command err: 2026-01-04T02:31:08.681581Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328359403838466:2135];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:08.696279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.747754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:09.368718Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:09.399906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:09.400007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:09.553349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:09.623370Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:09.629943Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328359403838370:2081] 1767493868657146 != 1767493868657149 2026-01-04T02:31:09.699379Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.777900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:09.893707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:09.893737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:09.893745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:09.893864Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:10.155781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:10.180726Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 87882144CD86A0BAAF699649A31E661D416835E0C51CBB756D06914B0834DAB7 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-04T02:31:13.483445Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328383727237312:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.483609Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:13.617899Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.628036Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328383727237210:2081] 1767493873470105 != 1767493873470108 2026-01-04T02:31:13.715491Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:13.716922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:13.717124Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:13.795383Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:13.866319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:14.094365Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:14.094392Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:14.094399Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:14.094460Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:14.344306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:14.354232Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 6E3CE81057D0C95C8B7FDF3B9C0217A91779C1C5221CCE8921647F1485898D07 () has now permanent error message 'Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers' 2026-01-04T02:31:14.354847Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 6E3CE81057D0C95C8B7FDF3B9C0217A91779C1C5221CCE8921647F1485898D07: Cannot create token from certificate. Client`s certificate and server`s certificate have different issuers 2026-01-04T02:31:19.081755Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328408218715977:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:19.081826Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:19.153592Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:19.303295Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:19.314011Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591328408218715950:2081] 1767493879061648 != 1767493879061651 2026-01-04T02:31:19.334933Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:19.335008Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:19.336024Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:19.344991Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:19.538294Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:19.538341Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:19.538357Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:19.538431Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:19.582004Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:19.844881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:19.850933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:19.853273Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket A10CEAF9CE40888A15DA9259524BC266318B9CA56273DDEAC9EE504806DFFBD1 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-04T02:31:19.853852Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket A10CEAF9CE40888A15DA9259524BC266318B9CA56273DDEAC9EE504806DFFBD1: Cannot create token from certificate. Client certificate failed verification 2026-01-04T02:31:23.619828Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:23.619987Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:23.837483Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:23.838442Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591328427380860457:2081] 1767493883451455 != 1767493883451458 2026-01-04T02:31:23.861775Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:23.861896Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:23.909921Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:23.940303Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:24.246604Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:24.246630Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:24.246638Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:24.246735Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:24.467082Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:24.499186Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:24.516039Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-04T02:31:24.516112Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0bf6ecf650] Connect to grpc://localhost:2362 2026-01-04T02:31:24.520191Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0bf6ecf650] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-04T02:31:24.542477Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0bf6ecf650] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-04T02:31:24.542689Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-04T02:31:24.542830Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } 2026-01-04T02:31:24.543377Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-04T02:31:24.543634Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0bf6ecf650] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } 0: "OK" 2026-01-04T02:31:24.546967Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d0bf6ecf650] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { resultCode: PERMISSION_DENIED impersonation_info { } } } } 2026-01-04T02:31:24.547431Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1292: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-04T02:31:24.547509Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for aaaa1234 bbbb4554 - PERMISSION_DENIED' 2026-01-04T02:31:24.577066Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:28.995561Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328448328411206:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:29.000165Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:29.030945Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:29.317974Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328448328411171:2081] 1767493888957403 != 1767493888957406 2026-01-04T02:31:29.392797Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:29.393474Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:29.397097Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:29.397697Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:29.420289Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:29.625672Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:29.685034Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:29.685060Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:29.685072Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:29.685167Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:30.005681Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:30.375919Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:30.388653Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-04T02:31:30.388709Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d0bf6ec4050] Connect to grpc://localhost:20004 2026-01-04T02:31:30.389691Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0bf6ec4050] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-04T02:31:30.404642Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0bf6ec4050] Status 14 Service Unavailable 2026-01-04T02:31:30.406101Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:30.406129Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:30.406174Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:30.406254Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-04T02:31:30.406624Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d0bf6ec4050] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } 2026-01-04T02:31:30.412957Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d0bf6ec4050] Status 1 CANCELLED 2026-01-04T02:31:30.414834Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-04T02:31:30.414859Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-04T02:31:30.414883Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' |86.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 >> Cdc::ShouldBreakLocksOnConcurrentAddStream [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-04T02:31:34.933291Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.933312Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.933330Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:34.938214Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:34.942077Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:34.960911Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.976298Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-04T02:31:34.977507Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:34.977754Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:31:34.982200Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:31:34.982567Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:31:34.983690Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:34.983737Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:31:34.983780Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:31:34.983935Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-04T02:31:34.983972Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:31:34.984014Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:31:34.984035Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:31:34.984153Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-04T02:31:34.984270Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-04T02:31:34.984293Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-04T02:31:34.984314Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:31:34.984404Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-04T02:31:34.984432Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-04T02:31:34.984465Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-04T02:31:34.984495Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:31:34.984592Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 2026-01-04T02:31:34.986166Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.986196Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.986225Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:34.986588Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:34.990004Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:34.990241Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:34.990532Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-04T02:31:34.991374Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:34.991588Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:31:34.998341Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:31:34.998589Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:31:35.001913Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:35.001961Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:31:35.002000Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:31:35.002165Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 Getting new event 2026-01-04T02:31:35.002200Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:31:35.002222Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:31:35.002243Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:31:35.002379Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 Getting new event 2026-01-04T02:31:35.002461Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-04T02:31:35.002479Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-04T02:31:35.002495Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:31:35.002552Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 Getting new event 2026-01-04T02:31:35.002577Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-04T02:31:35.002595Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-04T02:31:35.002612Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00 ... tream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:31:37.890598Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 201). Partition stream id: 1 2026-01-04T02:31:37.985544Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:31:37.985822Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:31:37.985952Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:37.986530Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:37.987147Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:37.987613Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:31:37.988229Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-04T02:31:38.088191Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-04T02:31:38.089211Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:38.090983Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:31:38.093471Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:31:38.094705Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-04T02:31:38.105380Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-04T02:31:38.106423Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-04T02:31:38.107379Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-04T02:31:38.108354Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-04T02:31:38.119595Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-04T02:31:38.120702Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-04T02:31:38.120800Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-04T02:31:38.120990Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:31:38.125530Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 11). Partition stream id: 1 2026-01-04T02:31:38.135230Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.135287Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.135338Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:38.135841Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:38.136302Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:38.136473Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.136820Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:38.137143Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-04T02:31:38.138214Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.138243Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.138285Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:38.140241Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:38.141082Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:38.141274Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.141952Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:38.142190Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:38.142330Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:38.142387Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:31:38.142562Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 |86.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> ReadSessionImplTest::SuccessfulInit [GOOD] >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> KikimrIcGateway::TestLoadTableMetadata [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> TTicketParserTest::NebiusAuthorization [GOOD] >> TTicketParserTest::NebiusAuthorizationModify >> TTicketParserTest::BulkAuthorizationWithUserAccount [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> TPQCDTest::TestDiscoverClusters [GOOD] >> TTicketParserTest::BulkAuthorizationUnavailable [GOOD] >> TTicketParserTest::BulkAuthorizationModify >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit >> TTicketParserTest::AuthorizationWithUserAccount [GOOD] >> TTicketParserTest::AuthorizationWithUserAccount2 >> Yq_1::DeleteConnections [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-23 [GOOD] >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 >> Cdc::InitialScanEnqueuesZeroRecords [GOOD] >> Cdc::InitialScanRacyProgressAndDrop >> PersQueueSdkReadSessionTest::ReadSessionWithAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-04T02:31:39.561743Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.561777Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.561850Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.562428Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.563662Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.563739Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.564857Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.564879Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.564900Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.565261Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.565619Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.565667Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.566504Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.566533Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.566549Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.566904Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:39.566958Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.566989Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.567129Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-04T02:31:39.567880Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.567906Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.567920Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.568258Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:31:39.568304Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.568342Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.568427Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-04T02:31:39.569543Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:31:39.569581Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:31:39.569873Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.570258Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.570796Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.587577Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:31:39.588034Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.588505Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-04T02:31:39.593106Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-04T02:31:39.593738Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.593787Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:31:39.593845Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:31:39.593884Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-04T02:31:39.593915Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-04T02:31:39.593975Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-04T02:31:39.593998Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-04T02:31:39.594028Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-04T02:31:39.594058Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-04T02:31:39.594078Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-04T02:31:39.594101Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-04T02:31:39.594136Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-04T02:31:39.594156Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-04T02:31:39.594176Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-04T02:31:39.594194Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-04T02:31:39.594213Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-04T02:31:39.594281Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-04T02:31:39.594303Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-04T02:31:39.594322Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-04T02:31:39.594354Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-04T02:31:39.594376Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-04T02:31:39.594394Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-04T02:31:39.594417Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-04T02:31:39.594437Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-04T02:31:39.594456Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-04T02:31:39.594489Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-04T02:31:39.594511Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-04T02:31:39.594555Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-04T02:31:39.594573Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-04T02:31:39.594596Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-04T02:31:39.594613Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-04T02:31:39.594630Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-04T02:31:39.594707Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-04T02:31:39.594733Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-04T02:31:39.594760Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-04T02:31:39.594781Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-04T02:31:39.594801Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-04T02:31:39.594818Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-04T02:31:39.594841Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-04T02:31:39.594860Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-04T02:31:39.594879Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-04T02:31:39.594902Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-04T02:31:39.594920Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-04T02:31:39.594954Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-04T02:31:39.594972Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-04T02:31:39.594991Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-04T02:31:39.595010Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-04T02:31:39.595034Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-04T02:31:39.595053Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-04T02:31:39.595071Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-04T02:31:39.595140Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-04T02:31:39.597779Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-04T02:31:39.600353Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-04T02:31:39.600414Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-04T02:31:39.600442Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-04T02:31:39.600465Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-04T02:31:39.600499Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-04T02:31:39.600534Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-04T02:31:39.600557Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-04T02:31:39.600578Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-04T02:31:39.600626Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-04T02:31:39.600648Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-04T02:31:39.600684Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-04T02:31:39.600725Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-04T02:31:39.600747Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-04T02:31:39.600768Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-04T02:31:39.600793Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-04T02:31:39.600826Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-04T02:31:39.600880Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-04T02:31:39.600900Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-04T02:31:39.600949Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-04T02:31:39.600971Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-04T02:31:39.600989Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-04T02:31:39.601007Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-04T02:31:39.601028Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-04T02:31:39.601047Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-04T02:31:39.601078Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-04T02:31:39.601099Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-04T02:31:39.601118Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-04T02:31:39.601151Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-04T02:31:39.601173Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-04T02:31:39.601193Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-04T02:31:39.601229Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-04T02:31:39.601259Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-04T02:31:39.601355Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-04T02:31:39.601381Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-04T02:31:39.601399Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-04T02:31:39.601418Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-04T02:31:39.601435Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-04T02:31:39.601453Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-04T02:31:39.601472Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-04T02:31:39.601491Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-04T02:31:39.601526Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-04T02:31:39.601550Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-04T02:31:39.601570Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-04T02:31:39.601589Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-04T02:31:39.601607Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-04T02:31:39.601628Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-04T02:31:39.601653Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-04T02:31:39.601683Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-04T02:31:39.601706Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-04T02:31:39.601725Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-04T02:31:39.601787Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-04T02:31:39.601973Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:31:39.603960Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.603987Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.604011Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.610168Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.612483Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.612800Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.613545Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.714414Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.714679Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:31:39.714744Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.714799Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:31:39.714866Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:31:39.921941Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-04T02:31:40.038283Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:31:40.038447Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:31:40.038641Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:31:40.039968Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.039994Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.040018Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:40.040436Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:40.040994Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:40.041199Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.041746Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:40.142731Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.146015Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:31:40.146094Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:40.146136Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:31:40.146221Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-04T02:31:40.146345Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:31:40.146504Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:31:40.149353Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:31:40.149559Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |86.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> Yq_1::ListConnections [GOOD] >> Yq_1::ListConnectionsOnEmptyConnectionsTable >> Yq_1::DescribeJob [GOOD] >> Yq_1::DescribeQuery >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-04T02:31:39.987759Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.987805Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.987837Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.988315Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:39.988371Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.988419Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.989535Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009586s 2026-01-04T02:31:39.991136Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.992071Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.992165Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.992967Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.992981Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.992995Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.993265Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:39.993300Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.993329Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.993426Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.005684s 2026-01-04T02:31:39.993843Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.994165Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.994249Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.994970Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.994983Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.994993Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.995276Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:31:39.995303Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.995326Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.995372Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.166179s 2026-01-04T02:31:39.995671Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.995926Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.995992Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.996573Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.996591Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.996609Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.996892Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:31:39.996923Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.996946Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.996996Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.284740s 2026-01-04T02:31:39.997283Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.997689Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:31:39.997753Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.998503Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.998521Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.998543Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.998813Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.999390Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:40.009272Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.010053Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-04T02:31:40.010089Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.010111Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.010172Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.296211s 2026-01-04T02:31:40.010414Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:31:40.011473Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.011499Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.011527Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:40.011906Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:40.012362Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:40.012542Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.012794Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:40.013466Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:40.013904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:31:40.014089Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-04T02:31:40.014173Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:40.014278Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:40.014323Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-04T02:31:40.014367Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:31:40.014394Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:31:40.015599Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.015670Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.015708Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:40.015938Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:40.016244Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:40.016355Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.016760Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:40.119347Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.119620Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:31:40.119691Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:40.119754Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:31:40.119827Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:31:40.225539Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:31:40.226178Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:31:40.227443Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.227465Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.227485Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:40.231095Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:40.231920Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:40.232138Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.241321Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:40.335695Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:40.335890Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:31:40.335943Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:40.336002Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:31:40.336082Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 3). Partition stream id: 1 2026-01-04T02:31:40.336168Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:31:40.336571Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:31:40.336641Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:31:40.336785Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |86.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TTicketParserTest::NebiusAuthenticationRetryError [GOOD] >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TTicketParserTest::AuthorizationModify [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_cluster_discovery/ut/unittest >> TPQCDTest::TestDiscoverClusters [GOOD] Test command err: 2026-01-04T02:31:26.202798Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328438194690368:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:26.203586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:26.813935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:26.891839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:26.891947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:27.065989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328438194690246:2081] 1767493886131020 != 1767493886131023 2026-01-04T02:31:27.112826Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:27.119308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:27.121956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:27.232987Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:27.438462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/003195/r3tmp/yandexE4VkmP.tmp 2026-01-04T02:31:27.438489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/003195/r3tmp/yandexE4VkmP.tmp 2026-01-04T02:31:27.438637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/003195/r3tmp/yandexE4VkmP.tmp 2026-01-04T02:31:27.438723Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:27.593906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:32401 2026-01-04T02:31:27.917585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:27.937211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:28.010245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:28.026494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:31:31.170070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328438194690368:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:31.170142Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:32.493185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328463964495004:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:32.493191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328463964495016:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:32.493326Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:32.493615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328463964495019:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:32.493661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:32.497536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:32.538699Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328463964495018:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:31:33.164323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328463964495085:2609] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:33.172609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:33.412553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:33.485642Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328468259462423:2348], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:33.488350Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmI4ZjE5ZTMtMjg4NTI3NmUtNWFjMWY3ZjItNzQwYzE1NQ==, ActorId: [1:7591328463964495001:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dgybpb0aphdkvf3p9k363, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:33.495236Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:33.618686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); |86.4%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_cluster_discovery/ut/unittest >> PrivateApi::GetTask [GOOD] >> PrivateApi::Nodes >> Yq_1::ModifyConnections [GOOD] >> Yq_1::ModifyQuery >> DataShardVolatile::UpsertNoLocksArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink |86.4%| [TA] $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |86.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut |86.5%| [TA] {RESULT} $(B)/ydb/services/persqueue_cluster_discovery/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.5%| [LD] {RESULT} $(B)/ydb/library/ycloud/impl/ut/ydb-library-ycloud-impl-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationModify [GOOD] Test command err: 2026-01-04T02:31:08.274815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328361935837672:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:08.275006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.890134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.890219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:09.003767Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:09.040478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:09.072195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328361935837459:2081] 1767493868245152 != 1767493868245155 2026-01-04T02:31:09.076620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:09.210161Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:09.423404Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.424780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:09.424788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:09.424795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:09.424884Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:09.681988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.691569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:09.708745Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:09.708864Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4c768f15d0] Connect to grpc://localhost:61088 2026-01-04T02:31:09.711421Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c768f15d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-04T02:31:09.721401Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c768f15d0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:09.721913Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-04T02:31:09.723196Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4c768f1cd0] Connect to grpc://localhost:8054 2026-01-04T02:31:09.724028Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c768f1cd0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-04T02:31:09.737315Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c768f1cd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-04T02:31:09.737784Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-04T02:31:13.058061Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328381268824290:2165];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.058107Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:13.092825Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:13.370000Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.397994Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:13.398134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:13.398960Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:13.465990Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328381268824163:2081] 1767493873034916 != 1767493873034919 2026-01-04T02:31:13.566893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:13.567640Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.816157Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:13.816194Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:13.816200Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:13.816277Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:13.947053Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:14.090680Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:14.199250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:14.207636Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:14.207688Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4c769c2450] Connect to grpc://localhost:4313 2026-01-04T02:31:14.208487Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769c2450] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-04T02:31:14.218352Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4c769c2450] Status 14 Service Unavailable 2026-01-04T02:31:14.218783Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:14.218809Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:14.218951Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769c2450] Request AuthenticateRequest { iam_token: "**** (8E120919)" } 2026-01-04T02:31:14.225048Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4c769c2450] Status 14 Service Unavailable 2026-01-04T02:31:14.226006Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:20.637240Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:20.677921Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:20.728458Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:20.728611Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591328414446904394:2081] 1767493880562431 != 1767493880562434 2026-01-04T02:31:20.779575Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:20.779665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:20.790922Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:20.901127Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:20.955202Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:20.955238Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:20.955247Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:20.955343Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:21.331389Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:21.338768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:21.344020Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (8E120919): Token is not supported 2026-01-04T02:31:24.874805Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:24.910139Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cp ... zation(something.read) 2026-01-04T02:31:30.407709Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (E2D1584C)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:30.410573Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4c769279d0] Status 16 Access Denied 2026-01-04T02:31:30.413955Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (E2D1584C) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:30.413996Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (E2D1584C) () has now permanent error message 'Access Denied' 2026-01-04T02:31:30.414706Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (BE2EA0D0) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:30.414921Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (BE2EA0D0)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:30.422576Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4c769279d0] Status 16 Access Denied 2026-01-04T02:31:30.423106Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (BE2EA0D0) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:30.423138Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (BE2EA0D0) () has now permanent error message 'Access Denied' 2026-01-04T02:31:30.426434Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:30.426648Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-04T02:31:30.437963Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d4c769279d0] Status 16 Access Denied 2026-01-04T02:31:30.438458Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:30.438489Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-04T02:31:30.441468Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:30.441695Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "XXXXXXXX" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:30.443856Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c769279d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:30.449948Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:30.450064Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:30.454858Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:30.455305Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "XXXXXXXX" type: "resource-manager.folder" } } 2026-01-04T02:31:30.459341Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c769279d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:30.463814Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:30.463940Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:30.464666Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(monitoring.view) 2026-01-04T02:31:30.473564Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "monitoring.view" resource_path { id: "gizmo" type: "iam.gizmo" } } 2026-01-04T02:31:30.479691Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c769279d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:30.482934Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission monitoring.view now has a valid subject "user1@as" 2026-01-04T02:31:30.483045Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:30.483872Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (6968D2E8) asking for AccessServiceAuthorization(something.write) 2026-01-04T02:31:30.484079Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c769279d0] Request AuthorizeRequest { iam_token: "**** (6968D2E8)" permission: "something.write" resource_path { id: "123" type: "ydb.database" } resource_path { id: "folder" type: "resource-manager.folder" } } 2026-01-04T02:31:30.486149Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c769279d0] Response AuthorizeResponse { subject { service_account { id: "service1" } } } 2026-01-04T02:31:30.486396Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (6968D2E8) permission something.write now has a valid subject "service1@as" 2026-01-04T02:31:30.486491Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-04T02:31:36.837237Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591328483161399645:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:36.837281Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:36.884133Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:36.991244Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:36.991341Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:37.014128Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:37.024985Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591328483161399598:2081] 1767493896830292 != 1767493896830295 2026-01-04T02:31:37.035634Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:37.060656Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:37.302523Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:37.302553Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:37.302562Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:37.302649Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:37.610637Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:37.626107Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:37.630124Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:37.630220Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4c76a21350] Connect to grpc://localhost:9947 2026-01-04T02:31:37.631122Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c76a21350] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:37.645053Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c76a21350] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:37.646042Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:37.646136Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:37.646831Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:37.646881Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-04T02:31:37.647039Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c76a21350] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:37.647627Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4c76a21350] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:37.654408Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c76a21350] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:37.654412Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4c76a21350] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:37.654591Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:37.654637Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-04T02:31:37.654737Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> KikimrIcGateway::TestDropTable [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] >> Yq_1::Basic_Null [GOOD] >> Yq_1::Basic_TaggedLiteral >> TTicketParserTest::BulkAuthorizationModify [GOOD] >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::LoginRefreshGroupsGood [GOOD] >> TTicketParserTest::LoginEmptyTicketBad |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_21_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 21] [GOOD] |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test >> TTicketParserTest::AuthenticationRetryError [GOOD] >> TTicketParserTest::AuthenticationRetryErrorImmediately ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAuthorizationModify [GOOD] Test command err: 2026-01-04T02:31:07.780182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328356664515819:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:07.780240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.243429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.247732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:08.335284Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:08.375173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:08.382733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:08.666070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:08.679451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:08.679477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:08.679488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:08.679573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:08.790195Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.091178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.099419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:09.104081Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:09.104196Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf11cf00050] Connect to grpc://localhost:14887 2026-01-04T02:31:09.107097Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cf00050] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-04T02:31:09.127715Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf11cf00050] Status 14 Service Unavailable 2026-01-04T02:31:09.128076Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:09.128120Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:09.128284Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cf00050] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-04T02:31:09.130269Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf11cf00050] Status 14 Service Unavailable 2026-01-04T02:31:09.130511Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:13.878059Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328384360329135:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.878302Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:14.042492Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:14.167118Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:14.189931Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328384360328911:2081] 1767493873813306 != 1767493873813309 2026-01-04T02:31:14.197743Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:14.202001Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:14.251241Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:14.430108Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:14.469897Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:14.469918Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:14.469925Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:14.469997Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:14.862298Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.004120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:15.013889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:15.017901Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-04T02:31:15.018019Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf11cfb2f50] Connect to grpc://localhost:9961 2026-01-04T02:31:15.019151Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfb2f50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-04T02:31:15.057937Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf11cfb2f50] Status 14 Service Unavailable 2026-01-04T02:31:15.061956Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:15.061996Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:15.062062Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-04T02:31:15.062350Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfb2f50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-04T02:31:15.070500Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf11cfb2f50] Status 14 Service Unavailable 2026-01-04T02:31:15.073851Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:15.073907Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:15.885948Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-04T02:31:15.886051Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-04T02:31:15.886512Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfb2f50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response 14: "Service Unavailable" 2026-01-04T02:31:15.894513Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf11cfb2f50] Status 14 Service Unavailable 2026-01-04T02:31:15.894883Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:15.894913Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:16.896442Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-04T02:31:16.896510Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-04T02:31:16.897007Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfb2f50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request ch ... 2: [7cf11cfaf750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "monitoring.view" } managed_resource_id: "folder" iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-04T02:31:35.792504Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cfaf750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-04T02:31:35.796431Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:35.797203Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (6968D2E8) asking for AccessServiceAuthorization( something.write) 2026-01-04T02:31:35.797434Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfaf750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (6968D2E8)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service1" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } 0: "OK" 2026-01-04T02:31:35.799695Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cfaf750] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service1" } } impersonation_info { } } } } 2026-01-04T02:31:35.800007Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (6968D2E8) () has now valid token of service1@as 2026-01-04T02:31:35.800822Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (7A38211C) asking for AccessServiceAuthorization( something.write) 2026-01-04T02:31:35.801039Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfaf750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (7A38211C)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service2" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } 0: "OK" 2026-01-04T02:31:35.803318Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cfaf750] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service2" } } impersonation_info { chain { account { service_account { id: "srv" } } } } } } } 2026-01-04T02:31:35.803673Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (7A38211C) () has now valid token of service2@as 2026-01-04T02:31:35.804349Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8853A21F) asking for AccessServiceAuthorization( something.write) 2026-01-04T02:31:35.804536Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cfaf750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "**** (8853A21F)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.write" } managed_resource_id: "folder" resource_path { path { id: "123" } } iam_token: "service3" } } NebiusAccessService::Authorize response results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } 0: "OK" 2026-01-04T02:31:35.810677Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cfaf750] Response AuthorizeResponse { results { key: 0 value { account { service_account { id: "service3" } } impersonation_info { chain { account { service_account { id: "srv" } } account { service_account { id: "one_more_service" } } } chain { account { service_account { id: "srv" } } account { service_account { id: "srv2" } } account { user_account { id: "user1" } } account { service_account { id: "srv3" } } } } } } } 2026-01-04T02:31:35.818030Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8853A21F) () has now valid token of service3@as 2026-01-04T02:31:35.853970Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:39.986187Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328495034607521:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:39.986256Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:40.025153Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:40.109283Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:40.112147Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328495034607482:2081] 1767493899984131 != 1767493899984134 2026-01-04T02:31:40.132445Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.132544Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.185638Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.281924Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:40.398170Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:40.398198Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:40.398212Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:40.398303Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:40.679915Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:40.694111Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:40.699629Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read) 2026-01-04T02:31:40.699687Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf11cf01c50] Connect to grpc://localhost:1640 2026-01-04T02:31:40.700794Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cf01c50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-04T02:31:40.715276Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cf01c50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-04T02:31:40.717615Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:40.722539Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:537: Ticket **** (8E120919) asking for AccessServiceAuthorization( something.read something.write) 2026-01-04T02:31:40.722903Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf11cf01c50] Request AuthorizeRequest { checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "**** (8E120919)" } } } NebiusAccessService::Authorize request checks { key: 0 value { permission { name: "something.read" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } checks { key: 1 value { permission { name: "something.write" } managed_resource_id: "aaaa1234" resource_path { path { id: "bbbb4554" } } iam_token: "user1" } } NebiusAccessService::Authorize response results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } 0: "OK" 2026-01-04T02:31:40.726492Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf11cf01c50] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user1" } } impersonation_info { } } } results { key: 1 value { account { user_account { id: "user1" } } impersonation_info { } } } } 2026-01-04T02:31:40.726892Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationWithUserAccount2 [GOOD] >> TTicketParserTest::AuthorizationUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationWithUserAccount2 [GOOD] Test command err: 2026-01-04T02:31:08.090896Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328362395651721:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:08.090944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.494015Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:08.517491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.517616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:08.621052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:08.786480Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328362395651481:2081] 1767493868024606 != 1767493868024609 2026-01-04T02:31:08.787257Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:08.793894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:09.085959Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.104772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:09.104803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:09.104810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:09.104888Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:09.509784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.534230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:09.930565Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-04T02:31:09.930604Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-04T02:31:09.931082Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****ZJUA (C85F785E)) 2026-01-04T02:31:12.101246Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ZJUA (C85F785E) () has now permanent error message 'Login state is not available' 2026-01-04T02:31:12.101312Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root/Db1, 2026-01-04T02:31:14.192399Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328387196522261:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:14.192470Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:14.262397Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:14.402426Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:14.446345Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:14.446429Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:14.471234Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:14.725904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:14.746419Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:14.746441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:14.746448Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:14.746527Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:15.165958Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.361760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:15.480032Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:15.480367Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db , DomainLoginOnly 1 2026-01-04T02:31:15.480392Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:15.480545Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (5DAB89DE) () has now permanent error message 'Token is not in correct format' 2026-01-04T02:31:15.480559Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Token is not in correct format 2026-01-04T02:31:15.480582Z node 2 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (5DAB89DE): Token is not in correct format 2026-01-04T02:31:21.400657Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328415418197401:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:21.400714Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:21.505185Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:21.798288Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:21.804934Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:21.805026Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:21.832638Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:21.852650Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:22.294593Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:22.294620Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:22.294628Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:22.294701Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:22.309908Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:22.386006Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:23.011252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:23.020524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:23.409973Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:23.425787Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:23.425842Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:23.426726Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****c1Hg (9EE3C32F) () has now valid token of user1 2026-01-04T02:31:23.426747Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-04T02:31:23.429788Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:26.401330Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591328415418197401:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:26.401395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:27.398425Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****c1Hg (9EE3C32F) 2026-01-04T02:31:27.398758Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****c1Hg (9EE3C32F) () has now permanent error message 'User not found' 2026-01-04T02:31:31.411375Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket ... ookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:34.762215Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:34.791748Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591328471811409148:2081] 1767493894606776 != 1767493894606779 2026-01-04T02:31:34.828750Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:34.828852Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:34.832996Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:34.839383Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:34.998146Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:35.048598Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:35.048628Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:35.048638Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:35.048716Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:35.507578Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:35.533075Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:35.533144Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cedb4e352d0] Connect to grpc://localhost:4809 2026-01-04T02:31:35.539603Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4e352d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:35.574650Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4e352d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:35.575025Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-04T02:31:35.576929Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cedb4e32c50] Connect to grpc://localhost:6807 2026-01-04T02:31:35.578023Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4e32c50] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-04T02:31:35.594374Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4e32c50] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-04T02:31:35.594858Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-04T02:31:35.595648Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.write) 2026-01-04T02:31:35.595805Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4e352d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:35.607383Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4e352d0] Response BulkAuthorizeResponse { results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-04T02:31:35.609940Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "" 2026-01-04T02:31:35.614215Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'Access Denied' 2026-01-04T02:31:35.626600Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-04T02:31:35.626837Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4e352d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:35.628883Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4e352d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:35.633930Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-04T02:31:35.634101Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:35.634315Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-04T02:31:39.988765Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328494470263407:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:39.988855Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:40.032870Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:40.121320Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:40.141389Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.141478Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.176796Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.375149Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:40.382642Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:40.382670Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:40.382678Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:40.382774Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:40.778644Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:40.788003Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read somewhere.sleep something.list something.write something.eat) 2026-01-04T02:31:40.788072Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cedb4ea4150] Connect to grpc://localhost:27787 2026-01-04T02:31:40.789070Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4ea4150] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "somewhere.sleep" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.list" ...(truncated) } 2026-01-04T02:31:40.806280Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4ea4150] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } items { permission: "something.list" r...(truncated) } 2026-01-04T02:31:40.807940Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.read access denied for subject "user1@as" 2026-01-04T02:31:40.807972Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission somewhere.sleep access denied for subject "user1@as" 2026-01-04T02:31:40.807985Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.list access denied for subject "user1@as" 2026-01-04T02:31:40.808001Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.eat access denied for subject "user1@as" 2026-01-04T02:31:40.808020Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-04T02:31:40.808234Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cedb4ea3dd0] Connect to grpc://localhost:31252 2026-01-04T02:31:40.809062Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cedb4ea3dd0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-04T02:31:40.823837Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cedb4ea3dd0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-04T02:31:40.824277Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-04T02:31:41.014184Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> Yq_1::DescribeConnection [GOOD] >> Yq_1::DeleteQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::BulkAuthorizationModify [GOOD] Test command err: 2026-01-04T02:31:08.395724Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328361921902789:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:08.395756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.467558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:08.935031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.935135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:09.022269Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:09.035221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:09.149967Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328361921902763:2081] 1767493868366965 != 1767493868366968 2026-01-04T02:31:09.160140Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:09.184975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:09.297605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:09.297635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:09.297642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:09.297733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:09.426830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.664025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.674239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:09.680338Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:09.680406Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d219a2f07d0] Connect to grpc://localhost:13339 2026-01-04T02:31:09.683608Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:09.706912Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a2f07d0] Status 14 Service Unavailable 2026-01-04T02:31:09.709986Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:09.710019Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:09.710095Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:09.710306Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:09.722462Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a2f07d0] Status 14 Service Unavailable 2026-01-04T02:31:09.722668Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:09.722694Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:10.431261Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:10.431405Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:10.431686Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:10.434441Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a2f07d0] Status 14 Service Unavailable 2026-01-04T02:31:10.434754Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:10.434789Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:12.432075Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:12.432156Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:12.432418Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2f07d0] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:12.435621Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d219a2f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:12.435853Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-04T02:31:13.398435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328361921902789:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.398503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:23.079320Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328424313949131:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:23.163597Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:23.199920Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:23.328013Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:23.328102Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:23.341961Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328420018981713:2081] 1767493883036257 != 1767493883036260 2026-01-04T02:31:23.350239Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:23.359105Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:23.444451Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:23.550069Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:23.550092Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:23.550101Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:23.550174Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:23.821483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:23.834372Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:23.841998Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:23.842048Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d219a3b4450] Connect to grpc://localhost:3054 2026-01-04T02:31:23.842894Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a3b4450] Request BulkAuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:23.853986Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a3b4450] Status 14 Service Unavailable 2026-01-04T02:31:23.858624Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a ... ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:32.623966Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-04T02:31:32.624219Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a43ef50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:32.631051Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d219a43ef50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } results { items { permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission_denied_error { message: "Access Denied" } } } } 2026-01-04T02:31:32.631294Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:1396: Ticket **** (8E120919) permission something.write access denied for subject "user1@as" 2026-01-04T02:31:32.631380Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket **** (8E120919) () has now permanent error message 'something.write for folder_id aaaa1234 - Access Denied' 2026-01-04T02:31:36.142518Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328480131652173:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:36.145447Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:36.184475Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:36.337964Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:36.339298Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:36.339375Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:36.353926Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328480131652058:2081] 1767493896115088 != 1767493896115091 2026-01-04T02:31:36.365551Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:36.376213Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:36.510516Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:36.510543Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:36.510552Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:36.510677Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:36.570176Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:36.790026Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:36.797316Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-04T02:31:36.797376Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d219a2e58d0] Connect to grpc://localhost:16780 2026-01-04T02:31:36.798506Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2e58d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:36.812084Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a2e58d0] Status 14 Service Unavailable 2026-01-04T02:31:36.812489Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:36.812504Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "Service Unavailable" retryable: 1 2026-01-04T02:31:36.812526Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:36.812582Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-04T02:31:36.812827Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a2e58d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:36.819093Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7d219a2e58d0] Status 1 CANCELLED 2026-01-04T02:31:36.819294Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" retryable: 1 2026-01-04T02:31:36.819328Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.write now has a retryable error "CANCELLED" retryable: 1 2026-01-04T02:31:36.819357Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-04T02:31:40.360092Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:40.363189Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:40.451132Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.451223Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.459685Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.469903Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591328499034883697:2081] 1767493900248805 != 1767493900248808 2026-01-04T02:31:40.508051Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:40.566462Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:40.673171Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:40.673197Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:40.673210Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:40.673317Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:41.168032Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:41.177175Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:41.183537Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:41.183606Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d219a30c450] Connect to grpc://localhost:4203 2026-01-04T02:31:41.184511Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a30c450] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:41.195685Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d219a30c450] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:41.196363Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:41.197070Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read something.write) 2026-01-04T02:31:41.197312Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d219a30c450] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.write" } } result_filter: ALL_FAILED } 2026-01-04T02:31:41.202026Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d219a30c450] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:41.202243Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:41.313317Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> BsControllerConfig::MergeBoxes [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |86.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/service/ut_json_change_record/tx-replication-service-ut_json_change_record >> Yq_1::CreateConnection_With_Existing_Name [GOOD] >> Yq_1::CreateConnections_With_Idempotency >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |86.5%| [LD] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/ydb-core-blobstorage-nodewarden-ut |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [FAIL] >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> TTicketParserTest::NebiusAuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::NebiusAccessKeySignatureUnsupported |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::LargeMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-04T02:31:34.422815Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:31:34.502879Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:31:34.502952Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:31:34.503017Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.503077Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:31:34.540602Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-04T02:31:34.540692Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:31:34.564323Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-04T02:31:34.564527Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.566252Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-04T02:31:34.566429Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:31:34.566498Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:31:34.566983Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:31:34.567331Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:31:34.570201Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:31:34.570269Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:31:34.570321Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:31:34.570386Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:31:34.571919Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:31:34.573382Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:31:34.573434Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:31:34.573469Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:31:34.573524Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:31:34.573567Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:34.573618Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:31:34.573678Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:31:34.573738Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:31:34.573776Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:31:34.573824Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:31:34.573867Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:31:34.574137Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:31:34.574175Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-04T02:31:34.574226Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:31:34.574409Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:31:34.574725Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:31:34.576670Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:31:34.576712Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-04T02:31:34.576745Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:31:34.576781Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:31:34.578761Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:31:34.579785Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:31:34.579822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:31:34.579855Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:31:34.579896Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:31:34.579921Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:31:34.579957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:31:34.580004Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-04T02:31:34.580045Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:31:34.580072Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:31:34.580098Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:31:34.580129Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:31:34.580308Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:31:34.580356Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-04T02:31:34.580412Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:31:34.580615Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:31:34.580795Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:31:34.580970Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:31:34.581072Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-04T02:31:47.194956Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.194995Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195031Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195066Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195102Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195135Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195169Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195200Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.195233Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-04T02:31:47.195402Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-04T02:31:47.195444Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-04T02:31:47.195487Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:0 isTruncatedBlob 1 2026-01-04T02:31:47.219302Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:0 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-04T02:31:47.219459Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 3:16 2026-01-04T02:31:47.219875Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 28 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 3 partno 16 count 4294967295 size 4294967295 endOffset 4 max time lag 0ms effective offset 3 2026-01-04T02:31:47.220371Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 28 added 2 blobs, size 12781161 count 1 last offset 3, current partition end offset: 4 2026-01-04T02:31:47.220424Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 28. Send blob request. 2026-01-04T02:31:47.220549Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 16 count 0 parts_count 16 source 0 size 8191635 accessed 1 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-04T02:31:47.220602Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 32 count 1 parts_count 8 source 1 size 4589526 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-04T02:31:47.220658Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 28. All 2 blobs are from cache. 2026-01-04T02:31:47.220738Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-04T02:31:47.220884Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 0 count 0 size 8191611 from pos 0 cbcount 16 2026-01-04T02:31:47.221041Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221078Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221115Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221152Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221186Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221219Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221254Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221286Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:31:47.221317Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 493288 from pos 0 cbcount 1 2026-01-04T02:31:47.221465Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 16 count 0 parts 16 suffix '0' 2026-01-04T02:31:47.221513Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 3 partno 32 count 1 parts 8 suffix '0' 2026-01-04T02:31:47.221610Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-04T02:31:47.221650Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-04T02:31:47.221693Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 3:16 isTruncatedBlob 1 2026-01-04T02:31:47.239997Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 3:16 isTruncatedBlob 1 hasNonZeroParts 1 isMiddlePartOfMessage 1 2026-01-04T02:31:47.272314Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00032_0000000001_00015 2026-01-04T02:31:47.272493Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-04T02:31:47.272791Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-04T02:31:47.272856Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:31:47.272996Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 0 size 271 2026-01-04T02:31:47.273093Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 32 count 1 size 187 2026-01-04T02:31:47.273182Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 0 size 8191590 2026-01-04T02:31:47.273221Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000002_00016_0000000000_00016(+) to d0000000000_00000000000000000002_00016_0000000000_00016(+) 2026-01-04T02:31:47.273257Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00032_0000000001_00015(+) to d0000000000_00000000000000000000_00032_0000000001_00015(+) 2026-01-04T02:31:47.302586Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 0 size 271 actorID [3:139:2142] 2026-01-04T02:31:47.302664Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 2 count 1 size 4589526 actorID [3:139:2142] is actual 1 2026-01-04T02:31:47.302743Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 187 actorID [3:139:2142] 2026-01-04T02:31:47.302777Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 0 size 8191635 actorID [3:139:2142] is actual 1 2026-01-04T02:31:47.302850Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 0 size 8191590 actorID [3:139:2142] 2026-01-04T02:31:47.303002Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 4589526 2026-01-04T02:31:47.304725Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191635 2026-01-04T02:31:47.314873Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 0 count 0 parts 16 suffix '0' size 271 2026-01-04T02:31:47.314978Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 2 partno 32 count 1 parts 8 suffix '0' size 187 2026-01-04T02:31:47.315022Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 0 parts 16 suffix '0' size 8191590 2026-01-04T02:31:47.315554Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:31:47.315603Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-04T02:31:47.315647Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-04T02:31:47.325374Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:402:2377], now have 1 active actors on pipe 2026-01-04T02:31:47.325530Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:31:47.325575Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:31:47.325690Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 3 for user __ydb_compaction_consumer 2026-01-04T02:31:47.326088Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:404:2379], now have 1 active actors on pipe Got start offset = 2 |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |86.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/bscontroller/ut_bscontroller/unittest >> BsControllerConfig::MergeBoxes [GOOD] Test command err: Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10915:2167] IGNORE Leader for TabletID 72057594037932033 is [0:0:0] sender: [1:11114:2156] recipient: [1:10915:2167] Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11117:2156] recipient: [1:10915:2167] 2026-01-04T02:30:24.563747Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828672 Event# NKikimr::TEvTablet::TEvBoot 2026-01-04T02:30:24.564986Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828673 Event# NKikimr::TEvTablet::TEvRestored 2026-01-04T02:30:24.565353Z node 1 :BS_CONTROLLER DEBUG: {BSC22@console_interaction.cpp:14} Console interaction started 2026-01-04T02:30:24.567886Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268828684 Event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:30:24.568372Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 268639244 Event# NKikimr::TEvNodeWardenStorageConfig 2026-01-04T02:30:24.568798Z node 1 :BS_CONTROLLER DEBUG: {BSC05@impl.h:2080} StateInit event Type# 131082 Event# NActors::TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.568828Z node 1 :BS_CONTROLLER DEBUG: {BSC01@bsc.cpp:829} Handle TEvInterconnect::TEvNodesInfo 2026-01-04T02:30:24.569281Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS01@init_scheme.cpp:17} TTxInitScheme Execute 2026-01-04T02:30:24.578849Z node 1 :BS_CONTROLLER DEBUG: {BSCTXIS03@init_scheme.cpp:44} TTxInitScheme Complete 2026-01-04T02:30:24.579004Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM01@migrate.cpp:190} Execute tx 2026-01-04T02:30:24.579201Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM02@migrate.cpp:251} Complete tx IncompatibleData# false 2026-01-04T02:30:24.579316Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.579416Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxTrimUnusedSlots 2026-01-04T02:30:24.579483Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion Leader for TabletID 72057594037932033 is [1:11116:2169] sender: [1:11138:2156] recipient: [1:110:2157] 2026-01-04T02:30:24.597746Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateSchemaVersion 2026-01-04T02:30:24.597934Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.665622Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxGenerateInstanceId 2026-01-04T02:30:24.665781Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.665903Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateStaticPDiskInfo 2026-01-04T02:30:24.665983Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.666093Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxFillInNonNullConfigForPDisk 2026-01-04T02:30:24.666158Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.666189Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxDropDriveStatus 2026-01-04T02:30:24.666250Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.678692Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateCompatibilityInfo 2026-01-04T02:30:24.678829Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM03@migrate.cpp:37} Execute tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.694626Z node 1 :BS_CONTROLLER DEBUG: {BSCTXM04@migrate.cpp:43} Complete tx from queue Type# NKikimr::NBsController::TBlobStorageController::TTxMigrate::TTxUpdateEnableConfigV2 2026-01-04T02:30:24.694779Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE01@load_everything.cpp:21} TTxLoadEverything Execute 2026-01-04T02:30:24.696183Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE03@load_everything.cpp:705} TTxLoadEverything Complete 2026-01-04T02:30:24.696231Z node 1 :BS_CONTROLLER DEBUG: {BSC09@impl.h:2211} LoadFinished 2026-01-04T02:30:24.696463Z node 1 :BS_CONTROLLER DEBUG: {BSC18@console_interaction.cpp:31} Console connection service started 2026-01-04T02:30:24.696509Z node 1 :BS_CONTROLLER DEBUG: {BSCTXLE04@load_everything.cpp:710} TTxLoadEverything InitQueue processed 2026-01-04T02:30:24.724994Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "/dev/disk0" } Drive { Path: "/dev/disk1" } Drive { Path: "/dev/disk2" } Drive { Path: "/dev/disk3" } Drive { Path: "/dev/disk4" } Drive { Path: "/dev/disk5" } Drive { Path: "/dev/disk6" } Drive { Path: "/dev/disk7" } Drive { Path: "/dev/disk8" Type: SSD } Drive { Path: "/dev/disk9" Type: SSD } Drive { Path: "/dev/disk10" Type: SSD } Drive { Path: "/dev/disk11" Type: SSD } Drive { Path: "/dev/disk12" Type: SSD } Drive { Path: "/dev/disk13" Type: SSD } Drive { Path: "/dev/disk14" Type: SSD } Drive { Path: "/dev/disk15" Type: SSD } } } Command { DefineBox { BoxId: 1 Name: "test box" Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12002 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12003 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12004 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12005 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12006 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12007 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12008 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12009 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12010 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12011 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12012 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12013 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12014 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12015 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12016 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12017 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12018 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12019 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12020 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12021 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12022 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12023 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12024 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12025 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12026 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12027 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12028 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12029 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12030 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12031 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12032 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12033 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12034 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12035 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12036 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12037 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12038 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12039 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12040 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12041 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12042 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12043 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12044 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12045 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12046 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12047 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12048 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12049 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12050 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12051 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12052 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12053 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12054 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12055 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12056 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12057 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12058 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12059 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12060 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12061 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12062 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12063 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12064 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12065 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12066 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12067 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12068 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12069 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12070 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12071 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12072 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12073 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12074 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12075 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12076 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12077 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12078 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12079 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12080 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12081 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12082 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12083 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12084 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12085 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12086 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12087 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12088 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12089 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12090 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12091 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12092 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12093 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12094 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12095 } HostConfigId: 1 } Host { Key { Fqdn: "::1" IcPort: 12096 } HostConfigId: 1 } Host { Ke ... } Create new pdisk PDiskId# 275:1002 Path# /dev/disk3 2026-01-04T02:31:38.546526Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1000 Path# /dev/disk1 2026-01-04T02:31:38.546563Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1001 Path# /dev/disk2 2026-01-04T02:31:38.546603Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 276:1002 Path# /dev/disk3 2026-01-04T02:31:38.546631Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1000 Path# /dev/disk1 2026-01-04T02:31:38.546659Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1001 Path# /dev/disk2 2026-01-04T02:31:38.546687Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 277:1002 Path# /dev/disk3 2026-01-04T02:31:38.546716Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1000 Path# /dev/disk1 2026-01-04T02:31:38.546745Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1001 Path# /dev/disk2 2026-01-04T02:31:38.546792Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 278:1002 Path# /dev/disk3 2026-01-04T02:31:38.546842Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1000 Path# /dev/disk1 2026-01-04T02:31:38.546873Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1001 Path# /dev/disk2 2026-01-04T02:31:38.546910Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 279:1002 Path# /dev/disk3 2026-01-04T02:31:38.546948Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1000 Path# /dev/disk1 2026-01-04T02:31:38.546981Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1001 Path# /dev/disk2 2026-01-04T02:31:38.547010Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 280:1002 Path# /dev/disk3 2026-01-04T02:31:38.547057Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1000 Path# /dev/disk1 2026-01-04T02:31:38.547090Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1001 Path# /dev/disk2 2026-01-04T02:31:38.547122Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 281:1002 Path# /dev/disk3 2026-01-04T02:31:38.547149Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1000 Path# /dev/disk1 2026-01-04T02:31:38.547179Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1001 Path# /dev/disk2 2026-01-04T02:31:38.547211Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 282:1002 Path# /dev/disk3 2026-01-04T02:31:38.547240Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1000 Path# /dev/disk1 2026-01-04T02:31:38.547267Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1001 Path# /dev/disk2 2026-01-04T02:31:38.547294Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 283:1002 Path# /dev/disk3 2026-01-04T02:31:38.547321Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1000 Path# /dev/disk1 2026-01-04T02:31:38.547368Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1001 Path# /dev/disk2 2026-01-04T02:31:38.547401Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 284:1002 Path# /dev/disk3 2026-01-04T02:31:38.547428Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1000 Path# /dev/disk1 2026-01-04T02:31:38.547455Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1001 Path# /dev/disk2 2026-01-04T02:31:38.547483Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 285:1002 Path# /dev/disk3 2026-01-04T02:31:38.547510Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1000 Path# /dev/disk1 2026-01-04T02:31:38.547540Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1001 Path# /dev/disk2 2026-01-04T02:31:38.547590Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 286:1002 Path# /dev/disk3 2026-01-04T02:31:38.547623Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1000 Path# /dev/disk1 2026-01-04T02:31:38.547650Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1001 Path# /dev/disk2 2026-01-04T02:31:38.547679Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 287:1002 Path# /dev/disk3 2026-01-04T02:31:38.547714Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1000 Path# /dev/disk1 2026-01-04T02:31:38.547754Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1001 Path# /dev/disk2 2026-01-04T02:31:38.547782Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 288:1002 Path# /dev/disk3 2026-01-04T02:31:38.547810Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1000 Path# /dev/disk1 2026-01-04T02:31:38.547838Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1001 Path# /dev/disk2 2026-01-04T02:31:38.547866Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 289:1002 Path# /dev/disk3 2026-01-04T02:31:38.547893Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1000 Path# /dev/disk1 2026-01-04T02:31:38.547923Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1001 Path# /dev/disk2 2026-01-04T02:31:38.547952Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 290:1002 Path# /dev/disk3 2026-01-04T02:31:38.547982Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1000 Path# /dev/disk1 2026-01-04T02:31:38.548033Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1001 Path# /dev/disk2 2026-01-04T02:31:38.548078Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 291:1002 Path# /dev/disk3 2026-01-04T02:31:38.548135Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1000 Path# /dev/disk1 2026-01-04T02:31:38.548186Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1001 Path# /dev/disk2 2026-01-04T02:31:38.548218Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 292:1002 Path# /dev/disk3 2026-01-04T02:31:38.548251Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1000 Path# /dev/disk1 2026-01-04T02:31:38.548281Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1001 Path# /dev/disk2 2026-01-04T02:31:38.548315Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 293:1002 Path# /dev/disk3 2026-01-04T02:31:38.548344Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1000 Path# /dev/disk1 2026-01-04T02:31:38.548386Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1001 Path# /dev/disk2 2026-01-04T02:31:38.548424Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 294:1002 Path# /dev/disk3 2026-01-04T02:31:38.548453Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1000 Path# /dev/disk1 2026-01-04T02:31:38.548482Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1001 Path# /dev/disk2 2026-01-04T02:31:38.548511Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 295:1002 Path# /dev/disk3 2026-01-04T02:31:38.548539Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1000 Path# /dev/disk1 2026-01-04T02:31:38.548569Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1001 Path# /dev/disk2 2026-01-04T02:31:38.548610Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 296:1002 Path# /dev/disk3 2026-01-04T02:31:38.548653Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1000 Path# /dev/disk1 2026-01-04T02:31:38.548700Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1001 Path# /dev/disk2 2026-01-04T02:31:38.548734Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 297:1002 Path# /dev/disk3 2026-01-04T02:31:38.548775Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1000 Path# /dev/disk1 2026-01-04T02:31:38.548814Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1001 Path# /dev/disk2 2026-01-04T02:31:38.548845Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 298:1002 Path# /dev/disk3 2026-01-04T02:31:38.548876Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1000 Path# /dev/disk1 2026-01-04T02:31:38.548905Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1001 Path# /dev/disk2 2026-01-04T02:31:38.548933Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 299:1002 Path# /dev/disk3 2026-01-04T02:31:38.548961Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1000 Path# /dev/disk1 2026-01-04T02:31:38.548989Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1001 Path# /dev/disk2 2026-01-04T02:31:38.549020Z node 251 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 300:1002 Path# /dev/disk3 2026-01-04T02:31:38.854233Z node 251 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.310343s 2026-01-04T02:31:38.854448Z node 251 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.310583s 2026-01-04T02:31:38.894940Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { MergeBoxes { OriginBoxId: 2 OriginBoxGeneration: 1 TargetBoxId: 1 TargetBoxGeneration: 1 StoragePoolIdMap { OriginStoragePoolId: 1 TargetStoragePoolId: 2 } } } } 2026-01-04T02:31:38.919070Z node 251 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { ReadBox { BoxId: 1 } } Command { QueryBaseConfig { } } } |86.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/bscontroller/ut_bscontroller/unittest |86.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |86.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |86.5%| [LD] {RESULT} $(B)/ydb/core/tablet_flat/benchmark/core_tablet_flat_benchmark |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> DataStreams::TestReservedResourcesMetering >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken |86.6%| [TA] $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [FAIL] |86.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestNonChargeableUser >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] >> TTicketParserTest::AuthorizationUnavailable [GOOD] >> DataStreams::TestUpdateStream >> DataStreams::TestPutRecordsOfAnauthorizedUser >> TTicketParserTest::LoginEmptyTicketBad [GOOD] >> Cdc::ShouldBreakLocksOnConcurrentAlterStream [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge >> Yq_1::Basic [GOOD] >> Yq_1::Basic_EmptyList ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthenticationRetryErrorImmediately [GOOD] Test command err: 2026-01-04T02:31:07.891365Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328358156029772:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:07.891412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:07.981069Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:08.377457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:08.424306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.424407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:08.525282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:08.559362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:08.599847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:08.681909Z node 1 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-04T02:31:08.706475Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:08.706572Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-04T02:31:08.706588Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-04T02:31:08.706602Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-04T02:31:08.706618Z node 1 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-04T02:31:08.706654Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd83bf07d0] Connect to grpc://localhost:64097 2026-01-04T02:31:08.726967Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83bf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:08.804475Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-04T02:31:08.886037Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83bf07d0] Status 16 Unauthenticated service 2026-01-04T02:31:08.886551Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-04T02:31:08.886583Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-01-04T02:31:08.886621Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-04T02:31:08.886715Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:08.886959Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83bf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:08.901767Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd83bf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:08.904584Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:08.946081Z node 1 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-04T02:31:08.946190Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:13.409497Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328381285576604:2091];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.417547Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:13.767198Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.775464Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:13.785919Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328381285576538:2081] 1767493873350952 != 1767493873350955 2026-01-04T02:31:13.802664Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:13.802745Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:13.825960Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:13.963063Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-04T02:31:13.963242Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:13.963266Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-04T02:31:13.963275Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-04T02:31:13.963293Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-04T02:31:13.963300Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-04T02:31:13.963327Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd83c21b50] Connect to grpc://localhost:4320 2026-01-04T02:31:13.964215Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83c21b50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:13.979223Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# ydb-****ount (05D5F592) 2026-01-04T02:31:14.053182Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:14.070246Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83c21b50] Status 16 Unauthenticated service 2026-01-04T02:31:14.070618Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1210: Ticket **** (8E120919) permission something.read now has a retryable error "Unauthenticated service" retryable: 1 2026-01-04T02:31:14.070643Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Unauthenticated service' 2026-01-04T02:31:14.070696Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-04T02:31:14.070744Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:14.070973Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83c21b50] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:14.074278Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd83c21b50] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:14.074660Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:14.406693Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-04T02:31:14.411489Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.409795Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-04T02:31:16.410091Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:66: Handle refresh tokens 2026-01-04T02:31:16.410125Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:70: Refresh token for provider# token-for-access-service 2026-01-04T02:31:16.410177Z node 2 :TOKEN_MANAGER TRACE: vm_metadata_token_provider_handler.cpp:25: Handle send request to vm metaservice 2026-01-04T02:31:16.413142Z node 2 :TOKEN_MANAGER DEBUG: vm_metadata_token_provider_handler.cpp:62: Updating vm metadata token 2026-01-04T02:31:16.413286Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:508: Create BulkAuthorize request with token: ydb-****ount (05D5F592) 2026-01-04T02:31:16.413334Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (9D42FAED) asking for AccessServiceBulkAuthorization( something.read) 2026-01-04T02:31:16.413354Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:100: Handle TEvPrivate::TEvUpdateToken 2026-01-04T02:31:16.413363Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:105: Update token for provider# token-for-access-service 2026-01-04T02:31:16.413386Z node 2 :TOKEN_MANAGER TRACE: token_manager.cpp:87: Handle NotifySubscribers 2026-01-04T02:31:16.413399Z node 2 :TOKEN_MANAGER DEBUG: token_manager.cpp:90: Notify subscribers# token-for-access-service 2026-01-04T02:31:16.413592Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83c21b50] Request BulkAuthorizeRequest { iam_token: "**** (9D42FAED)" actions { items { resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } permission: "something.read" } } result_filter: ALL_FAILED } 2026-01-04T02:31:16.414243Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:2377: Handle TEvTokenManager::TEvUpdateToken: id# token-for-access-service, Status.code# Success, Status.Msg# OK, Token# new-****ount (82D66F55) 2026-01-04T02:31:16.416237Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83c21b50] Status 16 Unauthenticated serv ... 1-04T02:31:27.277990Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket ApiK****alid (AB5B5EA8) asking for AccessServiceAuthentication 2026-01-04T02:31:27.278065Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd83cc1d50] Connect to grpc://localhost:2563 2026-01-04T02:31:27.278954Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83cc1d50] Request AuthenticateRequest { api_key: "ApiK****alid (AB5B5EA8)" } 2026-01-04T02:31:27.290177Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd83cc1d50] Response AuthenticateResponse { subject { user_account { id: "ApiKey-value-valid" } } } 2026-01-04T02:31:27.298715Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket ApiK****alid (AB5B5EA8) () has now valid token of ApiKey-value-valid@as 2026-01-04T02:31:32.190383Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328462042373050:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:32.199121Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:32.267630Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:32.425921Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328462042373016:2081] 1767493892167081 != 1767493892167084 2026-01-04T02:31:32.441926Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:32.493110Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:32.493207Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:32.495657Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:32.498886Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:32.560012Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:31:32.598817Z node 5 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:31:32.654425Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:32.654453Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:32.654459Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:32.654537Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:32.696107Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:32.949510Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:32.964135Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:32.970280Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:32.970346Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd83d12fd0] Connect to grpc://localhost:9714 2026-01-04T02:31:32.971682Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83d12fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:33.001243Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83d12fd0] Status 14 Service Unavailable 2026-01-04T02:31:33.001736Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:33.001778Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:33.001989Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83d12fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:33.010477Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83d12fd0] Status 14 Service Unavailable 2026-01-04T02:31:33.010844Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:33.198908Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:34.230004Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:34.230064Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:34.230283Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83d12fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:34.241501Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83d12fd0] Status 14 Service Unavailable 2026-01-04T02:31:34.243401Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:35.234076Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:35.234123Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:35.234334Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83d12fd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:35.246078Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd83d12fd0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:35.246754Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-04T02:31:37.173955Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591328462042373050:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:37.174060Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:46.087730Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591328523947636047:2223];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:46.091877Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:46.133225Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:46.267328Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:46.269656Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:46.269745Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:46.282938Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:46.352956Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:46.352986Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:46.352993Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:46.353107Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:46.408775Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:46.672458Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:46.690309Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:46.692491Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:46.692562Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cdd83c8fbd0] Connect to grpc://localhost:10911 2026-01-04T02:31:46.693586Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83c8fbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:46.709026Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cdd83c8fbd0] Status 14 Service Unavailable 2026-01-04T02:31:46.710068Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:46.710112Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthentication 2026-01-04T02:31:46.710344Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cdd83c8fbd0] Request AuthenticateRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } } 2026-01-04T02:31:46.715491Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cdd83c8fbd0] Response AuthenticateResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:46.718099Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-04T02:31:47.051671Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> DataShardVolatile::UpsertBrokenLockArbiter+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] >> DataStreams::TestControlPlaneAndMeteringData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::AuthorizationUnavailable [GOOD] Test command err: 2026-01-04T02:31:07.955710Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328355101290788:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:07.955762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:08.438003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:08.455039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.455138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:08.584597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:08.680554Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328355101290657:2081] 1767493867844764 != 1767493867844767 2026-01-04T02:31:08.698862Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:08.760619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:08.989860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:08.989887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:08.989897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:08.989997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:08.995196Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.281493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.289262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:09.291681Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:09.291838Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c6d3c0f0450] Connect to grpc://localhost:16055 2026-01-04T02:31:09.319291Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c0f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:09.365233Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c0f0450] Status 14 Service Unavailable 2026-01-04T02:31:09.365711Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-04T02:31:09.365747Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:09.365777Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:09.366069Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c0f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:09.375333Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c0f0450] Status 14 Service Unavailable 2026-01-04T02:31:09.375686Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-04T02:31:09.375713Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:10.966340Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:10.966385Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:10.966894Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c0f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:10.973277Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c0f0450] Status 14 Service Unavailable 2026-01-04T02:31:10.973637Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-04T02:31:10.973668Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:12.938645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328355101290788:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:12.938736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:12.970294Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket AKIA****MPLE (B3EDC139) 2026-01-04T02:31:12.970343Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:12.973014Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c0f0450] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:12.975750Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c6d3c0f0450] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:12.976071Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:12.976211Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket AKIA****MPLE (B3EDC139) () has now valid token of user1@as 2026-01-04T02:31:23.128685Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:23.128846Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:23.134696Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:23.137973Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328419818177080:2081] 1767493882670286 != 1767493882670289 2026-01-04T02:31:23.145119Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:23.145225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:23.157375Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:23.426134Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:23.448898Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:23.448926Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:23.448941Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:23.449020Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:23.773789Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:24.231909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:24.241441Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:24.241525Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c6d3c1a1750] Connect to grpc://localhost:10936 2026-01-04T02:31:24.242436Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c1a1750] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { signed_at { } } } permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:24.263419Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c1a1750] Status 14 Service Unavailable 2026-01-04T02:31:24.263792Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket AKIA****MPLE (B3EDC139) permission something.read now has a permanent error "Service Unavailable" retryable:1 2026-01-04T02:31:24.263827Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket AKIA****MPLE (B3EDC139) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:24.263850Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket AKIA****MPLE (B3EDC139) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:24.264111Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c1a1750] Request AuthorizeRequest { signature { access_key_id: "AKIAIOSFODNN7EXAMPLE" v4_parameters { si ... 26-01-04T02:31:40.974414Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:41.024149Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:41.284537Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:41.292434Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:41.295791Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:41.295870Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(somewhere.sleep) 2026-01-04T02:31:41.295897Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.list) 2026-01-04T02:31:41.295924Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-04T02:31:41.295950Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.eat) 2026-01-04T02:31:41.296008Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c6d3c101fd0] Connect to grpc://localhost:28802 2026-01-04T02:31:41.297117Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c101fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:41.301103Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c101fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "somewhere.sleep" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:41.301435Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c101fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.list" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:41.301730Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c101fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:41.301958Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c101fd0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.eat" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:41.316461Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c101fd0] Status 16 Access Denied 2026-01-04T02:31:41.316646Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.eat now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:41.318667Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c101fd0] Status 16 Access Denied 2026-01-04T02:31:41.318727Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c6d3c101fd0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:41.318863Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c101fd0] Status 16 Access Denied 2026-01-04T02:31:41.318960Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c101fd0] Status 16 Access Denied 2026-01-04T02:31:41.319173Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission somewhere.sleep now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:41.319232Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.write now has a valid subject "user1@as" 2026-01-04T02:31:41.319253Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.list now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:41.319276Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.read now has a permanent error "Access Denied" retryable:0 2026-01-04T02:31:41.319295Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:1068: Ticket **** (8E120919) asking for UserAccount(user1@as) 2026-01-04T02:31:41.319392Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c6d3c1026d0] Connect to grpc://localhost:25544 2026-01-04T02:31:41.320230Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c1026d0] Request GetUserAccountRequest { user_account_id: "user1" } 2026-01-04T02:31:41.350005Z node 5 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c6d3c1026d0] Response UserAccount { yandex_passport_user_account { login: "login1" } } 2026-01-04T02:31:41.350687Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of login1@passport 2026-01-04T02:31:41.516881Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:46.525958Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:46.541950Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:46.713482Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:46.713584Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:46.746046Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:46.766766Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:46.821942Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:47.010732Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:47.010767Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:47.010776Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:47.010869Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:47.357721Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:47.367354Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:47.370582Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:47.370660Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-04T02:31:47.370720Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c6d3c2606d0] Connect to grpc://localhost:15675 2026-01-04T02:31:47.371851Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c2606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:47.375815Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c2606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:47.390756Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c2606d0] Status 14 Service Unavailable 2026-01-04T02:31:47.390928Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "Service Unavailable" retryable:1 2026-01-04T02:31:47.391205Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c6d3c2606d0] Response AuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:31:47.391290Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1465: Ticket **** (8E120919) permission something.read now has a valid subject "user1@as" 2026-01-04T02:31:47.391329Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:47.391361Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.read) 2026-01-04T02:31:47.391434Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:491: Ticket **** (8E120919) asking for AccessServiceAuthorization(something.write) 2026-01-04T02:31:47.391665Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c2606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.read" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:47.392321Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c6d3c2606d0] Request AuthorizeRequest { iam_token: "**** (8E120919)" permission: "something.write" resource_path { id: "bbbb4554" type: "ydb.database" } resource_path { id: "aaaa1234" type: "resource-manager.folder" } } 2026-01-04T02:31:47.396685Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c2606d0] Status 1 CANCELLED 2026-01-04T02:31:47.396920Z node 6 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c6d3c2606d0] Status 1 CANCELLED 2026-01-04T02:31:47.398195Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1488: Ticket **** (8E120919) permission something.read now has a retryable error "CANCELLED" 2026-01-04T02:31:47.398259Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1480: Ticket **** (8E120919) permission something.write now has a permanent error "CANCELLED" retryable:1 2026-01-04T02:31:47.398289Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'CANCELLED' 2026-01-04T02:31:47.462822Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_14_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 14] [GOOD] |86.6%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::LoginEmptyTicketBad [GOOD] Test command err: 2026-01-04T02:31:07.619890Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328355847020700:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:07.619967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:07.677119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:08.243213Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:08.243302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:08.314572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:08.358084Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:08.405556Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:08.487484Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328355847020551:2081] 1767493867564209 != 1767493867564212 2026-01-04T02:31:08.662148Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:08.709680Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:08.726665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:08.726683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:08.726704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:08.726792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:09.249050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:09.341977Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:09.377465Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:09.377503Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:09.378637Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****NUqA (9F1DA21C) () has now valid token of user1 2026-01-04T02:31:09.378671Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-04T02:31:12.359474Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328378804186360:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:12.359578Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:12.436423Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:12.499150Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:12.509609Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328378804186330:2081] 1767493872353654 != 1767493872353657 2026-01-04T02:31:12.549707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:12.549777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:12.557961Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:12.800608Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:12.810535Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:12.810570Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:12.810577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:12.810663Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:13.394029Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:13.554058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:13.683782Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:13.691093Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:13.691118Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:13.691820Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Wl3A (E45C3742) () has now valid token of user1 2026-01-04T02:31:13.691832Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-04T02:31:19.162534Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328408072779490:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:19.162588Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:19.191020Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:19.381922Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:19.393763Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:19.393946Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:19.395691Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:19.401500Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591328408072779465:2081] 1767493879161473 != 1767493879161476 2026-01-04T02:31:19.429936Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:19.519224Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:19.519250Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:19.519257Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:19.519333Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:19.679719Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:19.742837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:19.750962Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:19.934238Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-04T02:31:19.934276Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-04T02:31:19.934330Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db1, login state is not available yet, deffer token (eyJh****WFsg (1C17D7B7)) 2026-01-04T02:31:20.174717Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:20.934032Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db1 keys 1 2026-01-04T02:31:20.934057Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db1 2026-01-04T02:31:20.934248Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db1, token db /Root/Db1, DomainLoginOnly 0 2026-01-04T02:31:20.934263Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db1, /Root 2026-01-04T02:31:20.934906Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****WFsg (1C17D7B7) () has now valid token of user1 2026-01-04T02:31:20.934922Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db1, A4 success 2026-01-04T02:31:21.027226Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-04T02:31:21.027257Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-04T02:31:21.027296Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root/Db2, login state is not available yet, deffer token (eyJh****F5cw (2503B0C2)) 2026-01-04T02:31:22.030240Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root/Db2 keys 1 2026-01-04T02:31:22.030262Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root/Db2 2026-01-04T02:31:22.030473Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db /Root/Db2, token db /Root/Db2, DomainLoginOnly 0 2026-01-04T02:31:22.030493Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(2): /Root/Db2, /Root 2026-01-04T02:31:22.031124Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****F5cw (2503B0C2) () has now valid token of user1 2026-01-04T02:31:22.031139Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root/Db2, A4 success 2026-01-04T02:31:23.392701Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328427566966364:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:23.393231Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:23.445198Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:23.622625Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:23.628216Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:23.628298Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:23.678351Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:23.837997Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:23.858338Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:23.858368Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:23.858374Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:23.858439Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:24.123937Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:24.143542Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:24.245979Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:24.253993Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:24.254030Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:24.254805Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YYmQ (F1BA8A38) () has now valid token of user1 2026-01-04T02:31:24.254820Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-04T02:31:24.258088Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:24.398062Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:27.399375Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****YYmQ (F1BA8A38) 2026-01-04T02:31:27.399879Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YYmQ (F1BA8A38) () has now valid token of user1 2026-01-04T02:31:28.392348Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591328427566966364:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:28.392418Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:32.414676Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****YYmQ (F1BA8A38) 2026-01-04T02:31:32.420222Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YYmQ (F1BA8A38) () has now valid token of user1 2026-01-04T02:31:34.261317Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:36.426923Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****YYmQ (F1BA8A38) 2026-01-04T02:31:36.427274Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YYmQ (F1BA8A38) () has now valid token of user1 2026-01-04T02:31:38.500444Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:31:38.500467Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:41.447450Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****YYmQ (F1BA8A38) 2026-01-04T02:31:41.447881Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YYmQ (F1BA8A38) () has now valid token of user1 2026-01-04T02:31:45.328274Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328518365946056:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:45.328316Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:45.377486Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:45.559972Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:45.579157Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:45.579266Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:45.598747Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:45.783599Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:45.810642Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:45.810677Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:45.810687Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:45.810835Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:46.338107Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:46.552877Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:46.562129Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:46.650005Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:46.695926Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:987: Ticket **** (00000000): Ticket is empty |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::NebiusAccessKeySignatureUnsupported [GOOD] Test command err: 2026-01-04T02:31:08.624319Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328361570195816:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:08.634574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:09.209897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:09.232022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:09.232110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:09.318393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:09.391401Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:09.394540Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328361570195768:2081] 1767493868584485 != 1767493868584488 2026-01-04T02:31:09.401412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:09.707581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:09.734899Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:09.734921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:09.734937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:09.735039Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:10.258508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:10.559414Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 2026-01-04T02:31:10.577553Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:10.577587Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:10.590066Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Kz6Q (0CAEE4C3) () has now retryable error message 'Security state is empty' 2026-01-04T02:31:10.590314Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:10.590340Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:10.590561Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Kz6Q (0CAEE4C3) () has now retryable error message 'Security state is empty' 2026-01-04T02:31:10.590577Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-04T02:31:10.590599Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-04T02:31:10.590626Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket eyJh****Kz6Q (0CAEE4C3): Security state is empty 2026-01-04T02:31:12.686717Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Kz6Q (0CAEE4C3) 2026-01-04T02:31:12.686985Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:12.687003Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:12.687230Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****Kz6Q (0CAEE4C3) () has now retryable error message 'Security state is empty' 2026-01-04T02:31:12.687246Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:826: CanInitLoginToken, database /Root, A2 error Security state is empty 2026-01-04T02:31:13.594585Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:31:13.629261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328361570195816:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.629339Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:15.695309Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Kz6Q (0CAEE4C3) 2026-01-04T02:31:15.695534Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:31:15.695561Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:31:15.696348Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Kz6Q (0CAEE4C3) () has now valid token of user1 2026-01-04T02:31:15.696364Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:853: CanInitLoginToken, database /Root, A4 success 2026-01-04T02:31:19.734826Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****Kz6Q (0CAEE4C3) 2026-01-04T02:31:19.735213Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Kz6Q (0CAEE4C3) () has now valid token of user1 2026-01-04T02:31:22.552392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:22.552525Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:22.654026Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328419467769120:2081] 1767493882342606 != 1767493882342609 2026-01-04T02:31:22.670854Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:22.689458Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:22.689956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:22.761116Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:22.789939Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:23.016039Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:23.016061Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:23.016068Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:23.016146Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:23.362046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:23.370442Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:23.374850Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:23.376576Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:23.376620Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c776d4176d0] Connect to grpc://localhost:6421 2026-01-04T02:31:23.379470Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d4176d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-04T02:31:23.405145Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c776d4176d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-04T02:31:23.405589Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:28.058017Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328447681375700:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:28.058127Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:28.102155Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:28.184296Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:28.210796Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:28.210906Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:28.243570Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:28.354995Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:28.377858Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:28.377878Z node 3 :NET_CLASSIFIE ... ration part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:28.736494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:28.747997Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:28.748082Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c776d5105d0] Connect to grpc://localhost:25298 2026-01-04T02:31:28.749018Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d5105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-04T02:31:28.777968Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c776d5105d0] Status 14 Service Unavailable 2026-01-04T02:31:28.781081Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:28.781135Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:28.781316Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d5105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-04T02:31:28.786924Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c776d5105d0] Status 14 Service Unavailable 2026-01-04T02:31:28.790139Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:29.093348Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:30.070068Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-04T02:31:30.070117Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:30.070296Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d5105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" 2026-01-04T02:31:30.083998Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c776d5105d0] Status 14 Service Unavailable 2026-01-04T02:31:30.084575Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:31.070440Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket **** (8E120919) 2026-01-04T02:31:31.070476Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:31.070615Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d5105d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-04T02:31:31.073396Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c776d5105d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-04T02:31:31.074080Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:33.061984Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591328447681375700:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:33.062071Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:41.746443Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328504540775272:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:41.746529Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:41.780073Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:41.928371Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:41.929304Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:41.934057Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591328504540775240:2081] 1767493901744555 != 1767493901744558 2026-01-04T02:31:41.948201Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:41.948312Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:41.970286Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:42.177532Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:42.177567Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:42.177582Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:42.177685Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:42.221151Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:42.778004Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:42.985746Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:43.001839Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:43.004489Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:43.004551Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c776d45e4d0] Connect to grpc://localhost:9070 2026-01-04T02:31:43.005478Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d45e4d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response 14: "Service Unavailable" 2026-01-04T02:31:43.018048Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c776d45e4d0] Status 14 Service Unavailable 2026-01-04T02:31:43.018698Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket **** (8E120919) () has now retryable error message 'Service Unavailable' 2026-01-04T02:31:43.018734Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:568: Ticket **** (8E120919) asking for AccessServiceAuthentication 2026-01-04T02:31:43.018910Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c776d45e4d0] Request AuthenticateRequest { iam_token: "**** (8E120919)" } NebiusAccessService::Authenticate request iam_token: "user1" NebiusAccessService::Authenticate response account { user_account { id: "user1" } } 0: "" 2026-01-04T02:31:43.021354Z node 4 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c776d45e4d0] Response AuthenticateResponse { account { user_account { id: "user1" } } } 2026-01-04T02:31:43.021930Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:31:48.688427Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328532322325075:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:48.688518Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:48.721688Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:48.865248Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:48.865351Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:48.866568Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:48.868264Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328532322325028:2081] 1767493908682690 != 1767493908682693 2026-01-04T02:31:48.918270Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:48.947071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:49.106490Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:49.106516Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:49.106528Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:49.106622Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:49.436989Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:49.446827Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:49.455364Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:979: Ticket AKIA****MPLE (B3EDC139): Access key signature is not supported |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestDropStreamingQuery |86.6%| [TA] {RESULT} $(B)/ydb/core/mind/bscontroller/ut_bscontroller/test-results/unittest/{meta.json ... results_accumulator.log} |86.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/ydb-core-statistics-service-ut-ut_aggregation >> TBlobStorageWardenTest::TestHttpMonPage >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] >> DataStreams::TestNonChargeableUser [GOOD] >> DataStreams::TestPutEmptyMessage >> Cdc::InitialScanRacyProgressAndDrop [GOOD] >> Cdc::EnqueueRequestProcessSend >> Yq_1::CreateQuery_With_Idempotency [GOOD] >> Yq_1::CreateQuery_Without_Connection >> BindQueue::Basic >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose >> TBlobStorageWardenTest::TestHttpMonPage [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction >> DataStreams::TestPutRecordsOfAnauthorizedUser [GOOD] >> DataStreams::TestPutRecordsWithRead >> test_alter_compression.py::TestAlterCompression::test[alter_compression] [GOOD] >> PrivateApi::Nodes [GOOD] >> DataStreams::TestUpdateStream [GOOD] >> DataStreams::Test_AutoPartitioning_Describe >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountPureFunction [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets-UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken >> TAccessServiceTest::PassRequestId >> DataStreams::TestControlPlaneAndMeteringData [GOOD] >> DataStreams::ChangeBetweenRetentionModes >> TAccessServiceTest::Authenticate >> TUserAccountServiceTest::Get ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> PrivateApi::Nodes [GOOD] Test command err: 2026-01-04T02:31:13.368307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328381131974262:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.373955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:13.517211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E0104 02:31:13.729698534 493685 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:13.729846735 493685 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:13.990257Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.117226Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24949 2026-01-04T02:31:14.117322Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.220436Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.356911Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.362482Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.380274Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:14.414362Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.450792Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.482896Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.482986Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531635Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531712Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531757Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531803Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531842Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531947Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.531993Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.654699Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.656813Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.657020Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.657065Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.787100Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.788451Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.796441Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.800329Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.874612Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.874700Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.874733Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24949: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24949 } ] 2026-01-04T02:31:14.874794Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127 ... Database : . DatabaseId : /Root. }. Received channels info: 2026-01-04T02:31:55.333243Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-01-04T02:31:55.333366Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: TxId: 281474976715677, task: 1. Add data: 101 / 101 2026-01-04T02:31:55.333422Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: TxId: 281474976715677, task: 1. Send data=101, closed=1, bufferActorId=[7:7591328564948281473:2356] 2026-01-04T02:31:55.333445Z node 7 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 101 2026-01-04T02:31:55.333469Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715677, task: 1. Tasks execution finished 2026-01-04T02:31:55.333481Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:31:55.333501Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-04T02:31:55.333525Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-01-04T02:31:55.333544Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715677, task: 1. Tasks execution finished 2026-01-04T02:31:55.333552Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:31:55.333599Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:31:55.333618Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715677, task: 1. Tasks execution finished 2026-01-04T02:31:55.333631Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:31:55.333708Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [7:7591328564948281473:2356], SessionActorId: [7:7591328547768410407:2356], Create new TableWriteActor for table `Root/yq/nodes` ([72057594046644480:48:1]). lockId=281474976715676. ActorId=[7:7591328564948281481:2356] 2026-01-04T02:31:55.333783Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Open: token=0 2026-01-04T02:31:55.334094Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Write: token=0 2026-01-04T02:31:55.334203Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Close: token=0 2026-01-04T02:31:55.334253Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [7:7591328564948281480:2356], TxId: 281474976715677, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [7:7591328564948281473:2356] 2026-01-04T02:31:55.334270Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [7:7591328564948281480:2356], TxId: 281474976715677, task: 1. Finished 2026-01-04T02:31:55.334295Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:31:55.334322Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715677, task: 1. Tasks execution finished 2026-01-04T02:31:55.334337Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7591328564948281478:2356], TxId: 281474976715677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dhmnpajv7rz99afqvbb0z. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=7&id=YmMzMDcyNjUtYTEyNmJiZC05NzAxZmQ1Zi0xY2Q3ZGFmZg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:31:55.334426Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715677, task: 1. pass away 2026-01-04T02:31:55.334509Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715677;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:31:55.334984Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3511: SelfId: [7:7591328564948281473:2356], SessionActorId: [7:7591328547768410407:2356], Start immediate commit 2026-01-04T02:31:55.335000Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1081: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]SetImmediateCommit 2026-01-04T02:31:55.335017Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [7:7591328564948281473:2356], SessionActorId: [7:7591328547768410407:2356], Flush data 2026-01-04T02:31:55.335188Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Send EvWrite to ShardID=72075186224037896, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715676 DataShard: 72075186224037896 Generation: 1 Counter: 1 SchemeShard: 72057594046644480 PathId: 48, Size=212, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=212 2026-01-04T02:31:55.366433Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Recv EvWriteResult from ShardID=72075186224037896, Status=STATUS_COMPLETED, TxId=5, Locks= , Cookie=1 2026-01-04T02:31:55.366471Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [7:7591328564948281481:2356], Table: `Root/yq/nodes` ([72057594046644480:48:1]), SessionActorId: [7:7591328547768410407:2356]Got completed result TxId=5, TabletId=72075186224037896, Cookie=1, Mode=3, Locks= 2026-01-04T02:31:55.366540Z node 7 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [7:7591328564948281473:2356], SessionActorId: [7:7591328547768410407:2356], Committed TxId=0 2026-01-04T02:31:55.396887Z node 7 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:26172 2026-01-04T02:31:55.746861Z node 7 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:26172: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:26172 2026-01-04T02:31:56.136222Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:400: DB Error, Status: TRANSPORT_UNAVAILABLE, Issues: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26172: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26172 } ], Query: --!syntax_v1 -- Query name: GetTask(read stale ro) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $from as Timestamp; DECLARE $tasks_limit as Uint64; SELECT `scope`, `query_id`, `owner`, `last_seen_at`, `retry_counter`, `retry_counter_updated_at`, `retry_rate`, `query_type`, `node` FROM `pending_small` WHERE `tenant` = $tenant AND `assigned_until` < $from ORDER BY `query_id` DESC LIMIT $tasks_limit; 2026-01-04T02:31:56.137583Z node 7 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: GetTaskRequest - GetTaskResult: {tenant: "TestTenant" owner_id: "1fca78db-7134d818-c34b58c0-66b75bd32" host: "ghrun-46vtxmsa7i" node_id: 7 } ERROR: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26172: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:26172 } ] 2026-01-04T02:31:56.137825Z node 7 :YQL_PRIVATE_PROXY ERROR: task_get.cpp:72: PrivateGetTask - Owner: 1fca78db-7134d818-c34b58c0-66b75bd32, Host: ghrun-46vtxmsa7i, Tenant: TestTenant, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:26172: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:26172
: Error: ControlPlane::GetTaskError 2026-01-04T02:31:56.738611Z node 7 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped |86.6%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] >> DataStreams::TestReservedResourcesMetering [GOOD] >> DataStreams::TestReservedStorageMetering >> FolderServiceTest::TFolderServiceAdapter >> DataStreams::TestPutEmptyMessage [GOOD] >> DataStreams::TestListStreamConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountExplicitConfig [GOOD] Test command err: 2026-01-04T02:31:57.269754Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.270209Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.272313Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.274136Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.278795Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.278897Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:57.279774Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/001539/r3tmp/tmpqV7a6M/pdisk_1.dat 2026-01-04T02:31:58.674493Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:58.677663Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:58.678344Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:58.678419Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:58.679695Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:31:58.679762Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/001539/r3tmp/tmp6LQo5e/pdisk_1.dat driveSize# 7900 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# -0.0125 driveSize# 8000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0 driveSize# 8100 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 8 SlotSizeInUnits# 1 relativeError# 0.0125 driveSize# 16000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 1 relativeError# 0 driveSize# 24000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 12 SlotSizeInUnits# 2 relativeError# 0 driveSize# 31000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 2 relativeError# -0.03125 driveSize# 50000 unitSizeInBytes# 1000 maxSlots# 16 -> ExpectedSlotCount# 13 SlotSizeInUnits# 4 relativeError# -0.03846153846 driveSize# 50000 unitSizeInBytes# 100 maxSlots# 16 -> ExpectedSlotCount# 16 SlotSizeInUnits# 32 relativeError# -0.0234375 driveSize# 18000 unitSizeInBytes# 200 maxSlots# 16 -> ExpectedSlotCount# 11 SlotSizeInUnits# 8 relativeError# 0.02272727273 driveSize# 1 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 1 relativeError# 0 driveSize# 2 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 2 relativeError# 0 driveSize# 3 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# -0.25 driveSize# 4 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0 driveSize# 5 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 4 relativeError# 0.25 driveSize# 6 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.25 driveSize# 7 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# -0.125 driveSize# 8 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0 driveSize# 9 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.125 driveSize# 10 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.25 driveSize# 11 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 8 relativeError# 0.375 driveSize# 12 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.25 driveSize# 13 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.1875 driveSize# 14 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.125 driveSize# 15 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# -0.0625 driveSize# 16 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0 driveSize# 17 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.0625 driveSize# 18 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.125 driveSize# 19 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.1875 driveSize# 20 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.25 driveSize# 21 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.3125 driveSize# 22 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.375 driveSize# 23 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 16 relativeError# 0.4375 driveSize# 24 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.25 driveSize# 25 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.21875 driveSize# 26 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.1875 driveSize# 27 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.15625 driveSize# 28 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.125 driveSize# 29 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.09375 driveSize# 30 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.0625 driveSize# 31 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# -0.03125 driveSize# 32 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0 driveSize# 33 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.03125 driveSize# 34 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.0625 driveSize# 35 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.09375 driveSize# 36 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.125 driveSize# 37 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.15625 driveSize# 38 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.1875 driveSize# 39 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.21875 driveSize# 40 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.25 driveSize# 41 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.28125 driveSize# 42 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.3125 driveSize# 43 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.34375 driveSize# 44 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.375 driveSize# 45 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.40625 driveSize# 46 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.4375 driveSize# 47 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 32 relativeError# 0.46875 driveSize# 48 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.25 driveSize# 49 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.234375 driveSize# 50 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.21875 driveSize# 51 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.203125 driveSize# 52 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.1875 driveSize# 53 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.171875 driveSize# 54 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.15625 driveSize# 55 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.140625 driveSize# 56 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.125 driveSize# 57 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.109375 driveSize# 58 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.09375 driveSize# 59 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.078125 driveSize# 60 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.0625 driveSize# 61 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.046875 driveSize# 62 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.03125 driveSize# 63 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 relativeError# -0.015625 driveSize# 64 unitSizeInBytes# 1 maxSlots# 1 -> ExpectedSlotCount# 1 SlotSizeInUnits# 64 re ... 12361Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 228 PDiskId# 1001 2026-01-04T02:32:02.012404Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 231 PDiskId# 1001 2026-01-04T02:32:02.012443Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 234 PDiskId# 1001 2026-01-04T02:32:02.012483Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 237 PDiskId# 1001 2026-01-04T02:32:02.012523Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 240 PDiskId# 1001 2026-01-04T02:32:02.012576Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 243 PDiskId# 1001 2026-01-04T02:32:02.012616Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1001 2026-01-04T02:32:02.012654Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1001 2026-01-04T02:32:02.012690Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1001 2026-01-04T02:32:02.012744Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1001 2026-01-04T02:32:02.012780Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1001 2026-01-04T02:32:02.012813Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1001 2026-01-04T02:32:02.012854Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1001 2026-01-04T02:32:02.012898Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1001 2026-01-04T02:32:02.012931Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1001 2026-01-04T02:32:02.012978Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1001 2026-01-04T02:32:02.013019Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1001 2026-01-04T02:32:02.013072Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1001 2026-01-04T02:32:02.013112Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1001 2026-01-04T02:32:02.013154Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1001 2026-01-04T02:32:02.013190Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1001 2026-01-04T02:32:02.013233Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1001 2026-01-04T02:32:02.013271Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1001 2026-01-04T02:32:02.013337Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1001 2026-01-04T02:32:02.013376Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1001 2026-01-04T02:32:02.013414Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1001 2026-01-04T02:32:02.013456Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1001 2026-01-04T02:32:02.013489Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1001 2026-01-04T02:32:02.013530Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1001 2026-01-04T02:32:02.013601Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1001 2026-01-04T02:32:02.013668Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1001 2026-01-04T02:32:02.014897Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-04T02:32:02.015542Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D68F9BF6880 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1001 2026-01-04T02:32:02.015657Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000004 PDiskId# 1001 2026-01-04T02:32:02.015761Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2562560001348 PDiskId# 1001 2026-01-04T02:32:02.015832Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1001 2026-01-04T02:32:02.016767Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1001 2026-01-04T02:32:02.016874Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.017084Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12290.900803 2026-01-04T02:32:02.022132Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-04T02:32:02.022208Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:02.022256Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.022316Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-04T02:32:02.022384Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10010000005 PDiskId# 1001 2026-01-04T02:32:02.022480Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2562560001604 PDiskId# 1001 2026-01-04T02:32:02.022785Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001604 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12290.907531 2026-01-04T02:32:02.022875Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-04T02:32:02.022907Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001604 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:02.022941Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.022975Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1001 2026-01-04T02:32:02.023026Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.025717Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10010000006 PDiskId# 1001 2026-01-04T02:32:02.025859Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1001 ReqId# 2562560001848 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12290.910891 2026-01-04T02:32:02.030059Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1001 2026-01-04T02:32:02.030143Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1001 ReqId# 2562560001848 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:02.030191Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.030309Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1001 Path: "SectorMap:TestInferPDiskSlotCountExplicitConfig:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 13 NumActiveSlots: 0 SlotSizeInUnits: 0 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1001 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 13 SlotSizeInUnits: 0 PDiskUsage: 0 } 2026-01-04T02:32:02.041171Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.051495Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.062046Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 2026-01-04T02:32:02.073931Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1001 |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> DataShardVolatile::UpsertBrokenLockArbiter-UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test |86.6%| [LD] {RESULT} $(B)/ydb/core/kesus/tablet/quoter_performance_test/quoter_performance_test >> TBlobStorageWardenTest::TestSendToInvalidGroupId >> TAccessServiceTest::PassRequestId [GOOD] >> KikimrIcGateway::TestDropStreamingQuery [GOOD] >> TServiceAccountServiceTest::Get [GOOD] >> FolderServiceTest::TFolderService >> TAccessServiceTest::Authenticate [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases >> TUserAccountServiceTest::Get [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig1DCBigCases [GOOD] >> TDistconfGenerateConfigTest::BadRack [GOOD] >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-04T02:32:01.217929Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328588133932245:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:01.222619Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:01.720594Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:01.786037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:01.786122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:01.876514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:01.883115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:01.884636Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328588133932199:2081] 1767493921207719 != 1767493921207722 2026-01-04T02:32:02.019327Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:02.092894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:02.098080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:32:02.123123Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c9223b9ed50]{trololo} Connect to grpc://localhost:9718 2026-01-04T02:32:02.130294Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9223b9ed50]{trololo} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-04T02:32:02.151657Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c9223b9ed50]{trololo} Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2026-01-04T02:32:02.250767Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |86.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> TBlobStorageWardenTest::TestSendToInvalidGroupId [GOOD] >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings >> DataStreams::TestPutRecordsWithRead [GOOD] >> DataStreams::TestPutRecordsCornerCases >> DataStreams::Test_AutoPartitioning_Describe [GOOD] >> DataStreams::Test_Crreate_AutoPartitioning_Disabled >> BindQueue::Basic [GOOD] >> TBlobStorageWardenTest::ObtainPDiskKeySamePin [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] >> Cdc::ResolvedTimestampsContinueAfterMerge [GOOD] >> Cdc::ResolvedTimestampForDisplacedUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::ExtraDCHelp [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-04T02:32:01.979375Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328589626765299:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:01.979485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:02.027685Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:02.421885Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:02.426020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:02.426125Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:02.435352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:02.581575Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:02.589954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328589626765051:2081] 1767493921922459 != 1767493921922462 2026-01-04T02:32:02.718421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:02.862675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:02.915615Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cbe5758edd0] Connect to grpc://localhost:21301 2026-01-04T02:32:02.917241Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbe5758edd0] Request AuthenticateRequest { iam_token: "**** (047D44F1)" } 2026-01-04T02:32:02.948069Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cbe5758edd0] Status 7 Permission Denied 2026-01-04T02:32:02.950248Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbe5758edd0] Request AuthenticateRequest { iam_token: "**** (342498C1)" } 2026-01-04T02:32:02.966040Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cbe5758edd0] Response AuthenticateResponse { subject { user_account { id: "1234" } } } 2026-01-04T02:32:02.970131Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestDropStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 27356, MsgBus: 20439 2026-01-04T02:31:30.537275Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328457504115206:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:30.537485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:30.978875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:30.978972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:30.980860Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:31.066074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:31.094612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:31.205428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:31.205449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:31.205455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:31.205528Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:31.340455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:31.546228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:31.940088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:31.959709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:35.369248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328478978952568:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:35.369385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:35.369771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328478978952578:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:35.369839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:35.541935Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328457504115206:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:35.542041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:35.751851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:35.944504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:36.005184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:36.058664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:36.128985Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328483273920183:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:36.129091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:36.129429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328483273920188:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:36.129488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328483273920189:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:36.129854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:36.135954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:36.156215Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328483273920192:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-04T02:31:36.414717Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328483273920243:2771] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21209, MsgBus: 27694 2026-01-04T02:31:37.781661Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328484178227701:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:37.781714Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:37.828566Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:37.993961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:38.119480Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:38.139027Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:38.139127Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:38.176607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:38.307227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:38.377924Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:38.377946Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:38.377956Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:38.378053Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:38.817623Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:38.869181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:38.874805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:38.922963Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:41.995604Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetc ... 1328545154005754:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:51.854155Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:51.854819Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591328545154005759:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:51.854885Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591328545154005760:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:51.860622Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:51.860966Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:51.897492Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591328545154005763:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:31:52.000864Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591328545154005816:3787] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:54.506570Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:31:54.550230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:31:54.612187Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 63877, MsgBus: 20211 2026-01-04T02:31:56.396531Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328565183625759:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:56.396806Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:56.437401Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:56.665892Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:56.672096Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:56.672182Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:56.683522Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:56.700102Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591328565183625629:2081] 1767493916375474 != 1767493916375477 2026-01-04T02:31:56.730462Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:56.866320Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:56.870567Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:56.870589Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:56.870601Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:56.870686Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:57.401773Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:57.406138Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:57.445018Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:00.459701Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328582363495810:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.459801Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328582363495809:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.459872Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328582363495800:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.459956Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.465145Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:00.469585Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328582363495822:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.469732Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:00.481210Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591328582363495824:2571] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:00.481566Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591328582363495820:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:32:00.573942Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591328582363495881:2604] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:01.395779Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:01.398133Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591328565183625759:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:01.398462Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:02.093273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:02.237027Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) >> DataStreams::ChangeBetweenRetentionModes [GOOD] >> DataStreams::TestCreateExistingStream |86.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TUserAccountServiceTest::Get [GOOD] Test command err: 2026-01-04T02:32:01.863033Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328588764329471:2226];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:01.863105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:02.134382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:02.161179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:02.161311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:02.205392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:02.253422Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:02.417248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:02.526403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> KikimrIcGateway::TestLoadTokenSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets >> TBlobStorageWardenTest::TestUnmonitoredEventsThenNoMonitorings [GOOD] >> TBlobStorageWardenTest::TestSendUsefulMonitoring |86.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceAdapter [GOOD] Test command err: 2026-01-04T02:32:03.174810Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328598107380034:2205];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:03.185666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:03.570821Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:03.701465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:03.701617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:03.724136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:03.777620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:03.782735Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328598107379847:2081] 1767493923063761 != 1767493923063764 2026-01-04T02:32:03.811908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:04.018702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:04.124480Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7b89780a50] Connect to grpc://localhost:18010 2026-01-04T02:32:04.133110Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7b89780a50] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-04T02:32:04.167242Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7b89780a50] Response ListFoldersResponse { result { cloud_id: "cloud_from_old_service" } } 2026-01-04T02:32:04.168703Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7b89780dd0] Connect to grpc://localhost:8022 2026-01-04T02:32:04.169381Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7b89780dd0] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-04T02:32:04.186949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:04.189664Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7b89780dd0] Response ResolveFoldersResponse { resolved_folders { cloud_id: "cloud_from_new_service" } } 2026-01-04T02:32:04.191521Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7b89780dd0] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-04T02:32:04.193313Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c7b89780dd0] Status 5 Not Found 2026-01-04T02:32:04.193797Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7b89780a50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-04T02:32:04.195461Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7c7b89780a50] Status 5 Not Found ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeyDifferentPin [GOOD] Test command err: Disable nodeId# 90 Disable nodeId# 100 Disable nodeId# 13 Disable nodeId# 43 Disable nodeId# 23 Enable nodeId# 100 Delete nodeId# 24 Disable nodeId# 27 Add nodeId# 101 Disable nodeId# 56 Enable nodeId# 23 Delete nodeId# 82 Enable nodeId# 43 Delete nodeId# 53 Pick Pick Delete nodeId# 38 Add nodeId# 102 Add nodeId# 103 Add nodeId# 104 Pick Enable nodeId# 56 Delete nodeId# 60 Delete nodeId# 70 Add nodeId# 105 Enable nodeId# 27 Delete nodeId# 81 Pick Add nodeId# 106 Pick Pick Enable nodeId# 90 Add nodeId# 107 Delete nodeId# 25 Pick Pick Pick Pick Enable nodeId# 13 Add nodeId# 108 Delete nodeId# 103 Disable nodeId# 15 Disable nodeId# 30 Delete nodeId# 101 Disable nodeId# 64 Delete nodeId# 27 Enable nodeId# 15 Delete nodeId# 94 Disable nodeId# 97 Add nodeId# 109 Disable nodeId# 73 Delete nodeId# 40 Disable nodeId# 44 Enable nodeId# 64 Add nodeId# 110 Add nodeId# 111 Delete nodeId# 93 Add nodeId# 112 Delete nodeId# 85 Add nodeId# 113 Enable nodeId# 73 Disable nodeId# 11 Add nodeId# 114 Delete nodeId# 114 Enable nodeId# 11 Disable nodeId# 89 Enable nodeId# 44 Enable nodeId# 89 Add nodeId# 115 Pick Enable nodeId# 97 Disable nodeId# 16 Delete nodeId# 42 Disable nodeId# 86 Pick Add nodeId# 116 Disable nodeId# 17 Disable nodeId# 49 Disable nodeId# 7 Add nodeId# 117 Pick Delete nodeId# 20 Disable nodeId# 23 Add nodeId# 118 Enable nodeId# 17 Delete nodeId# 69 Disable nodeId# 79 Add nodeId# 119 Disable nodeId# 43 Enable nodeId# 79 Disable nodeId# 4 Pick Pick Enable nodeId# 4 Disable nodeId# 102 Pick Delete nodeId# 41 Add nodeId# 120 Pick Delete nodeId# 45 Enable nodeId# 43 Enable nodeId# 7 Disable nodeId# 11 Disable nodeId# 32 Pick Pick Delete nodeId# 17 Add nodeId# 121 Disable nodeId# 96 Pick Enable nodeId# 11 Disable nodeId# 50 Enable nodeId# 23 Pick Add nodeId# 122 Pick Pick Delete nodeId# 32 Enable nodeId# 102 Delete nodeId# 10 Add nodeId# 123 Add nodeId# 124 Enable nodeId# 30 Delete nodeId# 29 Pick Enable nodeId# 50 Disable nodeId# 109 Add nodeId# 125 Enable nodeId# 109 Pick Disable nodeId# 113 Disable nodeId# 109 Pick Delete nodeId# 55 Pick Disable nodeId# 91 Add nodeId# 126 Enable nodeId# 113 Delete nodeId# 107 Add nodeId# 127 Enable nodeId# 109 Delete nodeId# 6 Enable nodeId# 86 Add nodeId# 128 Disable nodeId# 23 Pick Disable nodeId# 56 Pick Enable nodeId# 96 Delete nodeId# 86 Pick Enable nodeId# 49 Disable nodeId# 83 Pick Add nodeId# 129 Disable nodeId# 89 Enable nodeId# 16 Enable nodeId# 89 Pick Pick Pick Disable nodeId# 39 Disable nodeId# 54 Disable nodeId# 49 Disable nodeId# 58 Enable nodeId# 54 Enable nodeId# 58 Disable nodeId# 9 Enable nodeId# 83 Delete nodeId# 110 Add nodeId# 130 Disable nodeId# 100 Delete nodeId# 83 Pick Enable nodeId# 23 Pick Delete nodeId# 97 Pick Add nodeId# 131 Enable nodeId# 9 Add nodeId# 132 Delete nodeId# 120 Enable nodeId# 56 Enable nodeId# 91 Enable nodeId# 100 Enable nodeId# 39 Add nodeId# 133 Pick Enable nodeId# 49 Delete nodeId# 113 Delete nodeId# 13 Add nodeId# 134 Delete nodeId# 106 Pick Pick Delete nodeId# 111 Add nodeId# 135 Add nodeId# 136 Add nodeId# 137 Delete nodeId# 98 Disable nodeId# 134 Enable nodeId# 134 Pick Delete nodeId# 43 Pick Delete nodeId# 137 Pick Disable nodeId# 5 Pick Enable nodeId# 5 Pick Add nodeId# 138 Add nodeId# 139 Delete nodeId# 118 Disable nodeId# 128 Disable nodeId# 62 Pick Disable nodeId# 133 Pick Add nodeId# 140 Disable nodeId# 87 Enable nodeId# 62 Enable nodeId# 133 Disable nodeId# 134 Delete nodeId# 90 Enable nodeId# 87 Add nodeId# 141 Disable nodeId# 117 Delete nodeId# 115 Add nodeId# 142 Enable nodeId# 134 Add nodeId# 143 Disable nodeId# 77 Enable nodeId# 77 Disable nodeId# 131 Pick Pick Pick Delete nodeId# 135 Disable nodeId# 78 Enable nodeId# 131 Disable nodeId# 59 Pick Disable nodeId# 108 Enable nodeId# 117 Enable nodeId# 59 Enable nodeId# 108 Pick Disable nodeId# 51 Add nodeId# 144 Delete nodeId# 67 Enable nodeId# 78 Pick Disable nodeId# 140 Disable nodeId# 23 Add nodeId# 145 Delete nodeId# 36 Disable nodeId# 19 Pick Add nodeId# 146 Delete nodeId# 65 Disable nodeId# 112 Enable nodeId# 128 Add nodeId# 147 Disable nodeId# 50 Pick Enable nodeId# 51 Pick Pick Delete nodeId# 92 Delete nodeId# 140 Enable nodeId# 50 Enable nodeId# 112 Add nodeId# 148 Enable nodeId# 23 Enable nodeId# 19 Pick Add nodeId# 149 Delete nodeId# 11 Disable nodeId# 62 Disable nodeId# 75 Disable nodeId# 109 Pick Enable nodeId# 109 Delete nodeId# 124 Delete nodeId# 91 Enable nodeId# 75 Pick Add nodeId# 150 Disable nodeId# 1 Disable nodeId# 150 Disable nodeId# 51 Delete nodeId# 51 Add nodeId# 151 Pick Delete nodeId# 19 Enable nodeId# 62 Enable nodeId# 150 Delete nodeId# 68 Pick Disable nodeId# 136 Disable nodeId# 87 Delete nodeId# 127 Disable nodeId# 150 Pick Enable nodeId# 87 Disable nodeId# 56 Delete nodeId# 104 Disable nodeId# 87 Disable nodeId# 14 Add nodeId# 152 Disable nodeId# 28 Enable nodeId# 56 Pick Delete nodeId# 102 Delete nodeId# 132 Add nodeId# 153 Disable nodeId# 37 Enable nodeId# 150 Pick Delete nodeId# 121 Add nodeId# 154 Enable nodeId# 136 Add nodeId# 155 Enable nodeId# 37 Disable nodeId# 78 Enable nodeId# 87 Pick Disable nodeId# 75 Disable nodeId# 34 Disable nodeId# 147 Delete nodeId# 142 Delete nodeId# 129 Enable nodeId# 14 Add nodeId# 156 Enable nodeId# 147 Enable nodeId# 75 Pick Enable nodeId# 28 Enable nodeId# 78 Delete nodeId# 1 Enable nodeId# 34 Pick Disable nodeId# 64 Add nodeId# 157 Pick Pick Enable nodeId# 64 Pick Delete nodeId# 146 Pick Disable nodeId# 28 Delete nodeId# 154 Disable nodeId# 80 Enable nodeId# 28 Disable nodeId# 87 Enable nodeId# 80 Delete nodeId# 66 Delete nodeId# 15 Add nodeId# 158 Add nodeId# 159 Pick Pick Enable nodeId# 87 Disable nodeId# 157 Delete nodeId# 88 Delete nodeId# 156 Add nodeId# 160 Add nodeId# 161 Disable nodeId# 87 Disable nodeId# 54 Pick Pick Disable nodeId# 26 Disable nodeId# 100 Disable nodeId# 151 Delete nodeId# 46 Enable nodeId# 157 Enable nodeId# 54 Disable nodeId# 109 Disable nodeId# 96 Pick Add nodeId# 162 Disable nodeId# 134 Add nodeId# 163 Disable nodeId# 33 Delete nodeId# 116 Enable nodeId# 109 Disable nodeId# 3 Disable nodeId# 122 Disable nodeId# 108 Delete nodeId# 133 Add nodeId# 164 Disable nodeId# 63 Delete nodeId# 31 Enable nodeId# 108 Delete nodeId# 63 Add nodeId# 165 Disable nodeId# 64 Enable nodeId# 134 Disable nodeId# 8 Add nodeId# 166 Delete nodeId# 147 Disable nodeId# 148 Delete nodeId# 160 Disable nodeId# 47 Add nodeId# 167 Pick Pick Enable nodeId# 33 Enable nodeId# 87 Enable nodeId# 3 Delete nodeId# 5 Delete nodeId# 34 Add nodeId# 168 Add nodeId# 169 Pick Add nodeId# 170 Disable nodeId# 37 Pick Enable nodeId# 122 Pick Delete nodeId# 109 Enable nodeId# 26 Pick Pick Disable nodeId# 99 Add nodeId# 171 Delete nodeId# 145 Disable nodeId# 112 Pick Add nodeId# 172 Enable nodeId# 112 Disable nodeId# 57 Enable nodeId# 64 Disable nodeId# 139 Disable nodeId# 73 Pick Enable nodeId# 99 Delete nodeId# 99 Add nodeId# 173 Disable nodeId# 112 Pick Enable nodeId# 148 Add nodeId# 174 Disable nodeId# 171 Disable nodeId# 7 Disable nodeId# 50 Enable nodeId# 151 Disable nodeId# 54 Delete nodeId# 79 Enable nodeId# 47 Delete nodeId# 47 Pick Enable nodeId# 7 Add nodeId# 175 Enable nodeId# 8 Disable nodeId# 130 Enable nodeId# 96 Pick Enable nodeId# 37 Disable nodeId# 96 Pick Disable nodeId# 58 Enable nodeId# 58 Add nodeId# 176 Enable nodeId# 112 Add nodeId# 177 Delete nodeId# 134 Delete nodeId# 4 Disable nodeId# 16 Disable nodeId# 12 Disable nodeId# 87 Enable nodeId# 130 Add nodeId# 178 Delete nodeId# 126 Add nodeId# 179 Add nodeId# 180 Add nodeId# 181 Delete nodeId# 162 Delete nodeId# 75 Disable nodeId# 136 Delete nodeId# 158 Pick Disable nodeId# 64 Delete nodeId# 173 Disable nodeId# 177 Enable nodeId# 50 Disable nodeId# 172 Disable nodeId# 89 Pick Enable nodeId# 89 Delete nodeId# 35 Enable nodeId# 54 Disable nodeId# 165 Disable nodeId# 89 Pick Add nodeId# 182 Delete nodeId# 56 Pick Delete nodeId# 18 Delete nodeId# 108 Delete nodeId# 87 Pick Add nodeId# 183 Add nodeId# 184 Enable nodeId# 172 Enable nodeId# 100 Add nodeId# 185 Enable nodeId# 139 Pick Pick Pick Pick Enable nodeId# 96 Pick Delete nodeId# 159 Enable nodeId# 16 Add nodeId# 186 Delete nodeId# 112 Disable nodeId# 117 Disable nodeId# 157 Disable nodeId# 123 Disable nodeId# 168 Enable nodeId# 136 Disable nodeId# 96 Delete nodeId# 95 Delete nodeId# 76 Enable nodeId# 165 Pick Add nodeId# 187 Pick Disable nodeId# 77 Enable nodeId# 77 Disable nodeId# 39 Delete nodeId# 100 Pick Disable nodeId# 74 Enable nodeId# 64 Pick Pick Delete nodeId# 164 Disable nodeId# 49 Disable nodeId# 16 Pick Enable nodeId# 177 Pick Enable nodeId# 89 Disable nodeId# 80 Add nodeId# 188 Add nodeId# 189 Enable nodeId# 96 Enable nodeId# 123 Enable nodeId# 49 Add nodeId# 190 Delete nodeId# 78 Pick Add nodeId# 191 Disable nodeId# 186 Add nodeId# 192 Disable nodeId# 131 Disable nodeId# 37 Pick Enable nodeId# 73 Enable nodeId# 186 Delete nodeId# 59 Delete nodeId# 139 Enable nodeId# 16 Add nodeId# 193 Enable nodeId# 12 Pick Enable nodeId# 37 Enable nodeId# 157 Enable nodeId# 74 Enable nodeId# 131 Enable nodeId# 168 Pick Pick Add nodeId# 194 Pick Disable nodeId# 8 Add nodeId# 195 Enable nodeId# 8 Enable nodeId# 57 Pick Disable nodeId# 180 Delete nodeId# 149 Pick Add nodeId# 196 Delete nodeId# 184 Pick Enable nodeId# 171 Enable nodeId# 80 Delete nodeId# 186 Pick Add nodeId# 197 Disable nodeId# 72 Pick Add nodeId# 198 Pick Delete nodeId# 74 Delete nodeId# 21 Delete nodeId# 189 Add nodeId# 199 Add nodeId# 200 Enable nodeId# 117 Pick Delete nodeId# 2 Pick Disable nodeId# 23 Add nodeId# 201 Enable nodeId# 39 Pick Delete nodeId# 39 Delete nodeId# 178 Enable nodeId# 23 Pick Delete nodeId# 179 Add nodeId# 202 Delete nodeId# 181 Add nodeId# 203 Add nodeId# 204 Enable nodeId# 72 Disable nodeId# 196 Enable nodeId# 196 Pick Pick Disable nodeId# 23 Delete nodeId# 33 Pick Add nodeId# 205 Enable nodeId# 23 Enable nodeId# 180 Add nodeId# 206 Disable nodeId# 196 Add nodeId# 207 Disable nodeId# 50 Disable nodeId# 188 Add nodeId# 208 Pick Enable nodeId# 50 Add nodeId# 209 Enable nodeId# 196 Add nodeId# 210 Delete nodeId# 200 Add nodeId# 211 Delete nodeId# 49 Add nodeId# 212 Delete nodeId# 7 Enable nodeId# 188 Pick Delete nodeId# 207 Delete nodeId# 177 Disable nodeId# 105 Disable nodeId# 161 Delete nodeId# 71 Pick Pick Delete nodeId# 123 Add nodeId# 213 Disable nodeId# 197 Pick Pick Disable nodeId# 64 Enable nodeId# 197 Disable nodeId# 44 Delete nodeId# 150 Delete nodeId# 73 Disable nodeId# 187 Enable nodeId# 44 Disable nodeId# 89 Pick Enable nodeId# 105 Pick Disable nodeId# 3 Delete nodeId# 202 Delete nodeId# 185 Delete nodeId# 172 Disable nodeId# 136 Disable nodeId# 194 Delete nodeId# 136 Add nodeId# 214 Enable nodeId# 161 Add nodeId# 215 Add nodeId# 216 Pick Disable nodeId# 170 Disable nodeId# 54 Pick Enable nodeId# 170 Add nodeId# 217 Enable nodeId# 187 Disable nodeId# 105 Enable nodeId# 3 Add nodeId# 218 Enable nodeId# 54 Enable nodeId# 105 Disable nodeId# 188 Delete nodeId# 144 Add nodeId# 219 Enable nodeId# 64 Delete nodeId# 12 Add nodeId# 220 Delete nodeId# 37 Delete nodeId# 23 Disable nodeId# 44 Enable nodeId# 194 Disable nodeId# 180 Delete nodeId# 183 Delete nodeId# 199 Disable nodeId# 61 Delete nodeId# 44 Pick Pick Enable nodeId# 180 Add nodeId# 221 Delete nodeId# 170 Enable nodeId# 61 Enable nodeId# 89 Pick Delete nodeId# 201 Delete nodeId# 128 Pick Pick Pick Disable nodeId# 54 Pick Add nodeId# 222 Enable nodeId# 54 Delete nodeId# 3 Disable nodeId# 187 Enable nodeId# 188 Add nodeId# 223 Add nodeId# 224 Disable nodeId# 141 Enable nodeId# 187 Add nodeId# 225 Delete nodeId# 163 Add nodeId# 226 Enable nodeId# 141 Add nodeId# 227 Disable nodeId# 221 Pick Add nodeId# 228 Add nodeId# 229 Add nodeId# 230 Pick Add nodeId# 231 Add nodeId# 232 Add nodeId# 233 Disable nodeId# 187 Add nodeId# 234 Pick Delete nodeId# 122 Add nodeId# 235 Enable nodeId# 221 Add nodeId# 236 Delete nodeId# 105 Disable nodeId# 195 Delete nodeId# 190 Add nodeId# 237 Pick Add nodeId# 238 Delete nodeId# 117 Enable nodeId# 195 Pick Delete nodeId# 228 Disable nodeId# 96 Disable nodeId# 227 Pick Pick Pick Ena ... 076 Add nodeId# 20158 Pick Delete nodeId# 20071 Add nodeId# 20159 Disable nodeId# 20078 Enable nodeId# 20078 Pick Delete nodeId# 20117 Enable nodeId# 20109 Add nodeId# 20160 Pick Disable nodeId# 20043 Pick Delete nodeId# 20160 Enable nodeId# 20043 Delete nodeId# 20149 Disable nodeId# 20151 Disable nodeId# 20145 Disable nodeId# 20147 Pick Pick Disable nodeId# 20066 Pick Enable nodeId# 20066 Disable nodeId# 20108 Enable nodeId# 20151 Pick Disable nodeId# 20129 Add nodeId# 20161 Delete nodeId# 20065 Disable nodeId# 19966 Enable nodeId# 20129 Enable nodeId# 20147 Pick Enable nodeId# 20145 Add nodeId# 20162 Delete nodeId# 20078 Pick Enable nodeId# 20108 Enable nodeId# 19966 Delete nodeId# 19860 Pick Pick Delete nodeId# 20145 Delete nodeId# 20151 Add nodeId# 20163 Disable nodeId# 20125 Enable nodeId# 20125 Add nodeId# 20164 Pick Pick Delete nodeId# 20164 Pick Delete nodeId# 19918 Pick Delete nodeId# 20135 Delete nodeId# 20158 Delete nodeId# 20128 Delete nodeId# 20123 Pick Delete nodeId# 20115 Add nodeId# 20165 Disable nodeId# 20143 Delete nodeId# 20072 Pick Add nodeId# 20166 Add nodeId# 20167 Disable nodeId# 20124 Delete nodeId# 20127 Add nodeId# 20168 Disable nodeId# 20136 Add nodeId# 20169 Pick Delete nodeId# 20153 Delete nodeId# 20139 Disable nodeId# 20120 Add nodeId# 20170 Pick Pick Delete nodeId# 20106 Disable nodeId# 20066 Add nodeId# 20171 Delete nodeId# 20154 Delete nodeId# 20161 Delete nodeId# 20162 Disable nodeId# 20129 Pick Add nodeId# 20172 Delete nodeId# 20125 Add nodeId# 20173 Delete nodeId# 20124 Delete nodeId# 19975 Add nodeId# 20174 Delete nodeId# 20159 Enable nodeId# 20066 Disable nodeId# 20052 Enable nodeId# 20129 Add nodeId# 20175 Pick Disable nodeId# 20157 Add nodeId# 20176 Disable nodeId# 20172 Disable nodeId# 20109 Enable nodeId# 20109 Disable nodeId# 20171 Disable nodeId# 20175 Pick Disable nodeId# 20111 Disable nodeId# 20169 Pick Disable nodeId# 20026 Disable nodeId# 20173 Pick Delete nodeId# 20168 Delete nodeId# 20026 Add nodeId# 20177 Add nodeId# 20178 Enable nodeId# 20157 Disable nodeId# 19966 Disable nodeId# 20132 Pick Disable nodeId# 20033 Pick Disable nodeId# 20156 Enable nodeId# 20156 Enable nodeId# 20132 Pick Enable nodeId# 20143 Enable nodeId# 20172 Enable nodeId# 20171 Disable nodeId# 20100 Pick Add nodeId# 20179 Add nodeId# 20180 Delete nodeId# 20175 Add nodeId# 20181 Pick Delete nodeId# 20043 Disable nodeId# 20141 Pick Enable nodeId# 20033 Disable nodeId# 20163 Pick Add nodeId# 20182 Pick Enable nodeId# 20111 Disable nodeId# 20157 Add nodeId# 20183 Add nodeId# 20184 Add nodeId# 20185 Enable nodeId# 20120 Delete nodeId# 20150 Add nodeId# 20186 Enable nodeId# 20163 Delete nodeId# 20033 Disable nodeId# 19905 Delete nodeId# 20174 Add nodeId# 20187 Enable nodeId# 20157 Delete nodeId# 20107 Disable nodeId# 20143 Pick Enable nodeId# 20141 Enable nodeId# 20143 Enable nodeId# 19966 Enable nodeId# 20100 Disable nodeId# 20171 Add nodeId# 20188 Pick Disable nodeId# 20184 Delete nodeId# 20188 Enable nodeId# 20169 Enable nodeId# 20136 Delete nodeId# 20186 Delete nodeId# 20157 Add nodeId# 20189 Enable nodeId# 20173 Pick Pick Delete nodeId# 19966 Add nodeId# 20190 Delete nodeId# 20143 Disable nodeId# 20190 Add nodeId# 20191 Disable nodeId# 20109 Add nodeId# 20192 Disable nodeId# 20134 Pick Delete nodeId# 20134 Enable nodeId# 20184 Enable nodeId# 20190 Pick Delete nodeId# 20170 Delete nodeId# 20108 Add nodeId# 20193 Delete nodeId# 20183 Add nodeId# 20194 Enable nodeId# 20052 Disable nodeId# 20132 Delete nodeId# 20176 Add nodeId# 20195 Disable nodeId# 20182 Disable nodeId# 20129 Disable nodeId# 20103 Add nodeId# 20196 Add nodeId# 20197 Enable nodeId# 20182 Pick Disable nodeId# 20152 Pick Add nodeId# 20198 Add nodeId# 20199 Add nodeId# 20200 Enable nodeId# 19905 Add nodeId# 20201 Enable nodeId# 20132 Delete nodeId# 20182 Add nodeId# 20202 Add nodeId# 20203 Disable nodeId# 20180 Pick Delete nodeId# 20163 Enable nodeId# 20180 Pick Enable nodeId# 20152 Pick Pick Pick Pick Disable nodeId# 20166 Enable nodeId# 20109 Delete nodeId# 20171 Enable nodeId# 20103 Delete nodeId# 20199 Add nodeId# 20204 Add nodeId# 20205 Disable nodeId# 20173 Add nodeId# 20206 Delete nodeId# 20184 Pick Delete nodeId# 20179 Pick Pick Add nodeId# 20207 Add nodeId# 20208 Add nodeId# 20209 Disable nodeId# 20031 Delete nodeId# 20129 Delete nodeId# 20137 Delete nodeId# 20141 Enable nodeId# 20173 Delete nodeId# 20120 Delete nodeId# 20111 Disable nodeId# 20205 Add nodeId# 20210 Enable nodeId# 20166 Disable nodeId# 20203 Disable nodeId# 20208 Delete nodeId# 20132 Delete nodeId# 20191 Delete nodeId# 20187 Disable nodeId# 20166 Delete nodeId# 20156 Enable nodeId# 20208 Enable nodeId# 20166 Add nodeId# 20211 Enable nodeId# 20205 Pick Enable nodeId# 20031 Pick Pick Disable nodeId# 20201 Add nodeId# 20212 Enable nodeId# 20203 Disable nodeId# 20178 Enable nodeId# 20201 Enable nodeId# 20178 Add nodeId# 20213 Disable nodeId# 19905 Delete nodeId# 20206 Disable nodeId# 20142 Disable nodeId# 20136 Delete nodeId# 20167 Enable nodeId# 20136 Delete nodeId# 20147 Enable nodeId# 20142 Disable nodeId# 20052 Add nodeId# 20214 Disable nodeId# 20066 Pick Disable nodeId# 20031 Delete nodeId# 20209 Enable nodeId# 20052 Pick Add nodeId# 20215 Disable nodeId# 20210 Add nodeId# 20216 Disable nodeId# 20211 Disable nodeId# 20152 Enable nodeId# 20152 Enable nodeId# 19905 Delete nodeId# 20189 Add nodeId# 20217 Disable nodeId# 20152 Pick Add nodeId# 20218 Disable nodeId# 20142 Delete nodeId# 20213 Add nodeId# 20219 Disable nodeId# 20208 Enable nodeId# 20066 Pick Add nodeId# 20220 Add nodeId# 20221 Add nodeId# 20222 Add nodeId# 20223 Delete nodeId# 20166 Pick Delete nodeId# 20152 Add nodeId# 20224 Pick Delete nodeId# 20136 Pick Add nodeId# 20225 Delete nodeId# 19905 Disable nodeId# 20173 Pick Pick Delete nodeId# 20207 Add nodeId# 20226 Add nodeId# 20227 Enable nodeId# 20208 Disable nodeId# 20185 Enable nodeId# 20142 Enable nodeId# 20173 Disable nodeId# 20169 Delete nodeId# 20218 Disable nodeId# 20180 Enable nodeId# 20031 Pick Disable nodeId# 20215 Disable nodeId# 20177 Add nodeId# 20228 Enable nodeId# 20210 Enable nodeId# 20177 Pick Enable nodeId# 20185 Enable nodeId# 20169 Enable nodeId# 20211 Delete nodeId# 20212 Pick Add nodeId# 20229 Pick Enable nodeId# 20180 Disable nodeId# 20219 Add nodeId# 20230 Pick Enable nodeId# 20219 Enable nodeId# 20215 Pick Pick Disable nodeId# 20066 Pick Enable nodeId# 20066 Disable nodeId# 20211 Pick Pick Enable nodeId# 20211 Add nodeId# 20231 Delete nodeId# 20222 Delete nodeId# 20195 Delete nodeId# 20066 Delete nodeId# 20165 Pick Pick Pick Pick Pick Disable nodeId# 20180 Enable nodeId# 20180 Pick Add nodeId# 20232 Delete nodeId# 20211 Add nodeId# 20233 Disable nodeId# 20177 Pick Enable nodeId# 20177 Delete nodeId# 20228 Delete nodeId# 20202 Disable nodeId# 20178 Delete nodeId# 20142 Add nodeId# 20234 Add nodeId# 20235 Enable nodeId# 20178 Pick Add nodeId# 20236 Disable nodeId# 20230 Disable nodeId# 20173 Pick Add nodeId# 20237 Disable nodeId# 20031 Enable nodeId# 20230 Pick Disable nodeId# 20109 Pick Add nodeId# 20238 Delete nodeId# 20196 Disable nodeId# 20133 Add nodeId# 20239 Disable nodeId# 20231 Disable nodeId# 20229 Add nodeId# 20240 Disable nodeId# 20236 Pick Pick Enable nodeId# 20231 Disable nodeId# 20192 Pick Enable nodeId# 20173 Delete nodeId# 20185 Add nodeId# 20241 Pick Enable nodeId# 20109 Enable nodeId# 20192 Disable nodeId# 20103 Disable nodeId# 20224 Disable nodeId# 20231 Delete nodeId# 20217 Delete nodeId# 20192 Enable nodeId# 20231 Disable nodeId# 20219 Disable nodeId# 20208 Delete nodeId# 20240 Enable nodeId# 20224 Disable nodeId# 20181 Pick Add nodeId# 20242 Disable nodeId# 20224 Add nodeId# 20243 Add nodeId# 20244 Pick Disable nodeId# 20225 Pick Delete nodeId# 20210 Delete nodeId# 20214 Enable nodeId# 20225 Delete nodeId# 20031 Enable nodeId# 20219 Enable nodeId# 20208 Disable nodeId# 20243 Disable nodeId# 20197 Delete nodeId# 20221 Pick Pick Pick Add nodeId# 20245 Delete nodeId# 20236 Add nodeId# 20246 Pick Add nodeId# 20247 Pick Disable nodeId# 20237 Add nodeId# 20248 Delete nodeId# 20169 Enable nodeId# 20224 Add nodeId# 20249 Disable nodeId# 20242 Add nodeId# 20250 Add nodeId# 20251 Add nodeId# 20252 Pick Enable nodeId# 20133 Pick Disable nodeId# 20203 Delete nodeId# 20238 Delete nodeId# 20177 Add nodeId# 20253 Enable nodeId# 20242 Pick Pick Disable nodeId# 20223 Disable nodeId# 20172 Disable nodeId# 20052 Delete nodeId# 20178 Pick Enable nodeId# 20229 Enable nodeId# 20172 Add nodeId# 20254 Enable nodeId# 20203 Delete nodeId# 20230 Disable nodeId# 20249 Disable nodeId# 20216 Disable nodeId# 20246 Pick Pick Enable nodeId# 20197 Delete nodeId# 20203 Pick Add nodeId# 20255 Enable nodeId# 20237 Enable nodeId# 20216 Disable nodeId# 20201 Delete nodeId# 20180 Pick Add nodeId# 20256 Add nodeId# 20257 Disable nodeId# 20237 Add nodeId# 20258 Pick Pick Delete nodeId# 20173 Disable nodeId# 20255 Disable nodeId# 20234 Enable nodeId# 20246 Pick Enable nodeId# 20243 Pick Delete nodeId# 20247 Pick Disable nodeId# 20208 Disable nodeId# 20229 Add nodeId# 20259 Delete nodeId# 20215 Disable nodeId# 20235 Delete nodeId# 20251 Enable nodeId# 20103 Add nodeId# 20260 Pick Delete nodeId# 20248 Add nodeId# 20261 Pick Add nodeId# 20262 Delete nodeId# 20250 Add nodeId# 20263 Pick Disable nodeId# 20109 Add nodeId# 20264 Pick Add nodeId# 20265 Pick Disable nodeId# 20172 Disable nodeId# 20100 Enable nodeId# 20223 Add nodeId# 20266 Pick Enable nodeId# 20201 Delete nodeId# 20254 Add nodeId# 20267 Add nodeId# 20268 Disable nodeId# 20220 Delete nodeId# 20235 Pick Pick Enable nodeId# 20229 Enable nodeId# 20249 Delete nodeId# 20242 Delete nodeId# 20266 Enable nodeId# 20237 Add nodeId# 20269 Add nodeId# 20270 Add nodeId# 20271 Enable nodeId# 20109 Enable nodeId# 20255 Add nodeId# 20272 Delete nodeId# 20265 Pick Delete nodeId# 20241 Delete nodeId# 20234 Enable nodeId# 20172 Pick Delete nodeId# 20197 Enable nodeId# 20052 Pick Delete nodeId# 20258 Add nodeId# 20273 Enable nodeId# 20100 Enable nodeId# 20220 Enable nodeId# 20181 Pick Enable nodeId# 20208 Add nodeId# 20274 Disable nodeId# 20232 Add nodeId# 20275 Enable nodeId# 20232 Pick Pick Add nodeId# 20276 Pick Add nodeId# 20277 Delete nodeId# 20237 Pick Delete nodeId# 20201 Delete nodeId# 20200 Pick Disable nodeId# 20219 Delete nodeId# 20255 Add nodeId# 20278 Add nodeId# 20279 Enable nodeId# 20219 Add nodeId# 20280 Delete nodeId# 20245 Disable nodeId# 20256 Delete nodeId# 20216 Pick Pick Pick Add nodeId# 20281 Enable nodeId# 20256 Pick Disable nodeId# 20233 Delete nodeId# 20273 Disable nodeId# 20280 Enable nodeId# 20280 Add nodeId# 20282 Enable nodeId# 20233 Add nodeId# 20283 Pick Pick Add nodeId# 20284 Add nodeId# 20285 Disable nodeId# 20253 Delete nodeId# 20276 Delete nodeId# 20282 Pick Delete nodeId# 20249 Disable nodeId# 20194 Enable nodeId# 20194 Pick Pick Disable nodeId# 20225 Add nodeId# 20286 Disable nodeId# 20223 Disable nodeId# 20227 Delete nodeId# 20198 Disable nodeId# 20267 Enable nodeId# 20253 Enable nodeId# 20225 Pick Delete nodeId# 20052 Delete nodeId# 20261 Disable nodeId# 20274 Disable nodeId# 20219 Disable nodeId# 20181 Enable nodeId# 20219 Add nodeId# 20287 Delete nodeId# 20253 Add nodeId# 20288 Pick Enable nodeId# 20267 Add nodeId# 20289 Pick Disable nodeId# 20289 Pick Add nodeId# 20290 Disable nodeId# 20219 Delete nodeId# 20100 Enable nodeId# 20181 Disable nodeId# 20267 Add nodeId# 20291 Enable nodeId# 20219 Add nodeId# 20292 Pick Add nodeId# 20293 Disable nodeId# 20291 Delete nodeId# 20271 Disable nodeId# 20190 Enable nodeId# 20223 Delete nodeId# 20190 Pick Pick Add nodeId# 20294 Add nodeId# 20295 Enable nodeId# 20289 Delete nodeId# 20223 Add nodeId# 20296 Add nodeId# 20297 Add nodeId# 20298 Disable nodeId# 20278 Delete nodeId# 20259 Enable nodeId# 20267 Delete nodeId# 20252 Enable nodeId# 20227 Enable nodeId# 20274 Enable nodeId# 20278 Add nodeId# 20299 Disable nodeId# 20229 Add nodeId# 20300 Disable nodeId# 20281 Add nodeId# 20301 Add nodeId# 20302 Pick Enable nodeId# 20291 Disable nodeId# 20231 Add nodeId# 20303 Disable nodeId# 20204 |86.6%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> DataStreams::TestListStreamConsumers [GOOD] >> DataStreams::TestListShards1Shard >> TDistconfGenerateConfigTest::GenerateConfigSimpleCases [GOOD] >> TDistconfGenerateConfigTest::IgnoreNodes [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [FAIL] >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasSpecificVolume [GOOD] Test command err: Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } Ring { Node: 2 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 6 } Ring { Node: 11 } Ring { Node: 16 } Ring { Node: 21 } Ring { Node: 26 } Ring { Node: 31 } Ring { Node: 36 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 11 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 >> TBlobStorageWardenTest::TestDeleteStoragePool |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] >> FolderServiceTest::TFolderService [GOOD] >> TDistconfGenerateConfigTest::UsedNodes >> TDistconfGenerateConfigTest::UsedNodes [GOOD] >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCCases >> TDistconfGenerateConfigTest::GenerateConfig3DCCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases >> TDistconfGenerateConfigTest::GenerateConfig3DCBigCases [GOOD] >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] >> TBlobStorageWardenTest::TestFilterBadSerials ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestSendUsefulMonitoring [GOOD] Test command err: 2026-01-04T02:32:05.271281Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:05.272491Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:05.272595Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:05.272650Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:05.274741Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:05.275550Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/001516/r3tmp/tmpJUwLgZ/pdisk_1.dat 2026-01-04T02:32:05.904269Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:488:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1353:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:05.904463Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904512Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904542Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904577Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904607Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904636Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:05.904674Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:1353:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:05.904756Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG33 2026-01-04T02:32:05.904809Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG32 2026-01-04T02:32:05.904854Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG33 2026-01-04T02:32:05.904882Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG32 2026-01-04T02:32:05.904917Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG33 2026-01-04T02:32:05.904945Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG32 2026-01-04T02:32:05.905132Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:3] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:05.905202Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:2] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:05.905254Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:1] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:05.907844Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-04T02:32:05.908105Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-04T02:32:05.908217Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-04T02:32:05.908304Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-04T02:32:05.908367Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:05.908542Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.25 sample PartId# [72057594037932033:2:8:0:0:1353:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.251 sample PartId# [72057594037932033:2:8:0:0:1353:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.251 sample PartId# [72057594037932033:2:8:0:0:1353:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 3.895 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.105 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.226 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-04T02:32:05.931183Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 4294967295 IsLimitedKeyless# 0 fullIfPossible# 1 Marker# DSP58 2026-01-04T02:32:05.933024Z node 1 :BS_PROXY CRIT: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvBlock {TabletId# 1234 Generation# 1 Deadline# 18446744073709551 IsMonitored# 1} Response# TEvBlockResult {Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 Sending TEvPut 2026-01-04T02:32:05.933279Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvPut {Id# [1234:1:0:0:0:5:0] Size# 5 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:1:0:0:0:5:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID" ApproximateFreeSpaceShare# 0} Marker# DSP31 2026-01-04T02:32:05.933410Z node 1 :BS_PROXY DEBUG: dsproxy_impl.h:329: The request was sent for an invalid groupID Group# 4294967295 HandleError ev# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Response# TEvCollectGarbageResult {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Status# ERROR ErrorReason# "Created as unconfigured in error state (DSPE11). It happens when the request was sent for an invalid groupID"} Marker# DSP31 2026-01-04T02:32:06.574843Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:06.578637Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:06.579356Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:06.579454Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:06.580919Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:06.581002Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/001516/r3tmp/tmplviuVn/pdisk_1.dat 2026-01-04T02:32:07.122252Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [2:488:2466] Gro ... stablishingSessions Marker# DSP03 2026-01-04T02:32:09.026660Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.026849Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027060Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027136Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027200Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027254Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027307Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.027334Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-04T02:32:09.027371Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-04T02:32:09.027414Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:340: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 0 Marker# DSP57 initialize full monitoring 2026-01-04T02:32:09.028299Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [d70ef3c23a1a2346] bootstrap ActorId# [3:614:2517] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:5:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:09.029456Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [d70ef3c23a1a2346] Id# [1234:2:0:0:0:5:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:09.029512Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [d70ef3c23a1a2346] restore Id# [1234:2:0:0:0:5:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:09.029560Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [d70ef3c23a1a2346] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG33 2026-01-04T02:32:09.029601Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [d70ef3c23a1a2346] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-04T02:32:09.029721Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:607:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:09.033341Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [d70ef3c23a1a2346] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 0 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog CostSettings# { SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524257} Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 1 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-04T02:32:09.033445Z node 3 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [d70ef3c23a1a2346] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-04T02:32:09.033493Z node 3 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [d70ef3c23a1a2346] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:09.034761Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:32:09.034819Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-04T02:32:09.034971Z node 4 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvCollectGarbage {TabletId# 1234 RecordGeneration# 4294967295 PerGenerationCounter# 4294967295 Channel# 0 Deadline# 18446744073709551 Collect# true CollectGeneration# 4294967295 CollectStep# 4294967295 Hard# true IsMultiCollectAllowed# 1 IsMonitored# 1} Marker# DSP17 2026-01-04T02:32:09.035377Z node 4 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/c5zo/001516/r3tmp/tmpt5VX4o//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-04T02:32:09.036378Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-04T02:32:09.036432Z node 4 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:32:09.038615Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:618:2106] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.038720Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:619:2107] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.038792Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:620:2108] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.038861Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:621:2109] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.038927Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:622:2110] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.038996Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:623:2111] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.039062Z node 4 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [4:616:2105] Create Queue# [4:624:2112] targetNodeId# 3 Marker# DSP01 2026-01-04T02:32:09.039085Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:32:09.040381Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040603Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040675Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040845Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040900Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040946Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040977Z node 4 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:09.040994Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-04T02:32:09.041022Z node 4 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-04T02:32:09.041163Z node 4 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [4:618:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring |86.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |86.6%| [LD] {RESULT} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |86.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/statistics/service/ut/ydb-core-statistics-service-ut |86.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestFilterBadSerials [GOOD] >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> JsonChangeRecord::DataChangeVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::UseOldNodesInDisconnectedDC [GOOD] Test command err: Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 10 } Ring { Node: 11 } Ring { Node: 12 } Ring { Node: 13 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 5 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 2 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } Ring { Node: 11 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 12 } Ring { Node: 15 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 14 } Ring { Node: 17 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> JsonChangeRecord::DataChange [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderService [GOOD] Test command err: 2026-01-04T02:32:05.788987Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328606902848145:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:05.789658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:06.047276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:06.047442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:06.061067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:06.100389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:06.110472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:06.214431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:06.372665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:06.380107Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cbcb4e9ed50] Connect to grpc://localhost:4031 2026-01-04T02:32:06.414270Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbcb4e9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-04T02:32:06.422414Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cbcb4e9ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4031: Failed to connect to remote host: Connection refused 2026-01-04T02:32:06.423907Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbcb4e9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-04T02:32:06.424503Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cbcb4e9ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4031: Failed to connect to remote host: Connection refused 2026-01-04T02:32:06.793667Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:07.430565Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbcb4e9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-04T02:32:07.431314Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cbcb4e9ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:4031: Failed to connect to remote host: Connection refused 2026-01-04T02:32:08.438015Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbcb4e9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_not_exists" } 2026-01-04T02:32:08.444654Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cbcb4e9ed50] Status 5 Not Found 2026-01-04T02:32:08.446176Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cbcb4e9ed50] Request ResolveFoldersRequest { folder_ids: "i_am_exists" } 2026-01-04T02:32:08.449024Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cbcb4e9ed50] Response ResolveFoldersResponse { resolved_folders { cloud_id: "response_cloud_id" } } |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TDistconfGenerateConfigTest::GenerateConfigReplicasOverrides [GOOD] Test command err: Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 3 } Ring { Node: 5 } } } Expected: NToSelect: 3 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 19 } Ring { Node: 20 } Ring { Node: 21 } Ring { Node: 37 } Ring { Node: 38 } Ring { Node: 39 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 1 Ring { Node: 1 } } } Expected: NToSelect: 1 Actual: { RingGroups { NToSelect: 5 Ring { Node: 1 } Ring { Node: 2 } Ring { Node: 3 } Ring { Node: 4 } Ring { Node: 5 } Ring { Node: 6 } Ring { Node: 7 } Ring { Node: 8 } Ring { Node: 9 } Ring { Node: 10 } } } Expected: NToSelect: 5 Actual: { RingGroups { NToSelect: 9 Ring { Node: 1 } Ring { Node: 4 } Ring { Node: 7 } Ring { Node: 10 } Ring { Node: 13 } Ring { Node: 16 } Ring { Node: 19 } Ring { Node: 22 } Ring { Node: 25 } } } Expected: NToSelect: 9 Actual: { RingGroups { NToSelect: 3 Ring { Node: 1 } Ring { Node: 10 } Ring { Node: 19 } } } Expected: NToSelect: 3 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestDeleteStoragePool [GOOD] >> TBlobStorageWardenTest::TestBlockEncriptedGroup >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChangeVersion [GOOD] |86.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::DataChange [GOOD] |86.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest >> TServiceAccountServiceTest::IssueToken >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TServiceAccountServiceTest::IssueToken [GOOD] >> DataStreams::TestReservedStorageMetering [GOOD] >> DataStreams::TestReservedConsumersMetering >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] >> TBlobStorageWardenTest::TestLimitedKeylessGroupThenNoMonitoring [GOOD] >> TBlobStorageWardenTest::TestReceivedPDiskRestartNotAllowed [GOOD] >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden >> TBlobStorageWardenTest::TestGivenPDiskFormatedWithGuid1AndCreatedWithGuid2WhenYardInitThenError [GOOD] >> TBlobStorageWardenTest::TestEvVGenerationChangeRace >> DataStreams::TestCreateExistingStream [GOOD] >> DataStreams::ListStreamsValidation >> TBlobStorageWardenTest::TestBlockEncriptedGroup [GOOD] >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] >> Yq_1::DescribeQuery [GOOD] |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.7%| [LD] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/load_test/ut_ycsb/ydb-core-load_test-ut_ycsb |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> Cdc::EnqueueRequestProcessSend [GOOD] >> Cdc::InitialScanAndResolvedTimestamps >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::Get [GOOD] Test command err: 2026-01-04T02:32:04.755419Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328600236588129:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:04.758640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:05.084329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:05.099236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:05.099346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:05.128069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:05.227522Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328600236587982:2081] 1767493924744314 != 1767493924744317 2026-01-04T02:32:05.234190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:05.380481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:05.442527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:05.449245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:32:08.665718Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328617288627347:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:08.665821Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:08.692788Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:08.800301Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:08.809196Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:08.809268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:08.841507Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:08.861924Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:09.066287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:09.074200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 |86.7%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink >> AggregateStatistics::ShouldBePings >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::ObtainTenantKeySamePin [GOOD] Test command err: 2026-01-04T02:32:09.562294Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.562590Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.563362Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.564829Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.568830Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.568915Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:09.569643Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/0014f3/r3tmp/tmp4WLXMC/pdisk_1.dat 2026-01-04T02:32:10.125965Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [e2e5f1b9c917f854] bootstrap ActorId# [1:490:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1353:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:10.126164Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126212Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126243Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126274Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126304Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126335Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [e2e5f1b9c917f854] Id# [72057594037932033:2:8:0:0:1353:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.126383Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [e2e5f1b9c917f854] restore Id# [72057594037932033:2:8:0:0:1353:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:10.126455Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG33 2026-01-04T02:32:10.126521Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1353:1] Marker# BPG32 2026-01-04T02:32:10.126573Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG33 2026-01-04T02:32:10.126603Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1353:2] Marker# BPG32 2026-01-04T02:32:10.126639Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [e2e5f1b9c917f854] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG33 2026-01-04T02:32:10.126667Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [e2e5f1b9c917f854] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1353:3] Marker# BPG32 2026-01-04T02:32:10.126847Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:3] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:10.126922Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:2] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:10.127006Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1353:1] FDS# 1353 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:10.129788Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-04T02:32:10.130121Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-04T02:32:10.130208Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [e2e5f1b9c917f854] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1353:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 90653 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-04T02:32:10.130277Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [e2e5f1b9c917f854] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-04T02:32:10.130337Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [e2e5f1b9c917f854] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1353:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:10.130510Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.31 sample PartId# [72057594037932033:2:8:0:0:1353:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.311 sample PartId# [72057594037932033:2:8:0:0:1353:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.311 sample PartId# [72057594037932033:2:8:0:0:1353:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 4.156 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.412 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 4.495 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-04T02:32:10.222160Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [8d27cf9df52bfb78] bootstrap ActorId# [1:536:2506] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:10.222339Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222391Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222424Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222458Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222488Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222539Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8d27cf9df52bfb78] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:10.222582Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8d27cf9df52bfb78] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:10.222659Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-04T02:32:10.222709Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-04T02:32:10.222756Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-04T02:32:10.222785Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-04T02:32:10.222818Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8d27cf9df52bfb78] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-04T02:32:10.222846Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8d27cf9df52bfb78] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-04T02:32:10.223014Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:10.223093Z node 1 :BS_PROXY DEBUG: group_sessions.h:193 ... 0 to# 0 blob Id# [1234:2:0:0:0:5:1] Marker# BPG32 2026-01-04T02:32:12.230635Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:607:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:5:1] FDS# 5 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:12.233226Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd65997ea3b51537] received {EvVPutResult Status# OK ID# [1234:2:0:0:0:5:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 2 } Cost# 80039 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 3 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-04T02:32:12.233330Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd65997ea3b51537] Result# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 2181038082 Marker# BPP12 2026-01-04T02:32:12.233383Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd65997ea3b51537] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:5:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:12.233899Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:32:12.233946Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:58: Group# 2181038082 SetStateUnconfigured Marker# DSP07 2026-01-04T02:32:12.234077Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:219: Group# 2181038082 HandleEnqueue# TEvBlock {TabletId# 1234 Generation# 3 Deadline# 18446744073709551 IsMonitored# 1} Marker# DSP17 2026-01-04T02:32:12.234355Z node 3 :BS_NODE ERROR: {NW19@node_warden_group.cpp:221} error while parsing group GroupId# 2181038082 Err# LifeCyclePhase# KEY_NOT_LOADED Key.Id# "" Key.Version# 0 MainKey.Id# "/home/runner/.ya/build/build_root/c5zo/0014f3/r3tmp/tmpJPk2SE//key.txt" MainKey.Version# 1 GroupKeyNonce# 2181038082 2026-01-04T02:32:12.235123Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 2181038082 TEvConfigureProxy received GroupGeneration# 2 IsLimitedKeyless# true Marker# DSP02 2026-01-04T02:32:12.235173Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 2181038082 IsLimitedKeyless# 1 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:32:12.236994Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:620:2106] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237099Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:621:2107] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237167Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:622:2108] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237235Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:623:2109] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237319Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:624:2110] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237386Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:625:2111] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237452Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 2181038082 Actor# [3:618:2105] Create Queue# [3:626:2112] targetNodeId# 2 Marker# DSP01 2026-01-04T02:32:12.237469Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 2181038082 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:32:12.241226Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 1 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241401Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 2 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241471Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 3 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241651Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 4 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241728Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 5 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241776Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 6 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241834Z node 3 :BS_PROXY DEBUG: dsproxy_state.cpp:259: Group# 2181038082 Handle TEvProxyQueueState# {VDiskId# [82000002:2:0:0:0] QueueId# 7 IsConnected# true ExtraBlockChecksSupport# true Checksumming# false CostModel# {SeekTimeUs# 8000 ReadSpeedBps# 127000000 WriteSpeedBps# 127000000 ReadBlockSize# 524288 WriteBlockSize# 524288 MinHugeBlobInBytes# 524249 GType# none}} Duration# 0.000000s Marker# DSP04 2026-01-04T02:32:12.241859Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:230: Group# 2181038082 -> StateWork Marker# DSP11 2026-01-04T02:32:12.241887Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:82: Group# 2181038082 SetStateWork Marker# DSP15 2026-01-04T02:32:12.242031Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:150: [efc53170c63234c6] bootstrap ActorId# [3:627:2113] Group# 2181038082 TabletId# 1234 Generation# 3 Deadline# 586524-01-19T08:01:49.551615Z RestartCounter# 0 Marker# DSPB05 2026-01-04T02:32:12.242084Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:111: [efc53170c63234c6] Sending TEvVBlock Tablet# 1234 Generation# 3 vdiskId# [82000002:2:0:0:0] node# 2 Marker# DSPB03 2026-01-04T02:32:12.242242Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:620:2106] NKikimr::TEvBlobStorage::TEvVBlock# NKikimrBlobStorage.TEvVBlock TabletId: 1234 Generation: 3 VDiskID { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IssuerGuid: 11186022357415408581 MsgQoS { ExtQueueId: PutTabletLog } cookie# 0 2026-01-04T02:32:12.246596Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:43: [efc53170c63234c6] Handle TEvVBlockResult status# OK From# [82000002:2:0:0:0] NodeId# 2 Marker# DSPB01 2026-01-04T02:32:12.246686Z node 3 :BS_PROXY_BLOCK DEBUG: dsproxy_block.cpp:100: [efc53170c63234c6] Result# TEvBlockResult {Status# OK} Marker# DSPB04 Sending TEvPut 2026-01-04T02:32:12.247122Z node 3 :BS_PROXY INFO: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:3:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:3:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-04T02:32:12.247398Z node 3 :BS_PROXY DEBUG: dsproxy_impl.h:329: Group# 2181038082 HandleError ev# TEvPut {Id# [1234:4:0:0:0:10:0] Size# 10 Deadline# 18446744073709551 HandleClass# TabletLog Tactic# Default} Response# TEvPutResult {Id# [1234:4:0:0:0:10:0] Status# ERROR StatusFlags# { } ErrorReason# "Created as LIMITED without keys. It happens when tenant keys are missing on the node." ApproximateFreeSpaceShare# 0} Marker# DSP31 Sending TEvPut 2026-01-04T02:32:12.247776Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [c85e1a21dcb31b54] bootstrap ActorId# [2:628:2520] Group# 2181038082 BlobCount# 1 BlobIDs# [[1234:2:0:0:0:11:0]] HandleClass# TabletLog Tactic# Default RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:12.247893Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [c85e1a21dcb31b54] Id# [1234:2:0:0:0:11:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:12.247935Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [c85e1a21dcb31b54] restore Id# [1234:2:0:0:0:11:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:12.247983Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [c85e1a21dcb31b54] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG33 2026-01-04T02:32:12.248017Z node 2 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [c85e1a21dcb31b54] Sending missing VPut part# 0 to# 0 blob Id# [1234:2:0:0:0:11:1] Marker# BPG32 2026-01-04T02:32:12.248116Z node 2 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [2:607:2510] NKikimr::TEvBlobStorage::TEvVPut# {ID# [1234:2:0:0:0:11:1] FDS# 11 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:12.248318Z node 2 :BS_VDISK_PUT ERROR: blobstorage_skeleton.cpp:581: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TEvVPut: failed to pass the Hull check; id# [1234:2:0:0:0:11:1] status# {Status# BLOCKED} Marker# BSVS03 2026-01-04T02:32:12.248534Z node 2 :BS_PROXY_PUT INFO: dsproxy_put.cpp:265: [c85e1a21dcb31b54] received {EvVPutResult Status# BLOCKED ErrorReason# "blocked" ID# [1234:2:0:0:0:11:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 3 } Cost# 80086 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 4 }}}} from# [82000002:2:0:0:0] Marker# BPP01 2026-01-04T02:32:12.248600Z node 2 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [c85e1a21dcb31b54] Result# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} GroupId# 2181038082 Marker# BPP12 2026-01-04T02:32:12.248647Z node 2 :BS_PROXY_PUT NOTICE: dsproxy_put.cpp:499: [c85e1a21dcb31b54] SendReply putResult# TEvPutResult {Id# [1234:2:0:0:0:11:0] Status# BLOCKED StatusFlags# { } ErrorReason# "Got VPutResult status# BLOCKED from VDiskId# [82000002:2:0:0:0]" ApproximateFreeSpaceShare# 0} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:12.248943Z node 3 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [3:620:2106] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[1234:4294967295:4294967295:0] collect=[4294967295:4294967295] cookie# 0 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::Test_Crreate_AutoPartitioning_Disabled [GOOD] Test command err: 2026-01-04T02:31:52.578610Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328549400321794:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:52.578773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:53.305278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:53.385223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:53.385329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:53.492443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:53.633970Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:53.635860Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:53.653070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:54.053732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:54.053755Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:54.053765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:54.053871Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:54.566559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:54.697865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:54.921181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:31:55.233011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:31:55.608675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:00.532745Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328583336090497:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:00.532796Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:00.617697Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:00.764511Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:00.764602Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:00.772741Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:00.812608Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:00.874090Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:01.057046Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:01.057076Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:01.057083Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:01.057168Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:01.359664Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:01.490828Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:01.577957Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:01.784165Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000002" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } ALTER_SCHEME: { Name: "test-topic" Split { Partition: 1 SplitBoundary: "a" } } 2026-01-04T02:32:03.107712Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 107:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:04.390099Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:04.520862Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:04.684193Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:05.026682Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:06.925565Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007180s 2026-01-04T02:32:06.943121Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591328609634347785:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:06.943468Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:06.972665Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.012453s 2026-01-04T02:32:06.984044Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:07.093535Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:07.112077Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:07.112191Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:07.153335Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:07.161416Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:07.205218Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008169s 2026-01-04T02:32:07.202127Z node 8 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009599s 2026-01-04T02:32:07.214014Z node 9 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008645s 2026-01-04T02:32:07.316255Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:07.316280Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:07.316289Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:07.316377Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:07.558867Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:07.626122Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:07.842496Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:32:07.960940Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> IndexBuildTest::CancellationNotEnoughRetries [GOOD] >> IndexBuildTest::CancellationNotEnoughRetriesUniq >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ListConnectionsOnEmptyConnectionsTable [GOOD] Test command err: 2026-01-04T02:31:12.835027Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328379343961312:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:12.835100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0104 02:31:13.200126625 493218 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:13.220145823 493218 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:13.486823Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.670599Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.765347Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:23705 2026-01-04T02:31:13.765463Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.893881Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:13.894058Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.918147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.950303Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.962318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:13.989103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.989264Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:13.993361Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.045797Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.048465Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.055284Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.069498Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.071135Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.099881Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.102063Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.130097Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.130188Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.134297Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.162358Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:14.250582Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.278005Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.301552Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.302632Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.304375Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.307731Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.314113Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.314431Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:23705: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:23705 } ] 2026-01-04T02:31:14.374595Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table " ... sion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328619212837052 RawX2: 4503616807242618 } } DstEndpoint { ActorId { RawX1: 7591328619212837046 RawX2: 4503616807242019 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-04T02:32:08.944906Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1117: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Update input channelId: 1, peer: [4:7591328619212837051:2937] 2026-01-04T02:32:08.944956Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-04T02:32:08.945014Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328619212837051 RawX2: 4503616807242617 } } DstEndpoint { ActorId { RawX1: 7591328619212837052 RawX2: 4503616807242618 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328619212837052 RawX2: 4503616807242618 } } DstEndpoint { ActorId { RawX1: 7591328619212837046 RawX2: 4503616807242019 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-04T02:32:08.945028Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.945233Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:08.945260Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. Taken 0 locks 2026-01-04T02:32:08.945272Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:08.945292Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:08.945306Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.945318Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:08.945333Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:08.945345Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:08.945354Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. returned 0 rows; processed 0 rows 2026-01-04T02:32:08.945377Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. dropping batch for read #0 2026-01-04T02:32:08.945385Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. effective maxinflight 1 sorted 1 2026-01-04T02:32:08.945395Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:08.945407Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710738, task: 1, CA Id [4:7591328619212837051:2937]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:08.945471Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:08.945488Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:32:08.945501Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710738, task: 2. Finish input channelId: 1, from: [4:7591328619212837051:2937] 2026-01-04T02:32:08.945520Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.945549Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:08.945556Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:08.945574Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.945597Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710738, task: 1. Tasks execution finished 2026-01-04T02:32:08.945614Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328619212837051:2937], TxId: 281474976710738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:08.945724Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710738, task: 1. pass away 2026-01-04T02:32:08.945829Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710738;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:08.946023Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.946076Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710738, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:08.946086Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710738, task: 2. Tasks execution finished 2026-01-04T02:32:08.946099Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328619212837052:2938], TxId: 281474976710738, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj1c9153b4h7ttkxmhr3k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2UxMjI5ZGEtODVjN2YxZWMtYjQwNmUzOGYtMjBlOWY3MGI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:08.946163Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710738, task: 2. pass away 2026-01-04T02:32:08.946232Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710738;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; >> AggregateStatistics::ShouldBePings [GOOD] >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] >> DataStreams::TestListShards1Shard [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingOfLocalTablets [GOOD] Test command err: 2026-01-04T02:32:13.273992Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.274272Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2 2026-01-04T02:32:13.274309Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-04T02:32:13.274388Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-04T02:32:13.275093Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = ERROR 2026-01-04T02:32:13.275152Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 1 is not local. 2026-01-04T02:32:13.275256Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-04T02:32:13.275281Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.275315Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = ERROR 2026-01-04T02:32:13.275336Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-04T02:32:13.275425Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5 2026-01-04T02:32:13.275462Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 5 is not local. 2026-01-04T02:32:13.275511Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-04T02:32:13.275569Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = ERROR 2026-01-04T02:32:13.275591Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 7 is not local. 2026-01-04T02:32:13.275638Z node 1 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 1, client id = [1:15:2062], server id = [1:15:2062], tablet id = 8 2026-01-04T02:32:13.275656Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 8 is not local. 2026-01-04T02:32:13.275675Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:13.275781Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-04T02:32:13.275804Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |86.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots |86.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence_reboots/ydb-core-tx-schemeshard-ut_sequence_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCcorrectProcessingTabletTimeout [GOOD] Test command err: 2026-01-04T02:32:13.373990Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.375206Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [1:8:2055], tablet id = 1, status = OK 2026-01-04T02:32:13.375514Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:8:2055], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.375602Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-04T02:32:13.375706Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [1:9:2056], tablet id = 2, status = OK 2026-01-04T02:32:13.375752Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:9:2056], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.375840Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [1:10:2057], tablet id = 3, status = OK 2026-01-04T02:32:13.375881Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:10:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.375960Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:8:2055], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-04T02:32:13.375987Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.376032Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [1:11:2058], tablet id = 4, status = OK 2026-01-04T02:32:13.376068Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:11:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.376102Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-04T02:32:13.376151Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [1:12:2059], tablet id = 5, status = OK 2026-01-04T02:32:13.376191Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:12:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.376281Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [1:13:2060], tablet id = 6, status = OK 2026-01-04T02:32:13.376337Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:13:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.376385Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:10:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-04T02:32:13.376414Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.376440Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-04T02:32:13.376482Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [1:14:2061], tablet id = 7, status = OK 2026-01-04T02:32:13.376510Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:14:2061], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.376549Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:12:2059], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-04T02:32:13.376565Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.376586Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 7 2026-01-04T02:32:13.376623Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:14:2061], server id = [0:0:0], tablet id = 7, status = ERROR 2026-01-04T02:32:13.376639Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.387436Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 1 has already been processed 2026-01-04T02:32:13.387513Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 2 2026-01-04T02:32:13.387541Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 2 is not local. 2026-01-04T02:32:13.387635Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 3 has already been processed 2026-01-04T02:32:13.387668Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 4 2026-01-04T02:32:13.387686Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 4 is not local. 2026-01-04T02:32:13.387728Z node 1 :STATISTICS DEBUG: service_impl.cpp:1034: Tablet 5 has already been processed 2026-01-04T02:32:13.387747Z node 1 :STATISTICS ERROR: service_impl.cpp:1038: No result was received from the tablet 6 2026-01-04T02:32:13.387762Z node 1 :STATISTICS DEBUG: service_impl.cpp:1069: Tablet 6 is not local. 2026-01-04T02:32:13.387826Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:13.387891Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.387921Z node 1 :STATISTICS DEBUG: service_impl.cpp:1027: Skip TEvStatisticsRequestTimeout 2026-01-04T02:32:13.387994Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:9:2056], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-04T02:32:13.388017Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.388059Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:11:2058], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-04T02:32:13.388078Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.388112Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:13:2060], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-04T02:32:13.388139Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ChildNodesShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-04T02:32:13.626265Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.627805Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-04T02:32:13.628250Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.628588Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.628808Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.628964Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-04T02:32:13.629168Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-04T02:32:13.629194Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.629233Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-04T02:32:13.629305Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.629478Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-04T02:32:13.629571Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-04T02:32:13.629618Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:13.629706Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-04T02:32:13.629763Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:13.629868Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-04T02:32:13.629889Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.629926Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-04T02:32:13.629954Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:32:13.630099Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-04T02:32:13.630158Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-04T02:32:13.630178Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.630243Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-04T02:32:13.642719Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.642826Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:13.642994Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.643018Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:13.653983Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-04T02:32:13.654062Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:13.654161Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-04T02:32:13.654255Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.654287Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-04T02:32:13.654321Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.654346Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:13.654509Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:13.654560Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive >> Yq_1::Basic_EmptyList [GOOD] >> Yq_1::Basic_EmptyDict |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBePings [GOOD] Test command err: 2026-01-04T02:32:13.578326Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.578782Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:13.693970Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-04T02:32:13.694083Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-04T02:32:13.694120Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:13.694787Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:16:2056], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-04T02:32:13.694848Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.694939Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:19:2055], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-04T02:32:13.694967Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:13.695043Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-04T02:32:13.695084Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 >> DataStreams::TestPutRecordsCornerCases [GOOD] >> DataStreams::TestPutRecords >> Yq_1::Basic_TaggedLiteral [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestInferPDiskSlotCountWithRealNodeWarden [GOOD] Test command err: 2026-01-04T02:32:10.636989Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.640072Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.641499Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/0012c3/r3tmp/tmpPADVP2/pdisk_1.dat 2026-01-04T02:32:10.801044Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.801162Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.801233Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:3:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:0:0] targetVDisk# [2000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:11.342729Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [abc2fc901918ac71] bootstrap ActorId# [1:552:2466] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:1346:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:11.342900Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.342943Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.342970Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.343014Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.343041Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.343068Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:1346:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.343102Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:1346:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:11.343175Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:1346:1] Marker# BPG33 2026-01-04T02:32:11.343217Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:1346:1] Marker# BPG32 2026-01-04T02:32:11.343257Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:1346:2] Marker# BPG33 2026-01-04T02:32:11.343281Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:1346:2] Marker# BPG32 2026-01-04T02:32:11.343316Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:1346:3] Marker# BPG33 2026-01-04T02:32:11.343342Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:1346:3] Marker# BPG32 2026-01-04T02:32:11.343507Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:65:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:3] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.343572Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:2] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.343616Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:1346:1] FDS# 1346 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.356445Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 4 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 5 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-04T02:32:11.356722Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-04T02:32:11.356811Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:1346:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 90598 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-04T02:32:11.356893Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:1346:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-04T02:32:11.356950Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:1346:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:11.357123Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 1.054 sample PartId# [72057594037932033:2:8:0:0:1346:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.055 sample PartId# [72057594037932033:2:8:0:0:1346:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 1.055 sample PartId# [72057594037932033:2:8:0:0:1346:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 13.941 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 14.167 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 14.251 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } 2026-01-04T02:32:11.439556Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [a55b41de52eb2a08] bootstrap ActorId# [1:598:2504] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:9:0:0:224:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:11.439728Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439767Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439795Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439859Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439888Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439916Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [a55b41de52eb2a08] Id# [72057594037932033:2:9:0:0:224:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.439957Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [a55b41de52eb2a08] restore Id# [72057594037932033:2:9:0:0:224:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:11.440353Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG33 2026-01-04T02:32:11.440413Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:9:0:0:224:1] Marker# BPG32 2026-01-04T02:32:11.440456Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG33 2026-01-04T02:32:11.440485Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:9:0:0:224:2] Marker# BPG32 2026-01-04T02:32:11.440515Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [a55b41de52eb2a08] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG33 2026-01-04T02:32:11.440541Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [a55b41de52eb2a08] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:9:0:0:224:3] Marker# BPG32 2026-01-04T02:32:11.440706Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:58:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:3] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.440831Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:79:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:9:0:0:224:2] FDS# 224 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.440883Z node 1 :BS_PROXY DEBUG: group_sessions ... blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 240 PDiskId# 1002 2026-01-04T02:32:13.731380Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 243 PDiskId# 1002 2026-01-04T02:32:13.731418Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 246 PDiskId# 1002 2026-01-04T02:32:13.731457Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 249 PDiskId# 1002 2026-01-04T02:32:13.731500Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 252 PDiskId# 1002 2026-01-04T02:32:13.731544Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 255 PDiskId# 1002 2026-01-04T02:32:13.731580Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 258 PDiskId# 1002 2026-01-04T02:32:13.731617Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 261 PDiskId# 1002 2026-01-04T02:32:13.731654Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 264 PDiskId# 1002 2026-01-04T02:32:13.731694Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 267 PDiskId# 1002 2026-01-04T02:32:13.731730Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 270 PDiskId# 1002 2026-01-04T02:32:13.731777Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 273 PDiskId# 1002 2026-01-04T02:32:13.731840Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 276 PDiskId# 1002 2026-01-04T02:32:13.731883Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 279 PDiskId# 1002 2026-01-04T02:32:13.731926Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 282 PDiskId# 1002 2026-01-04T02:32:13.731966Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 285 PDiskId# 1002 2026-01-04T02:32:13.732001Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 288 PDiskId# 1002 2026-01-04T02:32:13.732044Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 291 PDiskId# 1002 2026-01-04T02:32:13.732081Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 294 PDiskId# 1002 2026-01-04T02:32:13.732130Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 297 PDiskId# 1002 2026-01-04T02:32:13.732194Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 300 PDiskId# 1002 2026-01-04T02:32:13.732239Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 303 PDiskId# 1002 2026-01-04T02:32:13.732282Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 306 PDiskId# 1002 2026-01-04T02:32:13.732318Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 309 PDiskId# 1002 2026-01-04T02:32:13.732380Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 312 PDiskId# 1002 2026-01-04T02:32:13.732435Z node 3 :BS_PDISK DEBUG: {BPD62@blobstorage_pdisk_writer.h:458} LogPageHeader writing ChunkIdx# 0 SectorIdx# 315 PDiskId# 1002 2026-01-04T02:32:13.732498Z node 3 :BS_PDISK DEBUG: {BPD66@blobstorage_pdisk_writer.h:388} TSectorWriter TerminateLog large SectorBytesFree# 2119 ChunkIdx# 0 SectorIdx# 315 SectorOffset# 1290240 PDiskId# 1002 2026-01-04T02:32:13.734764Z node 3 :BS_PDISK DEBUG: {BPD69@blobstorage_pdisk_impl_log.cpp:846} WriteSysLogRestorePoint FirstLogChunkToParseCommits# 1 CommonLogger# 0x00007D24537FF280 "LogChunks.size()"# 1 "LogChunks.front().ChunkIdx"# 1 BeginSectorIdx# 171 EndSectorIdx# 318 PDiskId# 1002 2026-01-04T02:32:13.734901Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000004 PDiskId# 1002 2026-01-04T02:32:13.734994Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 0 ReadOffset# 2576760176640 BytesToRead# 4096 ReqId# 2565120001348 PDiskId# 1002 2026-01-04T02:32:13.735051Z node 3 :BS_PDISK NOTICE: {BPD01@blobstorage_pdisk_impl_log.cpp:1759} PDisk have successfully started PDiskId# 1002 2026-01-04T02:32:13.735105Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_completion_impl.cpp:423: TCompletionEventSender {EvLogInitResult} 2026-01-04T02:32:13.735532Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TWhiteboardReport ReqId# 10020000005 PDiskId# 1002 2026-01-04T02:32:13.735604Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001592 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12302.486679 2026-01-04T02:32:13.735791Z node 3 :BS_PDISK INFO: {BPD01@blobstorage_pdisk_impl_log.cpp:1760} StartupOwnerInfo# { PDisk system/log ChunkIds: {0, 1} Free ChunkIds: {2..18902} PDiskId# 1002 2026-01-04T02:32:13.735918Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TWhiteboardReport OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-04T02:32:13.735995Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001592 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:13.736048Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.736142Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.737556Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001348 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12302.486064 2026-01-04T02:32:13.738074Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-04T02:32:13.738123Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001348 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:13.738168Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.738225Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 0 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-04T02:32:13.738285Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_req_creator.h:219} CreateReqFromArgs Req# NKikimr::NPDisk::TInitialReadMetadataResult ReqId# 10020000006 PDiskId# 1002 2026-01-04T02:32:13.738347Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:345} ReadMetadataIfNeeded: initiating read ChunkIdx# 18903 OffsetInSectors# 16640 ReadOffset# 2576828334080 BytesToRead# 4096 ReqId# 2565120001860 PDiskId# 1002 2026-01-04T02:32:13.738428Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.739596Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:4771: PDiskId# 1002 ReqId# 2565120001860 InputRequest InputQueue.Push priortiyClass# 13 creationTime# 12302.489421 2026-01-04T02:32:13.739736Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl.cpp:3115} PreprocessRequest RequestType# NKikimr::NPDisk::TInitialReadMetadataResult OwnerId# 0 OwnerRound# 0 errStatus# ERROR PDiskId# 1002 2026-01-04T02:32:13.739779Z node 3 :BS_PDISK DEBUG: blobstorage_pdisk_impl.cpp:3478: PDiskId# 1002 ReqId# 2565120001860 PushRequestToScheduler Push to FastOperationsQueue.size# 1 2026-01-04T02:32:13.739823Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.739880Z node 3 :BS_PDISK DEBUG: {BPD01@blobstorage_pdisk_impl_metadata.cpp:390} ProcessInitialReadMetadataResult (formatted) ChunkIdx# 18903 OffsetInSectors# 16640 ErrorReason# header checksum does not pass validation Payload.size# 0 PDiskId# 1002 2026-01-04T02:32:13.739938Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 Got TEvPDiskStateUpdate# NKikimrWhiteboard.TPDiskStateInfo PDiskId: 1002 Path: "SectorMap:TestInferPDiskSlotCount:2400" AvailableSize: 2576487546880 TotalSize: 2576980377600 State: Normal SerialNumber: "" SystemSize: 817889280 LogUsedSize: 136314880 LogTotalSize: 27262976000 ExpectedSlotCount: 12 NumActiveSlots: 0 SlotSizeInUnits: 2 PDiskUsage: 0 Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 0 TotalSize: 0 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: InitialFormatRead SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 100 } Got TEvControllerUpdateDiskStatus# NKikimrBlobStorage.TEvControllerUpdateDiskStatus PDisksMetrics { PDiskId: 1002 AvailableSize: 2576487546880 TotalSize: 2576980377600 MaxReadThroughput: 127000000 MaxWriteThroughput: 127000000 NonRealTimeMs: 0 SlowDeviceMs: 0 MaxIOPS: 125 State: Normal SlotCount: 12 SlotSizeInUnits: 2 PDiskUsage: 0 } 2026-01-04T02:32:13.751484Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.764100Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.774787Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.785041Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.795321Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.805575Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.816154Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 2026-01-04T02:32:13.826406Z node 3 :BS_PDISK DEBUG: {BPD82@blobstorage_pdisk_impl.cpp:3890} got requests from forsetti totalLogReqs# 0 totalChunkReqs# 0 PDiskId# 1002 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Create_And_Modify_The_Same_Connection [GOOD] Test command err: 2026-01-04T02:31:11.494440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328376011426959:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:11.502628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0104 02:31:12.100460366 492428 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:12.100655007 492428 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:12.333544Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.333740Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.348304Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.362349Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.490035Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.490326Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.498050Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.501932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:12.546469Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.546566Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.568187Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:12.595087Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:12.633920Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.634274Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:10424 2026-01-04T02:31:12.634365Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.634426Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.706736Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752303Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752374Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752427Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752529Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752598Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.752643Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.792649Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.806193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:12.815894Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.851881Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.874188Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.904033Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:12.922151Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:13.016510Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:10424: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:10424 } ] 2026-01-04T02:31:13.105916Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/conne ... 0 2026-01-04T02:32:08.881132Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: SelfId: [4:7591328619569633732:2997], TxId: 281474976715755, task: 2. Add data: 233 / 233 2026-01-04T02:32:08.881183Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: SelfId: [4:7591328619569633732:2997], TxId: 281474976715755, task: 2. Send data=233, closed=1, bufferActorId=[4:7591328619569633721:2333] 2026-01-04T02:32:08.881205Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [4:7591328619569633727:2997], TxId: 281474976715755, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 233 2026-01-04T02:32:08.881236Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715755, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:08.881248Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715755, task: 2. Tasks execution finished 2026-01-04T02:32:08.881261Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7591328619569633727:2997], TxId: 281474976715755, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:32:08.881285Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619569633726:2996], TxId: 281474976715755, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:08.881315Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619569633726:2996], TxId: 281474976715755, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.881340Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715755, task: 1. Tasks execution finished 2026-01-04T02:32:08.881354Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328619569633726:2996], TxId: 281474976715755, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:08.881482Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715755, task: 1. pass away 2026-01-04T02:32:08.881582Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715755;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:08.881847Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Create new TableWriteActor for table `Root/yq/connections` ([72057594046644480:55:1]). lockId=281474976715748. ActorId=[4:7591328619569633733:2333] 2026-01-04T02:32:08.881917Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Open: token=0 2026-01-04T02:32:08.882069Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [4:7591328619569633732:2997], TxId: 281474976715755, task: 2. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7591328619569633721:2333] 2026-01-04T02:32:08.882103Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [4:7591328619569633732:2997], TxId: 281474976715755, task: 2. Finished 2026-01-04T02:32:08.882131Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328619569633727:2997], TxId: 281474976715755, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:08.882192Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715755, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:08.882197Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Write: token=0 2026-01-04T02:32:08.882203Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715755, task: 2. Tasks execution finished 2026-01-04T02:32:08.882216Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328619569633727:2997], TxId: 281474976715755, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj197cg670fbcs0z90vx7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=ODVmYTViNy03YjczNzdhNC1hYjE4MjA0YS0yYmYxNjk5Ng==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:08.882293Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715755, task: 2. pass away 2026-01-04T02:32:08.882297Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Close: token=0 2026-01-04T02:32:08.882367Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715755;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:08.882476Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3486: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Start prepare for distributed commit 2026-01-04T02:32:08.882486Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1067: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]SetPrepare; txId=281474976715755 2026-01-04T02:32:08.882496Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Flush data 2026-01-04T02:32:08.882632Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Send EvWrite to ShardID=72075186224037900, isPrepare=1, isImmediateCommit=0, TxId=281474976715755, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715748 DataShard: 72075186224037900 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 55, Size=320, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=1, BufferMemory=320 2026-01-04T02:32:08.882734Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3620: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Send EvWrite (external) to ShardID=72075186224037891, isPrepare=1, isRollback=0, TxId=281474976715755, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976715748 DataShard: 72075186224037891 Generation: 1 Counter: 2 SchemeShard: 72057594046644480 PathId: 47, Size=0, Cookie=0, OperationsCount=0, IsFinal=1, Attempts=0 2026-01-04T02:32:08.883583Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Recv EvWriteResult from ShardID=72075186224037900, Status=STATUS_PREPARED, TxId=281474976715755, Locks= , Cookie=1 2026-01-04T02:32:08.883674Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Flush data 2026-01-04T02:32:08.883712Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4150: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Recv EvWriteResult (external) from ShardID=72075186224037891, Status=STATUS_PREPARED, TxId=281474976715755, Locks= , Cookie=0 2026-01-04T02:32:08.883729Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4468: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Got prepared result TxId=281474976715755, TabletId=72075186224037891, Cookie=0 2026-01-04T02:32:08.883763Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3532: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Start distributed commit with TxId=281474976715755 2026-01-04T02:32:08.883777Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1075: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]SetDistributedCommit; txId=281474976715755 2026-01-04T02:32:08.883811Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3719: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Execute planned transaction, coordinator: 72057594046316545, volitale: 1, shards: 2 2026-01-04T02:32:08.886884Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Got transaction status, status: 16 2026-01-04T02:32:08.895915Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3796: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Got transaction status, status: 17 2026-01-04T02:32:08.900731Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4182: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Recv EvWriteResult (external) from ShardID=72075186224037891, Status=STATUS_COMPLETED, TxId=281474976715755, Locks= , Cookie=0 2026-01-04T02:32:08.900770Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4497: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Got completed result TxId=281474976715755, TabletId=72075186224037891, Cookie=0, Locks= 2026-01-04T02:32:08.901864Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Recv EvWriteResult from ShardID=72075186224037900, Status=STATUS_COMPLETED, TxId=281474976715755, Locks= , Cookie=0 2026-01-04T02:32:08.901902Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7591328619569633733:2333], Table: `Root/yq/connections` ([72057594046644480:55:1]), SessionActorId: [4:7591328529375315838:2333]Got completed result TxId=281474976715755, TabletId=72075186224037900, Cookie=0, Mode=2, Locks= 2026-01-04T02:32:08.901938Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [4:7591328619569633721:2333], SessionActorId: [4:7591328529375315838:2333], Committed TxId=281474976715755 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::ShouldBeCorrectlyAggregateStatisticsFromAllNodes [GOOD] Test command err: 2026-01-04T02:32:14.064613Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:14.065517Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-04T02:32:14.065909Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.066110Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [1:39:2059], tablet id = 2, status = OK 2026-01-04T02:32:14.066165Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:39:2059], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.066627Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:14.066788Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:14.066918Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [1:40:2060], tablet id = 3, status = OK 2026-01-04T02:32:14.066990Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:40:2060], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.067301Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [2:46:2057], tablet id = 4, status = OK 2026-01-04T02:32:14.067354Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:46:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.067413Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-04T02:32:14.067642Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [3:44:2057], tablet id = 5, status = OK 2026-01-04T02:32:14.067708Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:44:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.067763Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-04T02:32:14.067870Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 2 2026-01-04T02:32:14.067998Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-04T02:32:14.068101Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:46:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-04T02:32:14.068125Z node 2 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.068178Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-04T02:32:14.068249Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 5 2026-01-04T02:32:14.068287Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:14.068444Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [4:49:2057], tablet id = 6, status = OK 2026-01-04T02:32:14.068513Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:49:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:14.068678Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-04T02:32:14.068709Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.068745Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:44:2057], server id = [0:0:0], tablet id = 5, status = ERROR 2026-01-04T02:32:14.068767Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.068791Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:39:2059], server id = [0:0:0], tablet id = 2, status = ERROR 2026-01-04T02:32:14.068813Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.068845Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 6 2026-01-04T02:32:14.068897Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:32:14.069113Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:40:2060], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-04T02:32:14.069143Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.069197Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:49:2057], server id = [0:0:0], tablet id = 6, status = ERROR 2026-01-04T02:32:14.069231Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:14.069268Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-04T02:32:14.069433Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-04T02:32:14.069501Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:14.069686Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 2 2026-01-04T02:32:14.069745Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets+UseAuthToken [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DescribeQuery [GOOD] Test command err: 2026-01-04T02:31:12.955580Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328377111658901:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:12.955663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:13.022564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E0104 02:31:13.426100792 493326 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:13.426229182 493326 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:13.777769Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.790191Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.793116Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.873235Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.906633Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.968569Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.969926Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:13.993217Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.014104Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:14.034684Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:24307 2026-01-04T02:31:14.035093Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.063505Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.077279Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.112580Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.114099Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.114161Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206383Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206462Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206536Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206584Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206607Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.206674Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.211664Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.211730Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.242864Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.295926Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.296077Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.321247Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.325606Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:24307: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:24307 } ] 2026-01-04T02:31:14.326939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: ... 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Processing resolved ShardId# 72075186224037896, partition range: [(String : yandexcloud://some_folder_id, String : utqudof50f9viko6bfun) ; ()), i: 0, state ranges: 0, points: 1 2026-01-04T02:32:10.040231Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Add point to new shardId: 72075186224037896 2026-01-04T02:32:10.040344Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Pending shards States: TShardState{ TabletId: 72075186224037896, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudof50f9viko6bfun)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudof50f9viko6bfun)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-04T02:32:10.040361Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:10.040375Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. BEFORE: 1.0 2026-01-04T02:32:10.040425Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Send EvRead to shardId: 72075186224037896, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-04T02:32:10.040463Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. AFTER: 0.1 2026-01-04T02:32:10.040476Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-04T02:32:10.041539Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Recv TEvReadResult from ShardID=72075186224037896, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:10.041561Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Taken 0 locks 2026-01-04T02:32:10.041574Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:10.041600Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:10.041622Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:10.041643Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:10.041662Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. enter pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:10.041693Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. exit pack cells method shardId: 72075186224037896 processedRows: 0 packed rows: 1 freeSpace: 8386348 2026-01-04T02:32:10.041712Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. returned 1 rows; processed 1 rows 2026-01-04T02:32:10.041752Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. dropping batch for read #0 2026-01-04T02:32:10.041763Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:10.041775Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:10.041792Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715738, task: 1, CA Id [4:7591328625456225740:2973]. returned async data processed rows 1 left freeSpace 8386348 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:10.042004Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:10.042022Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:10.042064Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715738, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:32:10.042096Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225741:2974], TxId: 281474976715738, task: 2. Ctx: { TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2026-01-04T02:32:10.042128Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715738, task: 2. Finish input channelId: 1, from: [4:7591328625456225740:2973] 2026-01-04T02:32:10.042169Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225741:2974], TxId: 281474976715738, task: 2. Ctx: { TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:10.042323Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328625456225741:2974], TxId: 281474976715738, task: 2. Ctx: { TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:10.042335Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:10.042362Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:10.042384Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715738, task: 1. Tasks execution finished 2026-01-04T02:32:10.042399Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328625456225740:2973], TxId: 281474976715738, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:10.042528Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715738, task: 1. pass away 2026-01-04T02:32:10.042626Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715738;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:10.043070Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328625456225741:2974], TxId: 281474976715738, task: 2. Ctx: { TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:10.043108Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715738, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:10.043117Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715738, task: 2. Tasks execution finished 2026-01-04T02:32:10.043127Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328625456225741:2974], TxId: 281474976715738, task: 2. Ctx: { TraceId : 01ke3dj2gje5q4wxyje4rk9tfr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MjEyYTk0ZDQtYzY2ZDg2ZjEtNTBjZjJmMmUtYjYwMzliNg==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:10.043176Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715738, task: 2. pass away 2026-01-04T02:32:10.043222Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715738;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestListShards1Shard [GOOD] Test command err: 2026-01-04T02:31:51.174912Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328544108576464:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:51.174980Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:51.669883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:51.702945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:51.703072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:51.723191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:51.908111Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:51.935657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:51.986014Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007877s 2026-01-04T02:31:52.250545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:52.426934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:52.426958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:52.426972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:52.427118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:52.838344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:53.125974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:53.447224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:31:53.901394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-01-04T02:31:54.021559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:31:54.206001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:31:54.291136Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:31:54.291172Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:31:54.291186Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:31:54.311199Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:31:54.311252Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-04T02:31:54.311278Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767493913749-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1767493913,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493914}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1767493914119-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767493914,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1767493914}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767493914118-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767493914,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493914}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767493913749-1","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":1,"unit":"second","start":1767493913,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493914}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037890-1767493914119-2","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767493914,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037890","source_wt":1767493914}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestNonChargeableUser","id":"reserved_resources-root-72075186224037888-1767493914118-3","schema":"yds.resources.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":0,"reserved_storage_bytes":90596966400},"usage":{"quantity":0,"unit":"second","start":1767493914,"finish":1767493914},"labels":{"datastreams_stream_name":"stream_TestNonChargeableUser","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493914}' 2026-01-04T02:31:58.038994Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328576192088743:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:58.040084Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:58.110275Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:58.265501Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:58.282670Z node 4 :HI ... t called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:04.722234Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:04.774919Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR E0000 00:00:1767493925.017620 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.017826 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.039046 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.039187 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.047818 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.047937 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.056035 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.056213 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:32:05.078733Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:05.145903Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767493925.195756 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.195884 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:32:05.206018Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767493925.266404 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.266534 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:32:05.278749Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) E0000 00:00:1767493925.355807 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.355934 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.373101 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.373253 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:32:05.427438Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:32:05.451594Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037893 not found 2026-01-04T02:32:05.451629Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-04T02:32:05.451642Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-04T02:32:05.451655Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037892 not found 2026-01-04T02:32:05.451666Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-04T02:32:05.451686Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-04T02:32:05.468929Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-01-04T02:32:05.468993Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found E0000 00:00:1767493925.491629 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493925.491752 508971 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:32:08.802997Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:08.803119Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:09.002486Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:09.028155Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:09.028270Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:09.036573Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:09.114589Z node 12 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008555s 2026-01-04T02:32:09.133317Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:09.166977Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:09.167007Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:09.167017Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:09.167120Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:09.471663Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:09.585485Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:09.786135Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:09.802457Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) E0000 00:00:1767493930.022459 510388 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493930.036380 510388 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493930.052269 510388 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493930.072888 510388 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn E0000 00:00:1767493930.083691 510388 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn |86.7%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-04T02:31:39.399745Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.399783Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.399875Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.400371Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.401120Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.417720Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.418455Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-04T02:31:39.420950Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.420977Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.421020Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.421281Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.421940Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.422058Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.422288Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.422765Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.425462Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:39.425584Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.425658Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:31:39.427292Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.427338Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.427396Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.427666Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.428284Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.428442Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.428744Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.429183Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.429321Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:39.429436Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.429486Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:31:39.430548Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.430621Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.430668Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.430973Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.431836Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.432027Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.432255Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.433050Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.433240Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:39.433361Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.433408Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:31:39.438125Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.438155Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.438243Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.438607Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.439328Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.439477Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.439663Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.440470Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.440655Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:39.440767Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.440834Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:31:39.441940Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.441968Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.441991Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:39.442323Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:31:39.442788Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:31:39.442970Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.443143Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:31:39.443953Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:39.444119Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:31:39.444245Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:31:39.444288Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:31:39.444355Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [1, 2). Partition stream id: 1 2026-01-04T02:31:39.479555Z :ReadSession INFO: Random seed for debugging is 1767493899479516 2026-01-04T02:31:39.840488Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328492869347986:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:39.840579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:39.882416Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005317s 2026-01-04T02:31:39.965865Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328492865610052:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:39.969959Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:39.970443Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:40.055418Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:40.429959Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:40.452775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:40.555283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.555375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.564158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.564247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.646701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.710625Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:31:40.734609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.789067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:40.844472Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:40.886269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:40.908591Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:41.047794Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:41.081881Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.011967s 2026-01-04T02:31:41.268507Z node 1 :NET_CLASSIFIE ... eIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.531794Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:09.634003Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:09.634031Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.634057Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:09.634074Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.634086Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:09.735833Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:09.735866Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.735879Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:09.735897Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.735908Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:09.836037Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:09.836070Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.836081Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:09.836099Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.836111Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:09.889932Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_2884662918562963351_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-04T02:32:09.937496Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:09.937527Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.937540Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:09.937557Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:09.937568Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:09.979771Z :INFO: [/Root] [/Root] [28c2055a-ef0d0bbe-52c73f0c-aa62d9e9] Closing read session. Close timeout: 0.000000s 2026-01-04T02:32:09.979849Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-04T02:32:09.979907Z :INFO: [/Root] [/Root] [28c2055a-ef0d0bbe-52c73f0c-aa62d9e9] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16680 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:32:09.980025Z :NOTICE: [/Root] [/Root] [28c2055a-ef0d0bbe-52c73f0c-aa62d9e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:32:09.980073Z :DEBUG: [/Root] [/Root] [28c2055a-ef0d0bbe-52c73f0c-aa62d9e9] [dc1] Abort session to cluster 2026-01-04T02:32:09.981130Z :NOTICE: [/Root] [/Root] [28c2055a-ef0d0bbe-52c73f0c-aa62d9e9] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:32:09.982478Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_2884662918562963351_v1 grpc read done: success# 0, data# { } 2026-01-04T02:32:09.982632Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_2884662918562963351_v1 grpc read failed 2026-01-04T02:32:09.982668Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_2884662918562963351_v1 grpc closed 2026-01-04T02:32:09.982701Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_2884662918562963351_v1 is DEAD 2026-01-04T02:32:09.983819Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591328552998892555:2484] disconnected. 2026-01-04T02:32:09.985676Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_2884662918562963351_v1 2026-01-04T02:32:09.985726Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7591328552998892562:2488] destroyed 2026-01-04T02:32:09.985862Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_2884662918562963351_v1 2026-01-04T02:32:09.983842Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591328552998892555:2484] disconnected; active server actors: 1 2026-01-04T02:32:09.983856Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591328552998892555:2484] client user disconnected session shared/user_1_1_2884662918562963351_v1 2026-01-04T02:32:10.037851Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:10.037882Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.037894Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:10.037910Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.037921Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:10.144552Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:10.144583Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.144594Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:10.144610Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.144619Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:10.246049Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:10.246079Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.246091Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:10.246107Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:10.246117Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:11.829229Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:11.829268Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:11.829321Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:32:11.829678Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:32:11.830191Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:32:11.830418Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:11.830649Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:32:11.831388Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:32:11.831867Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:32:11.832060Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-04T02:32:11.832156Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:32:11.832231Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:32:11.832272Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-04T02:32:11.832418Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:32:11.832470Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:32:13.835280Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:13.835315Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:13.835346Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:32:13.838710Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:32:13.844581Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:32:13.844813Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:13.845671Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:13.845883Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:32:13.846003Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:32:13.846118Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes |86.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/nodewarden/ut/unittest >> TBlobStorageWardenTest::TestEvVGenerationChangeRace [GOOD] Test command err: 2026-01-04T02:32:10.782594Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.782946Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.785587Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.788796Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.792412Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.792518Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:10.793382Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/00134c/r3tmp/tmpIh6xAz/pdisk_1.dat Formatting PDisk with guid1 6093499460192418219 Creating PDisk with guid2 12834998092467689468 Creating pdisk 2026-01-04T02:32:11.437005Z node 1 :BS_PDISK ERROR: {BSP01@blobstorage_pdisk_actor.cpp:570} PDiskId# 1001 Can't start due to a guid error expected# 12834998092467689468 on-disk# 6093499460192418219 PDiskId# 1001 2026-01-04T02:32:11.474839Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [abc2fc901918ac71] bootstrap ActorId# [1:491:2468] Group# 33554432 BlobCount# 1 BlobIDs# [[72057594037932033:2:8:0:0:347:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:32:11.474976Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475009Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 1 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475031Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 2 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475052Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475077Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 1 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475102Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [abc2fc901918ac71] Id# [72057594037932033:2:8:0:0:347:0] restore disk# 3 part# 2 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:32:11.475138Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [abc2fc901918ac71] restore Id# [72057594037932033:2:8:0:0:347:0] optimisticReplicas# 3 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:32:11.475212Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037932033:2:8:0:0:347:1] Marker# BPG33 2026-01-04T02:32:11.475279Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 0 to# 0 blob Id# [72057594037932033:2:8:0:0:347:1] Marker# BPG32 2026-01-04T02:32:11.475321Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 1 blob Id# [72057594037932033:2:8:0:0:347:2] Marker# BPG33 2026-01-04T02:32:11.475347Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 1 to# 1 blob Id# [72057594037932033:2:8:0:0:347:2] Marker# BPG32 2026-01-04T02:32:11.475377Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [abc2fc901918ac71] partPlacement record partSituation# ESituation::Unknown to# 2 blob Id# [72057594037932033:2:8:0:0:347:3] Marker# BPG33 2026-01-04T02:32:11.475402Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [abc2fc901918ac71] Sending missing VPut part# 2 to# 2 blob Id# [72057594037932033:2:8:0:0:347:3] Marker# BPG32 2026-01-04T02:32:11.475551Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:47:2091] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:3] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.475619Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:40:2084] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:2] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.475651Z node 1 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [1:61:2105] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037932033:2:8:0:0:347:1] FDS# 347 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:32:11.477372Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:2] {MsgQoS MsgId# { SequenceId: 1 MsgId: 9 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 10 }}}} from# [2000000:1:0:0:0] Marker# BPP01 2026-01-04T02:32:11.477582Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:3] {MsgQoS MsgId# { SequenceId: 1 MsgId: 10 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 11 }}}} from# [2000000:1:0:1:0] Marker# BPP01 2026-01-04T02:32:11.477665Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [abc2fc901918ac71] received {EvVPutResult Status# OK ID# [72057594037932033:2:8:0:0:347:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 8 } Cost# 82732 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 9 }}}} from# [2000000:1:0:3:0] Marker# BPP01 2026-01-04T02:32:11.477739Z node 1 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [abc2fc901918ac71] Result# TEvPutResult {Id# [72057594037932033:2:8:0:0:347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} GroupId# 33554432 Marker# BPP12 2026-01-04T02:32:11.477818Z node 1 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [abc2fc901918ac71] SendReply putResult# TEvPutResult {Id# [72057594037932033:2:8:0:0:347:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.999479} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:32:11.477994Z node 1 :BS_PROXY_PUT DEBUG: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 33554432 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.961 sample PartId# [72057594037932033:2:8:0:0:347:3] QueryCount# 1 VDiskId# [2000000:1:0:1:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.961 sample PartId# [72057594037932033:2:8:0:0:347:2] QueryCount# 1 VDiskId# [2000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.961 sample PartId# [72057594037932033:2:8:0:0:347:1] QueryCount# 1 VDiskId# [2000000:1:0:3:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.728 VDiskId# [2000000:1:0:0:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.903 VDiskId# [2000000:1:0:1:0] NodeId# 1 Status# OK } TEvVPutResult{ TimestampMs# 2.983 VDiskId# [2000000:1:0:3:0] NodeId# 1 Status# OK } ] } Verify that PDisk returns ERROR YardInitResult: {EvYardInitResult Status# CORRUPTED ErrorReason# "PDisk is in StateError, reason# PDiskId# 1001 Can't start due to a guid error expected# 12834998092467689468 on-disk# 6093499460192418219" StatusFlags# None PDiskParams# {{TPDiskParams ownerId# 0 ownerRound# 0 SlotSizeInUnits# 0 ChunkSize# 0 AppendBlockSize# 0 RecommendedReadSize# 0 SeekTimeUs# 0 ReadSpeedBps# 0 WriteSpeedBps# 0 ReadBlockSize# 0 WriteBlockSize# 0 BulkWriteBlockSize# 0 PrefetchSizeBytes# 0 GlueRequestDistanceBytes# 0 IsTinyDisk# 0}} OwnedChunks# {}} 2026-01-04T02:32:12.895815Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:1:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:12.900007Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:2:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:2:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:12.900763Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:12.900841Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:2:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:12.902397Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:0:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:32:12.902473Z node 2 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[2000000:_:0:1:0]: (33554432) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [2000000:1:0:3:0] targetVDisk# [2000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 tablet_helpers.cpp: SetPath # /home/runner/.ya/build/build_root/c5zo/00134c/r3tmp/tmp3us53b/pdisk_1.dat Starting test 2026-01-04T02:32:13.694086Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2537: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON START Marker# BSVS37 2026-01-04T02:32:13.694388Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:707: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery START 2026-01-04T02:32:13.699788Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:190: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) MAX LSNS: LogoBlobs# [ExplicitlySet# true Derived# false Lsn# NotSet] Blocks# [ExplicitlySet# true Derived# false Lsn# NotSet] Barriers# [ExplicitlySet# true Derived# false Lsn# NotSet] SyncLog# 0 2026-01-04T02:32:13.700589Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_logreplay.cpp:83: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) TRecoveryLogReplayer: START 2026-01-04T02:32:13.702463Z node 2 :BS_LOCALRECOVERY NOTICE: localrecovery_public.cpp:143: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) LocalRecovery FINISHED: {RecoveryDuration# 0.002000s RecoveredLogStartLsn# 0 SuccessfulRecovery# true EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {{RecsN# 0 Lsns# [0 0]}}} ... blocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON cookie 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } PDiskId: 1000 VDiskSlotId: 1002 Guid: 14579788458430204466 Kind: 0 StoragePoolName: "testEvVGenerationChangeRace" InstanceGuid: 7473551664191494344 GroupSizeInUnits: 0 2026-01-04T02:32:13.768187Z node 2 :BS_SKELETON INFO: blobstorage_skeletonfront.cpp:1727: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) VDisk Generation Change success; new VDiskId# [82000002:2:0:0:0] Marker# BSVSF02 TEvControllerConfigResponse# NKikimrBlobStorage.TEvControllerConfigResponse Response { Status { Success: true } Success: true ConfigTxSeqNo: 5 } Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: Initial DiskSpace: Green Replicated: false UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 7473551664191494344 ReplicationProgress: nan ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 7473551664191494344 GroupSizeInUnits: 0 ... unblocking NKikimr::TEvBlobStorage::TEvLocalRecoveryDone from to VDISK_SKELETON 2026-01-04T02:32:13.768983Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1966: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON LOCAL RECOVERY SUCCEEDED Marker# BSVS29 2026-01-04T02:32:13.784619Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:2134: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON SYNC GUID RECOVERY SUCCEEDED Marker# BSVS31 2026-01-04T02:32:13.784748Z node 2 :BS_SKELETON INFO: blobstorage_skeleton.cpp:1845: PDiskId# 1000 VDISK[82000002:_:0:0:0]: (2181038082) SKELETON IS UP AND RUNNING Marker# BSVS28 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } IncarnationGuid: 12376732421794081905 InstanceGuid: 7473551664191494344 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 3 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 10295654809068420070 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 2 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 10154266886461023041 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 1 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 8300656438507902624 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 12895965222540636254 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 10295654809068420070 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 10154266886461023041 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 8300656438507902624 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } InstanceGuid: 12895965222540636254 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 10295654809068420070 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 1 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 10154266886461023041 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 2 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 8300656438507902624 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 33554432 GroupGeneration: 1 Ring: 0 Domain: 3 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 12895965222540636254 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 14626869116438086390 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskState: OK DiskSpace: Green Replicated: true UnsyncedVDisks: 0 FrontQueues: Green UnreplicatedPhantoms: false UnreplicatedNonPhantoms: false HasUnreadableBlobs: false InstanceGuid: 8819570674421442298 ReplicationProgress: 1 ReplicationSecondsRemaining: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 14626869116438086390 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } SatisfactionRank { FreshRank { Flag: Green } LevelRank { Flag: Green } } ReadThroughput: 0 WriteThroughput: 0 InstanceGuid: 8819570674421442298 GroupSizeInUnits: 0 Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 14626869116438086390 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038081 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 8819570674421442298 AvailableSize: 17112760320 GroupSizeInUnits: 0 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN Got TEvVDiskStateUpdate# NKikimrWhiteboard.TVDiskStateInfo VDiskId { GroupID: 2181038082 GroupGeneration: 2 Ring: 0 Domain: 0 VDisk: 0 } AllocatedSize: 0 InstanceGuid: 7473551664191494344 AvailableSize: 34225520640 GroupSizeInUnits: 2 VDiskSlotUsage: 0 NormalizedOccupancy: 0.00052056220718375845 VDiskRawUsage: 0 CapacityAlert: GREEN >> Yq_1::CreateConnections_With_Idempotency [GOOD] |86.7%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/nodewarden/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/ut_aggregation/unittest >> AggregateStatistics::RootNodeShouldBeInvalidateByTimeout [GOOD] Test command err: 2026-01-04T02:32:15.393277Z node 1 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:15.394763Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [1:38:2058], tablet id = 1, status = OK 2026-01-04T02:32:15.395148Z node 1 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [1:38:2058], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:15.395486Z node 3 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:15.395624Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 1, Round: 1, current Round: 0 2026-01-04T02:32:15.395764Z node 1 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 1 2026-01-04T02:32:15.395949Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:38:2058], server id = [0:0:0], tablet id = 1, status = ERROR 2026-01-04T02:32:15.395972Z node 1 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:15.396008Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [3:42:2057], tablet id = 3, status = OK 2026-01-04T02:32:15.396071Z node 3 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [3:42:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:15.396234Z node 4 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 1, current Round: 0 2026-01-04T02:32:15.396310Z node 3 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 3 2026-01-04T02:32:15.396351Z node 3 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:15.396438Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [4:47:2057], tablet id = 4, status = OK 2026-01-04T02:32:15.396483Z node 4 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [4:47:2057], path = { OwnerId: 3 LocalId: 3 } 2026-01-04T02:32:15.396550Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:42:2057], server id = [0:0:0], tablet id = 3, status = ERROR 2026-01-04T02:32:15.396566Z node 3 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:15.396596Z node 4 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 4 2026-01-04T02:32:15.396616Z node 4 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:32:15.396708Z node 1 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 3 2026-01-04T02:32:15.396803Z node 4 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 4, client id = [4:47:2057], server id = [0:0:0], tablet id = 4, status = ERROR 2026-01-04T02:32:15.396821Z node 4 :STATISTICS DEBUG: service_impl.cpp:1116: Skip EvClientConnected 2026-01-04T02:32:15.396907Z node 2 :STATISTICS DEBUG: service_impl.cpp:450: Received TEvAggregateStatisticsResponse SenderNodeId: 4 2026-01-04T02:32:15.410071Z node 3 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:15.410171Z node 3 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:15.410297Z node 4 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:15.410319Z node 4 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:15.421045Z node 1 :STATISTICS INFO: service_impl.cpp:418: Node 2 is unavailable 2026-01-04T02:32:15.421140Z node 1 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 1 2026-01-04T02:32:15.421244Z node 2 :STATISTICS DEBUG: service_impl.cpp:403: Skip TEvKeepAliveTimeout 2026-01-04T02:32:15.421340Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:15.421375Z node 1 :STATISTICS DEBUG: service_impl.cpp:395: Skip TEvKeepAliveTimeout 2026-01-04T02:32:15.421408Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:15.421434Z node 1 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:32:15.421554Z node 1 :STATISTICS DEBUG: service_impl.cpp:254: Event round 1 is different from the current 0 2026-01-04T02:32:15.421614Z node 1 :STATISTICS DEBUG: service_impl.cpp:430: Skip TEvAggregateKeepAlive >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/ut_aggregation/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> Yq_1::ModifyQuery [GOOD] |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_olap_reboots/ydb-core-tx-schemeshard-ut_olap_reboots >> Yq_1::DeleteQuery [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_TaggedLiteral [GOOD] Test command err: 2026-01-04T02:31:14.090731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328385261615870:2180];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:14.090784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:14.160429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown E0104 02:31:14.766650378 494163 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:14.766771180 494163 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:14.967972Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.114214Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.153095Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.205577Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.237454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.237567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:15.303309Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:30116 2026-01-04T02:31:15.307328Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.409907Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.410339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.412274Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.412731Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.426899Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.447796Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.460202Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.460281Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.491797Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.493996Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.502926Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.533920Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.545936Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.557260Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.575852Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.612989Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.645948Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.648955Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.651294Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.700562Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.702874Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.730031Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:30116 } ] 2026-01-04T02:31:15.798015Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:30116: Fa ... 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:646: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Processing resolved ShardId# 72075186224037898, partition range: [(String : yandexcloud://some_folder_id, String : utqudof50bavrel4pi1u) ; ()), i: 0, state ranges: 0, points: 1 2026-01-04T02:32:12.391719Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:684: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Add point to new shardId: 72075186224037898 2026-01-04T02:32:12.391810Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Pending shards States: TShardState{ TabletId: 72075186224037898, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudof50bavrel4pi1u)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://some_folder_id, String : utqudof50bavrel4pi1u)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-04T02:32:12.391824Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:12.391836Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. BEFORE: 1.0 2026-01-04T02:32:12.391877Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Send EvRead to shardId: 72075186224037898, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-04T02:32:12.391914Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. AFTER: 0.1 2026-01-04T02:32:12.391926Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-04T02:32:12.393010Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Recv TEvReadResult from ShardID=72075186224037898, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:12.393035Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Taken 0 locks 2026-01-04T02:32:12.393045Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:12.393072Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:12.393091Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:12.393107Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:12.393126Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. enter pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:12.393158Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. exit pack cells method shardId: 72075186224037898 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-04T02:32:12.393177Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. returned 1 rows; processed 1 rows 2026-01-04T02:32:12.393213Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. dropping batch for read #0 2026-01-04T02:32:12.393224Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:12.393236Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:12.393254Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715741, task: 1, CA Id [4:7591328635753312001:2954]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:12.393470Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:12.393490Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:12.393529Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715741, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:32:12.393548Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312002:2955], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:32:12.393589Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715741, task: 2. Finish input channelId: 1, from: [4:7591328635753312001:2954] 2026-01-04T02:32:12.393639Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312002:2955], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:12.393786Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328635753312002:2955], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:12.393824Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:12.393850Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:12.393880Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715741, task: 1. Tasks execution finished 2026-01-04T02:32:12.393894Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328635753312001:2954], TxId: 281474976715741, task: 1. Ctx: { TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:12.393969Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328635753312002:2955], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:12.394013Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715741, task: 1. pass away 2026-01-04T02:32:12.394014Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715741, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:12.394023Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715741, task: 2. Tasks execution finished 2026-01-04T02:32:12.394041Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328635753312002:2955], TxId: 281474976715741, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj5b1cyvzys6x0tq8gagw. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDYwZTcyNmItM2RmMjRhMTItNzVhMWZkODktOTJkYjkwNGQ=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:12.394108Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715741, task: 2. pass away 2026-01-04T02:32:12.394163Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715741;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:12.394170Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715741;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> TServiceAccountServiceTest::IssueToken [GOOD] Test command err: 2026-01-04T02:32:06.996548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328611461933611:2133];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:06.998554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:07.277263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:07.292912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:07.293053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:07.394771Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:07.431270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:07.437970Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328611461933518:2081] 1767493926992919 != 1767493926992922 2026-01-04T02:32:07.576332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:07.644463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:07.654664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:32:11.522806Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:11.527768Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:11.530929Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:11.530993Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:11.531228Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:11.536576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:11.718144Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:11.785597Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:11.793982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |86.8%| [TA] $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals |86.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicStatistics::NotFullStatisticsDatashard >> BasicStatistics::TwoServerlessDbs >> UpsertLoad::ShouldWriteDataBulkUpsert >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom >> UpsertLoad::ShouldWriteKqpUpsert >> ReadLoad::ShouldReadIterate ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateConnections_With_Idempotency [GOOD] Test command err: 2026-01-04T02:31:11.898641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328375904347689:2089];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:11.898709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:12.817706Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:12.837452Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:12.853145Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:12.859705Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] E0104 02:31:12.879881194 492591 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:12.879995884 492591 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:12.977931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:13.013646Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.047154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.047242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:13.090389Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.092056Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:64684 2026-01-04T02:31:13.092160Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.124700Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.183284Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.195895Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.244238Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.276494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.344152Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.356718Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.370057Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.401718Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.427660Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.438819Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.486774Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.490962Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.492712Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.506079Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.531206Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.543476Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.555811Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.587870Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.622165Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:64684: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:64684 } ] 2026-01-04T02:31:13.886609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [ ... kSettings" value: "\032\036\n\016Root/yq/quotas\020\200\202\224\204\200\200\200\200\001\030.(\001\"\023\n\014subject_type\020\001 \201 \"\021\n\nsubject_id\020\002 \201 \"\022\n\013metric_name\020\003 \201 *\026\n\020limit_updated_at\020\005 2*\022\n\014metric_limit\020\004 \004*\022\n\013metric_name\020\003 \201 *\022\n\014metric_usage\020\006 \004*\021\n\nsubject_id\020\002 \201 *\023\n\014subject_type\020\001 \201 *\026\n\020usage_updated_at\020\007 20\334\200\200\200\200\200@8\004@\000H\001R\022\t\246\3374\200-\321Yi\021$\t\000\000\004\000\020\000X\000`\000h\004h\003h\002h\005h\001h\000h\006r\022P\206\265\344\266\2703X\377\377\377\377\377\377\377\377\377\001x\000\262\001\000" } } } 2026-01-04T02:32:13.733897Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-04T02:32:13.733923Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-01-04T02:32:13.733995Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:413: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. About to drain async output 0. FreeSpace: 67108864, allowedOvercommit: 4194304, toSend: 71303168, finished: 0 2026-01-04T02:32:13.735301Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:5019: TxId: 281474976710752, task: 1. Add data: 78 / 78 2026-01-04T02:32:13.735367Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4987: TxId: 281474976710752, task: 1. Send data=78, closed=1, bufferActorId=[4:7591328640264363942:2340] 2026-01-04T02:32:13.735392Z node 4 :KQP_COMPUTE DEBUG: dq_sync_compute_actor_base.h:427: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Drain async output 0. Free space decreased: -9223372036787666944, sent data from buffer: 78 2026-01-04T02:32:13.735410Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710752, task: 1. Tasks execution finished 2026-01-04T02:32:13.735423Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:32:13.735446Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-04T02:32:13.735475Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: 2026-01-04T02:32:13.735494Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710752, task: 1. Tasks execution finished 2026-01-04T02:32:13.735504Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:32:13.735544Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:13.735561Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710752, task: 1. Tasks execution finished 2026-01-04T02:32:13.735571Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:2128: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Waiting finish of sink[0] 2026-01-04T02:32:13.735634Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:2941: SelfId: [4:7591328640264363942:2340], SessionActorId: [4:7591328554365013393:2340], Create new TableWriteActor for table `Root/yq/quotas` ([72057594046644480:46:1]). lockId=281474976710748. ActorId=[4:7591328640264363951:2340] 2026-01-04T02:32:13.735703Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:481: Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Open: token=0 2026-01-04T02:32:13.735870Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:489: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Write: token=0 2026-01-04T02:32:13.735988Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:496: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Close: token=0 2026-01-04T02:32:13.736024Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4892: SelfId: [4:7591328640264363950:2340], TxId: 281474976710752, task: 1. TKqpForwardWriteActor recieve EvBufferWriteResult from [4:7591328640264363942:2340] 2026-01-04T02:32:13.736036Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4910: SelfId: [4:7591328640264363950:2340], TxId: 281474976710752, task: 1. Finished 2026-01-04T02:32:13.736052Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:13.736071Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710752, task: 1. Tasks execution finished 2026-01-04T02:32:13.736083Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328640264363948:2340], TxId: 281474976710752, task: 1. Ctx: { TraceId : 01ke3dj6n0cvp4dfpzt4g42w03. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=Y2VhYjlhY2QtZTI5MjAyYi1mNDk4YmQ2Zi05NWY4NzliNw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:13.736159Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710752, task: 1. pass away 2026-01-04T02:32:13.736243Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710752;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:13.736623Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3511: SelfId: [4:7591328640264363942:2340], SessionActorId: [4:7591328554365013393:2340], Start immediate commit 2026-01-04T02:32:13.736637Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1081: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]SetImmediateCommit 2026-01-04T02:32:13.736653Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:3409: SelfId: [4:7591328640264363942:2340], SessionActorId: [4:7591328554365013393:2340], Flush data 2026-01-04T02:32:13.736796Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1235: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Send EvWrite to ShardID=72075186224037888, isPrepare=0, isImmediateCommit=1, TxId=0, LockTxId=0, LockNodeId=0, Locks= LockId: 281474976710748 DataShard: 72075186224037888 Generation: 1 Counter: 5 SchemeShard: 72057594046644480 PathId: 46, Size=136, Cookie=1, OperationsCount=1, IsFinal=1, Attempts=0, Mode=3, BufferMemory=136 2026-01-04T02:32:13.738037Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7591328640264363945:2511], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328575839851611:2511]Recv EvWriteResult from ShardID=72075186224037888, Status=STATUS_COMPLETED, TxId=13, Locks= , Cookie=1 2026-01-04T02:32:13.738066Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7591328640264363945:2511], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328575839851611:2511]Got completed result TxId=13, TabletId=72075186224037888, Cookie=1, Mode=3, Locks= 2026-01-04T02:32:13.738121Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [4:7591328640264363933:2511], SessionActorId: [4:7591328575839851611:2511], Committed TxId=0 2026-01-04T02:32:13.740638Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:740: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Recv EvWriteResult from ShardID=72075186224037888, Status=STATUS_COMPLETED, TxId=14, Locks= , Cookie=1 2026-01-04T02:32:13.740670Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:1023: SelfId: [4:7591328640264363951:2340], Table: `Root/yq/quotas` ([72057594046644480:46:1]), SessionActorId: [4:7591328554365013393:2340]Got completed result TxId=14, TabletId=72075186224037888, Cookie=1, Mode=3, Locks= 2026-01-04T02:32:13.740714Z node 4 :KQP_COMPUTE DEBUG: kqp_write_actor.cpp:4535: SelfId: [4:7591328640264363942:2340], SessionActorId: [4:7591328554365013393:2340], Committed TxId=0 2026-01-04T02:32:14.080999Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestSecretsExistingValidation+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 24424, MsgBus: 3473 2026-01-04T02:31:33.282184Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328467471510250:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:33.282246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:33.746017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:33.769561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:33.769712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:33.782781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:33.873848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328467471510026:2081] 1767493893222784 != 1767493893222787 2026-01-04T02:31:33.889201Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:33.948293Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:34.152555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:34.152614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:34.152624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:34.152732Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:34.270197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:34.899196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:34.925253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:31:34.962211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:31:37.800120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328484651380136:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:37.800281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:37.801017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328484651380146:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:37.801088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.253580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.289873Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328467471510250:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:38.290216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:38.436374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.474453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.514538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.620257Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328488946347751:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.620347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.620690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328488946347756:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.620735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.620781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328488946347757:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.626205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:38.643420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328488946347760:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-04T02:31:38.781173Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328488946347811:2767] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 8649, MsgBus: 26287 2026-01-04T02:31:39.908867Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328492199601494:2200];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:39.909185Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:39.916592Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:40.003119Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:40.004667Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591328492199601332:2081] 1767493899898564 != 1767493899898567 2026-01-04T02:31:40.048975Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:40.049059Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:40.052527Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:40.053536Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:40.170774Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:40.170808Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:40.170817Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:40.170920Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:40.650765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:40.680385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/sc ... 2:32:03.995109Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-04T02:32:05.100515Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:32:05.100545Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:06.489670Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715717:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 25471, MsgBus: 2792 2026-01-04T02:32:07.714931Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:07.722044Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:07.815769Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:07.815859Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:07.831819Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:07.846349Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:07.987621Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:08.024770Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:08.024797Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:08.024807Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:08.024893Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:08.694112Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:08.896456Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:08.906967Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:32:08.924678Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:09.015718Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:09.185047Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:09.253990Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.152025Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328637799556982:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.152152Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.154486Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328637799556992:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.154616Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.231977Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.313278Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.353599Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.389482Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.423672Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.489106Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.525668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.603296Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:12.690902Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328637799557864:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.691011Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.691086Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328637799557869:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.691278Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328637799557871:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.691340Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:12.695179Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:12.709653Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591328637799557873:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:32:12.801201Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591328637799557927:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> DataStreams::ListStreamsValidation [GOOD] >> ReadLoad::ShouldReadKqp ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::ModifyQuery [GOOD] Test command err: 2026-01-04T02:31:12.724499Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328378997815795:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:12.724546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0104 02:31:13.389093104 493124 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:13.389216118 493124 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:13.501941Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.645199Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.741940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:13.886275Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.886354Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.894434Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:8953 2026-01-04T02:31:13.894507Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.933691Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.933756Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.933859Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:13.949764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.949868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:14.018149Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.052997Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.065542Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.070070Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.070073Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.071047Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.088468Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.089789Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.095785Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.133175Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.149664Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.150624Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.150696Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.152106Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.172681Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.232690Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.237882Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:14.262765Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.262833Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:8953: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:8953 } ] 2026-01-04T02:31:14.267254Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error ... NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [#0: [(String : TestTenant, String : NULL, String : NULL) ; (String : TestTenant)]], Points: [], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-04T02:32:13.605932Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:13.605946Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. BEFORE: 1.0 2026-01-04T02:32:13.606062Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. Send EvRead to shardId: 72075186224037893, tablePath: Root/yq/pending_small, ranges: [(String : TestTenant) ; (String : TestTenant)] , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-04T02:32:13.606108Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. AFTER: 0.1 2026-01-04T02:32:13.606122Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-04T02:32:13.607214Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. Recv TEvReadResult from ShardID=72075186224037893, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:13.607240Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. Taken 0 locks 2026-01-04T02:32:13.607254Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:13.607285Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:13.607309Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:13.607332Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:13.607354Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. enter pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:13.607393Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. exit pack cells method shardId: 72075186224037893 processedRows: 0 packed rows: 1 freeSpace: 8388517 2026-01-04T02:32:13.607416Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. returned 1 rows; processed 1 rows 2026-01-04T02:32:13.607461Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. dropping batch for read #0 2026-01-04T02:32:13.607475Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:13.607489Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:13.607506Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710761, task: 1, CA Id [4:7591328641127455046:3041]. returned async data processed rows 1 left freeSpace 8388517 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:13.607769Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:13.607793Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:13.607839Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710761, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:32:13.608070Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591328641127455042:2530] TxId: 281474976710761. Ctx: { TraceId: 01ke3dj6gs23gnxd1p3cn5304s, Database: , SessionId: ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-04T02:32:13.608192Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455047:3042], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:32:13.608224Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710761, task: 2. Finish input channelId: 1, from: [4:7591328641127455046:3041] 2026-01-04T02:32:13.608268Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455047:3042], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-04T02:32:13.608884Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [4:7591328641127455047:3042], TxId: 281474976710761, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [4:7591328641127455042:2530], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:32:13.608990Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710761, task: 2. pass away 2026-01-04T02:32:13.609093Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710761;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:32:13.609389Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-04T02:32:13.609429Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [4:7591328641127455046:3041], TxId: 281474976710761, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6gs23gnxd1p3cn5304s. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [4:7591328641127455042:2530], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:32:13.609540Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710761, task: 1. pass away 2026-01-04T02:32:13.609633Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710761;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:32:13.611904Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage.cpp:400: DB Error, Status: CLIENT_CANCELLED, Issues: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:62554 } ], Query: --!syntax_v1 -- Query name: GetTask(read stale ro) PRAGMA TablePathPrefix("Root/yq"); DECLARE $tenant as String; DECLARE $from as Timestamp; DECLARE $tasks_limit as Uint64; SELECT `scope`, `query_id`, `owner`, `last_seen_at`, `retry_counter`, `retry_counter_updated_at`, `retry_rate`, `query_type`, `node` FROM `pending_small` WHERE `tenant` = $tenant AND `assigned_until` < $from ORDER BY `query_id` DESC LIMIT $tasks_limit; 2026-01-04T02:32:13.612134Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YWU5OTQ5YTYtYTkzMWUzYTctYWJhZTUyNjgtMjFlNjIxM2U=, ActorId: [4:7591328555228106007:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3dj6gs23gnxd1p3cn5304s, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-01-04T02:32:13.615139Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: GetTaskRequest - GetTaskResult: {tenant: "TestTenant" owner_id: "e66fcfa6-e1299ebd-c108d21-63f8f1ef20" host: "ghrun-46vtxmsa7i" node_id: 4 } ERROR: [ {
: Error: GRpc error: (1): Cancelled on the server side } {
: Error: Grpc error response on endpoint localhost:62554 } ] 2026-01-04T02:32:13.615517Z node 4 :YQL_PRIVATE_PROXY ERROR: task_get.cpp:72: PrivateGetTask - Owner: e66fcfa6-e1299ebd-c108d21-63f8f1ef20, Host: ghrun-46vtxmsa7i, Tenant: TestTenant, Failed with code: GENERIC_ERROR Details:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint localhost:62554
: Error: ControlPlane::GetTaskError 2026-01-04T02:32:13.618187Z node 4 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (1): Cancelled on the server side
: Error: Grpc error response on endpoint [::]:62554 >> UpsertLoad::ShouldCreateTable >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 >> UpsertLoad::ShouldWriteDataBulkUpsertBatch |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::DeleteQuery [GOOD] Test command err: 2026-01-04T02:31:11.536924Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.006127s 2026-01-04T02:31:11.548463Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.011428s 2026-01-04T02:31:11.662182Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328375162239190:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:11.673883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:12.426859Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] E0104 02:31:12.572840208 492431 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:12.572998964 492431 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:12.633568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:12.677938Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.678053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:12.678112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:12.756637Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:31257 2026-01-04T02:31:12.822114Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.836360Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.840313Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.944426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:12.978015Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.978633Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:12.979455Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.018224Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.034773Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.039084Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.047044Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.089124Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::1%5D:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.114440Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.123671Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.161169Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.228130Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.248698Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.290364Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:13.374492Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.424456Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.453529Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.477707Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.506027Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.506103Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:31257 } ] 2026-01-04T02:31:13.507407Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:31257: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoi ... ointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Update input channelId: 1, peer: [4:7591328644630527981:2959] 2026-01-04T02:32:14.330507Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. Recv TEvReadResult from ShardID=72075186224037888, ReadId=0, Status=SUCCESS, Finished=1, RowCount=0, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:14.330524Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. Taken 0 locks 2026-01-04T02:32:14.330538Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:14.330542Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646926 2026-01-04T02:32:14.330562Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:14.330582Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:14.330600Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:14.330622Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. enter pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:14.330631Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328644630527981 RawX2: 4503616807242639 } } DstEndpoint { ActorId { RawX1: 7591328644630527982 RawX2: 4503616807242640 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328644630527982 RawX2: 4503616807242640 } } DstEndpoint { ActorId { RawX1: 7591328644630527977 RawX2: 4503616807242189 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-04T02:32:14.330637Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. exit pack cells method shardId: 72075186224037888 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:14.330650Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:14.330652Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. returned 0 rows; processed 0 rows 2026-01-04T02:32:14.330695Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. dropping batch for read #0 2026-01-04T02:32:14.330707Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:14.330723Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:14.330738Z node 4 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976710751, task: 1, CA Id [4:7591328644630527981:2959]. returned async data processed rows 0 left freeSpace 8388608 received rows 0 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:14.330820Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:14.330828Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2026-01-04T02:32:14.330855Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710751, task: 2. Finish input channelId: 1, from: [4:7591328644630527981:2959] 2026-01-04T02:32:14.330909Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:14.330914Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:14.330941Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:14.330966Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710751, task: 1. Tasks execution finished 2026-01-04T02:32:14.330982Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328644630527981:2959], TxId: 281474976710751, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:14.331011Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:14.331073Z node 4 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:14.331097Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710751, task: 1. pass away 2026-01-04T02:32:14.331108Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710751, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:14.331125Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710751, task: 2. Tasks execution finished 2026-01-04T02:32:14.331136Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [4:7591328644630527982:2960], TxId: 281474976710751, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3dj6tnf7f7pr0pg3f6bc11. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=4&id=NDQ5ZGNkZjYtODcyN2Q2ZGItZTY3NDQ2ZmItM2RjNDAyMjI=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:14.331196Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710751;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:14.331197Z node 4 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710751, task: 2. pass away 2026-01-04T02:32:14.331282Z node 4 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710751;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:14.336505Z node 4 :YQ_CONTROL_PLANE_STORAGE WARN: ydb_control_plane_storage_impl.h:775: DescribeQueryRequest - DescribeQueryResult: {query_id: "utqudof50cuo06b6i6ok" } ERROR: {
: Error: (NYql::TCodeLineException) ydb/core/fq/libs/control_plane_storage/ydb_control_plane_storage_queries.cpp:669: Query does not exist or permission denied. Please check the id of the query or your access rights, code: 1000 } |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TA] $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::Status >> BasicStatistics::TwoDatabases >> ColumnStatistics::CountMinSketchServerlessStatistics |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::ListStreamsValidation [GOOD] Test command err: 2026-01-04T02:31:53.801060Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328553594921870:2201];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:53.801128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:54.291611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:54.333179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:54.333307Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:54.363090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:54.507478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:54.575695Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.005442s 2026-01-04T02:31:54.666923Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.013599s 2026-01-04T02:31:54.684194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:54.810927Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:54.817966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:54.817996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:54.818004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:54.818111Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:55.080917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:55.191319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:55.582258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:31:55.605941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:58.830318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328553594921870:2201];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:58.830614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:58.855916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:59.159143Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037890:1][1:7591328579364727518:2354] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:42:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:31:59.460619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:31:59.829707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:31:59.949785Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-04T02:31:59.949867Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-04T02:31:59.949914Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-04T02:31:59.970558Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-04T02:31:59.970704Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-04T02:31:59.970754Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-04T02:31:59.970769Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-04T02:31:59.970787Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-04T02:31:59.970807Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-04T02:31:59.970824Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-04T02:31:59.970849Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-04T02:31:59.970870Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-04T02:31:59.970889Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-04T02:31:59.970909Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-04T02:31:59.970926Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-04T02:31:59.970952Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-04T02:31:59.988277Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,19) wasn't found 2026-01-04T02:31:59.988355Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-01-04T02:31:59.988388Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,13) wasn't found 2026-01-04T02:31:59.988413Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-01-04T02:31:59.988439Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,16) wasn't found 2026-01-04T02:31:59.988464Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,21) wasn't found 2026-01-04T02:31:59.988487Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,18) wasn't found 2026-01-04T02:31:59.988511Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,15) wasn't found 2026-01-04T02:31:59.988535Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,12) wasn't found 2026-01-04T02:31:59.988577Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-01-04T02:31:59.988622Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,20) wasn't found 2026-01-04T02:31:59.988645Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,17) wasn't found 2026-01-04T02:31:59.988673Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,14) wasn't found 2026-01-04T02:31:59.988697Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,11) wasn't found 2026-01-04T02:31:59.988725Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2026-01-04T02:32:02.034642Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328592219245504:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:02.035800Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:02.121164Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.014163s 2026-01-04T02:32:02.156512Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:02.326452Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:02.336685Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:02.336781Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:02.363158Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:02.461874Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:02.469226Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:02.469247Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:02.469255Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:02.469343Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:02.704107Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:02.835218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:03.110135Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:03.151151Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:32:03.495241Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:03.596332Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:03.710430Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:07.626213Z node 7 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007556s 2026-01-04T02:32:07.634632Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591328612763708525:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:07.634684Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:07.701965Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:07.824137Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:07.858547Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:07.858632Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:07.895890Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:07.900554Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:08.104943Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:08.104975Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:08.104986Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:08.105079Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:08.245677Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:08.575481Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:08.654676Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:08.743874Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:09.031549Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:32:09.316449Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7591328621353645311:3567] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/stream_TestCreateExistingStream\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:12.903288Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591328637942102884:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:12.903351Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:12.961124Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:13.059077Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:13.084345Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:13.084448Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:13.116834Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:13.224051Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:13.260919Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:13.260947Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:13.260956Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:13.261070Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:13.620965Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:13.750818Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:13.910967Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:14.020908Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] >> DataStreams::TestPutRecords [GOOD] >> HttpRequest::ProbeBaseStatsServerless |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] >> ColumnStatistics::SimpleColumnStatistics >> ColumnStatistics::EqWidthHistogram ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestPutRecords [GOOD] Test command err: 2026-01-04T02:31:52.795381Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328548337808753:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:52.795428Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:52.814093Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.010373s 2026-01-04T02:31:53.295965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:53.485077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:53.485169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:53.588423Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:53.628852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:53.662311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:53.702053Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.014687s 2026-01-04T02:31:53.782281Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:53.832394Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007514s 2026-01-04T02:31:53.985496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:54.024202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:54.024222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:54.024229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:54.024335Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:54.334007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:54.463217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:54.697298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:31:54.727248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:32:00.367448Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328582449678662:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:00.367766Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:00.396947Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:00.493878Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:00.581856Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:00.606017Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:00.606104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:00.658444Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:00.713530Z node 6 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.008325s 2026-01-04T02:32:00.709952Z node 5 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007698s 2026-01-04T02:32:00.847792Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:00.862699Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:00.862724Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:00.862732Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:00.862816Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:01.198685Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:01.222215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:32:01.366926Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:01.500230Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:01.827531Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:32:02.178987Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:02.275133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) encryption_type: NONE sequence_number: "0" shard_id: "shard-000000" encryption_type: NONE records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } 2026-01-04T02:32:02.375379Z :INFO: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] Starting read session 2026-01-04T02:32:02.375463Z :DEBUG: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] Starting session to cluster null (localhost:16729) 2026-01-04T02:32:02.390836Z :DEBUG: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] [null] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:02.390907Z :DEBUG: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] [null] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:02.393947Z :DEBUG: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] [null] Reconnecting session to cluster null in 0.000000s 2026-01-04T02:32:02.424226Z :DEBUG: [/Root/] [/Root/] [96777e77-bd741f0a-e17430ed-710e9ba7] [null] Successfully connected. Initializing session 2026-01-04T02:32:02.423761Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2026-01-04T02:32:02.423794Z node 4 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created c ... lientReadOffset 0 2026-01-04T02:32:12.871752Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:12.872009Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (0-2) 2026-01-04T02:32:12.872109Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 3 (3-3) 2026-01-04T02:32:12.873978Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 0} (0-0) 2026-01-04T02:32:12.874065Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {0, 1} (1-1) 2026-01-04T02:32:12.875225Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {1, 0} (2-2) 2026-01-04T02:32:12.875271Z :DEBUG: [/Root/] Take Data. Partition 3. Read: {2, 0} (3-3) 2026-01-04T02:32:12.875341Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] The application data is transferred to the client. Number of messages 4, size 1049088 bytes 2026-01-04T02:32:12.994562Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:32:12.994954Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 0 (0-1) 2026-01-04T02:32:12.995043Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 4 (0-1) 2026-01-04T02:32:12.995082Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (0-1) 2026-01-04T02:32:12.995141Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-04T02:32:12.995178Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (3-3) 2026-01-04T02:32:12.995206Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (4-4) 2026-01-04T02:32:12.995237Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (5-5) 2026-01-04T02:32:12.995268Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (6-6) 2026-01-04T02:32:12.995298Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (7-7) 2026-01-04T02:32:12.995327Z :DEBUG: [/Root/] Decompression task done. Partition/PartitionSessionId: 1 (8-8) 2026-01-04T02:32:12.995429Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T02:32:12.995475Z :DEBUG: [/Root/] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T02:32:12.995516Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-04T02:32:12.995693Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {0, 0} (0-0) 2026-01-04T02:32:12.995725Z :DEBUG: [/Root/] Take Data. Partition 4. Read: {1, 0} (1-1) 2026-01-04T02:32:12.995748Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] The application data is transferred to the client. Number of messages 2, size 0 bytes 2026-01-04T02:32:12.995863Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {0, 0} (0-0) 2026-01-04T02:32:12.996849Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {1, 0} (1-1) 2026-01-04T02:32:12.999096Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {2, 0} (2-2) 2026-01-04T02:32:13.000001Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {3, 0} (3-3) 2026-01-04T02:32:13.004590Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {4, 0} (4-4) 2026-01-04T02:32:13.005549Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {5, 0} (5-5) 2026-01-04T02:32:13.006648Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {6, 0} (6-6) 2026-01-04T02:32:13.007848Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {7, 0} (7-7) 2026-01-04T02:32:13.017143Z :DEBUG: [/Root/] Take Data. Partition 1. Read: {8, 0} (8-8) 2026-01-04T02:32:13.017232Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] The application data is transferred to the client. Number of messages 9, size 8388611 bytes 2026-01-04T02:32:13.021727Z :INFO: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] Closing read session. Close timeout: 0.000000s 2026-01-04T02:32:13.021845Z :INFO: [/Root/] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:stream_TestPutRecordsCornerCases:2:5:0:0 null:stream_TestPutRecordsCornerCases:4:4:1:0 null:stream_TestPutRecordsCornerCases:1:3:8:0 null:stream_TestPutRecordsCornerCases:0:2:1:0 null:stream_TestPutRecordsCornerCases:3:1:3:0 2026-01-04T02:32:13.021903Z :INFO: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] Counters: { Errors: 0 CurrentSessionLifetimeMs: 235 BytesRead: 9437699 MessagesRead: 17 BytesReadCompressed: 9437699 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:32:13.022017Z :NOTICE: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:32:13.022075Z :DEBUG: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] [null] Abort session to cluster 2026-01-04T02:32:13.022685Z :NOTICE: [/Root/] [/Root/] [30e1f382-4168cefe-ca203114-bd7b6867] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:32:13.025909Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer user1 session user1_7_1_4505571058882890613_v1 grpc read failed 2026-01-04T02:32:13.025962Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer user1 session user1_7_1_4505571058882890613_v1 grpc closed 2026-01-04T02:32:13.026011Z node 7 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer user1 session user1_7_1_4505571058882890613_v1 is DEAD 2026-01-04T02:32:14.705072Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591328643998480398:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:14.705150Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:14.760260Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:14.897352Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:14.922842Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:14.922951Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:14.964302Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:14.991681Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:15.097822Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:15.097872Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:15.097882Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:15.097989Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:15.423442Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:15.605077Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:32:15.713844Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:15.878560Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:32:16.328934Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101)
: Error: Access for stream /Root/stream_TestPutRecords is denied for subject user2@builtin, code: 500018 2026-01-04T02:32:16.545560Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) PutRecordsResponse = encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000002" } records { sequence_number: "3" shard_id: "shard-000002" } records { sequence_number: "4" shard_id: "shard-000002" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000002" } records { sequence_number: "6" shard_id: "shard-000002" } records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000003" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000002" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } PutRecord response = encryption_type: NONE sequence_number: "7" shard_id: "shard-000004" |86.8%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.8%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/ut_aggregation/test-results/unittest/{meta.json ... results_accumulator.log} >> UpsertLoad::ShouldWriteDataBulkUpsert [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsert2 |86.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] >> ReadLoad::ShouldReadIterate [GOOD] >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] >> ReadLoad::ShouldReadIterateMoreThanRows >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql [GOOD] |86.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export |86.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/ydb-core-tx-schemeshard-ut_export >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> DataShardVolatile::UpsertNoLocksArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink >> UpsertLoad::ShouldWriteDataBulkUpsertBatch [GOOD] >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] >> UpsertLoad::ShouldCreateTable [GOOD] >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom >> UpsertLoad::ShouldDropCreateTable |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::ResolvedTimestampForDisplacedUpsert [GOOD] Test command err: 2026-01-04T02:26:27.004965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:27.005050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:27.025323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:27.088027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:27.112232Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327150357200083:2081] 1767493586127817 != 1767493586127820 2026-01-04T02:26:27.112385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:27.114948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:27.257480Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:27.266306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:27.394856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:27.394876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:27.394886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:27.394969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:27.479667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:27.486745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:26:27.492847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:27.544551Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7591327154652168222:2296] 2026-01-04T02:26:27.544870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:27.562874Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:27.562931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:27.565381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:27.565433Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:27.565468Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:27.565884Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:27.565922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:27.565948Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7591327154652168238:2296] in generation 1 2026-01-04T02:26:27.577992Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:27.682780Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:27.682936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:27.683010Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7591327154652168240:2297] 2026-01-04T02:26:27.683020Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:27.683029Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:27.683043Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:27.683248Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:27.683314Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:27.683329Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:27.683352Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:27.683364Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:27.683381Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:27.686637Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327154652168214:2503], serverId# [1:7591327154652168224:2507], sessionId# [0:0:0] 2026-01-04T02:26:27.686752Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:27.686996Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:27.687092Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-04T02:26:27.690921Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:27.691333Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:27.691391Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:27.701024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327154652168254:2524], serverId# [1:7591327154652168256:2526], sessionId# [0:0:0] 2026-01-04T02:26:27.701075Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:27.707546Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767493587741 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493587741 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:26:27.707593Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:27.707817Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:27.707834Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:27.707854Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767493587741:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-04T02:26:27.708191Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767493587741:281474976710657 keys extracted: 0 2026-01-04T02:26:27.708336Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:26:27.708472Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:27.708508Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:26:27.710891Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:26:27.711345Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:27.714067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767493587740 2026-01-04T02:26:27.714092Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:27.714134Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767493587741} 2026-01-04T02:26:27.714167Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:27.714196Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:27.714211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:27.714248Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:26:27.714315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767493587741 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7591327150357200423:2137], exec latency: 2 ms, propose latency: 5 ms 2026-01-04T02:26:27.714376Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-04T02:26:27.714409Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:27.714481Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1767493587748 2026-01-04T02:26:27.725480Z node 1 :CHANGE_EXCHANGE D ... topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:32:18.785157Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 6 partNo : 0 messageNo: 11 size 26 offset: -1 2026-01-04T02:32:18.785308Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:32:18.785428Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:18.785537Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:32:18.785650Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:32:18.785741Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:18.785856Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:32:18.786003Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-04T02:32:18.786135Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:32:18.786231Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:32:18.786337Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:32:18.786463Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-04T02:32:18.786730Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-04T02:32:18.787859Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 5 PartNo 0 PackedSize 107 count 1 nextOffset 6 batches 1 2026-01-04T02:32:18.789383Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 5,1 HeadOffset 5 endOffset 5 curOffset 6 d0000000000_00000000000000000005_00000_0000000001_00000? size 93 WTime 8979 2026-01-04T02:32:18.790104Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:32:18.790527Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 5 partNo 0 count 1 size 93 2026-01-04T02:32:18.792352Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 5 count 1 size 93 actorID [30:1011:2835] 2026-01-04T02:32:18.792661Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' size 93 2026-01-04T02:32:18.792822Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:32:18.803513Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:492: [72075186224037889][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:32:18.803758Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037889][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 44 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:32:18.803944Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037889][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:32:18.804171Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037889][Partition][0][StateIdle] Answering for message sourceid: '\00072075186224037888', Topic: 'Table/Stream/streamImpl', Partition: 0, SeqNo: 6, partNo: 0, Offset: 5 is stored on disk 2026-01-04T02:32:18.805084Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:18.805231Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:18.805330Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:18.805434Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:18.805527Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:32:18.805665Z node 30 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037889][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:32:18.805893Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 11 requestId: cookie: 6 2026-01-04T02:32:18.806314Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:160: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1111:2878] Handle NKikimrClient.TResponse { SessionId: TxId: Success { Response: Status: 1 ErrorCode: OK PartitionResponse { CmdWriteResult { AlreadyWritten: false SourceId: "\00072075186224037888" SeqNo: 6 Offset: 5 WriteTimestampMS: 8979 PartitionQuotedTimeMs: 0 TotalTimeInPartitionQueueMs: 0 WriteTimeMs: 0 TopicQuotedTimeMs: 0 WrittenInTx: false } Cookie: 6 } } } 2026-01-04T02:32:18.806544Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:643: [CdcChangeSenderMain][72075186224037888:1][30:1046:2878] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 0 } 2026-01-04T02:32:18.806814Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 1, at tablet# 72075186224037888 2026-01-04T02:32:18.806898Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037888 2026-01-04T02:32:18.809244Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:335: TTxRemoveChangeRecords Complete: removed# 1, left# 0, at tablet# 72075186224037888 ... checking the update is logged before the new resolved timestamp >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-04T02:32:18.921722Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:32:18.921929Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:32:18.922312Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 10 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 6 max time lag 0ms effective offset 0 2026-01-04T02:32:18.924663Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 10 added 6 blobs, size 763 count 6 last offset 5, current partition end offset: 6 2026-01-04T02:32:18.924789Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 10. Send blob request. 2026-01-04T02:32:18.925000Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 93 accessed 5 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-04T02:32:18.925094Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 1 partno 0 count 1 parts_count 0 source 1 size 174 accessed 2 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-04T02:32:18.925141Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 2 partno 0 count 1 parts_count 0 source 1 size 93 accessed 1 times before, last time 1970-01-01T00:00:06.000000Z 2026-01-04T02:32:18.925202Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-04T02:32:18.925334Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 4 partno 0 count 1 parts_count 0 source 1 size 155 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-04T02:32:18.925402Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 5 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-04T02:32:18.925546Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 10. All 6 blobs are from cache. 2026-01-04T02:32:18.925765Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.925890Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.925941Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.926094Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.926144Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.926188Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 5 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:18.926384Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 6 blobs 2026-01-04T02:32:18.926926Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-04T02:32:18.927088Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 1 totakecount 1 count 1 size 154 from pos 0 cbcount 1 2026-01-04T02:32:18.927171Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 2 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-04T02:32:18.927245Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-04T02:32:18.927320Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 1 count 1 size 135 from pos 0 cbcount 1 2026-01-04T02:32:18.927392Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-04T02:32:18.927663Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |86.9%| [TA] {RESULT} $(B)/ydb/core/blobstorage/nodewarden/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestReservedConsumersMetering [GOOD] Test command err: 2026-01-04T02:31:50.827967Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328541588019503:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:50.828217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:50.836187Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.020161s 2026-01-04T02:31:51.586147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:51.760269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:51.760409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:52.007310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:52.019148Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:52.202461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:52.241143Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:52.566239Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:52.566260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:52.566266Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:52.569932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:53.149888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:53.301051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:53.703168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:31:53.726015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:54.063043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000004" } records { sequence_number: "0" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000004" } records { sequence_number: "1" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000001" } records { sequence_number: "1" shard_id: "shard-000009" } records { sequence_number: "0" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000001" } records { sequence_number: "0" shard_id: "shard-000007" } records { sequence_number: "1" shard_id: "shard-000007" } records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000007" } records { sequence_number: "3" shard_id: "shard-000004" } records { sequence_number: "2" shard_id: "shard-000005" } records { sequence_number: "0" shard_id: "shard-000003" } records { sequence_number: "2" shard_id: "shard-000009" } records { sequence_number: "1" shard_id: "shard-000008" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000006" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000001" } records { sequence_number: "4" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "5" shard_id: "shard-000001" } records { sequence_number: "5" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000004" } records { sequence_number: "3" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000008" } records { sequence_number: "6" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000004" } records { sequence_number: "4" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000009" } records { sequence_number: "2" shard_id: "shard-000006" } records { sequence_number: "7" shard_id: "shard-000001" } records { sequence_number: "3" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000007" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000004" } records { sequence_number: "5" shard_id: "shard-000005" } records { sequence_number: "1" shard_id: "shard-000003" } records { sequence_number: "7" shard_id: "shard-000009" } records { sequence_number: "3" shard_id: "shard-000008" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000006" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000009" } records { sequence_number: "8" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000009" } records { sequence_number: "9" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000001" } 2026-01-04T02:31:55.827718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328541588019503:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:55.827788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; encryption_type: NONE records { sequence_number: "10" shard_id: "shard-000001" } records { sequence_number: "10" shard_id: "shard-000009" } records { sequence_number: "10" shard_id: "shard-000004" } records { sequence_number: "6" shard_id: "shard-000005" } records { sequence_number: "4" shard_id: "shard-000008" } records { sequence_number: "11" shard_id: "shard-000004" } records { sequence_number: "12" shard_id: "shard-000004" } records { sequence_number: "7" shard_id: "shard-000005" } records { sequence_number: "11" shard_id: "shard-000001" } records { sequence_number: "11" shard_id: "shard-000009" } records { sequence_number: "4" shard_id: "shard-000006" } records { sequence_number: "12" shard_id: "shard-000001" } records { sequence_number: "6" shard_id: "shard-000007" } records { sequence_number: "7" shard_id: "shard-000007" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000007" } records { sequence_number: "13" shard_id: "shard-000004" } records { sequence_number: "8" shard_id: "shard-000005" } records { sequence_number: "2" shard_id: "shard-000003" } records { sequence_number: "12" shard_id: "shard-000009" } records { sequence_number: "5" shard_id: "shard-000008" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000006" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000009" } records { sequence_number: "13" shard_id: "shard-000001" } records { sequence_number: "14" shard_id: "shard-000009" } records { sequence_number: "14" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000001" } encryption_type: NONE records { sequence_number: "15" shard_id: "shard-000001" } records { sequence_number: "15" shard_id: "shard-000009" } records { sequence_number: "15" shard_id: "shard-000004" } records { sequence_number: "9" shard_id: "shard-000005" } records { sequence_number: "6" shard_id: "shard-000008" } records { sequence_number: "16" shard_id: "shard-000004" } records { sequence_number: "17" shard_id: "shard-000004" } records { sequence_number: "10" shard_id: "shard-000005" } records { sequence_number: "16" shard_id: "shard-000001" } records { sequence_number: "16" shard_id: "shard-000009" } records { sequence_number: "6" shard_id: "shard-000006" } records { sequence_number: "17" shard_id: "shard-000001" } records { sequence_number: "9" shard_id: "shard-000007" } records { sequence_number: "10" shard_id: "shard-000007" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000007" } records { sequence_number: "18" shard_id: "shard-000004" } records { sequence_number: "11" shard_id: "shard-000005" } records { sequence_number: "3" shard_id: "shard-000003" } records { sequence_number: "17" shard_id: "shard-000009" } records { sequence_number: "7" shard_id: "shard-000008" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000006" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000009" } records { sequence_number: "18" shard_id: "shard-000001" } records { sequence_number: "19" shard_id: "shard-000009" } records { sequence_number: "19" shard_id: "shard-000004" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequ ... Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493933882-105","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767493933,"finish":1767493933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493933941-106","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":0,"unit":"second","start":1767493933,"finish":1767493933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493933941-107","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767493933,"finish":1767493933},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493933}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493933981-108","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":1},"usage":{"quantity":1,"unit":"second","start":1767493933,"finish":1767493934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493933981-109","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493933,"finish":1767493934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493934027-110","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":0,"unit":"second","start":1767493934,"finish":1767493934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493934027-111","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":0,"unit":"mbyte*second","start":1767493934,"finish":1767493934},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493934}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767493934085-112","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767493934,"finish":1767493935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493934085-113","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767493934,"finish":1767493935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493934085-114","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493934,"finish":1767493935},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493935}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767493935138-115","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767493935,"finish":1767493936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493935138-116","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767493935,"finish":1767493936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493935138-117","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493935,"finish":1767493936},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493936}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767493936168-118","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767493936,"finish":1767493937},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493937}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493936168-119","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767493936,"finish":1767493937},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493937}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493936168-120","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493936,"finish":1767493937},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493937}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767493937216-121","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767493937,"finish":1767493938},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493938}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493937216-122","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767493937,"finish":1767493938},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493938}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493937216-123","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493937,"finish":1767493938},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493938}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"put_units-root-72075186224037888-1767493938244-124","schema":"yds.events.puts.v1","tags":{},"usage":{"quantity":1,"unit":"put_events","start":1767493938,"finish":1767493939},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493939}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493938244-125","schema":"yds.throughput.reserved.v1","tags":{"reserved_throughput_bps":1048576,"reserved_consumers_count":2},"usage":{"quantity":1,"unit":"second","start":1767493938,"finish":1767493939},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493939}' Got line from metering file data: '{"cloud_id":"somecloud","folder_id":"somefolder","resource_id":"/Root/stream_TestReservedConsumersMetering","id":"yds.reserved_resources-root-72075186224037888-1767493938244-126","schema":"yds.storage.reserved.v1","tags":{},"usage":{"quantity":56320,"unit":"mbyte*second","start":1767493938,"finish":1767493939},"labels":{"datastreams_stream_name":"stream_TestReservedConsumersMetering","ydb_database":"root","Category":"Topic"},"version":"v1","source_id":"72075186224037888","source_wt":1767493939}' >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkqlKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.427651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.559968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.560348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.560414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.818103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.818233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.895115Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938293413 != 1767493938293416 2026-01-04T02:32:21.912371Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.960015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.047845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.363478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.377545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.481610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.755278Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-04T02:32:22.755449Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-04T02:32:22.858735Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.102889s, errors=0 2026-01-04T02:32:22.858833Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> HttpRequest::Analyze >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:20.909787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.045305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.045772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.045878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.300403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.300503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.386522Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493937789006 != 1767493937789009 2026-01-04T02:32:21.397376Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.443244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.547526Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:21.816051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:21.829211Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:21.934767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.272414Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 KeyFrom: 12345 } 2026-01-04T02:32:22.272545Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 KeyFrom: 12345 2026-01-04T02:32:22.276446Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-04T02:32:22.276525Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-04T02:32:22.276578Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-04T02:32:22.276602Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-04T02:32:22.276651Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-04T02:32:22.276690Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 KeyFrom: 12345 2026-01-04T02:32:22.281611Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=OGE1M2FmMzctNWQ0YzUwYmYtYTA3ZTRiN2EtYjA4MWExZTA= 2026-01-04T02:32:22.281670Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=ZWViMzQ3NDItODc3NjM4YWItMjdlNDU2MDQtNGY0OGJlMA== 2026-01-04T02:32:22.283513Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=MWVlODI5MDMtYjg4ZDU1ZC1hZWJiM2Q4YS05OTgzNTZlMw== 2026-01-04T02:32:22.285055Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=MTE5ODY5YTYtNWFiNTZmMWYtY2ViNjFjMTYtYmM3YjA4M2E= 2026-01-04T02:32:22.286291Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=YjA3OTcxNDItMjMwODNkN2EtZTU2YmIyN2UtMThkMWQ1ZTY= 2026-01-04T02:32:22.290177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290432Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.290542Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.292014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1010:2862], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.292283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.297383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:22.342985Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.343789Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1012:2864] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.344291Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.344878Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.459143Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.459250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.459304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.459338Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.459373Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.493772Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:22.938240Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1131:2946] txid# 281474976715665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.011389Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767493943.011350s, errors=0 2026-01-04T02:32:23.011454Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767493943.011446s, errors=0 2026-01-04T02:32:23.011588Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767493943011 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.011651Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767493943011 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.024476Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1221:2986] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.086031Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767493943.085978s, errors=0 2026-01-04T02:32:23.086389Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767493943085 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.100068Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1272:3008] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.164113Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767493943.164055s, errors=0 2026-01-04T02:32:23.164501Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767493943164 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.178632Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1323:3030] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.243063Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767493943.243027s, errors=0 2026-01-04T02:32:23.243308Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767493943243 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.243344Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 0.967038s, oks# 20, errors# 0 2026-01-04T02:32:23.243454Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadServiceAccountSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets+UseAuthToken [GOOD] Test command err: Trying to start YDB, gRPC: 17678, MsgBus: 19296 2026-01-04T02:31:32.442581Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328465401814372:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:32.448226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:33.153933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:33.311131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:33.323126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:33.417966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328465401814333:2081] 1767493892381766 != 1767493892381769 2026-01-04T02:31:33.419420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:33.445051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:33.445700Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:33.471127Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:33.766809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:31:33.766831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:31:33.766837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:31:33.766929Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:34.632586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:34.662489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:31:34.780870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:35.024502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:35.238284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:35.385793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:37.422066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328465401814372:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:37.422154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:38.319944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328491171620002:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.320075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.322903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328491171620012:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.322995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:38.725347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.757631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.799034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.836014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.881746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.943834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:38.995002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:39.060538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:39.149464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328495466588185:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:39.149596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:39.149920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328495466588190:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:39.149969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328495466588191:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:39.150091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:39.154076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:39.173035Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328495466588194:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:31:39.247667Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328495466588245:3784] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:41.017209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... qpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:15.363141Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:15.378989Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:15.379104Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:15.402445Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:15.485688Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:15.526135Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:15.526163Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:15.526181Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:15.526285Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:16.179330Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:16.217511Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:16.217947Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:16.365323Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:16.811904Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:16.925467Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.514819Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328664164675074:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:19.514926Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:19.515307Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328664164675084:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:19.515350Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:19.603149Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.645500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.689337Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.731216Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.769954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.856172Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:19.942773Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:20.037867Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:20.145437Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328668459643253:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:20.145541Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:20.145607Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328668459643258:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:20.146006Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591328668459643260:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:20.146067Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:20.150363Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:20.170084Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591328668459643261:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:32:20.209922Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591328646984804066:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:20.210026Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:20.248012Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591328668459643316:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:22.068985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:32:22.147146Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:32:22.162066Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.081534Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.206031Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.206509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.206575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.488456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.488613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.571496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493937945341 != 1767493937945344 2026-01-04T02:32:21.585332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.633511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.732019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.005677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.019946Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.129931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.432078Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-04T02:32:22.432213Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-04T02:32:22.436513Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-04T02:32:22.436593Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:22.436646Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:22.436686Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:22.436745Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:22.436775Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:22.442149Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=NTQ4MzFkZC04MTY3ZTI0NC0xMDAyY2NmNC1jYTE5YjlmYg== 2026-01-04T02:32:22.442226Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=OWY4OGQxMmMtNWNmY2JlOWMtZTdiZmYxZS00NzI3NjE2Zg== 2026-01-04T02:32:22.444121Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=NjM0OGNiZDAtYjYwMDBmZTktNjdmYmZmNWYtNTMzNjFlMTI= 2026-01-04T02:32:22.445780Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=YjFhMzUxNjctNGFjNjY3MjQtZThkMGEwMGItOTNkZTk3OGU= 2026-01-04T02:32:22.447402Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=OGU3MjI4MzUtNTBhNTdkZjQtNDljMWRjY2EtNmFmOGJmMmQ= 2026-01-04T02:32:22.452078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.452600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.454581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1010:2862], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.454837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.460753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:22.531091Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.532148Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1012:2864] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.532831Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.533571Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:22.652831Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.652964Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.653044Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.653117Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.653177Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:22.687975Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.116224Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767493943.116175s, errors=0 2026-01-04T02:32:23.116550Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767493943116 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.130242Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1171:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.198145Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767493943.198100s, errors=0 2026-01-04T02:32:23.198436Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767493943198 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.212487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1222:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.278817Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1272:3008] txid# 281474976715678, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.296081Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767493943.296023s, errors=0 2026-01-04T02:32:23.296294Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767493943296 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.345416Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767493943.345379s, errors=0 2026-01-04T02:32:23.345566Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767493943345 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.359410Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1323:3030] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:23.435757Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767493943.435701s, errors=0 2026-01-04T02:32:23.436093Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767493943435 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:23.436148Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 0.999858s, oks# 20, errors# 0 2026-01-04T02:32:23.436334Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteKqpUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.856215Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.991059Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.991451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.991510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:22.288725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:22.288853Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:22.356336Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938758047 != 1767493938758050 2026-01-04T02:32:22.368355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:22.410703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.494353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.805717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.820661Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.926342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:23.212064Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertKqpStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "JustTable" } UpsertKqpStart { RowCount: 20 Inflight: 5 } 2026-01-04T02:32:23.212203Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:298: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} Bootstrap called: RowCount: 20 Inflight: 5 2026-01-04T02:32:23.215596Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:361: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} started# 5 actors each with inflight# 4 2026-01-04T02:32:23.215679Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:23.215742Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:23.215792Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:23.215839Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:23.215868Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:116: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} Bootstrap called: RowCount: 4 Inflight: 1 2026-01-04T02:32:23.221342Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} session: ydb://session/3?node_id=1&id=NzJkMGMxMjUtZDdiMGZlNTMtNDY0Mzg3ZC01ZWJlYjU5YQ== 2026-01-04T02:32:23.221428Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} session: ydb://session/3?node_id=1&id=YzA0OWZiOTUtMzUwZGQxNDAtNDQ5MTViY2MtOGVkZDQxMjI= 2026-01-04T02:32:23.223400Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} session: ydb://session/3?node_id=1&id=ZDllNjZkYzAtYjcxMzMxMjUtMWEzOGFjMDctODZlNGNjNA== 2026-01-04T02:32:23.225100Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} session: ydb://session/3?node_id=1&id=ZmU1MmUzNzAtZmVlOGM5MWQtODMyNTFhMWYtNTg3MjA4MmE= 2026-01-04T02:32:23.226737Z node 1 :DS_LOAD_TEST DEBUG: kqp_upsert.cpp:207: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} session: ydb://session/3?node_id=1&id=MjdmODQxYTYtZWU2ZTVmYTctYTc3ZDhkMDEtZjQwMzg1MGM= 2026-01-04T02:32:23.231354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:966:2824], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:994:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231638Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:995:2847], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:996:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.231827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.233914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1010:2862], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.234210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:23.240283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:23.288689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1011:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:23.289349Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1012:2864] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:23.289855Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1016:2868] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:23.290358Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2870] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:23.406735Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1004:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:23.406846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1005:2857], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:23.406918Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1006:2858], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:23.406964Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1007:2859], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:23.407025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1008:2860], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:23.442272Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1098:2927] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:24.027636Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 1} finished in 1767493944.027558s, errors=0 2026-01-04T02:32:24.028010Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 1 { Tag: 1 DurationMs: 1767493944027 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:24.046012Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1171:2965] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:24.129549Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 5} finished in 1767493944.129428s, errors=0 2026-01-04T02:32:24.129934Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 5 { Tag: 5 DurationMs: 1767493944129 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:24.144651Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1222:2987] txid# 281474976715673, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:24.164974Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1239:2996] txid# 281474976715675, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:24.258952Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 3} finished in 1767493944.258910s, errors=0 2026-01-04T02:32:24.259391Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 3 { Tag: 3 DurationMs: 1767493944258 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:24.259487Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 4} finished in 1767493944.259427s, errors=0 2026-01-04T02:32:24.259602Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 4 { Tag: 4 DurationMs: 1767493944259 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:24.277494Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1320:3027] txid# 281474976715683, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:24.350112Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:190: TKqpUpsertActor# {Tag: 0, parent: [1:953:2811], subTag: 2} finished in 1767493944.350064s, errors=0 2026-01-04T02:32:24.350461Z node 1 :DS_LOAD_TEST INFO: kqp_upsert.cpp:376: kqp# {Tag: 0, parent: [1:952:2810], subTag: 2} finished: 2 { Tag: 2 DurationMs: 1767493944350 OperationsOK: 4 OperationsError: 0 } 2026-01-04T02:32:24.350542Z node 1 :DS_LOAD_TEST NOTICE: kqp_upsert.cpp:395: TKqpUpsertActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 2} finished in 1.135155s, oks# 20, errors# 0 2026-01-04T02:32:24.350690Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogramSmallParamTypes |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::InitialScanAndResolvedTimestamps [GOOD] Test command err: 2026-01-04T02:26:22.352690Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327133855952162:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:22.399746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:22.418068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:23.136392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:26:23.232295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:23.232391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:23.252901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:23.361443Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:23.458157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:26:23.458245Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327133855951953:2081] 1767493582304573 != 1767493582304576 2026-01-04T02:26:23.461291Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:23.747583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:23.747618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:23.747625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:23.747717Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:23.845748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:23.858788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:26:23.924169Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7591327138150920093:2297] 2026-01-04T02:26:23.924405Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:23.990279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:23.990515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:23.992365Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:26:23.992426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:26:23.992451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:26:23.992892Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:23.992940Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:23.992970Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7591327138150920108:2297] in generation 1 2026-01-04T02:26:23.998231Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:24.088314Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:26:24.088490Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:24.088554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7591327142445887406:2298] 2026-01-04T02:26:24.088568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:24.088582Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:26:24.088592Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:24.088752Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:26:24.088858Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:26:24.088880Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:24.088901Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:24.088915Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:26:24.088943Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:24.097463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327138150920091:2504], serverId# [1:7591327138150920094:2505], sessionId# [0:0:0] 2026-01-04T02:26:24.097598Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:26:24.097887Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:26:24.097973Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-04T02:26:24.105975Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:24.106115Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:26:24.106160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:24.114153Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591327142445887420:2521], serverId# [1:7591327142445887422:2523], sessionId# [0:0:0] 2026-01-04T02:26:24.123205Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767493584157 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767493584157 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:26:24.123247Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:24.123399Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:26:24.123472Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:24.123491Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:24.123512Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767493584157:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-04T02:26:24.123817Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767493584157:281474976710657 keys extracted: 0 2026-01-04T02:26:24.123955Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:26:24.124066Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:26:24.124102Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:26:24.136293Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:26:24.136797Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:24.148928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767493584156 2026-01-04T02:26:24.148956Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:26:24.149015Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767493584157} 2026-01-04T02:26:24.149067Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:24.149106Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:26:24.149125Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:26:24.149149Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:26:24.149202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767493584157 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7591327133855952308:2147], exec latency: 12 ms, propose latency: 25 ms 2026-01-04T02:26:24.149232Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-04T02:26:24.149282Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 720751862 ... artition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:23.211574Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:23.211612Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:32:23.233099Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:23.233177Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:23.233207Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:23.233250Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:23.233283Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:32:23.234114Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 9000 at tablet 72075186224037888 2026-01-04T02:32:23.234245Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:32:23.234408Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:42: [CdcStreamHeartbeat] Emit change records: edge# v9000/18446744073709551615, at tablet# 72075186224037888 2026-01-04T02:32:23.234641Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:32:23.246066Z node 30 :TX_DATASHARD INFO: cdc_stream_heartbeat.cpp:78: [CdcStreamHeartbeat] Enqueue 1 change record(s): at tablet# 72075186224037888 2026-01-04T02:32:23.246170Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 } 2026-01-04T02:32:23.246235Z node 30 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:32:23.246294Z node 30 :TX_DATASHARD DEBUG: datashard.cpp:3842: Waiting for PlanStep# 12000 from mediator time cast 2026-01-04T02:32:23.246428Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037888:1][30:903:2775] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-04T02:32:23.249132Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:628: [CdcChangeSenderMain][72075186224037888:1][30:1162:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] BodySize: 0 }] } 2026-01-04T02:32:23.249431Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037888 2026-01-04T02:32:23.249641Z node 30 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 1 change records: to# [30:1162:2950], at tablet# 72075186224037888 2026-01-04T02:32:23.249690Z node 30 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 1, forgotten# 0, left# 0, at tablet# 72075186224037888 2026-01-04T02:32:23.249831Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:633: [CdcChangeSenderMain][72075186224037888:1][30:1162:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-04T02:32:23.249950Z node 30 :CHANGE_EXCHANGE DEBUG: change_sender_cdc_stream.cpp:111: [CdcChangeSenderPartition][72075186224037888:1][0][72075186224037889][30:1244:2950] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 5 Group: 0 Step: 9000 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 43] Kind: CdcHeartbeat Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-04T02:32:23.250216Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:32:23.250264Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:32:23.250351Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 4 requestId: cookie: 3 2026-01-04T02:32:23.250500Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:32:23.250529Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:32:23.250578Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037889] got client message topic: Table/Stream/streamImpl partition: 0 SourceId: '\00072075186224037888' SeqNo: 5 partNo : 0 messageNo: 5 size 26 offset: -1 2026-01-04T02:32:23.250629Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037889][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:32:23.250676Z node 30 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:23.250709Z node 30 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:32:23.250748Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:32:23.250771Z node 30 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:23.250800Z node 30 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037889][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:32:23.250915Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1263: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 process heartbeat sourceId '\00072075186224037888' version v9000/0 2026-01-04T02:32:23.250964Z node 30 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:32:23.250996Z node 30 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037889][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:32:23.251029Z node 30 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:32:23.251080Z node 30 :PERSQUEUE INFO: partition_write.cpp:1746: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 emit heartbeat v9000/0 2026-01-04T02:32:23.251153Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob processing sourceId '\00072075186224037889' seqNo 0 partNo 0 2026-01-04T02:32:23.251902Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037889][Partition][0][StateIdle] Topic 'Table/Stream/streamImpl' partition 0 part blob complete sourceId '\00072075186224037889' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 4 PartNo 0 PackedSize 107 count 1 nextOffset 5 batches 1 2026-01-04T02:32:23.252316Z node 30 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037889][Partition][0][StateIdle] Add new write blob: topic 'Table/Stream/streamImpl' partition 0 compactOffset 4,1 HeadOffset 4 endOffset 4 curOffset 5 d0000000000_00000000000000000004_00000_0000000001_00000? size 93 WTime 8950 2026-01-04T02:32:23.252562Z node 30 :PERSQUEUE DEBUG: read.h:275: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:32:23.252667Z node 30 :PERSQUEUE DEBUG: read.h:313: [72075186224037889][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 0 count 1 size 93 2026-01-04T02:32:23.253727Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 1 size 93 actorID [30:1117:2903] 2026-01-04T02:32:23.253834Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037889' partition 0 offset 4 partno 0 count 1 parts 0 suffix '63' size 93 2026-01-04T02:32:23.253879Z node 30 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037889][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse >>>>> GetRecords path=/Root/Table/Stream partitionId=0 2026-01-04T02:32:23.254741Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:32:23.254788Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream/streamImpl' partition 0 2026-01-04T02:32:23.254962Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 4 max time lag 0ms effective offset 0 2026-01-04T02:32:23.255928Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 2 blobs, size 452 count 4 last offset 3, current partition end offset: 4 2026-01-04T02:32:23.256045Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-04T02:32:23.256221Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 3 parts_count 0 source 1 size 359 accessed 1 times before, last time 1970-01-01T00:00:07.000000Z 2026-01-04T02:32:23.256292Z node 30 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 3 partno 0 count 1 parts_count 0 source 1 size 93 accessed 0 times before, last time 1970-01-01T00:00:08.000000Z 2026-01-04T02:32:23.256429Z node 30 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 2 blobs are from cache. 2026-01-04T02:32:23.256613Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 3 parts 0 suffix '63' 2026-01-04T02:32:23.256690Z node 30 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 3 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:32:23.256774Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-04T02:32:23.257091Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 3 count 3 size 339 from pos 0 cbcount 3 2026-01-04T02:32:23.257224Z node 30 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 3 totakecount 1 count 1 size 75 from pos 0 cbcount 1 2026-01-04T02:32:23.257954Z node 30 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >> BasicStatistics::Serverless |86.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |86.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] |86.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |86.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |86.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_login_large/ydb-core-tx-schemeshard-ut_login_large |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/ydb-core-tx-schemeshard-ut_topic_set_boundaries ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsert2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:20.558954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:20.689954Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:20.690366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:20.690421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:20.990789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:20.990915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.077133Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493937710703 != 1767493937710706 2026-01-04T02:32:21.095486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.142025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.234410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:21.529100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:21.542260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:21.651114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:21.926082Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:21.926195Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-04T02:32:21.982613Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.056132s, errors=0 2026-01-04T02:32:21.982715Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-04T02:32:25.409009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:25.414021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:25.417739Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:25.417959Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:25.418084Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:25.658967Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:25.659128Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:25.702521Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493942438515 != 1767493942438519 2026-01-04T02:32:25.709245Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:25.760838Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:25.845721Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:26.189523Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:26.203081Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:26.321590Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:26.570199Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:26.570330Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-04T02:32:26.635397Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.064564s, errors=0 2026-01-04T02:32:26.635516Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] >> Yq_1::CreateQuery_Without_Connection [GOOD] |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool_reboots/ydb-core-tx-schemeshard-ut_resource_pool_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadIterateMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.003291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.117127Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.117595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.117665Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.382196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.382310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.448813Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493937986552 != 1767493937986555 2026-01-04T02:32:21.473714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.522081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.605470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:21.908077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:21.921407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.025581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.318075Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 1000 2026-01-04T02:32:22.319846Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 1000 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-04T02:32:22.363119Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.042898s, errors=0 2026-01-04T02:32:22.363849Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-04T02:32:22.363983Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [1:961:2819] with id# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-04T02:32:22.365136Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-04T02:32:22.365299Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:964:2822] 2026-01-04T02:32:22.365446Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-04T02:32:22.365505Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-04T02:32:22.365785Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:22.370958Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.005099s, read# 1000 2026-01-04T02:32:22.371262Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 5 OperationsOK: 1000 OperationsError: 0 } 2026-01-04T02:32:22.371372Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:967:2825] 2026-01-04T02:32:22.371433Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-04T02:32:22.371458Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-04T02:32:22.371635Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:22.562236Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 2} finished in 0.190556s, read# 1000 2026-01-04T02:32:22.562386Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 190 OperationsOK: 1000 OperationsError: 0 } 2026-01-04T02:32:22.562490Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:970:2828] 2026-01-04T02:32:22.562538Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-04T02:32:22.562575Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-04T02:32:22.562817Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:22.628379Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 3} finished in 0.065515s, read# 1000 2026-01-04T02:32:22.628496Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [1:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 65 OperationsOK: 1000 OperationsError: 0 } 2026-01-04T02:32:22.628588Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [1:973:2831] 2026-01-04T02:32:22.628631Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called, sample# 1000 2026-01-04T02:32:22.628665Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-04T02:32:22.628867Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:22.630941Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 4} finished in 0.001519s, sampled# 1000, iter finished# 1, oks# 1000 2026-01-04T02:32:22.631062Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 1000 2026-01-04T02:32:22.631224Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} started read actor with id# [1:976:2834] 2026-01-04T02:32:22.631305Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called, will read keys# 1000 2026-01-04T02:32:23.001961Z node 1 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-04T02:32:23.002116Z node 1 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 370 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 37\n" } 2026-01-04T02:32:23.002260Z node 1 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [1:952:2810], subTag: 3} finished in 0.638141s with report: { DurationMs: 5 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 190 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 65 OperationsOK: 1000 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 370 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 37\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-04T02:32:23.002488Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:961:2819] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-04T02:32:26.343472Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.349329Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.353209Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:26.353461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:26.353603Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:26.601073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.601198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.625449Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493943338897 != 1767493943338901 2026-01-04T02:32:26.630340Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:26.674776Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.753717Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:27.018969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:27.032231Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:27.149222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:27.410768Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 10 2026-01-04T02:32:27.411038Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-04T02:32:27.423920Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 1} TUpsertActor finished in 0.012622s, errors=0 2026-01-04T02:32:27.424542Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadIteratorStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadIteratorStart { RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 } 2026-01-04T02:32:27.424661Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:334: ReadIteratorLoadScenario# [2:961:2819] with id# {Tag: 0, parent: [2:952:2810], subTag: 3} Bootstrap called: RowCount: 10 ReadCount: 1000 Inflights: 1 Chunks: 0 Chunks: 1 Chunks: 10 2026-01-04T02:32:27.425888Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:397: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-04T02:32:27.426008Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:964:2822] 2026-01-04T02:32:27.426136Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Bootstrap called, sample# 0 2026-01-04T02:32:27.426201Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-04T02:32:27.426536Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:27.427314Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 1} finished in 0.000719s, read# 10 2026-01-04T02:32:27.427469Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:964:2822] with chunkSize# 0 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-04T02:32:27.427583Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:967:2825] 2026-01-04T02:32:27.427633Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Bootstrap called, sample# 0 2026-01-04T02:32:27.427662Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Connect to# 72075186224037888 called 2026-01-04T02:32:27.427872Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:27.433870Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 0.005949s, read# 10 2026-01-04T02:32:27.434026Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:967:2825] with chunkSize# 1 finished: 0 { DurationMs: 5 OperationsOK: 10 OperationsError: 0 } 2026-01-04T02:32:27.434129Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:970:2828] 2026-01-04T02:32:27.434200Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Bootstrap called, sample# 0 2026-01-04T02:32:27.434234Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Connect to# 72075186224037888 called 2026-01-04T02:32:27.434474Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:27.435158Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:147: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 0.000648s, read# 10 2026-01-04T02:32:27.435251Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:457: fullscan actor# [2:970:2828] with chunkSize# 10 finished: 0 { DurationMs: 0 OperationsOK: 10 OperationsError: 0 } 2026-01-04T02:32:27.435352Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:438: started fullscan actor# [2:973:2831] 2026-01-04T02:32:27.435401Z node 2 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Bootstrap called, sample# 10 2026-01-04T02:32:27.435426Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Connect to# 72075186224037888 called 2026-01-04T02:32:27.435601Z node 2 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:27.436068Z node 2 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 0.000377s, sampled# 10, iter finished# 1, oks# 10 2026-01-04T02:32:27.436168Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:507: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received keyCount# 10 2026-01-04T02:32:27.436323Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:552: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} started read actor with id# [2:976:2834] 2026-01-04T02:32:27.436373Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:79: TReadIteratorPoints# {Tag: 0, parent: [2:961:2819], subTag: 5} Bootstrap called, will read keys# 10 2026-01-04T02:32:27.793978Z node 2 :DS_LOAD_TEST DEBUG: test_load_read_iterator.cpp:560: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} received point times# 1000, Inflight left# 0 2026-01-04T02:32:27.794193Z node 2 :DS_LOAD_TEST INFO: test_load_read_iterator.cpp:483: headread with inflight# 1 finished: 0 { DurationMs: 357 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 24\n" } 2026-01-04T02:32:27.794375Z node 2 :DS_LOAD_TEST NOTICE: test_load_read_iterator.cpp:617: ReadIteratorLoadScenario# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 0.369541s with report: { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 1, type# FullScan with chunk# inf" } { DurationMs: 5 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 2, type# FullScan with chunk# 1" } { DurationMs: 0 OperationsOK: 10 OperationsError: 0 PrefixInfo: "Test run# 3, type# FullScan with chunk# 10" } { DurationMs: 357 OperationsOK: 1000 OperationsError: 0 Info: "single row head read hist (ms):\n50%: 1\n95%: 1\n99%: 1\n99.9%: 24\n" PrefixInfo: "Test run# 4, type# ReadHeadPoints with inflight# 1" } 2026-01-04T02:32:27.794490Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestWriteVeryBigMessage [GOOD] Test command err: 2026-01-04T02:31:33.679008Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:31:33.786031Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:31:33.786126Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:31:33.786213Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:33.786317Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:31:33.816720Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:33.844310Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:31:33.845394Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:31:33.848606Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:31:33.850870Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:31:33.853048Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:31:33.864520Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:33.865053Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d8402ce3-958c633e-b15f2c39-326faa17_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:31:33.889981Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:33.890591Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1f2f5196-3828f8b4-583fa33c-7cd4ab7c_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:31:33.936528Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:33.937066Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d62979cb-24b3fb3d-2ece01cc-88cd54e6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:244:2057] recipient: [1:103:2137] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:247:2057] recipient: [1:246:2242] Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:249:2057] recipient: [1:246:2242] 2026-01-04T02:31:34.040088Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:31:34.040162Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:31:34.041329Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.041386Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:31:34.042070Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:297:2243] 2026-01-04T02:31:34.047155Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:298:2243] 2026-01-04T02:31:34.055128Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:31:34.055544Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:31:34.055951Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:297:2243] 2026-01-04T02:31:34.056185Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:298:2243] 2026-01-04T02:31:34.065049Z node 1 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-04T02:31:34.067908Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:248:2243] sender: [1:326:2057] recipient: [1:14:2061] Got start offset = 0 2026-01-04T02:31:34.550279Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-04T02:31:34.661525Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:31:34.661637Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:31:34.661696Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.661763Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] 2026-01-04T02:31:34.682430Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.683491Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-04T02:31:34.684309Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:188:2142] 2026-01-04T02:31:34.687267Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:188:2142] 2026-01-04T02:31:34.689207Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:189:2142] 2026-01-04T02:31:34.691461Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:189:2142] 2026-01-04T02:31:34.699397Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:34.699832Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|85fa2942-fae1221b-c56dbfa8-cdd307fe_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:31:34.721718Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:34.722323Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|221b8d71-22de4cf4-1d9db91c-fb0262e8_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:31:34.777748Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:31:34.778632Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9ee0d195-fc9e3489-86ad3c0-5193c71b_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvOffsets ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:242:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:245:2057] recipient: [2:244:2239] Leader for TabletID 72057594037927937 is [2:246:2240] sender: [2:247:2057] recipient: [2:244:2239] 2026-01-04T02:31:34.911852Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:31:34.911943Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:31:34.912863Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:31:34.912935Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01 ... 7594037927937] Config applied version 55 actor [55:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 55 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 55 } 2026-01-04T02:32:27.453859Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:188:2142] 2026-01-04T02:32:27.456124Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [55:188:2142] 2026-01-04T02:32:27.457785Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:189:2142] 2026-01-04T02:32:27.459518Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [55:189:2142] 2026-01-04T02:32:27.468517Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:27.468940Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4007b779-9eaa2e25-7c119eb9-daa7e97d_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:32:27.502298Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:27.502891Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3497a534-a6e9f29d-9660dab1-3936869e_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:32:27.549632Z node 55 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:27.550266Z node 55 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3b56f831-8030bb37-241a24b7-631df702_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [55:112:2142] sender: [55:243:2057] recipient: [55:104:2137] Leader for TabletID 72057594037927937 is [55:112:2142] sender: [55:246:2057] recipient: [55:245:2240] Leader for TabletID 72057594037927937 is [55:247:2241] sender: [55:248:2057] recipient: [55:245:2240] 2026-01-04T02:32:27.647131Z node 55 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:32:27.647188Z node 55 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:32:27.647878Z node 55 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:27.647917Z node 55 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:32:27.648835Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [55:296:2241] 2026-01-04T02:32:27.651288Z node 55 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [55:297:2241] 2026-01-04T02:32:27.660684Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:32:27.661202Z node 55 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:32:27.661930Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [55:296:2241] 2026-01-04T02:32:27.662222Z node 55 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [55:297:2241] 2026-01-04T02:32:27.675114Z node 55 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-04T02:32:27.677652Z node 55 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 55 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [55:247:2241] sender: [55:325:2057] recipient: [55:14:2061] Got start offset = 0 2026-01-04T02:32:28.040854Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:107:2057] recipient: [56:105:2138] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:112:2057] recipient: [56:105:2138] 2026-01-04T02:32:28.112177Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:32:28.112239Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:32:28.112289Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:28.112345Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [56:153:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927938 is [56:157:2176] sender: [56:158:2057] recipient: [56:151:2172] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:183:2057] recipient: [56:14:2061] 2026-01-04T02:32:28.135159Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:28.136031Z node 56 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 56 actor [56:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 56 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 56 } 2026-01-04T02:32:28.136764Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:189:2142] 2026-01-04T02:32:28.139943Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [56:189:2142] 2026-01-04T02:32:28.142024Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:190:2142] 2026-01-04T02:32:28.144317Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [56:190:2142] 2026-01-04T02:32:28.151724Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:28.152105Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|89b8a801-73e4373-186d3df-89cf0f4_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:32:28.173084Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:28.173608Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ace9d592-4062d015-6b586bf3-5e9e1c13_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:32:28.259422Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:28.259972Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ff3a8d81-220ca6b8-2d660141-8c07fa19_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:242:2057] recipient: [56:103:2137] Leader for TabletID 72057594037927937 is [56:111:2142] sender: [56:245:2057] recipient: [56:244:2240] Leader for TabletID 72057594037927937 is [56:246:2241] sender: [56:247:2057] recipient: [56:244:2240] 2026-01-04T02:32:28.358023Z node 56 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:32:28.358088Z node 56 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:32:28.358838Z node 56 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:28.358888Z node 56 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:32:28.359910Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [56:295:2241] 2026-01-04T02:32:28.362677Z node 56 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [56:296:2241] 2026-01-04T02:32:28.372295Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:32:28.372863Z node 56 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:32:28.373466Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [56:295:2241] 2026-01-04T02:32:28.373742Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [56:296:2241] 2026-01-04T02:32:28.381689Z node 56 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 2 suffix '63' size 1048786 2026-01-04T02:32:28.384725Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [56:246:2241] sender: [56:324:2057] recipient: [56:14:2061] Got start offset = 0 >> UpsertLoad::ShouldDropCreateTable [GOOD] |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertKeyFrom [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.876568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:22.013633Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:22.014075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:22.014133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:22.317380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:22.317482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:22.411939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938822386 != 1767493938822389 2026-01-04T02:32:22.431225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:22.479959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.582178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.912718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.926450Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:23.030616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:23.310354Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 100 Inflight: 3 BatchSize: 7 } 2026-01-04T02:32:23.310537Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 BatchSize: 7 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-04T02:32:23.383134Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.072308s, errors=0 2026-01-04T02:32:23.383247Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-04T02:32:26.803621Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.809559Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.813680Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:26.813945Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:26.814117Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:27.030600Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:27.030715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:27.058656Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493943891422 != 1767493943891426 2026-01-04T02:32:27.064067Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:27.110894Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:27.199780Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:27.547263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:27.562833Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:27.668703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:27.916464Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertBulkStart { RowCount: 10 Inflight: 3 KeyFrom: 12345 } 2026-01-04T02:32:27.916611Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 KeyFrom: 12345 with type# 0, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-04T02:32:27.975270Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.058246s, errors=0 2026-01-04T02:32:27.975385Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldWriteDataBulkUpsertLocalMkql2 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.642131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.739507Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.739870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.739915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.996861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.996989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:22.077982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938682943 != 1767493938682946 2026-01-04T02:32:22.089978Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:22.144679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.255317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.531464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.545268Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.650842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.964096Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "usertable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:22.964232Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "usertable" 2026-01-04T02:32:23.052198Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 2} TUpsertActor finished in 0.087623s, errors=0 2026-01-04T02:32:23.052324Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:953:2811] with tag# 2 ... waiting for SysViewsRoster update finished 2026-01-04T02:32:26.568604Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.575671Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.580647Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:26.580930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:26.581116Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:26.839012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.839166Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.863291Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493943495529 != 1767493943495533 2026-01-04T02:32:26.868386Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:26.912344Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.983418Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:27.328802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:27.342039Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:27.448501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:27.734609Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertLocalMkqlStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" } UpsertLocalMkqlStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:27.734756Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 1, target# TabletId: 72075186224037888 TableId: 38 TableName: "JustTable" 2026-01-04T02:32:27.853266Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.118118s, errors=0 2026-01-04T02:32:27.853372Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 |87.0%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |87.0%| [LD] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/security/ldap_auth_provider/ut/ydb-core-security-ldap_auth_provider-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::CreateQuery_Without_Connection [GOOD] Test command err: 2026-01-04T02:31:14.120477Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328387601654414:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:14.120530Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0104 02:31:15.109674307 494206 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:15.109817254 494206 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:15.142765Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.162278Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.309761Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.477084Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:21551 2026-01-04T02:31:15.477221Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.558734Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.561994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.562111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:15.593683Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.598261Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.663813Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.663907Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.664010Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.668448Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.668526Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.672574Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.705944Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.723998Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.724065Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.724116Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.727117Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.810307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.854770Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:15.934000Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.011703Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.074134Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.075220Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.075897Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.222306Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.225054Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.225449Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:21551: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:21551 } ] 2026-01-04T02:31:16.271776Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/y ... 6715731, task: 1, CA Id [1:7591328693859856997:2886]. Add point to new shardId: 72075186224037892 2026-01-04T02:32:25.367540Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:733: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Pending shards States: TShardState{ TabletId: 72075186224037892, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://folder_id_WTF, String : utqudof4vv1ljb3msdc5)], RetryAttempt: 0, ResolveAttempt: 0 }; In Flight shards States: TShardState{ TabletId: 0, Last Key , Ranges: [], Points: [# 0: (String : yandexcloud://folder_id_WTF, String : utqudof4vv1ljb3msdc5)], RetryAttempt: 0, ResolveAttempt: 1 }; 2026-01-04T02:32:25.367553Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:25.367564Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:472: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. BEFORE: 1.0 2026-01-04T02:32:25.367604Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:908: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Send EvRead to shardId: 72075186224037892, tablePath: Root/yq/queries, ranges: , limit: (empty maybe), readId = 0, reverse = 0, snapshot = (txid=0,step=0), lockTxId = 0, lockNodeId = 0 2026-01-04T02:32:25.367637Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:486: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. AFTER: 0.1 2026-01-04T02:32:25.367650Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Scheduled table scans, in flight: 1 shards. pending shards to read: 0, 2026-01-04T02:32:25.368481Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Recv TEvReadResult from ShardID=72075186224037892, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:25.368494Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Taken 0 locks 2026-01-04T02:32:25.368506Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:25.368531Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:25.368549Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:25.368565Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:25.368585Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. enter pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:25.368613Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. exit pack cells method shardId: 72075186224037892 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-04T02:32:25.368630Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. returned 1 rows; processed 1 rows 2026-01-04T02:32:25.368665Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. dropping batch for read #0 2026-01-04T02:32:25.368677Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:25.368689Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:25.368706Z node 1 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715731, task: 1, CA Id [1:7591328693859856997:2886]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:25.368846Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:25.368863Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:25.368903Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715731, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:32:25.368922Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856998:2887], TxId: 281474976715731, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646923 2026-01-04T02:32:25.368949Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715731, task: 2. Finish input channelId: 1, from: [1:7591328693859856997:2886] 2026-01-04T02:32:25.368986Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856998:2887], TxId: 281474976715731, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:25.369128Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7591328693859856998:2887], TxId: 281474976715731, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:25.369144Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:25.369166Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:25.369185Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715731, task: 1. Tasks execution finished 2026-01-04T02:32:25.369199Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591328693859856997:2886], TxId: 281474976715731, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:25.369306Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715731, task: 1. pass away 2026-01-04T02:32:25.369395Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715731;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:25.369771Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591328693859856998:2887], TxId: 281474976715731, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646922 2026-01-04T02:32:25.369853Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715731, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:25.369866Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715731, task: 2. Tasks execution finished 2026-01-04T02:32:25.369878Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591328693859856998:2887], TxId: 281474976715731, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djhxeb7b44zhzp7btga9x. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=MmM0MTcyNC02MTIzYzc3Ni1jOWM4NmNhNC1lZTYzMDdjZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:25.369933Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715731, task: 2. pass away 2026-01-04T02:32:25.369983Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715731;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:25.521862Z node 1 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv6:%5B::%5D:8190: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint [::]:8190 [good] Yq_1::CreateQuery_Without_Connection |87.0%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> ReadLoad::ShouldReadKqp [GOOD] >> ReadLoad::ShouldReadKqpMoreThanRows |87.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |87.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning |87.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/ydb-core-tx-schemeshard-ut_background_cleaning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> UpsertLoad::ShouldDropCreateTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.639556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.775645Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.776110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.776191Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:22.070233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:22.070354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:22.140168Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938637954 != 1767493938637957 2026-01-04T02:32:22.161865Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:22.210243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.314716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.610797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.624626Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.739665Z node 1 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# BrandNewTable in dir# /Root 2026-01-04T02:32:22.940437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.940614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.940963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.941091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.956458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:23.263857Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# BrandNewTable in dir# /Root with rows# 10 2026-01-04T02:32:23.265712Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-04T02:32:23.278050Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 1} TUpsertActor finished in 0.011994s, errors=0 2026-01-04T02:32:23.278374Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "BrandNewTable" CreateTable: true MinParts: 11 MaxParts: 13 MaxPartSizeMb: 1234 } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:23.278593Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "BrandNewTable" 2026-01-04T02:32:23.335357Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:861:2744], subTag: 3} TUpsertActor finished in 0.056464s, errors=0 2026-01-04T02:32:23.335467Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [1:971:2822] with tag# 3 ... waiting for SysViewsRoster update finished 2026-01-04T02:32:27.051273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:27.056647Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:27.060456Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:27.060768Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:27.060944Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:27.361763Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:27.362001Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:27.387825Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767493943898843 != 1767493943898847 2026-01-04T02:32:27.393934Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:27.446241Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:27.529513Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:27.838437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:27.851810Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:27.959102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:28.188906Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 1, proto# NotifyWhenFinished: true TargetShard { TabletId: 72075186224037888 TableId: 38 } UpsertBulkStart { RowCount: 100 Inflight: 3 } 2026-01-04T02:32:28.189077Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 3 with type# 0, target# TabletId: 72075186224037888 TableId: 38 2026-01-04T02:32:28.608737Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:952:2810], subTag: 2} TUpsertActor finished in 0.419294s, errors=0 2026-01-04T02:32:28.608824Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:953:2811] with tag# 2 2026-01-04T02:32:28.614508Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:174: TLoad# 0 drops table# table in dir# /Root 2026-01-04T02:32:28.629655Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:994:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.629754Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.630121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1005:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.630175Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.786705Z node 2 :DS_LOAD_TEST NOTICE: test_load_actor.cpp:194: TLoad# 0 creates table# table in dir# /Root 2026-01-04T02:32:28.802144Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1045:2895], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.802278Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.802637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1049:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.802678Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:28.811318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:28.858376Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:32:29.020541Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# table in dir# /Root with rows# 10 2026-01-04T02:32:29.020878Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-04T02:32:29.032786Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 1} TUpsertActor finished in 0.011612s, errors=0 2026-01-04T02:32:29.033056Z node 2 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kUpsertBulkStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "table" DropTable: true } TargetShard { TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" } UpsertBulkStart { RowCount: 10 Inflight: 3 } 2026-01-04T02:32:29.033157Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor Bootstrap called: RowCount: 10 Inflight: 3 with type# 0, target# TabletId: 72075186224037889 TableId: 39 WorkingDir: "/Root" TableName: "table" 2026-01-04T02:32:29.090987Z node 2 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [2:991:2849], subTag: 3} TUpsertActor finished in 0.057574s, errors=0 2026-01-04T02:32:29.091080Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:1138:2969] with tag# 3 |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed >> TExportToS3Tests::ShouldSucceedOnSingleShardTable >> TExportToS3Tests::IndexMaterializationDisabled >> TExportToS3Tests::ShouldSucceedOnManyTables >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed >> TExportToS3Tests::DropSourceTableBeforeTransferring >> TExportToS3Tests::UidAsIdempotencyKey >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings >> TExportToS3Tests::RebootDuringCompletion |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables >> BasicStatistics::SimpleGlobalIndex >> TExportToS3Tests::DropCopiesBeforeTransferring1 >> ColumnStatistics::CountMinSketchStatistics >> TExportToS3Tests::IndexMaterializationDisabled [GOOD] >> TExportToS3Tests::CancelUponCreatingExportDirShouldSucceed [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed >> TSchemeShardExportToFsTests::ShouldSucceedCreateExportToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath >> TExportToS3Tests::ShouldSucceedOnSingleShardTable [GOOD] >> TExportToS3Tests::IndexMaterialization >> TExportToS3Tests::UidAsIdempotencyKey [GOOD] >> TExportToS3Tests::DropSourceTableBeforeTransferring [GOOD] >> TExportToS3Tests::ShouldOmitNonStrictStorageSettings [GOOD] >> TPQTest::TestPartitionedBlobFails [GOOD] >> TPQTest::TestPQSmallRead >> TExportToS3Tests::ShouldSucceedOnMultiShardTable >> TExportToS3Tests::RebootDuringCompletion [GOOD] >> TExportToS3Tests::UserSID >> TExportToS3Tests::ExportStartTime >> TExportToS3Tests::ShouldExcludeBackupTableFromStats >> TExportToS3Tests::ShouldSucceedOnManyTables [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart+UseSink [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink >> TExportToS3Tests::RebootDuringAbortion >> HttpRequest::ProbeServerless |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |87.1%| [LD] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/ut_blobstorage-ut_read_only_vdisk >> TExportToS3Tests::ShouldPreserveIncrBackupFlag >> TSchemeShardExportToFsTests::ShouldFailOnNonExistentPath [GOOD] >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath >> TExportToS3Tests::DropCopiesBeforeTransferring1 [GOOD] |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |87.1%| [LD] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/fq/libs/actors/ut/ydb-core-fq-libs-actors-ut >> TExportToS3Tests::DropCopiesBeforeTransferring2 >> TExportToS3Tests::UserSID [GOOD] >> TExportToS3Tests::IndexMaterialization [GOOD] >> TExportToS3Tests::ExportStartTime [GOOD] >> TExportToS3Tests::TransferExportNoConnString >> TExportToS3Tests::ShouldSucceedOnMultiShardTable [GOOD] >> TExportToS3Tests::ExportPartitioningSettings >> TExportToS3Tests::IndexMaterializationGlobal >> TExportToS3Tests::RebootDuringAbortion [GOOD] >> BasicStatistics::Simple >> TExportToS3Tests::TablePermissions >> TExportToS3Tests::ShouldPreserveIncrBackupFlag [GOOD] >> TExportToS3Tests::ReplicationExportWithStaticCredentials >> TSchemeShardExportToFsTests::ShouldFailOnDeletedPath [GOOD] >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs >> TExportToS3Tests::ShouldRestartOnScanErrors >> TExportToS3Tests::DropCopiesBeforeTransferring2 [GOOD] >> TExportToS3Tests::CorruptedDyNumber >> TExportToS3Tests::TransferExportNoConnString [GOOD] >> TExportToS3Tests::ExportPartitioningSettings [GOOD] >> TExportToS3Tests::ReplicationExportWithStaticCredentials [GOOD] >> HttpRequest::Status [GOOD] >> TExportToS3Tests::IndexMaterializationGlobal [GOOD] >> TExportToS3Tests::TransferExportWithConnString >> TExportToS3Tests::TablePermissions [GOOD] >> TExportToS3Tests::ExportIndexTablePartitioningSettings >> TExportToS3Tests::CancelUponTransferringSingleShardTableShouldSucceed [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalAsync >> TExportToS3Tests::ReplicationExportWithOAuthCredentials >> Yq_1::Basic_EmptyDict [GOOD] >> TExportToS3Tests::TopicExport >> TExportToS3Tests::CancelUponTransferringSingleTableShouldSucceed [GOOD] >> TExportToS3Tests::CheckItemProgress >> TSchemeShardExportToFsTests::ShouldExportDataAndSchemaToFs [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData >> TExportToS3Tests::CorruptedDyNumber [GOOD] >> TExportToS3Tests::CompletedExportEndTime |87.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |87.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant |87.1%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/ydb-core-tx-tx_proxy-ut_ext_tenant >> TExportToS3Tests::ShouldRestartOnScanErrors [GOOD] >> TExportToS3Tests::TransferExportWithConnString [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Status [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:22.441189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:22.552443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:22.560431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:22.560826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:22.560972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:23.036246Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:23.142452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:23.142700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:23.197754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:23.299949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:24.105666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:24.106916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:24.106966Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:24.107001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:24.107387Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:24.177536Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:24.856083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:28.447203Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:28.454732Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:28.459516Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:28.489634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.489744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.518105Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:28.520103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:28.700364Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.701201Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.701962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.702983Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.703253Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.703531Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.703731Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.703943Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.704050Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.876428Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:28.888247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.888356Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.901564Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:29.070089Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:29.132081Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:29.132192Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:29.164582Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:29.164941Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:29.165299Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:29.165374Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:29.165432Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:29.165510Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:29.165583Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:29.165648Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:29.166488Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:29.319060Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:29.319507Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:29.321381Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:29.321423Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:29.321489Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:29.324388Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:29.328390Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:29.332657Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:29.403414Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:29.422090Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.422210Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.438294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:29.439498Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:29.827766Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:29.830212Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:29.833950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:29.889636Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:29.889795Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... ;this=137049039210016;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=131:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.293162Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;self_id=[2:2937:3086];ev=NActors::IEventHandle;tablet_id=72075186224037902;tx_id=281474976715659;this=137049086545472;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.293745Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2946:3091];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=137049086544128;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.294246Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2947:3092];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=137049086542336;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.294705Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2956:3096];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=137049086541216;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.319624Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2962:3100];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=137049086536288;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.320278Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2963:3101];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=137049086535168;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.320758Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2964:3102];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=137049086534048;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.355633Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2970:3107];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=137049086530464;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3011;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.503791Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.503947Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.504003Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.536507Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.536603Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.536642Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.547732Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.547824Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.547865Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.556028Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.556124Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.556177Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.566524Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.566593Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.566623Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.572668Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.572739Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.572763Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.580469Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.580532Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.580560Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.585937Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.586052Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.586095Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.594190Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.594278Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.594314Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.602993Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.603090Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:32.603127Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000019s 2026-01-04T02:32:35.017581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3818:3751] 2026-01-04T02:32:35.035577Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION Answer: 'No analyze operation' |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ReplicationExportWithOAuthCredentials [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs >> TExportToS3Tests::TransferExportWithConsumer >> TExportToS3Tests::ExportIndexTablePartitioningSettings [GOOD] >> TExportToS3Tests::TopicExport [GOOD] >> TExportToS3Tests::ReplicationExportMultipleItems >> TExportToS3Tests::IndexMaterializationGlobalAsync [GOOD] >> TExportToS3Tests::CancelUponCopyingTablesShouldSucceed [GOOD] >> TExportToS3Tests::TopicWithPermissionsExport >> TExportToS3Tests::EnableChecksumsPersistance >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/services/fq/ut_integration/unittest >> Yq_1::Basic_EmptyDict [GOOD] Test command err: 2026-01-04T02:31:13.569777Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328383724385304:2240];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:13.569855Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; E0104 02:31:14.318277264 493652 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:31:14.318399046 493652 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:31:14.306986Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.522436Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.594068Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:14.644805Z node 1 :YQL_NODES_MANAGER ERROR: nodes_manager.cpp:395: ydb/core/fq/libs/actors/nodes_manager.cpp:353: TRANSPORT_UNAVAILABLE
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:25388 2026-01-04T02:31:14.711127Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.711222Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.734654Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.737530Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.742792Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.742874Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.772191Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.772263Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/idempotency_keys". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.817791Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.819521Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.831217Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/compute_databases". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.853030Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/mappings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.854187Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.854293Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/quotas". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.854946Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenant_acks". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:14.870132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:14.870190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.000872Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.026541Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:15.031052Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/jobs". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.046977Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/tenants". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.105642Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/connections". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.120104Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/nodes". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.138716Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/queries". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.154721Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/result_sets". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.175394Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/bindings". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.202511Z node 1 :YQ_CONTROL_PLANE_STORAGE WARN: schema.cpp:297: Create table "Root/yq/pending_small". Create session error: [ {
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:25388: Failed to connect to remote host: Connection refused } {
: Error: Grpc error response on endpoint localhost:25388 } ] 2026-01-04T02:31:15.202650Z node 1 :YQ_CONTROL_PLANE_STORAGE ERROR: schema.cpp:160: Create directory "Root/yq" erro ... UG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646926 2026-01-04T02:32:35.328999Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:1110: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Received channels info: Update { Id: 1 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 1 DstTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328736534266753 RawX2: 4503629692144501 } } DstEndpoint { ActorId { RawX1: 7591328736534266754 RawX2: 4503629692144502 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED DstStageId: 1 } Update { Id: 2 TransportVersion: DATA_TRANSPORT_OOB_PICKLE_1_0 SrcTaskId: 2 SrcEndpoint { ActorId { RawX1: 7591328736534266754 RawX2: 4503629692144502 } } DstEndpoint { ActorId { RawX1: 7591328736534266749 RawX2: 4503629692143904 } } InMemory: true WatermarksMode: WATERMARKS_MODE_DISABLED } 2026-01-04T02:32:35.329046Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.329383Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:987: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. Recv TEvReadResult from ShardID=72075186224037899, ReadId=0, Status=SUCCESS, Finished=1, RowCount=1, TxLocks= , BrokenTxLocks= 2026-01-04T02:32:35.329404Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1079: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. Taken 0 locks 2026-01-04T02:32:35.329423Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1094: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. new data for read #0 seqno = 1 finished = 1 2026-01-04T02:32:35.329452Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 276037645 2026-01-04T02:32:35.329482Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.329509Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1362: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. enter getasyncinputdata results size 1, freeSpace 8388608 2026-01-04T02:32:35.329538Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1258: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. enter pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 0 freeSpace: 8388608 2026-01-04T02:32:35.329575Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1339: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. exit pack cells method shardId: 72075186224037899 processedRows: 0 packed rows: 1 freeSpace: 8388572 2026-01-04T02:32:35.329599Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1401: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. returned 1 rows; processed 1 rows 2026-01-04T02:32:35.329650Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1441: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. dropping batch for read #0 2026-01-04T02:32:35.329666Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:468: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. effective maxinflight 1024 sorted 0 2026-01-04T02:32:35.329681Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:490: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, 2026-01-04T02:32:35.329707Z node 7 :KQP_COMPUTE DEBUG: kqp_read_actor.cpp:1466: TxId: 281474976715735, task: 1, CA Id [7:7591328736534266753:2933]. returned async data processed rows 1 left freeSpace 8388572 received rows 1 running reads 0 pending shards 0 finished = 1 has limit 0 limit reached 0 2026-01-04T02:32:35.329993Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:35.330021Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.330074Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976715735, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:32:35.330096Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:32:35.330132Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976715735, task: 2. Finish input channelId: 1, from: [7:7591328736534266753:2933] 2026-01-04T02:32:35.330180Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.330344Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:32:35.330362Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:32:35.330399Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.330430Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715735, task: 1. Tasks execution finished 2026-01-04T02:32:35.330449Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7591328736534266753:2933], TxId: 281474976715735, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:35.330589Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715735, task: 1. pass away 2026-01-04T02:32:35.330702Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:35.330941Z node 7 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:32:35.331022Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976715735, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:32:35.331040Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976715735, task: 2. Tasks execution finished 2026-01-04T02:32:35.331056Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [7:7591328736534266754:2934], TxId: 281474976715735, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3djvqv7cqpdr8mmr6hhhbn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=7&id=NzNkOGExOC1lOGRlNDEzZC05ZTRkNDZlMy02MjdjNmExYw==. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:32:35.331171Z node 7 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715735, task: 2. pass away 2026-01-04T02:32:35.331269Z node 7 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715735;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:32:35.487464Z node 7 :FQ_PENDING_FETCHER ERROR: pending_fetcher.cpp:259: Error with GetTask:
: Error: Client is stopped |87.1%| [TM] {BAZEL_UPLOAD} ydb/services/fq/ut_integration/unittest >> TExportToS3Tests::CheckItemProgress [GOOD] >> TExportToS3Tests::ReplicationExportMultipleItems [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel >> TExportToS3Tests::CancelledExportEndTime >> TExportToS3Tests::TopicWithPermissionsExport [GOOD] >> TExportToS3Tests::ReplicationExportGlobalConsistency >> TExportToS3Tests::TransferExportWithConsumer [GOOD] >> TExportToS3Tests::ShouldSucceedOnConcurrentTxs [GOOD] >> TSchemeShardExportToFsTests::ShouldExportMultipleTablesWithData [GOOD] >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs >> TExportToS3Tests::TopicsExport >> TExportToS3Tests::ShouldSucceedOnConcurrentExport >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::EnableChecksumsPersistance [GOOD] |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/scenario/py3test >> test_alter_compression.py::TestAlterCompression::test_multi[alter_compression] [GOOD] >> HttpRequest::Analyze [GOOD] >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTree [GOOD] >> TExportToS3Tests::EncryptedExport >> TExportToS3Tests::ReplicationExportGlobalConsistency [GOOD] |87.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/scenario/py3test >> TExportToS3Tests::ReplicatedTableExport >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix |87.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |87.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::CancelledExportEndTime [GOOD] >> TSchemeShardExportToFsTests::ShouldAcceptCompressionForFs [GOOD] >> TSchemeShardExportToFsTests::FsExportWithMultipleTables >> TExportToS3Tests::TopicsExport [GOOD] >> TExportToS3Tests::Checksums >> TExportToS3Tests::TopicsWithPermissionsExport >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] |87.1%| [TA] $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/column_family/compression/py3test >> alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] [GOOD] 2026-01-04 02:32:40,422 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 02:32:40,562 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 387363 625M 620M 616M ydb-tests-olap-column_family-compression --basetemp /home/runner/.ya/build/build_root/c5zo/0035db/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor Test command err: Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...-doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/c5zo/0035db/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/chunk2/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/0035db', '--source-root', '/home/runner/.ya/build/build_root/c5zo/0035db/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/0035db/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '2', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...-doctest-modules', '--ya-trace', '/home/runner/.ya/build/build_root/c5zo/0035db/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/chunk2/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/0035db', '--source-root', '/home/runner/.ya/build/build_root/c5zo/0035db/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/0035db/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/chunk2/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/olap/column_family/compression', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '2', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/olap/column_family/compression', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address']' stopped by 600 seconds timeout",), {}) 2026-01-04 02:32:41,270 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-04 02:32:41,286 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |87.1%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/column_family/compression/py3test |87.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] >> TExportToS3Tests::ReplicatedTableExport [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:27.896989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:28.008765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:28.015705Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:28.016036Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:28.016169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:28.433685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:28.528462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.528620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.577228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:28.677136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:29.358215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:29.358907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:29.358961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:29.359002Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:29.359623Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:29.426512Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:30.006904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:33.357321Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:33.366606Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:33.370786Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:33.406007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:33.406144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:33.438391Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:33.440839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:33.620690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:33.620826Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:33.625145Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.626045Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.626637Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.627870Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.628190Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.628370Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.631499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.632132Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.632311Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:33.659304Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:33.890696Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:33.921154Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:33.921296Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:33.954726Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:33.956279Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:33.956547Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:33.956615Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:33.956671Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:33.956736Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:33.956794Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:33.956852Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:33.957468Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:33.957841Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:33.960799Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:33.973899Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:33.973971Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:33.974061Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:33.990004Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:34.003094Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:34.007902Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:34.011968Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:34.012076Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:34.042556Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:34.042952Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:34.174775Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:34.214840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:34.509415Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:34.511249Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:34.514734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:34.518724Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:34.518875Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... =0;src=[2:1906:2454];cookie=151:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.662523Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;self_id=[2:2901:3080];ev=NActors::IEventHandle;tablet_id=72075186224037901;tx_id=281474976715659;this=136720095903456;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=141:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.662879Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;self_id=[2:2908:3084];ev=NActors::IEventHandle;tablet_id=72075186224037904;tx_id=281474976715659;this=136720095904576;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=171:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.663262Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[2:2884:3068];ev=NActors::IEventHandle;tablet_id=72075186224037903;tx_id=281474976715659;this=136720095905696;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3008;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=161:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.685644Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:2913:3089];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715659;this=136720095910624;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=201:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.686251Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;self_id=[2:2916:3092];ev=NActors::IEventHandle;tablet_id=72075186224037908;tx_id=281474976715659;this=136720095911744;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=211:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.687203Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;self_id=[2:2909:3085];ev=NActors::IEventHandle;tablet_id=72075186224037905;tx_id=281474976715659;this=136720095912864;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3009;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=181:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.728591Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;self_id=[2:2922:3095];ev=NActors::IEventHandle;tablet_id=72075186224037906;tx_id=281474976715659;this=136720095915104;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=3010;max=18446744073709551615;plan=0;src=[2:1906:2454];cookie=191:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.877913Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.878082Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.878140Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.898562Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.898628Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.898654Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037900;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.904002Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.904069Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.904094Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.911134Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.911206Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.911231Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.917484Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.917580Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.917617Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037902;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.926462Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.926521Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.926548Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.931871Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.931928Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.931951Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037904;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.936712Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.936788Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.936815Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.942032Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.942094Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.942119Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.947580Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.947683Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:32:36.947722Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000015s 2026-01-04T02:32:38.934996Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3701:3670] 2026-01-04T02:32:38.953027Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3676:3498] , Record { OperationId: "\000\000\000\000\017\364/\351\323\371\227^\234P\230a" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Database: "" } 2026-01-04T02:32:38.953089Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `/^Pa', DatabaseName: `', Types: 2026-01-04T02:32:38.953126Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `/^Pa', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: Answer: 'Analyze sent. OperationId: 00000003zm5zmx7ycqbte51631' |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut >> TExportToS3Tests::ShouldSucceedOnConcurrentExport [GOOD] |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition |87.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |87.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut |87.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/src-client-topic-ut-with_direct_read_ut |87.2%| [LD] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/ydb-public-sdk-cpp-src-client-federated_topic-ut >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange |87.2%| [TA] {RESULT} $(B)/ydb/services/fq/ut_integration/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] >> TExportToS3Tests::Checksums [GOOD] >> TExportToS3Tests::EncryptedExport [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/load_test/ut_ycsb/unittest >> ReadLoad::ShouldReadKqpMoreThanRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:21.548128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.647597Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.647967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.648020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.883924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.884059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.981223Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493938475389 != 1767493938475392 2026-01-04T02:32:21.989707Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:22.030905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:22.129769Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.417460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.430836Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.534889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:22.838153Z node 1 :DS_LOAD_TEST INFO: test_load_actor.cpp:347: TLoad# 0 warmups table# usertable in dir# /Root with rows# 100 2026-01-04T02:32:22.840138Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:157: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor Bootstrap called: RowCount: 100 Inflight: 100 BatchSize: 100 with type# 0, target# TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" 2026-01-04T02:32:22.856033Z node 1 :DS_LOAD_TEST NOTICE: bulk_mkql_upsert.cpp:255: Id# {Tag: 0, parent: [1:952:2810], subTag: 1} TUpsertActor finished in 0.015546s, errors=0 2026-01-04T02:32:22.856394Z node 1 :DS_LOAD_TEST DEBUG: test_load_actor.cpp:426: TLoad# 0 created load actor of type# kReadKqpStart with tag# 2, proto# NotifyWhenFinished: true TableSetup { WorkingDir: "/Root" TableName: "usertable" } TargetShard { TabletId: 72075186224037888 TableId: 38 WorkingDir: "/Root" TableName: "usertable" } ReadKqpStart { RowCount: 100 Inflights: 10 } 2026-01-04T02:32:22.856514Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:322: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} Bootstrap called: RowCount: 100 Inflights: 10 2026-01-04T02:32:22.857718Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:367: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} will work with tablet# 72075186224037888 with ownerId# 72057594046644480 with tableId# 38 resolved for path# /Root/usertable with columnsCount# 11, keyColumnCount# 1 2026-01-04T02:32:22.857886Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:401: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started fullscan actor# [1:964:2822] 2026-01-04T02:32:22.857999Z node 1 :DS_LOAD_TEST INFO: common.cpp:52: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Bootstrap called, sample# 100 2026-01-04T02:32:22.858054Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:61: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Connect to# 72075186224037888 called 2026-01-04T02:32:22.858362Z node 1 :DS_LOAD_TEST DEBUG: common.cpp:75: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} Handle TEvClientConnected called, Status# OK 2026-01-04T02:32:22.859236Z node 1 :DS_LOAD_TEST NOTICE: common.cpp:137: ReadIteratorScan# {Tag: 0, parent: [1:961:2819], subTag: 1} finished in 0.000762s, sampled# 100, iter finished# 1, oks# 100 2026-01-04T02:32:22.859406Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:417: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} received keyCount# 100 2026-01-04T02:32:22.859702Z node 1 :DS_LOAD_TEST NOTICE: kqp_select.cpp:446: TKqpSelectActorMultiSession# {Tag: 0, parent: [1:952:2810], subTag: 3} started# 10 actors each with inflight# 1 2026-01-04T02:32:22.859805Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} Bootstrap called 2026-01-04T02:32:22.859849Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.859905Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} Bootstrap called 2026-01-04T02:32:22.859934Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.859968Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} Bootstrap called 2026-01-04T02:32:22.860002Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860030Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} Bootstrap called 2026-01-04T02:32:22.860077Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860115Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} Bootstrap called 2026-01-04T02:32:22.860137Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860178Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} Bootstrap called 2026-01-04T02:32:22.860201Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860241Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} Bootstrap called 2026-01-04T02:32:22.860278Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860317Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} Bootstrap called 2026-01-04T02:32:22.860339Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860383Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} Bootstrap called 2026-01-04T02:32:22.860411Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.860448Z node 1 :DS_LOAD_TEST INFO: kqp_select.cpp:130: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} Bootstrap called 2026-01-04T02:32:22.860473Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:142: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} sends event for session creation to proxy: [1:8678280833929343339:121] 2026-01-04T02:32:22.862258Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 2} session: ydb://session/3?node_id=1&id=NjRiNGJjOWYtYjA5OTAyY2QtMjQwMWEyYzctZjNjNWVlYWQ= 2026-01-04T02:32:22.864247Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 3} session: ydb://session/3?node_id=1&id=NGJlYWEyMTAtYTk4ZjJlOWUtMzc5ZDYxZWMtYmQxZDE2OWY= 2026-01-04T02:32:22.866840Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 4} session: ydb://session/3?node_id=1&id=NWJlOTI5Mi1hNmQ0YmYwMS0zMzY2MGY0Yy1jYjBhOWJlZQ== 2026-01-04T02:32:22.867003Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 5} session: ydb://session/3?node_id=1&id=Njg1ZjdkOGYtM2VjMWM2YzMtNTUwZWQ5M2QtZDYzMjYwOGQ= 2026-01-04T02:32:22.868145Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 6} session: ydb://session/3?node_id=1&id=NTRkZjkxOWEtNmM3Nzg3YzYtMzM1YTc1MTYtNTQ4OGE4ZTg= 2026-01-04T02:32:22.869297Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 7} session: ydb://session/3?node_id=1&id=NDdmNmM5ODMtOGQwZDQyYjktMzVkMDg0ODMtNDBiZDJmODQ= 2026-01-04T02:32:22.870465Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 8} session: ydb://session/3?node_id=1&id=NDA5N2U4Mi0zYTRiNTBmOC1lNzFjYjc1MC1iZWMxZGEzZA== 2026-01-04T02:32:22.871560Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 9} session: ydb://session/3?node_id=1&id=NTM2YWUzY2EtMzBiNDYxNTAtOTVlNzNlMmUtZTMwOTJiNjk= 2026-01-04T02:32:22.872727Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 10} session: ydb://session/3?node_id=1&id=MzFjODgxY2ItZjFjOWVkZmYtZDgxMDE4N2QtZGM2OTBkNzE= 2026-01-04T02:32:22.873792Z node 1 :DS_LOAD_TEST DEBUG: kqp_select.cpp:214: TKqpSelectActor# {Tag: 0, parent: [1:961:2819], subTag: 11} session: ydb://session/3?node_id=1&id=MWUwOTdkMDAtOGQxNDA2MjMtOTE5NGM2YzEtMWZkMzBkOTE= 2026-01-04T02:32:22.878737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:988:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:22.878835Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActo ... manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:34.959911Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1068:2920] txid# 281474976715664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:34.960498Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1077:2923] txid# 281474976715666, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:34.961223Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1081:2924] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:35.070335Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1048:2900], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070418Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1049:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070460Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1050:2902], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070496Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1051:2903], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070530Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1052:2904], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070578Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1053:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070638Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1054:2906], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070683Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1055:2907], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070719Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1056:2908], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.070757Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1059:2911], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:32:35.106061Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1186:3010] txid# 281474976715668, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:35.639783Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 2} finished in 0.734358s, errors=0 2026-01-04T02:32:35.640022Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 2 { Tag: 2 DurationMs: 734 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:35.654287Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2215:3432] txid# 281474976715769, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:36.110833Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 7} finished in 1.196482s, errors=0 2026-01-04T02:32:36.111151Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 7 { Tag: 7 DurationMs: 1196 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:36.125678Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:3222:3838] txid# 281474976715870, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:36.616939Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 5} finished in 1.705602s, errors=0 2026-01-04T02:32:36.617162Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 5 { Tag: 5 DurationMs: 1705 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:36.631702Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4229:4244] txid# 281474976715971, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:37.139333Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 6} finished in 2.225081s, errors=0 2026-01-04T02:32:37.139666Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 6 { Tag: 6 DurationMs: 2225 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:37.187692Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5236:4650] txid# 281474976716072, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:37.802178Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 8} finished in 2.884934s, errors=0 2026-01-04T02:32:37.802594Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 8 { Tag: 8 DurationMs: 2884 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:37.818197Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:6243:5056] txid# 281474976716173, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:38.461277Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 9} finished in 3.543838s, errors=0 2026-01-04T02:32:38.461669Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 9 { Tag: 9 DurationMs: 3543 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:38.478952Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7250:5462] txid# 281474976716274, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:39.185504Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 10} finished in 4.266368s, errors=0 2026-01-04T02:32:39.185766Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 10 { Tag: 10 DurationMs: 4266 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:39.203545Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:8257:5868] txid# 281474976716375, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:39.968375Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 11} finished in 5.047483s, errors=0 2026-01-04T02:32:39.968676Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 11 { Tag: 11 DurationMs: 5047 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:39.985659Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:9264:6274] txid# 281474976716476, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:40.936945Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 4} finished in 6.025695s, errors=0 2026-01-04T02:32:40.937330Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 4 { Tag: 4 DurationMs: 6025 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:40.957249Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:10271:6680] txid# 281474976716577, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:41.759277Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:197: TKqpSelectActor# {Tag: 0, parent: [2:961:2819], subTag: 3} finished in 6.851856s, errors=0 2026-01-04T02:32:41.759819Z node 2 :DS_LOAD_TEST DEBUG: kqp_select.cpp:462: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished: 3 { Tag: 3 DurationMs: 6851 OperationsOK: 100 OperationsError: 0 } 2026-01-04T02:32:41.759902Z node 2 :DS_LOAD_TEST NOTICE: kqp_select.cpp:481: TKqpSelectActorMultiSession# {Tag: 0, parent: [2:952:2810], subTag: 3} finished in 6.857522s, oks# 1000, errors# 0 2026-01-04T02:32:41.760285Z node 2 :DS_LOAD_TEST INFO: test_load_actor.cpp:448: TLoad# 0 received finished from actor# [2:961:2819] with tag# 3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::ShouldExportWithCompressionToFs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.997641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.997747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.997791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.997890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.997938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.997987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.998060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.998183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.999112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.999431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.093583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.093653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.108524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.108727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.108903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.117294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.117529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.118369Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.118647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.121689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.121901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.123070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.123134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.123272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.123324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.123386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.123582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.275621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.276685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.276805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.276877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.276968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.277840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-04T02:32:41.748520Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-04T02:32:41.748553Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:32:41.748613Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-04T02:32:41.749294Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:32:41.749336Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-04T02:32:41.749372Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:32:41.749439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-04T02:32:41.749533Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-04T02:32:41.749783Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:41.749891Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:41.749939Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-04T02:32:41.750053Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-04T02:32:41.750119Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-04T02:32:41.750159Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:32:41.750204Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-04T02:32:41.750248Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:32:41.750294Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:32:41.750350Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:32:41.750396Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-04T02:32:41.750439Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:32:41.750481Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-04T02:32:41.750522Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-04T02:32:41.750591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:32:41.750743Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-04T02:32:41.750788Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:32:41.750830Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-04T02:32:41.753905Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.754028Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.754445Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:41.754499Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:41.754641Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:32:41.754754Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:41.754789Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-04T02:32:41.754828Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-04T02:32:41.755542Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.755621Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.755661Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:41.755710Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:32:41.755765Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:32:41.756091Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.756176Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.756206Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:41.756237Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:32:41.756268Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:32:41.756344Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-04T02:32:41.756398Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2153] 2026-01-04T02:32:41.756743Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:32:41.756778Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:32:41.756836Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:32:41.758522Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.759563Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:41.759649Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:32:41.759695Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:32:41.760170Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:32:41.761237Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-04T02:32:41.761298Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [6:915:2873] TestWaitNotification: OK eventTxId 112 >> TExportToS3Tests::ChecksumsWithCompression >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] >> DataShardVolatile::UpsertBrokenLockArbiterRestart-UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink >> TExportToS3Tests::ExportTableWithUniqueIndex |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/load_test/ut_ycsb/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationGlobalVectorKmeansTreePrefix [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition |87.2%| [TA] $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ReplicatedTableExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:32.020106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:32.020214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.020255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:32.020295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:32.020335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:32.020363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:32.020453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.020527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:32.021399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:32.021686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.112606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.112663Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.127258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.127773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.127988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.140373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.140683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.141486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.141747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.146307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.146525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.147851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.147937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.147997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.148056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.148124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.148335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.321118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.322981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.323122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.323207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.323309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.323396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6678944, LocalPathId: 1], version: 8 2026-01-04T02:32:42.328216Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:32:42.330204Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:32:42.330315Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:32:42.330357Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:32:42.330396Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:32:42.330437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:32:42.330566Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:32:42.333878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11186 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:32:42.333947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:32:42.334158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11186 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:32:42.334369Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 11186 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:32:42.336822Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 30064773772 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:32:42.336892Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:32:42.337089Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 30064773772 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:32:42.337177Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:32:42.337324Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 714 RawX2: 30064773772 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:32:42.337428Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:42.337510Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:32:42.337570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:32:42.337644Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:32:42.338845Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:32:42.338984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:32:42.340995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:32:42.341179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:32:42.341574Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:32:42.341644Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:32:42.341829Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:32:42.341890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:32:42.341950Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:32:42.341995Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:32:42.342052Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:32:42.342142Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:742:2720] message: TxId: 101 2026-01-04T02:32:42.342217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:32:42.342273Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:32:42.342329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:32:42.342514Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:32:42.344687Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:32:42.344758Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [7:743:2721] TestWaitNotification: OK eventTxId 101 2026-01-04T02:32:42.345429Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:32:42.345747Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 347us result status StatusSuccess 2026-01-04T02:32:42.346467Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationTwoTables >> TExportToS3Tests::CancelUponTransferringMultiShardTableShouldSucceed [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [FAIL] Test command err: Starting YDB, grpc: 5276, msgbus: 12767 2026-01-04T02:27:09.465783Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327336157613859:2264];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:09.494722Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:09.528472Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.015690s 2026-01-04T02:27:10.152956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:10.175658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:10.175764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:10.199002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:10.319730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:10.379179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:10.463352Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:10.594368Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639257 Duration# 0.013061s 2026-01-04T02:27:10.666056Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.025097s 2026-01-04T02:27:10.813939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:10.813967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:10.813972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:10.814128Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:11.386066Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327336157613881:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:27:11.386118Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327344747549291:2730] HANDLE EvNavigateScheme dc-1 2026-01-04T02:27:11.386556Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327344747549291:2730] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:11.478542Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327344747549291:2730] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:27:11.511893Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327344747549291:2730] Handle TEvDescribeSchemeResult Forward to# [1:7591327344747549290:2729] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:27:11.518175Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327336157613881:2118] Handle TEvProposeTransaction 2026-01-04T02:27:11.518203Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327336157613881:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:27:11.518272Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327336157613881:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591327344747549296:2735] 2026-01-04T02:27:11.683478Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327344747549296:2735] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:11.683568Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327344747549296:2735] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:27:11.683588Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327344747549296:2735] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:11.683654Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327344747549296:2735] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:11.684005Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327344747549296:2735] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:11.684109Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327344747549296:2735] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:27:11.684207Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327344747549296:2735] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-04T02:27:11.684365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327344747549296:2735] txid# 281474976710657 HANDLE EvClientConnected 2026-01-04T02:27:11.685075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:11.694524Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327344747549296:2735] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-04T02:27:11.694590Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327344747549296:2735] txid# 281474976710657 SEND to# [1:7591327344747549295:2734] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-04T02:27:11.717189Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327336157613881:2118] Handle TEvProposeTransaction 2026-01-04T02:27:11.717221Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327336157613881:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-04T02:27:11.717253Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327336157613881:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7591327344747549334:2769] 2026-01-04T02:27:11.719974Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327344747549334:2769] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:11.720021Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327344747549334:2769] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:27:11.720054Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327344747549334:2769] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:11.720109Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327344747549334:2769] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:11.720396Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327344747549334:2769] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:11.720494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327344747549334:2769] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:27:11.720551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327344747549334:2769] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-04T02:27:11.720684Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327344747549334:2769] txid# 281474976710658 HANDLE EvClientConnected 2026-01-04T02:27:11.721216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:27:11.734703Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327344747549334:2769] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-04T02:27:11.734750Z node 1 :TX_PROXY DEBUG: ... 19452:2773] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:49.301633Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328537836619452:2773] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:49.301754Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328537836619452:2773] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-04T02:31:49.301954Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328537836619452:2773] txid# 281474976710657 HANDLE EvClientConnected 2026-01-04T02:31:49.302699Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:49.315236Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328537836619452:2773] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-04T02:31:49.315305Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328537836619452:2773] txid# 281474976710657 SEND to# [59:7591328537836619451:2772] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-04T02:31:49.376316Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328533541651174:2116] Handle TEvProposeTransaction 2026-01-04T02:31:49.376351Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328533541651174:2116] TxId# 281474976710658 ProcessProposeTransaction 2026-01-04T02:31:49.376389Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328533541651174:2116] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7591328537836619497:2808] 2026-01-04T02:31:49.379307Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328537836619497:2808] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:49.379367Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328537836619497:2808] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:31:49.379382Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328537836619497:2808] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:49.379439Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328537836619497:2808] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:49.379808Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328537836619497:2808] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:49.379902Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328537836619497:2808] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:49.379971Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328537836619497:2808] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-04T02:31:49.380095Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328537836619497:2808] txid# 281474976710658 HANDLE EvClientConnected 2026-01-04T02:31:49.380669Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:49.390956Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328537836619497:2808] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-04T02:31:49.391016Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328537836619497:2808] txid# 281474976710658 SEND to# [59:7591328537836619496:2807] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-04T02:31:49.405680Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:49.426017Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328533541651174:2116] Handle TEvProposeTransaction 2026-01-04T02:31:49.426051Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328533541651174:2116] TxId# 281474976710659 ProcessProposeTransaction 2026-01-04T02:31:49.426101Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328533541651174:2116] Cookie# 0 userReqId# "" txid# 281474976710659 SEND to# [59:7591328537836619567:2870] 2026-01-04T02:31:49.428288Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328537836619567:2870] txid# 281474976710659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\033\010\001\022\027\032\025cluster_admin@builtin\n#\010\000\022\037\010\001\020\200\200\002\032\025cluster_admin@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:49478" 2026-01-04T02:31:49.428336Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328537836619567:2870] txid# 281474976710659 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:31:49.428349Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328537836619567:2870] txid# 281474976710659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:49.428386Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328537836619567:2870] txid# 281474976710659 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:49.428685Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328537836619567:2870] txid# 281474976710659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:49.428774Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328537836619567:2870] HANDLE EvNavigateKeySetResult, txid# 281474976710659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:49.428823Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328537836619567:2870] txid# 281474976710659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710659 TabletId# 72057594046644480} 2026-01-04T02:31:49.428927Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328537836619567:2870] txid# 281474976710659 HANDLE EvClientConnected 2026-01-04T02:31:49.429452Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:49.433657Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328537836619567:2870] txid# 281474976710659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710659} 2026-01-04T02:31:49.433696Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328537836619567:2870] txid# 281474976710659 SEND to# [59:7591328537836619566:2307] Source {TEvProposeTransactionStatus txid# 281474976710659 Status# 48} 2026-01-04T02:31:53.353288Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7591328533541651104:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:53.353389Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:55.032990Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dheyk3gw2be8k5y05nca8", Request deadline has expired for 0.584149s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17541 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+3148 (0x1AD1E42C) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7FA447D20D90) __libc_start_main+128 (0x7FA447D20E40) _start+41 (0x18703029) |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 18107, msgbus: 18824 2026-01-04T02:26:46.098777Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327237306300752:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:46.098845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:46.544494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:46.650720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:46.650823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:46.760267Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:46.763562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:46.804559Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:47.114000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:47.114022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:47.114029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:47.114098Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:47.122894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:47.479174Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327237306300913:2137] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:47.479233Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327241601269008:2736] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:47.479617Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327241601269008:2736] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.526556Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327241601269008:2736] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:47.532908Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327241601269008:2736] Handle TEvDescribeSchemeResult Forward to# [1:7591327241601269007:2735] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:47.539986Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327237306300913:2137] Handle TEvProposeTransaction 2026-01-04T02:26:47.540024Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327237306300913:2137] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:47.540083Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327237306300913:2137] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327241601269013:2741] 2026-01-04T02:26:47.644858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327241601269013:2741] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:47.644960Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327241601269013:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:26:47.644977Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327241601269013:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:47.645035Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327241601269013:2741] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:47.645315Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327241601269013:2741] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.645421Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327241601269013:2741] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:47.645493Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327241601269013:2741] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:47.645650Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327241601269013:2741] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:47.646292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:47.648707Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327241601269013:2741] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:47.648766Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327241601269013:2741] txid# 281474976715657 SEND to# [1:7591327241601269012:2740] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:47.661981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:26:47.664851Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327237306300913:2137] Handle TEvProposeTransaction 2026-01-04T02:26:47.664882Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327237306300913:2137] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:47.664919Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327237306300913:2137] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327241601269049:2774] 2026-01-04T02:26:47.667377Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327241601269049:2774] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:47.667436Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327241601269049:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:26:47.667451Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327241601269049:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:47.667566Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327241601269049:2774] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:47.667840Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327241601269049:2774] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.667933Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327241601269049:2774] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:47.667991Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327241601269049:2774] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:47.668132Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327241601269049:2774] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:47.668752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:47.671179Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327241601269049:2774] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:26:47.671219Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327241601269049:2774] txid# 281474976715658 SEND to# [1:7591327241601269048:2773] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:26: ... directRequired# true 2026-01-04T02:31:35.398464Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328475863893330:2947] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2026-01-04T02:31:35.398648Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328475863893330:2947] txid# 281474976715661 HANDLE EvClientConnected 2026-01-04T02:31:35.407315Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328475863893330:2947] txid# 281474976715661 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715661} 2026-01-04T02:31:35.407410Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328475863893330:2947] txid# 281474976715661 SEND to# [59:7591328475863893329:2328] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2026-01-04T02:31:35.492719Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328450094088330:2114] Handle TEvProposeTransaction 2026-01-04T02:31:35.492752Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328450094088330:2114] TxId# 281474976715662 ProcessProposeTransaction 2026-01-04T02:31:35.492803Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328450094088330:2114] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7591328475863893358:2967] 2026-01-04T02:31:35.496007Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328475863893358:2967] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\022\010\001\022\016\032\014ordinaryuser\n\032\010\000\022\026\010\001\020\200\200\002\032\014ordinaryuser \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58814" 2026-01-04T02:31:35.496093Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328475863893358:2967] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:35.496114Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328475863893358:2967] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:35.496187Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328475863893358:2967] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:35.496655Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328475863893358:2967] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:35.496802Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328475863893358:2967] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:35.496886Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328475863893358:2967] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2026-01-04T02:31:35.497094Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328475863893358:2967] txid# 281474976715662 HANDLE EvClientConnected 2026-01-04T02:31:35.497927Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:35.507727Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328475863893358:2967] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2026-01-04T02:31:35.507797Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328475863893358:2967] txid# 281474976715662 SEND to# [59:7591328475863893357:2344] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2026-01-04T02:31:35.572278Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328450094088330:2114] Handle TEvProposeTransaction 2026-01-04T02:31:35.572318Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328450094088330:2114] TxId# 281474976715663 ProcessProposeTransaction 2026-01-04T02:31:35.572376Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328450094088330:2114] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7591328475863893441:3036] 2026-01-04T02:31:35.574930Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328475863893441:3036] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58840" 2026-01-04T02:31:35.575006Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328475863893441:3036] txid# 281474976715663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:35.575026Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328475863893441:3036] txid# 281474976715663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:35.575082Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328475863893441:3036] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:35.575436Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328475863893441:3036] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:35.575512Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328475863893441:3036] HANDLE EvNavigateKeySetResult, txid# 281474976715663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:35.611573Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328475863893441:3036] txid# 281474976715663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715663 TabletId# 72057594046644480} 2026-01-04T02:31:35.611778Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328475863893441:3036] txid# 281474976715663 HANDLE EvClientConnected 2026-01-04T02:31:35.615682Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328475863893441:3036] txid# 281474976715663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715663} 2026-01-04T02:31:35.615726Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328475863893441:3036] txid# 281474976715663 SEND to# [59:7591328475863893440:2346] Source {TEvProposeTransactionStatus txid# 281474976715663 Status# 48} 2026-01-04T02:31:35.700339Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328450094088330:2114] Handle TEvProposeTransaction 2026-01-04T02:31:35.700379Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328450094088330:2114] TxId# 281474976715664 ProcessProposeTransaction 2026-01-04T02:31:35.700438Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328450094088330:2114] Cookie# 0 userReqId# "" txid# 281474976715664 SEND to# [59:7591328475863893473:3050] 2026-01-04T02:31:35.705281Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328475863893473:3050] txid# 281474976715664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\014ordinaryuser\022\030\022\026\n\024all-users@well-known\032\334\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NzUzNzA5NSwiaWF0IjoxNzY3NDkzODk1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.G0SmpO8DtRSSc9sZMP9Rf7MLh0IgfOFQ1gx8IePECHtQOhXKSYia_kkjk_hi6JMZfVBEpEFxSrKT-_aKPWPGBCO3Sy13ObiAcPGFipXusnxK9UQ2HsLUs_PDQOWnsJfdgVwwyPA8czO0Q46eXBwpsdZIMp6p17lJEMZwM3-sfWOFjoAk9khXLCSVKGqlM4fCXZWjfgXqyszEOzm3J1Ld-cqsEqIwkJCuuwZk-QriFGtszXpZVCP4KYE21nmwL0igrCkbkW3AR7Zhbumbj4-lwWSdRpXjHGCmmtL1xpVXryYRX7z3-HKGYebF7_mHrbMGIg7REWkr_5A1xKK7u6JVVg\"\005Login*\210\001eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NzUzNzA5NSwiaWF0IjoxNzY3NDkzODk1LCJzdWIiOiJvcmRpbmFyeXVzZXIifQ.**0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:58862" 2026-01-04T02:31:35.705384Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328475863893473:3050] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:35.705406Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328475863893473:3050] txid# 281474976715664 Bootstrap, UserSID: ordinaryuser IsClusterAdministrator: 0 2026-01-04T02:31:35.705591Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7591328475863893473:3050] txid# 281474976715664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-04T02:31:35.705644Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7591328475863893473:3050] txid# 281474976715664 HandleResolveDatabase, UserSID: ordinaryuser CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-04T02:31:35.705689Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328475863893473:3050] txid# 281474976715664 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:35.705992Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328475863893473:3050] txid# 281474976715664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:35.706020Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7591328475863893473:3050] txid# 281474976715664, Access denied for ordinaryuser, attempt to manage user 2026-01-04T02:31:35.706108Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7591328475863893473:3050] txid# 281474976715664, issues: { message: "Access denied for ordinaryuser" issue_code: 200000 severity: 1 } 2026-01-04T02:31:35.706137Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328475863893473:3050] txid# 281474976715664 SEND to# [59:7591328475863893472:2357] Source {TEvProposeTransactionStatus Status# 5} 2026-01-04T02:31:35.706709Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=N2JjZDViNGUtZTQzMjM3LTEyZTRhOTNkLTlhMDJiMWY2, ActorId: [59:7591328475863893463:2357], ActorState: ExecuteState, LegacyTraceId: 01ke3dh1g01qydfn3yqgdh68z2, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-04T02:31:35.707122Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7591328450094088330:2114] Handle TEvExecuteKqpTransaction 2026-01-04T02:31:35.707148Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7591328450094088330:2114] TxId# 281474976715665 ProcessProposeKqpTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TSchemeShardExportToFsTests::FsExportWithMultipleTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.943341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.943430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.943468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.943501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.943537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.943565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.943679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.943784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.944602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.944857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.028206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.028261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.039105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.039289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.039506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.048548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.048789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.049324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.049532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.052805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.053024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.054242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.054302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.054426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.054474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.054511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.054731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.175400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.179946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.180033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.180118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.180211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.180302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], version: 1 2026-01-04T02:32:43.037361Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 4 2026-01-04T02:32:43.037424Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/2, is published: true 2026-01-04T02:32:43.038123Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:4 msg type: 268697601 2026-01-04T02:32:43.038277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72057594037968897 2026-01-04T02:32:43.038327Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-04T02:32:43.038688Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 4 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 42 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-04T02:32:43.038923Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 4, type DataShard, boot OK, tablet id 72075186233409548 2026-01-04T02:32:43.039032Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:3 msg type: 268697601 2026-01-04T02:32:43.039258Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1813: TOperation RegisterRelationByTabletId collision in routes has found, TxId# 281474976710758, partId# 0, prevPartId# 1, tablet# 72057594037968897, guessDefaultRootHive# yes, prevTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "1" CopyFromTable: "/MyRoot/Table2" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true, newTx# WorkingDir: "/MyRoot/export-103" OperationType: ESchemeOpCreateTable CreateTable { Name: "0" CopyFromTable: "/MyRoot/Table1" OmitFollowers: true IsBackup: true AllowUnderSameOperation: false OmitIndexes: true } FailOnExist: true 2026-01-04T02:32:43.039308Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-04T02:32:43.039554Z node 7 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 3 TabletType: DataShard FollowerGroups { } ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 41 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-04T02:32:43.039691Z node 7 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 3, type DataShard, boot OK, tablet id 72075186233409549 2026-01-04T02:32:43.040252Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-04T02:32:43.040314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:4, partId: 1 2026-01-04T02:32:43.040450Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-04T02:32:43.040527Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-04T02:32:43.040622Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:1 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 4 TabletID: 72075186233409548 Origin: 72057594037968897 2026-01-04T02:32:43.040711Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:1 2 -> 3 2026-01-04T02:32:43.041238Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-04T02:32:43.041278Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710758, shardIdx: 72057594046678944:3, partId: 0 2026-01-04T02:32:43.041358Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-04T02:32:43.041389Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-04T02:32:43.041439Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710758:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 3 TabletID: 72075186233409549 Origin: 72057594037968897 2026-01-04T02:32:43.041496Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710758:0 2 -> 3 2026-01-04T02:32:43.043997Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.044177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.044364Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.046408Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.046709Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-04T02:32:43.046861Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:1, at schemeshard: 72057594046678944 2026-01-04T02:32:43.046932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:1 ProgressState at tablet# 72057594046678944 2026-01-04T02:32:43.047007Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:1 Propose modify scheme on dstDatashard# 72075186233409548 idx# 72057594046678944:4 srcDatashard# 72075186233409547 idx# 72057594046678944:2 operationId# 281474976710758:1 seqNo# 2:3 at tablet# 72057594046678944 2026-01-04T02:32:43.049410Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-04T02:32:43.049680Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-04T02:32:43.049722Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 281474976710758:0 ProgressState at tablet# 72057594046678944 2026-01-04T02:32:43.049774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 281474976710758:0 Propose modify scheme on dstDatashard# 72075186233409549 idx# 72057594046678944:3 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 281474976710758:0 seqNo# 2:4 at tablet# 72057594046678944 2026-01-04T02:32:43.054959Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409548 cookie: 72057594046678944:4 msg type: 269549568 2026-01-04T02:32:43.055115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:1 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-04T02:32:43.055198Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409548 2026-01-04T02:32:43.055223Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 1, tablet: 72075186233409547 2026-01-04T02:32:43.057022Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:3 msg type: 269549568 2026-01-04T02:32:43.057111Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-04T02:32:43.057178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409549 2026-01-04T02:32:43.057213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 0, tablet: 72075186233409546 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ChecksumsWithCompression [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 15473, msgbus: 23756 2026-01-04T02:26:50.475244Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327251730104686:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:50.475300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:50.530466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:50.950073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:51.169154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:51.169256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:51.200943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:51.273433Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:51.309904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:51.389931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:51.570358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:51.570388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:51.570402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:51.570488Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:51.696658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:52.073349Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327251730104720:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:52.073424Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327260320040148:2743] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:52.073875Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327260320040148:2743] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:52.171686Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327260320040148:2743] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:52.181742Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327260320040148:2743] Handle TEvDescribeSchemeResult Forward to# [1:7591327260320040147:2742] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:52.206020Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327251730104720:2118] Handle TEvProposeTransaction 2026-01-04T02:26:52.206056Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327251730104720:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:52.206131Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327251730104720:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327260320040154:2749] 2026-01-04T02:26:52.351369Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327260320040154:2749] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:52.351469Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327260320040154:2749] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:52.351498Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327260320040154:2749] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:52.351567Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327260320040154:2749] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:52.351948Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327260320040154:2749] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:52.352107Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327260320040154:2749] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:52.352272Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327260320040154:2749] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:52.352487Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327260320040154:2749] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:52.353275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:52.360383Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327260320040154:2749] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:52.360451Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327260320040154:2749] txid# 281474976715657 SEND to# [1:7591327260320040153:2748] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:52.391149Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327251730104720:2118] Handle TEvProposeTransaction 2026-01-04T02:26:52.391177Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327251730104720:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:52.391225Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327251730104720:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327260320040190:2782] 2026-01-04T02:26:52.394059Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327260320040190:2782] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:52.394131Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327260320040190:2782] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:52.394148Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327260320040190:2782] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:52.394217Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327260320040190:2782] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:52.394525Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327260320040190:2782] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:52.394651Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327260320040190:2782] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:52.394723Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327260320040190:2782] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:52.394858Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327260320040190:2782] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:52.395392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:52.400510Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327260320040190:2782] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult S ... r# [59:7591328414482081172:2738] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:20.574542Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7591328414482081172:2738] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:31:20.580957Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7591328414482081172:2738] Handle TEvDescribeSchemeResult Forward to# [59:7591328414482081171:2737] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:31:20.584509Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328410187112859:2112] Handle TEvProposeTransaction 2026-01-04T02:31:20.584541Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328410187112859:2112] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:31:20.584596Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328410187112859:2112] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7591328414482081177:2743] 2026-01-04T02:31:20.589345Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328414482081177:2743] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:20.589428Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328414482081177:2743] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:20.589450Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328414482081177:2743] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:20.589508Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328414482081177:2743] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:20.589910Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328414482081177:2743] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:20.590001Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328414482081177:2743] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:20.590096Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328414482081177:2743] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:31:20.590252Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328414482081177:2743] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:31:20.590861Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:20.595318Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328414482081177:2743] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:31:20.595374Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328414482081177:2743] txid# 281474976715657 SEND to# [59:7591328414482081176:2742] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:31:20.618985Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328410187112859:2112] Handle TEvProposeTransaction 2026-01-04T02:31:20.619016Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328410187112859:2112] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:31:20.619056Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328410187112859:2112] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7591328414482081216:2779] 2026-01-04T02:31:20.622115Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328414482081216:2779] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:20.622188Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328414482081216:2779] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:20.622211Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328414482081216:2779] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:20.622274Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328414482081216:2779] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:20.622608Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328414482081216:2779] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:20.622723Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328414482081216:2779] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:20.622799Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328414482081216:2779] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:31:20.622944Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328414482081216:2779] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:31:20.623631Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:20.643986Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328414482081216:2779] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:31:20.644044Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328414482081216:2779] txid# 281474976715658 SEND to# [59:7591328414482081215:2778] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:31:20.762051Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:29.015149Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dgjvj5ptghkyzypcvfzep", Request deadline has expired for 3.346315s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19304 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD1DEB7) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7FCAC9D72D90) __libc_start_main+128 (0x7FCAC9D72E40) _start+41 (0x18703029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-60 [GOOD] Test command err: Starting YDB, grpc: 29685, msgbus: 3148 2026-01-04T02:26:48.939346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327243220876553:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:48.939415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:49.783571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:49.868920Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:49.869044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:49.993906Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:50.018096Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:50.059947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:50.087484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:50.373783Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.018266s 2026-01-04T02:26:50.451252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:50.451274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:50.451281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:50.451375Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:50.855258Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327247515844106:2143] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:50.855321Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327251810812182:2727] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:50.855665Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327251810812182:2727] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:50.994703Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327251810812182:2727] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:51.001744Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327251810812182:2727] Handle TEvDescribeSchemeResult Forward to# [1:7591327251810812181:2726] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:51.010619Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327247515844106:2143] Handle TEvProposeTransaction 2026-01-04T02:26:51.010645Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327247515844106:2143] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:51.010707Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327247515844106:2143] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327256105779487:2736] 2026-01-04T02:26:51.273133Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327256105779487:2736] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:51.273222Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327256105779487:2736] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:51.273252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327256105779487:2736] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:51.273319Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327256105779487:2736] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:51.273634Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327256105779487:2736] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:51.273757Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327256105779487:2736] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:51.274285Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327256105779487:2736] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:51.274547Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327256105779487:2736] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:51.275170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:51.282968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327256105779487:2736] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:51.283028Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327256105779487:2736] txid# 281474976715657 SEND to# [1:7591327256105779486:2735] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:51.337961Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327247515844106:2143] Handle TEvProposeTransaction 2026-01-04T02:26:51.337982Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327247515844106:2143] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:51.338009Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327247515844106:2143] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327256105779532:2774] 2026-01-04T02:26:51.340569Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327256105779532:2774] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:51.340631Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327256105779532:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:51.340648Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327256105779532:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:51.340707Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327256105779532:2774] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:51.341090Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327256105779532:2774] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:51.341227Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327256105779532:2774] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:51.341311Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327256105779532:2774] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:51.341493Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327256105779532:2774] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:51.346484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:51.350974Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327256105779532:2774] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:26:51.351021Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327256105779532:2774] txid# 281474976715658 SEND to# [1:7591327256105779531:2773] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:26:51.479818Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: acto ... 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:27.644818Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328443161484101:2974] txid# 281474976710661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710661 TabletId# 72057594046644480} 2026-01-04T02:31:27.644978Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328443161484101:2974] txid# 281474976710661 HANDLE EvClientConnected 2026-01-04T02:31:27.651194Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328443161484101:2974] txid# 281474976710661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-04T02:31:27.651351Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7591328443161484101:2974] txid# 281474976710661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:27.651387Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328443161484101:2974] txid# 281474976710661 SEND to# [59:7591328443161484026:2339] Source {TEvProposeTransactionStatus txid# 281474976710661 Status# 48} 2026-01-04T02:31:27.701952Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328417391678918:2101] Handle TEvProposeTransaction 2026-01-04T02:31:27.701997Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328417391678918:2101] TxId# 281474976710662 ProcessProposeTransaction 2026-01-04T02:31:27.702058Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328417391678918:2101] Cookie# 0 userReqId# "" txid# 281474976710662 SEND to# [59:7591328443161484125:2986] 2026-01-04T02:31:27.705442Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328443161484125:2986] txid# 281474976710662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:34386" 2026-01-04T02:31:27.705523Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328443161484125:2986] txid# 281474976710662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:27.705545Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328443161484125:2986] txid# 281474976710662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:27.705601Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328443161484125:2986] txid# 281474976710662 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:27.706343Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328443161484125:2986] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:27.706445Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328443161484125:2986] HANDLE EvNavigateKeySetResult, txid# 281474976710662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:27.769253Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328443161484125:2986] txid# 281474976710662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710662 TabletId# 72057594046644480} 2026-01-04T02:31:27.769618Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328443161484125:2986] txid# 281474976710662 HANDLE EvClientConnected 2026-01-04T02:31:27.775877Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328443161484125:2986] txid# 281474976710662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710662} 2026-01-04T02:31:27.775936Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328443161484125:2986] txid# 281474976710662 SEND to# [59:7591328443161484124:2331] Source {TEvProposeTransactionStatus txid# 281474976710662 Status# 48} 2026-01-04T02:31:27.788898Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328417391678918:2101] Handle TEvProposeTransaction 2026-01-04T02:31:27.788928Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328417391678918:2101] TxId# 281474976710663 ProcessProposeTransaction 2026-01-04T02:31:27.788973Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328417391678918:2101] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [59:7591328443161484139:2996] 2026-01-04T02:31:27.792048Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328443161484139:2996] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "" NewOwner: "db_admin@builtin" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:39096" 2026-01-04T02:31:27.792116Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328443161484139:2996] txid# 281474976710663 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:27.792137Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328443161484139:2996] txid# 281474976710663 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:27.792210Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328443161484139:2996] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:27.792571Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328443161484139:2996] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:27.792661Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328443161484139:2996] HANDLE EvNavigateKeySetResult, txid# 281474976710663 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:27.792745Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328443161484139:2996] txid# 281474976710663 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710663 TabletId# 72057594046644480} 2026-01-04T02:31:27.792904Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328443161484139:2996] txid# 281474976710663 HANDLE EvClientConnected 2026-01-04T02:31:27.793334Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:27.799494Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328443161484139:2996] txid# 281474976710663 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710663} 2026-01-04T02:31:27.799558Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328443161484139:2996] txid# 281474976710663 SEND to# [59:7591328443161484138:2344] Source {TEvProposeTransactionStatus txid# 281474976710663 Status# 48} 2026-01-04T02:31:27.934531Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328417391678918:2101] Handle TEvProposeTransaction 2026-01-04T02:31:27.934566Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328417391678918:2101] TxId# 281474976710664 ProcessProposeTransaction 2026-01-04T02:31:27.934614Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328417391678918:2101] Cookie# 0 userReqId# "" txid# 281474976710664 SEND to# [59:7591328443161484170:3010] 2026-01-04T02:31:27.938734Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328443161484170:3010] txid# 281474976710664 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\020db_admin@builtin\022\030\022\026\n\024all-users@well-known\032\020db_admin@builtin\"\007Builtin*\027db_a****ltin (DEFA2CD5)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:34410" 2026-01-04T02:31:27.938821Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328443161484170:3010] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:27.938847Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328443161484170:3010] txid# 281474976710664 Bootstrap, UserSID: db_admin@builtin IsClusterAdministrator: 0 2026-01-04T02:31:27.939033Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7591328443161484170:3010] txid# 281474976710664 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-04T02:31:27.939089Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7591328443161484170:3010] txid# 281474976710664 HandleResolveDatabase, UserSID: db_admin@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 1 DatabaseOwner: db_admin@builtin 2026-01-04T02:31:27.939142Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328443161484170:3010] txid# 281474976710664 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:27.939467Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328443161484170:3010] txid# 281474976710664 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:27.939563Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328443161484170:3010] HANDLE EvNavigateKeySetResult, txid# 281474976710664 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:27.939658Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328443161484170:3010] txid# 281474976710664 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710664 TabletId# 72057594046644480} 2026-01-04T02:31:27.939813Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328443161484170:3010] txid# 281474976710664 HANDLE EvClientConnected 2026-01-04T02:31:27.943383Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328443161484170:3010] txid# 281474976710664 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710664} 2026-01-04T02:31:27.943441Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328443161484170:3010] txid# 281474976710664 SEND to# [59:7591328443161484169:2349] Source {TEvProposeTransactionStatus txid# 281474976710664 Status# 48} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [FAIL] Test command err: Starting YDB, grpc: 2805, msgbus: 25688 2026-01-04T02:27:05.868566Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327318146931964:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:05.868699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:06.313071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:06.484395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:06.668896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:06.669030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:06.798451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:06.809915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327318146931736:2082] 1767493625822959 != 1767493625822962 2026-01-04T02:27:06.859109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:06.860674Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:07.209421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:07.278370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:07.278391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:07.278397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:07.278492Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:08.004391Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327318146931996:2113] Handle TEvNavigate describe path dc-1 2026-01-04T02:27:08.004442Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327331031834728:2735] HANDLE EvNavigateScheme dc-1 2026-01-04T02:27:08.004865Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327331031834728:2735] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:08.141506Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327331031834728:2735] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:27:08.151607Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327331031834728:2735] Handle TEvDescribeSchemeResult Forward to# [1:7591327331031834727:2734] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:27:08.174937Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327318146931996:2113] Handle TEvProposeTransaction 2026-01-04T02:27:08.174965Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327318146931996:2113] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:27:08.175063Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327318146931996:2113] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327331031834733:2740] 2026-01-04T02:27:08.371776Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327331031834733:2740] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:08.371863Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327331031834733:2740] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-04T02:27:08.371880Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327331031834733:2740] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:08.371943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327331031834733:2740] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:08.372252Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327331031834733:2740] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:08.372353Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327331031834733:2740] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:27:08.372432Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327331031834733:2740] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:27:08.372677Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327331031834733:2740] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:27:08.373389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:08.377274Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327331031834733:2740] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:27:08.377329Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327331031834733:2740] txid# 281474976715657 SEND to# [1:7591327331031834732:2739] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:27:08.423043Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327318146931996:2113] Handle TEvProposeTransaction 2026-01-04T02:27:08.423066Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327318146931996:2113] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:27:08.423129Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327318146931996:2113] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327331031834775:2773] 2026-01-04T02:27:08.425596Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327331031834775:2773] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:08.425657Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327331031834775:2773] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-04T02:27:08.425673Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327331031834775:2773] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:08.425760Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327331031834775:2773] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:08.426086Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327331031834775:2773] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:08.426203Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327331031834775:2773] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:27:08.426258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327331031834775:2773] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:27:08.426415Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327331031834775:2773] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:27:08.427041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:27:08.437766Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327331031834775:2773] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 2814 ... r# [59:7591328503859868728:2732] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:41.983152Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7591328503859868728:2732] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:31:41.984077Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7591328503859868728:2732] Handle TEvDescribeSchemeResult Forward to# [59:7591328503859868727:2731] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:31:41.985566Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328503859867718:2109] Handle TEvProposeTransaction 2026-01-04T02:31:41.985594Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328503859867718:2109] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:31:41.985650Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328503859867718:2109] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [59:7591328503859868733:2737] 2026-01-04T02:31:41.988477Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328503859868733:2737] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:41.988550Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328503859868733:2737] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:41.988574Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328503859868733:2737] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:41.988631Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328503859868733:2737] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:41.989007Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328503859868733:2737] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:41.989085Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328503859868733:2737] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:41.989149Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328503859868733:2737] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-04T02:31:41.989269Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328503859868733:2737] txid# 281474976710657 HANDLE EvClientConnected 2026-01-04T02:31:41.990773Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:41.994937Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328503859868733:2737] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-04T02:31:41.995196Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328503859868733:2737] txid# 281474976710657 SEND to# [59:7591328503859868732:2736] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-04T02:31:42.018343Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328503859867718:2109] Handle TEvProposeTransaction 2026-01-04T02:31:42.018372Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328503859867718:2109] TxId# 281474976710658 ProcessProposeTransaction 2026-01-04T02:31:42.018405Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328503859867718:2109] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [59:7591328508154836064:2769] 2026-01-04T02:31:42.021694Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328508154836064:2769] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:42.021779Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328508154836064:2769] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:42.021896Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328508154836064:2769] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:42.021963Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328508154836064:2769] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:42.022353Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328508154836064:2769] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:42.022494Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328508154836064:2769] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:42.022579Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328508154836064:2769] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-04T02:31:42.022758Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328508154836064:2769] txid# 281474976710658 HANDLE EvClientConnected 2026-01-04T02:31:42.023504Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:42.032123Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328508154836064:2769] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-04T02:31:42.032193Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328508154836064:2769] txid# 281474976710658 SEND to# [59:7591328508154836063:2768] Source {TEvProposeTransactionStatus txid# 281474976710658 Status# 48} 2026-01-04T02:31:42.099271Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:48.338475Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dh7qs3j49qcjy0d8wgycm", Request deadline has expired for 1.286640s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29500 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD1DEB7) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7FD869E53D90) __libc_start_main+128 (0x7FD869E53E40) _start+41 (0x18703029) |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 28573, msgbus: 16370 2026-01-04T02:26:49.353822Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327249601931120:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:49.353884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:49.913927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:50.092299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:50.092397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:50.137410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:50.279935Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:50.281010Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:50.565956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:50.706391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:50.706415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:50.706424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:50.706507Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:50.780949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:51.182763Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327249601931268:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:51.182830Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327258191866762:2747] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:51.183301Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327258191866762:2747] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:51.289026Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327258191866762:2747] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:51.294998Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327258191866762:2747] Handle TEvDescribeSchemeResult Forward to# [1:7591327258191866761:2746] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:51.302497Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327249601931268:2118] Handle TEvProposeTransaction 2026-01-04T02:26:51.302527Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327249601931268:2118] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:26:51.302591Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327249601931268:2118] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591327258191866768:2753] 2026-01-04T02:26:51.462244Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327258191866768:2753] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:51.462335Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327258191866768:2753] txid# 281474976710657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:51.462356Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327258191866768:2753] txid# 281474976710657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:51.462429Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327258191866768:2753] txid# 281474976710657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:51.462745Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327258191866768:2753] txid# 281474976710657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:51.462839Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327258191866768:2753] HANDLE EvNavigateKeySetResult, txid# 281474976710657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:51.462907Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327258191866768:2753] txid# 281474976710657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710657 TabletId# 72057594046644480} 2026-01-04T02:26:51.463034Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327258191866768:2753] txid# 281474976710657 HANDLE EvClientConnected 2026-01-04T02:26:51.463611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:51.466418Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327258191866768:2753] txid# 281474976710657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976710657} 2026-01-04T02:26:51.466468Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327258191866768:2753] txid# 281474976710657 SEND to# [1:7591327258191866767:2752] Source {TEvProposeTransactionStatus txid# 281474976710657 Status# 53} 2026-01-04T02:26:51.483383Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327249601931268:2118] Handle TEvProposeTransaction 2026-01-04T02:26:51.483406Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327249601931268:2118] TxId# 281474976710658 ProcessProposeTransaction 2026-01-04T02:26:51.483432Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327249601931268:2118] Cookie# 0 userReqId# "" txid# 281474976710658 SEND to# [1:7591327258191866806:2787] 2026-01-04T02:26:51.485888Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327258191866806:2787] txid# 281474976710658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:51.485928Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327258191866806:2787] txid# 281474976710658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:51.485943Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327258191866806:2787] txid# 281474976710658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:51.485990Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327258191866806:2787] txid# 281474976710658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:51.486246Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327258191866806:2787] txid# 281474976710658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:51.486360Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327258191866806:2787] HANDLE EvNavigateKeySetResult, txid# 281474976710658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:51.486418Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327258191866806:2787] txid# 281474976710658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976710658 TabletId# 72057594046644480} 2026-01-04T02:26:51.486545Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327258191866806:2787] txid# 281474976710658 HANDLE EvClientConnected 2026-01-04T02:26:51.487096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:51.492160Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327258191866806:2787] txid# 281474976710658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976710658} 2026-01-04T02:26:51.492212Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327258191866806:2787] txid# 281474976710658 SEND to# [1:7591327258191866805:2786] Source {TEvProposeTran ... 6715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:15.551297Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328392018150661:2731] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:31:15.551458Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328392018150661:2731] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:31:15.552184Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:15.555621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328392018150661:2731] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:31:15.555677Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328392018150661:2731] txid# 281474976715657 SEND to# [59:7591328392018150660:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:31:15.569944Z node 59 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:31:15.594428Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328387723182380:2116] Handle TEvProposeTransaction 2026-01-04T02:31:15.594475Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328387723182380:2116] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:31:15.594515Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328387723182380:2116] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7591328392018150718:2767] 2026-01-04T02:31:15.602015Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328392018150718:2767] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:15.602120Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328392018150718:2767] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:15.602144Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328392018150718:2767] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:15.602225Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328392018150718:2767] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:15.602621Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328392018150718:2767] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:15.602753Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328392018150718:2767] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:15.602838Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328392018150718:2767] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:31:15.603019Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328392018150718:2767] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:31:15.603757Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:15.623734Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328392018150718:2767] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:31:15.623958Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328392018150718:2767] txid# 281474976715658 SEND to# [59:7591328392018150717:2766] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:31:15.686982Z node 59 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:15.696021Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328387723182380:2116] Handle TEvProposeTransaction 2026-01-04T02:31:15.696060Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328387723182380:2116] TxId# 281474976715659 ProcessProposeTransaction 2026-01-04T02:31:15.696115Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328387723182380:2116] Cookie# 0 userReqId# "" txid# 281474976715659 SEND to# [59:7591328392018150782:2823] 2026-01-04T02:31:15.699519Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328392018150782:2823] txid# 281474976715659 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\001\022\026\032\024ordinaryuser@builtin\n\"\010\000\022\036\010\001\020\200\200\002\032\024ordinaryuser@builtin \000" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:60316" 2026-01-04T02:31:15.699597Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328392018150782:2823] txid# 281474976715659 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:15.699620Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328392018150782:2823] txid# 281474976715659 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:15.699680Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328392018150782:2823] txid# 281474976715659 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:15.700056Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328392018150782:2823] txid# 281474976715659 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:15.700213Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328392018150782:2823] HANDLE EvNavigateKeySetResult, txid# 281474976715659 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:15.702371Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328392018150782:2823] txid# 281474976715659 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715659 TabletId# 72057594046644480} 2026-01-04T02:31:15.702567Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328392018150782:2823] txid# 281474976715659 HANDLE EvClientConnected 2026-01-04T02:31:15.703318Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:15.714148Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328392018150782:2823] txid# 281474976715659 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715659} 2026-01-04T02:31:15.714210Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328392018150782:2823] txid# 281474976715659 SEND to# [59:7591328392018150781:2306] Source {TEvProposeTransactionStatus txid# 281474976715659 Status# 48} 2026-01-04T02:31:19.646119Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7591328387723182451:2160];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:19.646210Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:21.648976Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dge0p6n8rc8s4sbhyzq3n", Request deadline has expired for 0.915133s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24811 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+3148 (0x1AD1E42C) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7FBA70CC7D90) __libc_start_main+128 (0x7FBA70CC7E40) _start+41 (0x18703029) |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [FAIL] Test command err: Starting YDB, grpc: 25807, msgbus: 1733 2026-01-04T02:26:47.425942Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327239290062584:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:47.426051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:48.036450Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:48.114104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:48.114241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:48.265907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:48.268415Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:48.276516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:48.415694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:48.417139Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.010947s 2026-01-04T02:26:48.687076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:48.687098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:48.687104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:48.698089Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:48.864537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:49.143217Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327239290062592:2119] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:49.143273Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327247879998011:2736] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:49.143646Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327247879998011:2736] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:49.196319Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327247879998011:2736] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:49.208480Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327247879998011:2736] Handle TEvDescribeSchemeResult Forward to# [1:7591327247879998010:2735] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:49.213863Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327239290062592:2119] Handle TEvProposeTransaction 2026-01-04T02:26:49.213894Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327239290062592:2119] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:49.213980Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327239290062592:2119] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327247879998016:2741] 2026-01-04T02:26:49.396604Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327247879998016:2741] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:49.402842Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327247879998016:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:49.402874Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327247879998016:2741] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:49.402949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327247879998016:2741] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:49.403258Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327247879998016:2741] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:49.403383Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327247879998016:2741] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:49.403465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327247879998016:2741] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:49.403591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327247879998016:2741] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:49.404246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:49.407981Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327247879998016:2741] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:49.408034Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327247879998016:2741] txid# 281474976715657 SEND to# [1:7591327247879998015:2740] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:49.445248Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327239290062592:2119] Handle TEvProposeTransaction 2026-01-04T02:26:49.445273Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327239290062592:2119] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:49.445307Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327239290062592:2119] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327247879998052:2774] 2026-01-04T02:26:49.447862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327247879998052:2774] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:49.447932Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327247879998052:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:49.447949Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327247879998052:2774] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:49.448013Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327247879998052:2774] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:49.448893Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327247879998052:2774] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:49.449005Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327247879998052:2774] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:49.449066Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327247879998052:2774] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:49.449195Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327247879998052:2774] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:49.449849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:49.455048Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327247879998052:2774] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:26:49.455089Z node 1 :TX_PROXY DEBUG: schemer ... hemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:31:23.032774Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7591328424467129058:2754] Handle TEvDescribeSchemeResult Forward to# [59:7591328424467129057:2753] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:31:23.038334Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328415877193401:2092] Handle TEvProposeTransaction 2026-01-04T02:31:23.038368Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328415877193401:2092] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:31:23.038421Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328415877193401:2092] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7591328424467129063:2759] 2026-01-04T02:31:23.041437Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328424467129063:2759] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:23.041522Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328424467129063:2759] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:23.041544Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328424467129063:2759] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:23.041602Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328424467129063:2759] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:23.042023Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328424467129063:2759] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:23.042112Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328424467129063:2759] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:23.042211Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328424467129063:2759] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:31:23.042385Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328424467129063:2759] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:31:23.043057Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:23.055713Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328424467129063:2759] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:31:23.055774Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328424467129063:2759] txid# 281474976715657 SEND to# [59:7591328424467129062:2758] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:31:23.117298Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328415877193401:2092] Handle TEvProposeTransaction 2026-01-04T02:31:23.117332Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328415877193401:2092] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:31:23.117375Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328415877193401:2092] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7591328424467129098:2791] 2026-01-04T02:31:23.120286Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328424467129098:2791] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:23.120363Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328424467129098:2791] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:23.120386Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328424467129098:2791] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:23.120452Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328424467129098:2791] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:23.120819Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328424467129098:2791] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:23.120939Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328424467129098:2791] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:23.121019Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328424467129098:2791] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:31:23.121172Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328424467129098:2791] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:31:23.129947Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:23.136374Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328424467129098:2791] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:31:23.136438Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328424467129098:2791] txid# 281474976715658 SEND to# [59:7591328424467129097:2790] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:31:26.768090Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7591328415877193378:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:26.768198Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:29.340983Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dgnas4fgmbep3sgjhwq2d", Request deadline has expired for 1.177144s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22416 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD1DEB7) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7F6A187BBD90) __libc_start_main+128 (0x7F6A187BBE40) _start+41 (0x18703029) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-72 [GOOD] Test command err: Starting YDB, grpc: 32299, msgbus: 21619 2026-01-04T02:26:46.464550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327236699295711:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:46.464597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:46.527841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:26:46.996182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:47.038876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:47.038989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:47.114004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:47.114848Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591327236699295457:2082] 1767493606374488 != 1767493606374491 2026-01-04T02:26:47.115251Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:47.297863Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:47.387004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:47.387028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:47.387035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:47.390646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:47.461596Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:47.638510Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327236699295727:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:47.638563Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327240994263856:2739] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:47.638928Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327240994263856:2739] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.683536Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327240994263856:2739] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:47.689249Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327240994263856:2739] Handle TEvDescribeSchemeResult Forward to# [1:7591327240994263855:2738] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:47.690774Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327236699295727:2118] Handle TEvProposeTransaction 2026-01-04T02:26:47.690802Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327236699295727:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:47.690863Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327236699295727:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327240994263861:2744] 2026-01-04T02:26:47.825563Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327240994263861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:47.825661Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327240994263861:2744] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:47.825679Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327240994263861:2744] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:47.825738Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327240994263861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:47.826387Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327240994263861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.826535Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327240994263861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:47.826663Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327240994263861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:47.826823Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327240994263861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:47.827422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:47.830888Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327240994263861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:47.830986Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327240994263861:2744] txid# 281474976715657 SEND to# [1:7591327240994263860:2743] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:47.847646Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327236699295727:2118] Handle TEvProposeTransaction 2026-01-04T02:26:47.847675Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327236699295727:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:47.847705Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327236699295727:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327240994263898:2775] 2026-01-04T02:26:47.850229Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327240994263898:2775] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:47.850280Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327240994263898:2775] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:26:47.850296Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327240994263898:2775] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:47.850355Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327240994263898:2775] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:47.851165Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327240994263898:2775] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:47.851268Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327240994263898:2775] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:47.851328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327240994263898:2775] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:47.851452Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327240994263898:2775] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:47.852056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:47.860959Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327240994263898:2775] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01 ... TEvProposeTransaction 2026-01-04T02:30:59.374435Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328296502230902:2113] TxId# 281474976715661 ProcessProposeTransaction 2026-01-04T02:30:59.374495Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328296502230902:2113] Cookie# 0 userReqId# "" txid# 281474976715661 SEND to# [59:7591328322272036094:3014] 2026-01-04T02:30:59.378589Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328322272036094:3014] txid# 281474976715661 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool ModifyACL { Name: "default" DiffACL: "\n\032\010\000\022\026\010\001\020\377\317\003\032\014root@builtin \003\n!\010\000\022\035\010\001\020\201\004\032\024all-users@well-known \003\n\031\010\000\022\025\010\001\020\201\004\032\014root@builtin \003" NewOwner: "metadata@system" } Internal: true CreateResourcePool { Name: "default" Properties { Properties { key: "concurrent_query_limit" value: "-1" } Properties { key: "database_load_cpu_threshold" value: "-1" } Properties { key: "query_cancel_after_seconds" value: "0" } Properties { key: "query_cpu_limit_percent_per_node" value: "-1" } Properties { key: "query_memory_limit_percent_per_node" value: "-1" } Properties { key: "queue_size" value: "-1" } Properties { key: "resource_weight" value: "-1" } Properties { key: "total_cpu_limit_percent_per_node" value: "-1" } } } } } UserToken: "\n\017metadata@system\022\000" DatabaseName: "/dc-1" 2026-01-04T02:30:59.378659Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328322272036094:3014] txid# 281474976715661 Bootstrap, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:30:59.378683Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328322272036094:3014] txid# 281474976715661 Bootstrap, UserSID: metadata@system IsClusterAdministrator: 0 2026-01-04T02:30:59.378876Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7591328322272036094:3014] txid# 281474976715661 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-04T02:30:59.378908Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7591328322272036094:3014] txid# 281474976715661 HandleResolveDatabase, UserSID: metadata@system CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-04T02:30:59.385968Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [59:7591328322272036094:3014] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:30:59.386132Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328322272036094:3014] txid# 281474976715661 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:30:59.386420Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328322272036094:3014] txid# 281474976715661 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:30:59.386543Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328322272036094:3014] HANDLE EvNavigateKeySetResult, txid# 281474976715661 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:30:59.386711Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328322272036094:3014] txid# 281474976715661 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715661 TabletId# 72057594046644480} 2026-01-04T02:30:59.386996Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328322272036094:3014] txid# 281474976715661 HANDLE EvClientConnected 2026-01-04T02:30:59.395840Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328322272036094:3014] txid# 281474976715661 Status StatusAlreadyExists HANDLE {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715661 Reason# Check failed: path: '/dc-1/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)} 2026-01-04T02:30:59.396022Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7591328322272036094:3014] txid# 281474976715661, issues: { message: "Check failed: path: \'/dc-1/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:30:59.396065Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328322272036094:3014] txid# 281474976715661 SEND to# [59:7591328322272036016:2336] Source {TEvProposeTransactionStatus txid# 281474976715661 Status# 48} 2026-01-04T02:30:59.429776Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328296502230902:2113] Handle TEvProposeTransaction 2026-01-04T02:30:59.429876Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328296502230902:2113] TxId# 281474976715662 ProcessProposeTransaction 2026-01-04T02:30:59.429945Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328296502230902:2113] Cookie# 0 userReqId# "" txid# 281474976715662 SEND to# [59:7591328322272036118:3026] 2026-01-04T02:30:59.433521Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328322272036118:3026] txid# 281474976715662 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { CreateUser { User: "targetuser" Password: "passwd" CanLogin: true HashedPassword: "" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv6:[::1]:52650" 2026-01-04T02:30:59.433609Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328322272036118:3026] txid# 281474976715662 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:30:59.433635Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328322272036118:3026] txid# 281474976715662 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:30:59.433699Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328322272036118:3026] txid# 281474976715662 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:30:59.434810Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328322272036118:3026] txid# 281474976715662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:30:59.434922Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328322272036118:3026] HANDLE EvNavigateKeySetResult, txid# 281474976715662 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:30:59.511152Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328322272036118:3026] txid# 281474976715662 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715662 TabletId# 72057594046644480} 2026-01-04T02:30:59.511375Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328322272036118:3026] txid# 281474976715662 HANDLE EvClientConnected 2026-01-04T02:30:59.523429Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328322272036118:3026] txid# 281474976715662 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715662} 2026-01-04T02:30:59.523497Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328322272036118:3026] txid# 281474976715662 SEND to# [59:7591328322272036117:2329] Source {TEvProposeTransactionStatus txid# 281474976715662 Status# 48} 2026-01-04T02:30:59.625932Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328296502230902:2113] Handle TEvProposeTransaction 2026-01-04T02:30:59.625976Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328296502230902:2113] TxId# 281474976715663 ProcessProposeTransaction 2026-01-04T02:30:59.626028Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328296502230902:2113] Cookie# 0 userReqId# "" txid# 281474976715663 SEND to# [59:7591328322272036151:3041] 2026-01-04T02:30:59.629674Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328322272036151:3041] txid# 281474976715663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { RemoveUser { User: "targetuser" MissingOk: false } } } } UserToken: "\n\024ordinaryuser@builtin\022\030\022\026\n\024all-users@well-known\032\024ordinaryuser@builtin\"\007Builtin*\027ordi****ltin (32520BBF)0\000" DatabaseName: "/dc-1" RequestType: "" PeerName: "ipv4:127.0.0.1:38368" 2026-01-04T02:30:59.629765Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328322272036151:3041] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:30:59.629794Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328322272036151:3041] txid# 281474976715663 Bootstrap, UserSID: ordinaryuser@builtin IsClusterAdministrator: 0 2026-01-04T02:30:59.630405Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1582: Actor# [59:7591328322272036151:3041] txid# 281474976715663 HandleResolveDatabase, ResultSet size: 1 ResultSet error count: 0 2026-01-04T02:30:59.630465Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1617: Actor# [59:7591328322272036151:3041] txid# 281474976715663 HandleResolveDatabase, UserSID: ordinaryuser@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 IsClusterAdministrator: 0 IsDatabaseAdministrator: 0 DatabaseOwner: root@builtin 2026-01-04T02:30:59.630527Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328322272036151:3041] txid# 281474976715663 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:30:59.630886Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328322272036151:3041] txid# 281474976715663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:30:59.630919Z node 59 :TX_PROXY ERROR: schemereq.cpp:1235: Actor# [59:7591328322272036151:3041] txid# 281474976715663, Access denied for ordinaryuser@builtin, attempt to manage user 2026-01-04T02:30:59.631036Z node 59 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [59:7591328322272036151:3041] txid# 281474976715663, issues: { message: "Access denied for ordinaryuser@builtin" issue_code: 200000 severity: 1 } 2026-01-04T02:30:59.631070Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328322272036151:3041] txid# 281474976715663 SEND to# [59:7591328322272036150:2346] Source {TEvProposeTransactionStatus Status# 5} 2026-01-04T02:30:59.631948Z node 59 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=59&id=NDk3YzgwNjgtNzc5MjI2OTItZTNiODI5ZjQtMjNlZjc3NmY=, ActorId: [59:7591328322272036136:2346], ActorState: ExecuteState, LegacyTraceId: 01ke3dfy8b05wqrvggeg58z8vg, Create QueryResponse for error on request, msg: status# UNAUTHORIZED issues# { message: "Executing ESchemeOpAlterLogin" issue_code: 2018 severity: 1 issues { message: "Access denied for scheme request" issue_code: 2018 severity: 1 issues { message: "Access denied." severity: 1 } } } trace_id# 2026-01-04T02:30:59.632652Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [59:7591328296502230902:2113] Handle TEvExecuteKqpTransaction 2026-01-04T02:30:59.632676Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [59:7591328296502230902:2113] TxId# 281474976715664 ProcessProposeKqpTransaction |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TExportToS3Tests::ShouldSucceedOnConcurrentImport [GOOD] |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest |87.2%| [TA] $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundsBadRange [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:43.484463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:43.484575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:43.484621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:43.484672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:43.484721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:43.484758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:43.484826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:43.484898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:43.486179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:43.486525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:43.577489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:43.577536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:43.593567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:43.593734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:43.593904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:43.605016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:43.605267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:43.606056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:43.606331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:43.609663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:43.609877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:43.611062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:43.611127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:43.611242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:43.611310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:43.611419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:43.611613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:43.763900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.764942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.765937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.766019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.766114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 110 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:44.904597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 110:0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.904821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 110:1, propose status:StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", at schemeshard: 72057594046678944 2026-01-04T02:32:44.907779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 110, response: Status: StatusInvalidParameter Reason: "Last patrition 2 doesn\'t have the highest bound \"AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9\"" TxId: 110 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:44.908080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 110, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Last patrition 2 doesn't have the highest bound "AA AA AA AA AA AA AA AA AA AA AA AA AA AA AA A9", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 110, wait until txId: 110 TestWaitNotification wait txId: 110 2026-01-04T02:32:44.908492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 110: send EvNotifyTxCompletion 2026-01-04T02:32:44.908569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 110 2026-01-04T02:32:44.909034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 110, at schemeshard: 72057594046678944 2026-01-04T02:32:44.909143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 110: got EvNotifyTxCompletionResult 2026-01-04T02:32:44.909200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 110: satisfy waiter [1:1016:2936] TestWaitNotification: OK eventTxId 110 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } TestModificationResults wait txId: 112 2026-01-04T02:32:44.913278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } } } TxId: 112 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:44.913523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 112:0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.913757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 112:1, propose status:StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, at schemeshard: 72057594046678944 2026-01-04T02:32:44.919063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 112, response: Status: StatusInvalidParameter Reason: "Only 1 root partitions has new bounds, required: 3" TxId: 112 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:44.919347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 112, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Only 1 root partitions has new bounds, required: 3, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-04T02:32:44.919719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-04T02:32:44.919793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-04T02:32:44.920330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-04T02:32:44.920469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-04T02:32:44.920512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1023:2943] TestWaitNotification: OK eventTxId 112 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } TestModificationResults wait txId: 114 2026-01-04T02:32:44.924648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 CreatePartition: false } RootPartitionBoundaries { Partition: 2 CreatePartition: false } } } TxId: 114 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:44.924886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 114:0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.925038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 114:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 114, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 114 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 114, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 114, wait until txId: 114 TestWaitNotification wait txId: 114 2026-01-04T02:32:44.928304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 114: send EvNotifyTxCompletion 2026-01-04T02:32:44.928350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 114 2026-01-04T02:32:44.928845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 114, at schemeshard: 72057594046678944 2026-01-04T02:32:44.928956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:32:44.929005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:1030:2950] TestWaitNotification: OK eventTxId 114 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } TestModificationResults wait txId: 116 2026-01-04T02:32:44.934249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } } } TxId: 116 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:44.934571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 116:0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.934766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 116:1, propose status:StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", at schemeshard: 72057594046678944 2026-01-04T02:32:44.937334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 116, response: Status: StatusInvalidParameter Reason: "Partitions 0 and 0 have overlapped bounds at point \"-inf\"" TxId: 116 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:44.937644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 116, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Partitions 0 and 0 have overlapped bounds at point "-inf", operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 116, wait until txId: 116 TestWaitNotification wait txId: 116 2026-01-04T02:32:44.938112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 116: send EvNotifyTxCompletion 2026-01-04T02:32:44.938191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 116 2026-01-04T02:32:44.938701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 116, at schemeshard: 72057594046678944 2026-01-04T02:32:44.938842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-04T02:32:44.938890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:1037:2957] TestWaitNotification: OK eventTxId 116 >> TExportToS3Tests::Changefeeds |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [FAIL] Test command err: Starting YDB, grpc: 28939, msgbus: 6487 2026-01-04T02:27:10.115369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327337069319544:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:27:10.125672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:27:11.008117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:27:11.113533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:27:11.113647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:27:11.182390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:27:11.317184Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:27:11.431611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:27:11.436007Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:27:11.806338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:27:11.806366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:27:11.806378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:27:11.806487Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:27:12.610028Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327337069319649:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:27:12.610100Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327345659255060:2725] HANDLE EvNavigateScheme dc-1 2026-01-04T02:27:12.610506Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327345659255060:2725] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:12.762127Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327345659255060:2725] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:27:12.775832Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327345659255060:2725] Handle TEvDescribeSchemeResult Forward to# [1:7591327345659255059:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:27:12.790546Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327337069319649:2118] Handle TEvProposeTransaction 2026-01-04T02:27:12.790572Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327337069319649:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:27:12.790636Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327337069319649:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327345659255066:2731] 2026-01-04T02:27:13.007071Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327345659255066:2731] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:13.007167Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327345659255066:2731] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:27:13.007187Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327345659255066:2731] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:13.007254Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327345659255066:2731] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:13.007629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327345659255066:2731] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:13.007754Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327345659255066:2731] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:27:13.007834Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327345659255066:2731] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:27:13.007971Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327345659255066:2731] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:27:13.008755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:27:13.014807Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327345659255066:2731] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:27:13.014873Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327345659255066:2731] txid# 281474976715657 SEND to# [1:7591327345659255065:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:27:13.033501Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327337069319649:2118] Handle TEvProposeTransaction 2026-01-04T02:27:13.033526Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327337069319649:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:27:13.033578Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327337069319649:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327349954222401:2764] 2026-01-04T02:27:13.036238Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327349954222401:2764] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:27:13.036311Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327349954222401:2764] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:27:13.036328Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327349954222401:2764] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:27:13.036380Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327349954222401:2764] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:27:13.036682Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327349954222401:2764] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:27:13.036774Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327349954222401:2764] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:27:13.036838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327349954222401:2764] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:27:13.036975Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327349954222401:2764] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:27:13.037611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:27:13.049917Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327349954222401:2764] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:27:13.049972Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327349954222401:2764] txid# 281474976715658 SEND to# [1:7591327349954222400:2763] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:27:13.231505Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327337069319649:2118] Handle TEvProposeTransaction 2026-01-04T02:27:13.231531Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237 ... chemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:31:23.420850Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7591328427280257543:2727] Handle TEvDescribeSchemeResult Forward to# [59:7591328427280257542:2726] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:31:23.428041Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328422985289307:2104] Handle TEvProposeTransaction 2026-01-04T02:31:23.428075Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328422985289307:2104] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:31:23.428130Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328422985289307:2104] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [59:7591328427280257548:2732] 2026-01-04T02:31:23.431691Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328427280257548:2732] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:23.431775Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328427280257548:2732] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:23.431796Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328427280257548:2732] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:23.431851Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328427280257548:2732] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:23.432234Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328427280257548:2732] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:23.432321Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328427280257548:2732] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:31:23.432411Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328427280257548:2732] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:31:23.432551Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328427280257548:2732] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:31:23.433198Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:23.445209Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328427280257548:2732] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:31:23.445279Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328427280257548:2732] txid# 281474976715657 SEND to# [59:7591328427280257547:2731] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:31:23.502753Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [59:7591328422985289307:2104] Handle TEvProposeTransaction 2026-01-04T02:31:23.502788Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [59:7591328422985289307:2104] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:31:23.502832Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [59:7591328422985289307:2104] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [59:7591328427280257584:2765] 2026-01-04T02:31:23.506838Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [59:7591328427280257584:2765] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:31:23.506928Z node 59 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [59:7591328427280257584:2765] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 1 2026-01-04T02:31:23.506955Z node 59 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [59:7591328427280257584:2765] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:31:23.507017Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [59:7591328427280257584:2765] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:31:23.507426Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [59:7591328427280257584:2765] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:31:23.507557Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [59:7591328427280257584:2765] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:31:23.507643Z node 59 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [59:7591328427280257584:2765] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:31:23.507807Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [59:7591328427280257584:2765] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:31:23.508632Z node 59 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:31:23.521450Z node 59 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [59:7591328427280257584:2765] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult Status# StatusSuccess txid# 281474976715658} 2026-01-04T02:31:23.521536Z node 59 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [59:7591328427280257584:2765] txid# 281474976715658 SEND to# [59:7591328427280257583:2764] Source {TEvProposeTransactionStatus txid# 281474976715658 Status# 48} 2026-01-04T02:31:27.349988Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[59:7591328422985289420:2274];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:27.350071Z node 59 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:29.357367Z node 59 :KQP_PROXY ERROR: kqp_proxy_service.cpp:1359: TraceId: "01ke3dgnp47dq51ewfdw00wda1", Request deadline has expired for 0.820526s seconds assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9738 TBackTrace::Capture()+28 (0x1B12E21C) NUnitTest::NPrivate::RaiseError(char const*, TBasicString> const&, bool)+604 (0x1B61D6EC) NKikimr::NTxProxyUT::CreateLocalUser(NKikimr::NTxProxyUT::TTestEnv const&, TBasicString> const&, TBasicString> const&, TBasicString> const&)+1840 (0x1AD066D0) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase)+1751 (0x1AD1DEB7) std::__y1::__function::__func const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>, std::__y1::allocator const&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase const&>>, void (NUnitTest::TTestContext&)>::operator()(NUnitTest::TTestContext&)+230 (0x1AD618A6) std::__y1::__function::__func, void ()>::operator()()+280 (0x1AD4F318) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x1B65637A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x1B6243C8) NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TCurrentTest::Execute()+1229 (0x1AD4E41D) NUnitTest::TTestFactory::Execute()+2176 (0x1B625B80) NUnitTest::RunMain(int, char**)+5805 (0x1B6501DD) ??+0 (0x7FEF7A01CD90) __libc_start_main+128 (0x7FEF7A01CE40) _start+41 (0x18703029) |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitAndGrowFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:43.213284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:43.213399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:43.213449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:43.213494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:43.213535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:43.213575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:43.213640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:43.213703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:43.214819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:43.215252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:43.297022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:43.297100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:43.310918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:43.311156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:43.311363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:43.331940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:43.332242Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:43.333528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:43.333791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:43.337632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:43.337913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:43.339159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:43.339222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:43.339356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:43.339416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:43.339521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:43.339723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:43.488657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.489781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.489954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.490964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:43.491076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... on for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:32:44.924218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:32:44.924268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:32:44.924293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:32:44.924318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:32:44.924345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:32:44.924520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.924725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.924859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:32:44.924916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:32:44.924972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:32:44.925026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:32:44.925055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:32:44.925189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.925296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.925528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-04T02:32:44.925794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:32:44.926279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.926480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.926954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.927858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.928084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.928222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.928757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.929613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.947445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:44.956741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:44.956847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:44.957455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:44.957570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:44.957638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:44.961291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1076:2978] sender: [1:1136:2058] recipient: [1:15:2062] 2026-01-04T02:32:45.014175Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:32:45.014584Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 451us result status StatusSuccess 2026-01-04T02:32:45.015532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active ParentPartitionIds: 0 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::TopicsWithPermissionsExport [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.832965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.833036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.833070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.833098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.833124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.833145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.833188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.833249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.833933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.834243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:31.903772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:31.903825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.916347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:31.916542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:31.916718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:31.926045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:31.926340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:31.927195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:31.927502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:31.932589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.932847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:31.934139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:31.934208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.934350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:31.934405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:31.934457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:31.934695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.086544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.087613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.087740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.087814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.087919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.088791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6-01-04T02:32:43.940212Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-04T02:32:43.940244Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:32:43.940318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-04T02:32:43.943837Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-04T02:32:43.943881Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 0/1, is published: true 2026-01-04T02:32:43.943924Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-04T02:32:43.944631Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-04T02:32:43.944773Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:32:43.945375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000044 FAKE_COORDINATOR: advance: minStep5000044 State->FrontStep: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000044 2026-01-04T02:32:43.945683Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000044, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:43.945794Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000044 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:43.945859Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000044, at schemeshard: 72057594046678944 2026-01-04T02:32:43.945984Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-04T02:32:43.946053Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-04T02:32:43.946088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:32:43.946131Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-04T02:32:43.946161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:32:43.946224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-04T02:32:43.946311Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:32:43.946348Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-04T02:32:43.946393Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:32:43.946428Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-04T02:32:43.946456Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-04T02:32:43.946529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:32:43.946580Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-04T02:32:43.946612Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 20 2026-01-04T02:32:43.946647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-04T02:32:43.947787Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.949165Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:43.949209Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:43.949408Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:32:43.949556Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:43.949611Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-04T02:32:43.949656Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 43 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-04T02:32:43.950964Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.951086Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 20 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.951138Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-04T02:32:43.951176Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 20 2026-01-04T02:32:43.951217Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 8 2026-01-04T02:32:43.951626Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.951683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.951718Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-04T02:32:43.951753Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-04T02:32:43.951788Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:32:43.951850Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-04T02:32:43.951878Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-04T02:32:43.952411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:32:43.952459Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:32:43.952535Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 7 2026-01-04T02:32:43.956121Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.957318Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:32:43.957432Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-04T02:32:43.957501Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-04T02:32:43.958010Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:32:43.960411Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:32:43.960462Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1495:3318] TestWaitNotification: OK eventTxId 106 >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions |87.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly-StrictAclCheck [GOOD] Test command err: Starting YDB, grpc: 1437, msgbus: 4591 2026-01-04T02:26:56.122620Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591327277864461953:2268];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:26:56.122679Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:26:56.850897Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:57.122100Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:26:57.130476Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:26:57.130583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:26:57.171154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:26:57.171295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:26:57.251767Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:26:57.403934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:57.562066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:26:57.706842Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:26:57.706875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:26:57.706884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:26:57.706985Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:26:58.226775Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591327277864461965:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:26:58.226852Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591327286454397413:2765] HANDLE EvNavigateScheme dc-1 2026-01-04T02:26:58.227267Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591327286454397413:2765] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:58.278237Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591327286454397413:2765] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ReturnBoundaries: true ShowPrivateTable: true ReturnRangeKey: true } 2026-01-04T02:26:58.284244Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591327286454397413:2765] Handle TEvDescribeSchemeResult Forward to# [1:7591327286454397412:2764] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:26:58.305904Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327277864461965:2118] Handle TEvProposeTransaction 2026-01-04T02:26:58.305935Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327277864461965:2118] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:26:58.306005Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327277864461965:2118] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591327286454397419:2771] 2026-01-04T02:26:58.638153Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327286454397419:2771] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "tenant-db" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:58.638271Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327286454397419:2771] txid# 281474976715657 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-04T02:26:58.638291Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327286454397419:2771] txid# 281474976715657 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:58.638358Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327286454397419:2771] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:58.638706Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327286454397419:2771] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:58.638806Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327286454397419:2771] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# false 2026-01-04T02:26:58.638889Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327286454397419:2771] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:26:58.639041Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327286454397419:2771] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:26:58.639704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:26:58.658825Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327286454397419:2771] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:26:58.658909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:7591327286454397419:2771] txid# 281474976715657 SEND to# [1:7591327286454397418:2770] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:26:58.698095Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591327277864461965:2118] Handle TEvProposeTransaction 2026-01-04T02:26:58.698121Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591327277864461965:2118] TxId# 281474976715658 ProcessProposeTransaction 2026-01-04T02:26:58.698162Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591327277864461965:2118] Cookie# 0 userReqId# "" txid# 281474976715658 SEND to# [1:7591327286454397461:2804] 2026-01-04T02:26:58.700761Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591327286454397461:2804] txid# 281474976715658 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpModifyACL ModifyACL { Name: "dc-1" DiffACL: "\n\032\010\000\022\026\010\001\020\377\377\003\032\014root@builtin \003" } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "" 2026-01-04T02:26:58.700859Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591327286454397461:2804] txid# 281474976715658 Bootstrap, UserSID: root@builtin CheckAdministrator: 1 CheckDatabaseAdministrator: 0 2026-01-04T02:26:58.700880Z node 1 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [1:7591327286454397461:2804] txid# 281474976715658 Bootstrap, UserSID: root@builtin IsClusterAdministrator: 1 2026-01-04T02:26:58.700932Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:7591327286454397461:2804] txid# 281474976715658 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:26:58.701212Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:7591327286454397461:2804] txid# 281474976715658 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:26:58.701323Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:7591327286454397461:2804] HANDLE EvNavigateKeySetResult, txid# 281474976715658 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:26:58.701385Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:7591327286454397461:2804] txid# 281474976715658 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715658 TabletId# 72057594046644480} 2026-01-04T02:26:58.703918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:26:58.711416Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:7591327286454397461:2804] txid# 281474976715658 HANDLE EvClientConnected 2026-01-04T02:26:58.711472Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:7591327286454397461:2804] txid# 281474976715658 Status StatusSuccess HANDLE {TEvModifySchemeTransactionResult ... AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 23], version: 4 2026-01-04T02:32:18.046026Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 23] was 1 2026-01-04T02:32:18.047443Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047515Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186224037891, msg: Owner: 72075186224037891 Generation: 1 LocalPathId: 27 Version: 4 PathOwnerId: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047529Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72075186224037891, txId: 281474976715665 2026-01-04T02:32:18.047545Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186224037891, txId: 281474976715665, pathId: [OwnerId: 72075186224037891, LocalPathId: 27], version: 4 2026-01-04T02:32:18.047565Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186224037891, LocalPathId: 27] was 1 2026-01-04T02:32:18.047624Z node 60 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72075186224037891, txId: 281474976715665, subscribers: 0 2026-01-04T02:32:18.047782Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047890Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047928Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047961Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.047996Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.048032Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.048065Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.048098Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060108Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060231Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060259Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060290Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060320Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060714Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060783Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060808Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060839Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060874Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060903Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.060931Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.061162Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.061197Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.061222Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.061244Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070343Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070473Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070506Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070547Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070590Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070619Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.070658Z node 60 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186224037891, cookie: 281474976715665 2026-01-04T02:32:18.461187Z node 59 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [59:7591328625006939546:2118] Handle TEvNavigate describe path /dc-1/tenant-db 2026-01-04T02:32:18.461274Z node 59 :TX_PROXY DEBUG: describe.cpp:270: Actor# [59:7591328663661647065:3467] HANDLE EvNavigateScheme /dc-1/tenant-db 2026-01-04T02:32:18.461874Z node 59 :TX_PROXY DEBUG: describe.cpp:354: Actor# [59:7591328663661647065:3467] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:18.462047Z node 59 :TX_PROXY DEBUG: describe.cpp:433: Actor# [59:7591328663661647065:3467] SEND to# 72075186224037891 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/tenant-db" Options { ReturnBoundaries: false ShowPrivateTable: true ReturnRangeKey: false } 2026-01-04T02:32:18.464871Z node 59 :TX_PROXY DEBUG: describe.cpp:446: Actor# [59:7591328663661647065:3467] Handle TEvDescribeSchemeResult Forward to# [59:7591328663661647064:3466] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 0 Record# Status: StatusSuccess Path: "/dc-1/tenant-db" PathDescription { Self { Name: "dc-1/tenant-db" PathId: 1 SchemeshardId: 72075186224037891 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "\n\025\010\001\020\200\004\032\014clusteradmin \003" EffectiveACL: "\n\030\010\001\020\377\377\003\032\014root@builtin \003(\001\n\025\010\001\020\200\004\032\014clusteradmin \003" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 1 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 3 SecurityStateVersion: 1 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186224037891 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976725657 CreateStep: 1767493931350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 38 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186224037890 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037889 SchemeShard: 72075186224037891 Hive: 72075186224037888 } DomainKey { SchemeShard: 72057594046644480 PathId: 38 } StoragePools { Name: "name_tenant-db_kind_tenant-db" Kind: "tenant-db" } StoragePools { Name: "name_tenant-db_kind_test" Kind: "test" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Sids { Name: "tenantuser" Type: USER } Audience: "/dc-1/tenant-db" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186224037891 2026-01-04T02:32:18.596276Z node 60 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:18.599803Z node 59 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 60 2026-01-04T02:32:18.600494Z node 59 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(60, (0,0,0,0)) VolatileState: Connected -> Disconnected |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |87.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_schemereq/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry |87.2%| [TA] {RESULT} $(B)/ydb/core/load_test/ut_ycsb/test-results/unittest/{meta.json ... results_accumulator.log} |87.2%| [TA] {RESULT} $(B)/ydb/tests/olap/column_family/compression/test-results/py3test/{meta.json ... results_accumulator.log} >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:32:44.841444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:44.841550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:44.841622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:44.841670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:44.841715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:44.841784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:44.841906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:44.842011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:44.843111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:44.843459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:44.930737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:44.930783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:44.947097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:44.950054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:44.950250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:44.961204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:44.961452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:44.962201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:44.962464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:44.966012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:44.966203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:44.967408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:44.967475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:44.967585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:44.967632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:44.967677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:44.967899Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:45.124202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.125900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.126429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 0 2026-01-04T02:32:46.789447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 0 2026-01-04T02:32:46.789470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 7] was 0 2026-01-04T02:32:46.789492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-04T02:32:46.789521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-04T02:32:46.789564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-04T02:32:46.789588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-04T02:32:46.789626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-04T02:32:46.789682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-04T02:32:46.789705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-04T02:32:46.789746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-04T02:32:46.789771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-04T02:32:46.789821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-04T02:32:46.789845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-04T02:32:46.789869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-04T02:32:46.789893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-04T02:32:46.789919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-04T02:32:46.789950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-04T02:32:46.789981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-04T02:32:46.790003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-04T02:32:46.790026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-04T02:32:46.790047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-04T02:32:46.790069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-04T02:32:46.790092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:32:46.790112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:32:46.790141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:32:46.790181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:32:46.790225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:32:46.790270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:32:46.790305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:32:46.790326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:32:46.790346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:32:46.790367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:32:46.790538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.790672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.790794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:32:46.790855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:32:46.790904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:32:46.790939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:32:46.790965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:32:46.791075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.791167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.791413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-04T02:32:46.791606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:32:46.792077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.792228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.792680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.792762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.793821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.794354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.794670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.794769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.794832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.794970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.795054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.795101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:46.801188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:46.806584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:46.806663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:46.806755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:46.806803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:46.806874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:46.807811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false |87.3%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors |87.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ExportTableWithUniqueIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:32.004416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:32.004489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.004518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:32.004546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:32.004579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:32.004600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:32.004670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.004784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:32.005635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:32.005902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.090093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.090160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.104110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.104320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.104475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.113677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.114020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.114801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.115080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.118011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.118194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.119438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.119530Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.119678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.119725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.119770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.119989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.272963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.273968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.274954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.275041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.275103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:32:46.185250Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-04T02:32:46.185297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-04T02:32:46.185380Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:32:46.185429Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-04T02:32:46.185489Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-04T02:32:46.185536Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-04T02:32:46.187381Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-04T02:32:46.195338Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:46.195398Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:46.195635Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:32:46.195806Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:46.195856Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-04T02:32:46.195904Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 41 2026-01-04T02:32:46.196870Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.196992Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.197034Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:46.197112Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-04T02:32:46.197170Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:32:46.197695Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.197770Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.198500Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:46.198563Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-04T02:32:46.198600Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:32:46.198691Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-04T02:32:46.198746Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-04T02:32:46.203330Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.203575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:46.203672Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:32:46.203741Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:32:46.206417Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:32:46.206507Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:944:2892] TestWaitNotification: OK eventTxId 102 2026-01-04T02:32:46.207256Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/ByValue" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:32:46.207624Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/ByValue" took 407us result status StatusSuccess 2026-01-04T02:32:46.208626Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table/ByValue" PathDescription { Self { Name: "ByValue" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ByValue" LocalPathId: 39 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:32:45.452109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:45.452269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:45.452322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:45.452367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:45.452411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:45.452486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:45.452581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:45.452664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:45.453758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:45.454244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:45.574979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:45.575109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:45.592210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:45.597738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:45.598007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:45.605026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:45.605348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:45.606217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:45.606544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:45.610133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:45.610353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:45.611968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:45.612046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:45.612191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:45.612252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:45.612305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:45.612477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:45.770571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.771820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.771988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:45.772956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944 2026-01-04T02:32:47.160147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.160240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.160302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.160470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.160538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.160594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.165664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:47.170701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:47.170778Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.171855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:47.171932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:47.171986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:47.177333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1133:3039] sender: [1:1193:2058] recipient: [1:15:2062] 2026-01-04T02:32:47.272995Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:32:47.273351Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 397us result status StatusSuccess 2026-01-04T02:32:47.274110Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 109 2026-01-04T02:32:47.280169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:47.280446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 109:0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.280612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, at schemeshard: 72057594046678944 2026-01-04T02:32:47.286920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusInvalidParameter Reason: "KeyRange must be specified for root partition bounds" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:47.287229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: KeyRange must be specified for root partition bounds, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-04T02:32:47.287648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-04T02:32:47.287719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-04T02:32:47.288222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-04T02:32:47.288330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-04T02:32:47.288379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [1:1198:3091] TestWaitNotification: OK eventTxId 109 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } TestModificationResults wait txId: 111 2026-01-04T02:32:47.293596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 1 KeyRange { ToBound: "\177" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\177" } CreatePartition: false } } } TxId: 111 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:32:47.293888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 111:0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.294110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 111:1, propose status:StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:47.298893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 111, response: Status: StatusInvalidParameter Reason: "Unable to change bounds of non-root partition: 1" TxId: 111 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:47.299226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 111, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Unable to change bounds of non-root partition: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 111, wait until txId: 111 TestWaitNotification wait txId: 111 2026-01-04T02:32:47.299660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 111: send EvNotifyTxCompletion 2026-01-04T02:32:47.299708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 111 2026-01-04T02:32:47.300270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 111, at schemeshard: 72057594046678944 2026-01-04T02:32:47.300393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 111: got EvNotifyTxCompletionResult 2026-01-04T02:32:47.300436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 111: satisfy waiter [1:1205:3098] TestWaitNotification: OK eventTxId 111 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false >> BasicUsage::WriteSessionWriteInHandlers >> TExportToS3Tests::Changefeeds [GOOD] >> BasicUsage::SelectDatabaseByHash [GOOD] >> BasicUsage::SelectDatabase [GOOD] >> BasicUsage::BasicWriteSession |87.3%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/{meta.json ... results_accumulator.log} |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/ydb-core-tx-datashard-ut_kqp_errors >> BasicUsage::WriteSessionNoAvailableDatabase >> BasicUsage::RetryDiscoveryWithCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithManyPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:32:44.386779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:44.386904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:44.386963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:44.387009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:44.387069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:44.387106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:44.387179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:44.387268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:44.388363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:44.388687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:44.494027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:44.494100Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:44.508802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:44.513088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:44.513336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:44.527439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:44.527736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:44.528669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:44.528966Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:44.539565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:44.539909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:44.541641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:44.541721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:44.541885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:44.541939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:44.541992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:44.542224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:44.702043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.703926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:44.704506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:32:48.128656Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:32:48.128684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:32:48.128869Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.128983Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.129120Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:32:48.129168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:32:48.129206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:32:48.129232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:32:48.129258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:32:48.129361Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.129495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.129742Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-04T02:32:48.129986Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:32:48.130419Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.130569Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131024Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131111Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131464Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131646Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131711Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.131813Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.132095Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.132220Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.132809Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133100Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133190Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133266Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133424Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133486Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.133550Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:32:48.143505Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:48.149487Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:48.149596Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:48.150041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:48.150120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:48.150187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:48.150299Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1100:2994] sender: [2:1159:2058] recipient: [2:15:2062] 2026-01-04T02:32:48.215818Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:32:48.216173Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 402us result status StatusSuccess 2026-01-04T02:32:48.216916Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } Status: Active } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest |87.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SelectDatabase [GOOD] |87.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut |87.3%| [LD] {RESULT} $(B)/ydb/services/ydb/backup_ut/ydb-services-ydb-backup_ut >> TPQTest::TestPQSmallRead [GOOD] >> TPQTest::TestReadAndDeleteConsumer >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad >> BasicUsage::GetAllStartPartitionSessions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::IndexMaterializationTwoTables [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.835764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.835880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.835933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.835972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.836022Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.836071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.836131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.836228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.837150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.837497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:31.932346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:31.932410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.947447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:31.947658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:31.947847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:31.956564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:31.956808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:31.957517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:31.957793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:31.961009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.961200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:31.962425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:31.962495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.962621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:31.962697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:31.962753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:31.962959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.124423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.125524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.125669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.125758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.125872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.125942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.126771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... e publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:32:47.874125Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.874224Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 15 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.874273Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-04T02:32:47.874315Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 15 2026-01-04T02:32:47.874356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-04T02:32:47.874477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-04T02:32:47.876416Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-04T02:32:47.876460Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 0/1, is published: true 2026-01-04T02:32:47.876503Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710765, at schemeshard: 72057594046678944 2026-01-04T02:32:47.878222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710765:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710765 msg type: 269090816 2026-01-04T02:32:47.878372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710765, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:32:47.878935Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 FAKE_COORDINATOR: Add transaction: 281474976710765 at step: 5000048 FAKE_COORDINATOR: advance: minStep5000048 State->FrontStep: 5000047 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710765 at step: 5000048 2026-01-04T02:32:47.879255Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000048, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:47.879382Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710765 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000048 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:32:47.879455Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710765:0, step: 5000048, at schemeshard: 72057594046678944 2026-01-04T02:32:47.879625Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710765:0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.879758Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-04T02:32:47.879806Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-04T02:32:47.879859Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710765:0 progress is 1/1 2026-01-04T02:32:47.879918Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-04T02:32:47.879991Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:32:47.880068Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-04T02:32:47.880115Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710765, ready parts: 1/1, is published: false 2026-01-04T02:32:47.880194Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710765 ready parts: 1/1 2026-01-04T02:32:47.880249Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710765:0 2026-01-04T02:32:47.880295Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710765:0 2026-01-04T02:32:47.880375Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 4 2026-01-04T02:32:47.880422Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710765, publications: 2, subscribers: 1 2026-01-04T02:32:47.880497Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 1], 18 2026-01-04T02:32:47.880561Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710765, [OwnerId: 72057594046678944, LocalPathId: 44], 18446744073709551615 2026-01-04T02:32:47.882155Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.884044Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:47.884093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:47.884334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710765, path id: [OwnerId: 72057594046678944, LocalPathId: 44] 2026-01-04T02:32:47.884489Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.884541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 1 2026-01-04T02:32:47.884585Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710765, path id: 44 FAKE_COORDINATOR: Erasing txId 281474976710765 2026-01-04T02:32:47.885551Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.885653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.885698Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-04T02:32:47.885769Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-04T02:32:47.885866Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:32:47.886474Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.886559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.886604Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710765 2026-01-04T02:32:47.886655Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710765, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-04T02:32:47.886695Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-04T02:32:47.886791Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710765, subscribers: 1 2026-01-04T02:32:47.886873Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-04T02:32:47.890654Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.891161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710765 2026-01-04T02:32:47.891263Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710765 2026-01-04T02:32:47.891333Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710765 2026-01-04T02:32:47.893409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:32:47.893484Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:1220:3112] TestWaitNotification: OK eventTxId 103 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> BasicUsage::FallbackToSingleDb >> TExportToS3Tests::CancelUponTransferringManyTablesShouldSucceed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::Changefeeds [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.788517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.788589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.788624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.788651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.788699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.788743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.788988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.789059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.789770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.790124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:31.868078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:31.868176Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.880096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:31.880276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:31.880473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:31.888853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:31.889067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:31.889760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:31.890066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:31.893419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.893604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:31.894540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:31.894595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:31.894692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:31.894741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:31.894792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:31.894937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.040881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.041995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.042931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.043036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.043115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.043178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:49.146491Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-04T02:32:49.146556Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:32:49.147651Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:49.147784Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:49.147824Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:32:49.153338Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-04T02:32:49.153397Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-04T02:32:49.153546Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-04T02:32:49.153648Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [6:128:2153] 2026-01-04T02:32:49.165102Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:49.165496Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:32:49.165653Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:32:49.165731Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:32:49.165794Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:32:49.165852Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-04T02:32:49.165890Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-04T02:32:49.168234Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:32:49.168346Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:32:49.168426Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1542:3334] TestWaitNotification: OK eventTxId 105 2026-01-04T02:32:49.169867Z node 6 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-04T02:32:49.180069Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:32:49.180171Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 105 2026-01-04T02:32:49.180253Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-04T02:32:49.180337Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:32:49.180459Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 105, at schemeshard: 72057594046678944 2026-01-04T02:32:49.180525Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:32:49.180582Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 105 2026-01-04T02:32:49.180659Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-04T02:32:49.180798Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:32:49.184016Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-105" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-04T02:32:49.184275Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-105, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-04T02:32:49.184492Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:32:49.187381Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:32:49.187721Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-105', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-105 2026-01-04T02:32:49.187928Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-04T02:32:49.188028Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-04T02:32:49.188116Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:32:49.188193Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-04T02:32:49.188290Z node 6 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-105\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000046, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 45 PathDropTxId: 281474976710761 2026-01-04T02:32:49.188443Z node 6 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 105 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 45] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-04T02:32:49.190414Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:32:49.190589Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:32:49.190721Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:32:49.190790Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:32:49.190866Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:32:49.190912Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-04T02:32:49.190981Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 105, itemIdx# 4294967295 2026-01-04T02:32:49.192913Z node 6 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 105 2026-01-04T02:32:49.193239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:32:49.193319Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:32:49.194002Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:32:49.194181Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:32:49.194262Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1766:3554] TestWaitNotification: OK eventTxId 105 |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |87.3%| [LD] {RESULT} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/wrappers/ut/ydb-core-wrappers-ut |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest >> BasicUsage::WriteSessionCloseWaitsForWrites >> TExportToS3Tests::AuditCompletedExport >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-false [GOOD] >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true >> TxUsage::WriteToTopic_Demo_12_Table |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_backup/ydb-core-tx-schemeshard-ut_backup >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |87.3%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/batch_operations/ydb-core-kqp-ut-batch_operations >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true >> BasicUsage::CreateTopicWithStreamingConsumer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-04T02:32:48.941376Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328790566236235:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:48.941461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.392943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.393105Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.432558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.526085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.531706Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328790566236197:2081] 1767493968927817 != 1767493968927820 2026-01-04T02:32:49.532037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:49.752714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.783450Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328794861203757:2105] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:49.783493Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328794861204261:2465] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:49.783604Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328794861203783:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:49.783699Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328794861203783:2119], cookie# 1 2026-01-04T02:32:49.785314Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203977:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203974:2204], cookie# 1 2026-01-04T02:32:49.785348Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203978:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203975:2204], cookie# 1 2026-01-04T02:32:49.785361Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203979:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203976:2204], cookie# 1 2026-01-04T02:32:49.785410Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236165:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203977:2204], cookie# 1 2026-01-04T02:32:49.785445Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236168:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203978:2204], cookie# 1 2026-01-04T02:32:49.785461Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236171:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203979:2204], cookie# 1 2026-01-04T02:32:49.785511Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203977:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236165:2049], cookie# 1 2026-01-04T02:32:49.785528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203978:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236168:2052], cookie# 1 2026-01-04T02:32:49.785546Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203979:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236171:2055], cookie# 1 2026-01-04T02:32:49.785584Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203974:2204], cookie# 1 2026-01-04T02:32:49.785612Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328794861203943:2204][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:49.785629Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203975:2204], cookie# 1 2026-01-04T02:32:49.785649Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328794861203943:2204][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:49.785671Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203976:2204], cookie# 1 2026-01-04T02:32:49.785707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328794861203943:2204][/dc-1] Sync cookie mismatch: sender# [1:7591328794861203976:2204], cookie# 1, current cookie# 0 2026-01-04T02:32:49.785764Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328794861203783:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:49.785853Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328794861203783:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328794861203943:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:49.785935Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328794861203783:2119], cacheItem# { Subscriber: { Subscriber: [1:7591328794861203943:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:49.788483Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328794861204262:2466], recipient# [1:7591328794861204261:2465], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:49.788538Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328794861204261:2465] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:49.844584Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328794861204261:2465] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:49.850350Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591328794861204261:2465] Handle TEvDescribeSchemeResult Forward to# [1:7591328794861204260:2464] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:32:49.858663Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328794861203757:2105] Handle TEvProposeTransaction 2026-01-04T02:32:49.858696Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328794861203757:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:32:49.858759Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328794861203757:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591328794861204266:2470] 2026-01-04T02:32:49.938718Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328794861203783:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073 ... mr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-04T02:32:50.064914Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328794861203783:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328799156171608:2503] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767493970088 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:50.064981Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328794861203783:2119], cacheItem# { Subscriber: { Subscriber: [1:7591328799156171608:2503] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767493970088 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] DomainId: [OwnerId: 72057594046644480, LocalPathId: 38] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:50.065101Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328799156171615:2504], recipient# [1:7591328799156171607:2502], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:38:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 38] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:50.065132Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7591328799156171607:2502] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-04T02:32:50.105037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328794861203757:2105] Handle TEvNavigate describe path /dc-1 2026-01-04T02:32:50.105074Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328799156171617:2506] HANDLE EvNavigateScheme /dc-1 2026-01-04T02:32:50.105182Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328794861203783:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.105283Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328794861203783:2119], cookie# 4 2026-01-04T02:32:50.105363Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203977:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203974:2204], cookie# 4 2026-01-04T02:32:50.105380Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203978:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203975:2204], cookie# 4 2026-01-04T02:32:50.105394Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328794861203979:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203976:2204], cookie# 4 2026-01-04T02:32:50.105431Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236165:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203977:2204], cookie# 4 2026-01-04T02:32:50.105459Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236168:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203978:2204], cookie# 4 2026-01-04T02:32:50.105477Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328790566236171:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328794861203979:2204], cookie# 4 2026-01-04T02:32:50.105512Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203977:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236165:2049], cookie# 4 2026-01-04T02:32:50.105528Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203978:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236168:2052], cookie# 4 2026-01-04T02:32:50.105576Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328794861203979:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328790566236171:2055], cookie# 4 2026-01-04T02:32:50.105613Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203974:2204], cookie# 4 2026-01-04T02:32:50.105633Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328794861203943:2204][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:50.105658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203975:2204], cookie# 4 2026-01-04T02:32:50.105678Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328794861203943:2204][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:50.105707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328794861203943:2204][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [1:7591328794861203976:2204], cookie# 4 2026-01-04T02:32:50.105718Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328794861203943:2204][/dc-1] Sync cookie mismatch: sender# [1:7591328794861203976:2204], cookie# 4, current cookie# 0 2026-01-04T02:32:50.105749Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328794861203783:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:50.105831Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328794861203783:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328794861203943:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493970053 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:50.105910Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328794861203783:2119], cacheItem# { Subscriber: { Subscriber: [1:7591328794861203943:2204] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493970053 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-04T02:32:50.106060Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328799156171618:2507], recipient# [1:7591328799156171617:2506], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:50.106106Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328799156171617:2506] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:50.106185Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328799156171617:2506] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:50.106904Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591328799156171617:2506] Handle TEvDescribeSchemeResult Forward to# [1:7591328799156171616:2505] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 128 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767493970053 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table >> DataShardVolatile::UpsertDependenciesShardsRestart+UseSink [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-04T02:31:35.312042Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1767493895311998 2026-01-04T02:31:36.052658Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328482593551901:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:36.052737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:36.229740Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:36.254280Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:36.260568Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328482314890068:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:36.260722Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:36.299540Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:36.845916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:36.876618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:36.917051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:36.917166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:36.933634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:36.933741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:36.974534Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:37.012052Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:31:37.035523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:37.142143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:37.231367Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:37.240774Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:37.248115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:37.282256Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:37.362026Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.013323s 2026-01-04T02:31:37.386326Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.005943s 2026-01-04T02:31:37.616764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0026db/r3tmp/yandexIxkhvS.tmp 2026-01-04T02:31:37.616807Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0026db/r3tmp/yandexIxkhvS.tmp 2026-01-04T02:31:37.616977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0026db/r3tmp/yandexIxkhvS.tmp 2026-01-04T02:31:37.617060Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:37.698377Z INFO: TTestServer started on Port 28176 GrpcPort 14113 PQClient connected to localhost:14113 2026-01-04T02:31:38.009906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:41.055130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328482593551901:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:41.055189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:41.208627Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328482314890068:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:41.209214Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:41.214050Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328503789726791:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:41.214142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328503789726772:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:41.214297Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:41.217561Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328503789726803:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:41.217745Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:41.226347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:41.252026Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328503789726802:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:31:41.689558Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328503789726833:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:41.741236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:41.748303Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328503789726842:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:41.751340Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MTE1M2MwZmMtNWNkMGQwYjctZTQ4ODcyZWEtOGZlYjUwZTE=, ActorId: [2:7591328503789726770:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3dh6wb00bdbqxmq8r557kq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:41.760320Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:31:41.763093Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328504068389803:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:41.765410Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWEzNGVhZWQtMjk4NDVkMGQtNWYyNGVhOTktODg3ZWUyY2Q=, ActorId: [1:7591328504068389777:2 ... : (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:32:53.390806Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1767493973] updating configuration. Deleted partitions []. Added partitions [] 2026-01-04T02:32:53.393654Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:32:53.399415Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:32:53.403065Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.403101Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.403116Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.403138Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.403154Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.409695Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1767493973448, TxId 281474976710676 2026-01-04T02:32:53.409731Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.409752Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-04T02:32:53.409782Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:32:53.409795Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.409829Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:32:53.409842Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.410082Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767493973448, TxId 281474976710676 2026-01-04T02:32:53.410104Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.410122Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:32:53.410137Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.410164Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:32:53.410311Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976710676 2026-01-04T02:32:53.410332Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:32:53.410347Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:32:53.410368Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.410565Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:32:53.414243Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:32:53.414523Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:32:53.414616Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:32:53.414642Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.414663Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.414677Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.414697Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.414710Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.414737Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:32:53.415070Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1767493973" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1767493973" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1767493973" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-04T02:32:53.415110Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:53.415217Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:32:53.430521Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:32:53.444917Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-04T02:32:53.462867Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.462900Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.462917Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.462940Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.462957Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.504865Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.504908Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.504926Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.504952Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.504970Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.566159Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.566199Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.566214Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.566240Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.566256Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.605135Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.605178Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.605194Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.605218Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.605237Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.669490Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:32:53.669531Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.669550Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:32:53.669574Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:32:53.669591Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:32:53.811637Z node 9 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [9:7591328811265770737:2468] TxId: 281474976710677. Ctx: { TraceId: 01ke3dkd9m23nkke5gdev88yt1, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MzI4MGIyMjMtZDU2NjUwMDAtZTRhNThhMTMtY2ViMGRkYmE=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 10 trace_id# 2026-01-04T02:32:53.811842Z node 9 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [9:7591328811265770742:2468], TxId: 281474976710677, task: 3. Ctx: { TraceId : 01ke3dkd9m23nkke5gdev88yt1. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=9&id=MzI4MGIyMjMtZDU2NjUwMDAtZTRhNThhMTMtY2ViMGRkYmE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [9:7591328811265770737:2468], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |87.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDrop-EnableRealSystemViewPaths-true [GOOD] Test command err: 2026-01-04T02:32:48.610561Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328788616630654:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:48.610659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.147632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.147830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.206129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.222438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.240459Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328788616630535:2081] 1767493968595070 != 1767493968595073 2026-01-04T02:32:49.249638Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:49.437418Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328788616630801:2106] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:49.437493Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328792911598385:2264] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:49.437622Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328788616630807:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:49.437708Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328788616630979:2198][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328788616630807:2108], cookie# 1 2026-01-04T02:32:49.442811Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328788616631013:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631010:2198], cookie# 1 2026-01-04T02:32:49.442868Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328788616631014:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631011:2198], cookie# 1 2026-01-04T02:32:49.442885Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328788616631015:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631012:2198], cookie# 1 2026-01-04T02:32:49.442931Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328788616630509:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631015:2198], cookie# 1 2026-01-04T02:32:49.442993Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328788616631015:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616630509:2055], cookie# 1 2026-01-04T02:32:49.443038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328788616630979:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616631012:2198], cookie# 1 2026-01-04T02:32:49.443070Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328788616630979:2198][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:49.443091Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328788616630503:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631013:2198], cookie# 1 2026-01-04T02:32:49.443109Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328788616630506:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328788616631014:2198], cookie# 1 2026-01-04T02:32:49.443127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328788616631013:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616630503:2049], cookie# 1 2026-01-04T02:32:49.443142Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328788616631014:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616630506:2052], cookie# 1 2026-01-04T02:32:49.443167Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328788616630979:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616631010:2198], cookie# 1 2026-01-04T02:32:49.443190Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328788616630979:2198][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:49.443220Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328788616630979:2198][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328788616631011:2198], cookie# 1 2026-01-04T02:32:49.443236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328788616630979:2198][/dc-1] Sync cookie mismatch: sender# [1:7591328788616631011:2198], cookie# 1, current cookie# 0 2026-01-04T02:32:49.443281Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328788616630807:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:49.455531Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328788616630807:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328788616630979:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:49.456316Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328788616630807:2108], cacheItem# { Subscriber: { Subscriber: [1:7591328788616630979:2198] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:49.459771Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328792911598386:2265], recipient# [1:7591328792911598385:2264], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:49.459868Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328792911598385:2264] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:49.466274Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328788616630807:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:49.466368Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591328788616630807:2108], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-04T02:32:49.466432Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591328788616630807:2108], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-04T02:32:49.466457Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591328788616630807:2108], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-04T02:32:49.466752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591328792911598387:2266][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:32:49.467302Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328788616630509:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7591328792911598395:2266] 2026-01-04T02:32:49.467334Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328788616630509:2055] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-04T02:32:49.467410Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328788616630509:2055] Subscribe: subscriber# [1:7591328792911598395:2266], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:49.467470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591328792911598395:2266][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591328788616630509:2055] 2026-01-04T02:32:49.467526Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591328792911598387:2266][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591328792911598392:2266] 2026-01-04T02 ... ean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:32:53.174757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-04T02:32:53.174789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:32:53.174880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:32:53.174894Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-04T02:32:53.174933Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:32:53.175562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-01-04T02:32:53.175623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-01-04T02:32:53.175641Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-01-04T02:32:53.175659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715659 2026-01-04T02:32:53.175706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:32:53.175730Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:32:53.179189Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7591328808363402777:2106] Handle TEvNavigate describe path /dc-1 2026-01-04T02:32:53.179217Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7591328812658370697:2522] HANDLE EvNavigateScheme /dc-1 2026-01-04T02:32:53.179306Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591328808363402788:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:53.179394Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7591328808363402912:2145][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7591328808363402788:2108], cookie# 4 2026-01-04T02:32:53.179436Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328808363402929:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402926:2145], cookie# 4 2026-01-04T02:32:53.179448Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328808363402930:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402927:2145], cookie# 4 2026-01-04T02:32:53.179465Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328808363402931:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402928:2145], cookie# 4 2026-01-04T02:32:53.179503Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328808363402530:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402929:2145], cookie# 4 2026-01-04T02:32:53.179532Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328808363402533:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402930:2145], cookie# 4 2026-01-04T02:32:53.179564Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328808363402536:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328808363402931:2145], cookie# 4 2026-01-04T02:32:53.179611Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328808363402929:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402530:2049], cookie# 4 2026-01-04T02:32:53.179627Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328808363402930:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402533:2052], cookie# 4 2026-01-04T02:32:53.179639Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328808363402931:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402536:2055], cookie# 4 2026-01-04T02:32:53.179669Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328808363402912:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402926:2145], cookie# 4 2026-01-04T02:32:53.179686Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7591328808363402912:2145][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:53.179701Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328808363402912:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402927:2145], cookie# 4 2026-01-04T02:32:53.179724Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7591328808363402912:2145][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:53.179753Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328808363402912:2145][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 10 Partial: 0 Cluster State: { } }: sender# [2:7591328808363402928:2145], cookie# 4 2026-01-04T02:32:53.179766Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7591328808363402912:2145][/dc-1] Sync cookie mismatch: sender# [2:7591328808363402928:2145], cookie# 4, current cookie# 0 2026-01-04T02:32:53.179800Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7591328808363402788:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:53.179872Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7591328808363402788:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7591328808363402912:2145] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493973098 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:53.179935Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591328808363402788:2108], cacheItem# { Subscriber: { Subscriber: [2:7591328808363402912:2145] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493973098 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-04T02:32:53.180080Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591328812658370698:2523], recipient# [2:7591328812658370697:2522], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:53.180108Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7591328812658370697:2522] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:53.180181Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7591328812658370697:2522] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:53.180674Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7591328812658370697:2522] Handle TEvDescribeSchemeResult Forward to# [2:7591328812658370696:2521] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 67 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767493973098 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |87.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad |87.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |87.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant |87.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/ydb-core-tx-tx_proxy-ut_base_tenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithoutNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:32:49.059832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328796039360510:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.060195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.153799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:49.520794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.544945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.545064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.559050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.631993Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:49.633171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328796039360386:2081] 1767493969025621 != 1767493969025624 2026-01-04T02:32:49.696781Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.962488Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328796039360617:2095] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:49.962545Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328796039361159:2446] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:49.962632Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328796039360670:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:49.962752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328796039360940:2297][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328796039360670:2123], cookie# 1 2026-01-04T02:32:49.964292Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796039360984:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360981:2297], cookie# 1 2026-01-04T02:32:49.964327Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796039360985:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360982:2297], cookie# 1 2026-01-04T02:32:49.964340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796039360986:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360983:2297], cookie# 1 2026-01-04T02:32:49.964386Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796039360354:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360984:2297], cookie# 1 2026-01-04T02:32:49.964414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796039360357:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360985:2297], cookie# 1 2026-01-04T02:32:49.964433Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796039360360:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796039360986:2297], cookie# 1 2026-01-04T02:32:49.964497Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796039360984:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360354:2049], cookie# 1 2026-01-04T02:32:49.964582Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796039360985:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360357:2052], cookie# 1 2026-01-04T02:32:49.964606Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796039360986:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360360:2055], cookie# 1 2026-01-04T02:32:49.964646Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796039360940:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360981:2297], cookie# 1 2026-01-04T02:32:49.964669Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328796039360940:2297][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:49.964687Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796039360940:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360982:2297], cookie# 1 2026-01-04T02:32:49.964707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328796039360940:2297][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:49.964738Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796039360940:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796039360983:2297], cookie# 1 2026-01-04T02:32:49.964757Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328796039360940:2297][/dc-1] Sync cookie mismatch: sender# [1:7591328796039360983:2297], cookie# 1, current cookie# 0 2026-01-04T02:32:49.964838Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328796039360670:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:49.978425Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328796039360670:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328796039360940:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:49.978616Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328796039360670:2123], cacheItem# { Subscriber: { Subscriber: [1:7591328796039360940:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:49.981560Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328796039361160:2447], recipient# [1:7591328796039361159:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:49.981664Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328796039361159:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:50.028746Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328796039361159:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:50.036849Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591328796039361159:2446] Handle TEvDescribeSchemeResult Forward to# [1:7591328796039361158:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:32:50.045106Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328796039360617:2095] Handle TEvProposeTransaction 2026-01-04T02:32:50.045136Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328796039360617:2095] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:32:50.045238Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328796039360617:2095] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591328800334328461:2451] 2026-01-04T02:32:50.045931Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache ... 2026-01-04T02:32:54.885648Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7591328811432430373:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7591328815727398398:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767493974344 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:54.885741Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591328811432430373:2116], cacheItem# { Subscriber: { Subscriber: [2:7591328815727398398:2591] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767493974344 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-04T02:32:54.885946Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591328815727398550:2683], recipient# [2:7591328815727398549:2682], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:54.885984Z node 2 :TX_PROXY INFO: describe.cpp:354: Actor# [2:7591328815727398549:2682] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-04T02:32:54.889382Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7591328811432430181:2090] Handle TEvNavigate describe path /dc-1 2026-01-04T02:32:54.889421Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7591328815727398552:2685] HANDLE EvNavigateScheme /dc-1 2026-01-04T02:32:54.889509Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591328811432430373:2116], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:54.889774Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7591328811432430440:2127][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7591328811432430373:2116], cookie# 4 2026-01-04T02:32:54.889864Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328811432430492:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430489:2127], cookie# 4 2026-01-04T02:32:54.889886Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328811432430493:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430490:2127], cookie# 4 2026-01-04T02:32:54.889902Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591328811432430494:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430491:2127], cookie# 4 2026-01-04T02:32:54.889930Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328811432430115:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430492:2127], cookie# 4 2026-01-04T02:32:54.889959Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328811432430118:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430493:2127], cookie# 4 2026-01-04T02:32:54.889977Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591328811432430121:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591328811432430494:2127], cookie# 4 2026-01-04T02:32:54.890024Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328811432430492:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430115:2049], cookie# 4 2026-01-04T02:32:54.890050Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328811432430493:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430118:2052], cookie# 4 2026-01-04T02:32:54.890070Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591328811432430494:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430121:2055], cookie# 4 2026-01-04T02:32:54.890111Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328811432430440:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430489:2127], cookie# 4 2026-01-04T02:32:54.890136Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7591328811432430440:2127][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:54.890154Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328811432430440:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430490:2127], cookie# 4 2026-01-04T02:32:54.890181Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7591328811432430440:2127][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:54.890207Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591328811432430440:2127][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591328811432430491:2127], cookie# 4 2026-01-04T02:32:54.890220Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7591328811432430440:2127][/dc-1] Sync cookie mismatch: sender# [2:7591328811432430491:2127], cookie# 4, current cookie# 0 2026-01-04T02:32:54.890287Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7591328811432430373:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:54.890376Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7591328811432430373:2116], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7591328811432430440:2127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493974309 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:54.890459Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591328811432430373:2116], cacheItem# { Subscriber: { Subscriber: [2:7591328811432430440:2127] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767493974309 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-04T02:32:54.890663Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591328815727398553:2686], recipient# [2:7591328815727398552:2685], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:54.890701Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7591328815727398552:2685] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:54.890771Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7591328815727398552:2685] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:54.891507Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7591328815727398552:2685] Handle TEvDescribeSchemeResult Forward to# [2:7591328815727398551:2684] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767493974309 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table >> TxUsage::WriteToTopic_Demo_11_Table |87.3%| [TA] $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> BasicUsage::CreateTopicWithCustomName >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndDefineWithNodes-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:32:49.419790Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328792833218428:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.419842Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.946318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.973545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.973643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.982210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.071143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.197203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:50.370034Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328792833218649:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:50.370091Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328797128186441:2445] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:50.370188Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328792833218656:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.370298Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328792833218914:2289][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328792833218656:2121], cookie# 1 2026-01-04T02:32:50.372013Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328792833218945:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218942:2289], cookie# 1 2026-01-04T02:32:50.372049Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328792833218946:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218943:2289], cookie# 1 2026-01-04T02:32:50.372063Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328792833218947:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218944:2289], cookie# 1 2026-01-04T02:32:50.372096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328792833218336:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218945:2289], cookie# 1 2026-01-04T02:32:50.372177Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328792833218339:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218946:2289], cookie# 1 2026-01-04T02:32:50.372200Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328792833218342:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328792833218947:2289], cookie# 1 2026-01-04T02:32:50.372272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328792833218945:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218336:2050], cookie# 1 2026-01-04T02:32:50.372296Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328792833218946:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218339:2053], cookie# 1 2026-01-04T02:32:50.372312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328792833218947:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218342:2056], cookie# 1 2026-01-04T02:32:50.372364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328792833218914:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218942:2289], cookie# 1 2026-01-04T02:32:50.372402Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328792833218914:2289][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:50.372424Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328792833218914:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218943:2289], cookie# 1 2026-01-04T02:32:50.372456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328792833218914:2289][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:50.372506Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328792833218914:2289][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328792833218944:2289], cookie# 1 2026-01-04T02:32:50.372519Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328792833218914:2289][/dc-1] Sync cookie mismatch: sender# [1:7591328792833218944:2289], cookie# 1, current cookie# 0 2026-01-04T02:32:50.372594Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328792833218656:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:50.395902Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328792833218656:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328792833218914:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:50.396071Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328792833218656:2121], cacheItem# { Subscriber: { Subscriber: [1:7591328792833218914:2289] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:50.412036Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328797128186442:2446], recipient# [1:7591328797128186441:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:50.417110Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328797128186441:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:50.426857Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328792833218656:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.426928Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591328792833218656:2121], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-04T02:32:50.427137Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591328797128186444:2447][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:32:50.427544Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328792833218336:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591328797128186448:2447] 2026-01-04T02:32:50.427578Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328792833218336:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:32:50.427690Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328792833218336:2050] Subscribe: subscriber# [1:7591328797128186448:2447], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:50.427768Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328792833218339:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591328797128186449:2447] 2026-01-04T02:32:50.427777Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328792833218339:2053] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:32:50.427806Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328792833218339:2053] Subscribe: subscriber# [1:7591328797128186449:2447], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:50.427829Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328792833218342:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591328797128186450:2447] 2026-01-04T02:32:50.427835Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328792833218342:2056] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:32:50.427853Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328792833218342:2056] Subscribe: subscriber# [1:7591328797128186450:2447], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72 ... Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328832775325689:2790] 2026-01-04T02:32:58.999633Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328832775325683:2790][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328832775325686:2790] 2026-01-04T02:32:58.999656Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591328819890422830:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:32:58.999661Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591328832775325683:2790][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7591328819890422830:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:58.999685Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7591328819890422511:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/delayed_requests DomainOwnerId: 72057594046644480 }: sender# [3:7591328832775325696:2791] 2026-01-04T02:32:58.999695Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7591328819890422511:2050] Upsert description: path# /dc-1/.metadata/workload_manager/delayed_requests 2026-01-04T02:32:58.999713Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591328819890422830:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591328832775325683:2790] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:58.999724Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7591328819890422511:2050] Subscribe: subscriber# [3:7591328832775325696:2791], path# /dc-1/.metadata/workload_manager/delayed_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:58.999751Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7591328819890422511:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/running_requests DomainOwnerId: 72057594046644480 }: sender# [3:7591328832775325702:2792] 2026-01-04T02:32:58.999758Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7591328819890422511:2050] Upsert description: path# /dc-1/.metadata/workload_manager/running_requests 2026-01-04T02:32:58.999760Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328819890422830:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328832775325683:2790] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:58.999780Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7591328819890422511:2050] Subscribe: subscriber# [3:7591328832775325702:2792], path# /dc-1/.metadata/workload_manager/running_requests, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:58.999794Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591328832775325696:2791][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591328819890422511:2050] 2026-01-04T02:32:58.999801Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328819890422511:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328832775325688:2790] 2026-01-04T02:32:58.999814Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328819890422511:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328832775325696:2791] 2026-01-04T02:32:58.999824Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328832775325691:2791][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591328832775325693:2791] 2026-01-04T02:32:58.999833Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591328832775325702:2792][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591328819890422511:2050] 2026-01-04T02:32:58.999855Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591328832775325691:2791][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7591328819890422830:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:58.999857Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328832775325692:2792][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591328832775325699:2792] 2026-01-04T02:32:58.999876Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328819890422511:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328832775325702:2792] 2026-01-04T02:32:58.999877Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591328832775325692:2792][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7591328819890422830:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:58.999935Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328832775325706:2794], recipient# [3:7591328832775325678:2309], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:58.999949Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328832775325705:2793], recipient# [3:7591328832775325681:2312], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.935862Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328819890422830:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.935996Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328819890422830:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328824185391020:2776] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:59.936073Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328837070293016:2795], recipient# [3:7591328837070293015:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:00.004422Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328819890422830:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:00.004584Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328819890422830:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328832775325683:2790] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:00.004695Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328841365260320:2799], recipient# [3:7591328841365260319:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndLs-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:32:49.130991Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328793428707828:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.131073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.516716Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.544788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.544910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.554294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.635537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:49.810408Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.908813Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328793428708010:2127] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:49.908935Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328793428708536:2446] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:49.909044Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328793428708036:2129], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:49.909127Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328793428708319:2297][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328793428708036:2129], cookie# 1 2026-01-04T02:32:49.910882Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328793428708367:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708364:2297], cookie# 1 2026-01-04T02:32:49.910921Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328793428708368:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708365:2297], cookie# 1 2026-01-04T02:32:49.910947Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328793428708369:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708366:2297], cookie# 1 2026-01-04T02:32:49.911010Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328789133740428:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708367:2297], cookie# 1 2026-01-04T02:32:49.911044Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328789133740431:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708368:2297], cookie# 1 2026-01-04T02:32:49.911092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328789133740434:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328793428708369:2297], cookie# 1 2026-01-04T02:32:49.911159Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328793428708367:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328789133740428:2050], cookie# 1 2026-01-04T02:32:49.911180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328793428708368:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328789133740431:2053], cookie# 1 2026-01-04T02:32:49.911195Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328793428708369:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328789133740434:2056], cookie# 1 2026-01-04T02:32:49.911247Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328793428708319:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328793428708364:2297], cookie# 1 2026-01-04T02:32:49.911272Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328793428708319:2297][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:49.911312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328793428708319:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328793428708365:2297], cookie# 1 2026-01-04T02:32:49.911337Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328793428708319:2297][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:49.911383Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328793428708319:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328793428708366:2297], cookie# 1 2026-01-04T02:32:49.911399Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328793428708319:2297][/dc-1] Sync cookie mismatch: sender# [1:7591328793428708366:2297], cookie# 1, current cookie# 0 2026-01-04T02:32:49.911466Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328793428708036:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:49.925861Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328793428708036:2129], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328793428708319:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:49.925988Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328793428708036:2129], cacheItem# { Subscriber: { Subscriber: [1:7591328793428708319:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:49.928688Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328793428708537:2447], recipient# [1:7591328793428708536:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:49.928777Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328793428708536:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:49.974593Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328793428708536:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:49.979461Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591328793428708536:2446] Handle TEvDescribeSchemeResult Forward to# [1:7591328793428708535:2445] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:32:49.985202Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328793428708010:2127] Handle TEvProposeTransaction 2026-01-04T02:32:49.985230Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328793428708010:2127] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:32:49.985292Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328793428708010:2127] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591328793428708545:2454] 2026-01-04T02:32:50.130830Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591328793428708545:2454] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-04T02:32:50.130879Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:28 ... lid> AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:58.776048Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328833807228838:2949][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328833807228853:2949] 2026-01-04T02:32:58.776053Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328820922325485:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328833807228849:2948] 2026-01-04T02:32:58.776068Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328820922325485:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328833807228855:2949] 2026-01-04T02:32:58.776070Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591328833807228838:2949][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7591328820922325799:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:58.776082Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328820922325488:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328833807228850:2948] 2026-01-04T02:32:58.776095Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328820922325488:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328833807228856:2949] 2026-01-04T02:32:58.776157Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-04T02:32:58.776228Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591328833807228836:2947] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:58.776241Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328820922325482:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328833807228848:2948] 2026-01-04T02:32:58.776309Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328820922325799:2120], cacheItem# { Subscriber: { Subscriber: [3:7591328833807228836:2947] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:58.776336Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-04T02:32:58.776373Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591328833807228837:2948] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:58.776420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328820922325799:2120], cacheItem# { Subscriber: { Subscriber: [3:7591328833807228837:2948] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:58.776471Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:32:58.776506Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591328820922325799:2120], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591328833807228838:2949] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:58.776549Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328820922325799:2120], cacheItem# { Subscriber: { Subscriber: [3:7591328833807228838:2949] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:58.776559Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328833807228857:2950], recipient# [3:7591328833807228829:2311], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:58.776606Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328833807228858:2951], recipient# [3:7591328833807228835:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.672225Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328820922325799:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.672391Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328820922325799:2120], cacheItem# { Subscriber: { Subscriber: [3:7591328825217294174:2935] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:59.672515Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328838102196171:2955], recipient# [3:7591328838102196170:2318], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.777493Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328820922325799:2120], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.777678Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328820922325799:2120], cacheItem# { Subscriber: { Subscriber: [3:7591328833807228838:2949] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:59.777765Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328838102196176:2956], recipient# [3:7591328838102196175:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndAlterPools-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:32:49.251510Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328795603393756:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.251556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.695021Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.765149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.784525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.795242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.829790Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.001843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:50.180035Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328795603393866:2118] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:50.180110Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328799898361662:2446] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:50.180298Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328795603393873:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.180391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328795603394141:2297][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328795603393873:2121], cookie# 1 2026-01-04T02:32:50.182146Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328795603394190:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394187:2297], cookie# 1 2026-01-04T02:32:50.182182Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328795603394191:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394188:2297], cookie# 1 2026-01-04T02:32:50.182199Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328795603394192:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394189:2297], cookie# 1 2026-01-04T02:32:50.182251Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328795603393553:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394190:2297], cookie# 1 2026-01-04T02:32:50.182306Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328795603393556:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394191:2297], cookie# 1 2026-01-04T02:32:50.182328Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328795603393559:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328795603394192:2297], cookie# 1 2026-01-04T02:32:50.182391Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328795603394190:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603393553:2050], cookie# 1 2026-01-04T02:32:50.182415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328795603394191:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603393556:2053], cookie# 1 2026-01-04T02:32:50.182452Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328795603394192:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603393559:2056], cookie# 1 2026-01-04T02:32:50.182500Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328795603394141:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603394187:2297], cookie# 1 2026-01-04T02:32:50.182532Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328795603394141:2297][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:50.182560Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328795603394141:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603394188:2297], cookie# 1 2026-01-04T02:32:50.182600Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328795603394141:2297][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:50.182638Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328795603394141:2297][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328795603394189:2297], cookie# 1 2026-01-04T02:32:50.182652Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328795603394141:2297][/dc-1] Sync cookie mismatch: sender# [1:7591328795603394189:2297], cookie# 1, current cookie# 0 2026-01-04T02:32:50.182770Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328795603393873:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:50.219625Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328795603393873:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328795603394141:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:50.219757Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328795603393873:2121], cacheItem# { Subscriber: { Subscriber: [1:7591328795603394141:2297] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:50.223047Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328799898361663:2447], recipient# [1:7591328799898361662:2446], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:50.223216Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328799898361662:2446] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:50.260177Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.296304Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328799898361662:2446] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:50.296543Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328795603393873:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.296610Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591328795603393873:2121], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-04T02:32:50.297091Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591328799898361665:2448][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:32:50.302096Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328795603393559:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591328799898361672:2448] 2026-01-04T02:32:50.302147Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328795603393559:2056] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:32:50.302222Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328795603393559:2056] Subscribe: subscriber# [1:7591328799898361672:2448], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:50.305894Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591328795603393553:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591328799898361670:2448] 2026-01-04T02:32:50.305938Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591328795603393553:2050] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:32:50.305967Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591328799898361672:2448][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591328795603393559:2056] 2026-01-04T02:32:50.306011Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591328795603393553:2050] Subscribe: subscriber# [1:7591328799898361670:2448], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# Ack ... ode 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328826359549306:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328839244452406:3003] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:59.776396Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[3:24339059:0], [3:1099535966835:0], [3:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:32:59.776728Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7591328826359548987:2050] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7591328839244452426:3004] 2026-01-04T02:32:59.776737Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7591328826359548987:2050] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:32:59.776768Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7591328826359548987:2050] Subscribe: subscriber# [3:7591328839244452426:3004], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:59.776796Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7591328826359548990:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7591328839244452427:3004] 2026-01-04T02:32:59.776804Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7591328826359548990:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:32:59.776821Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7591328826359548990:2053] Subscribe: subscriber# [3:7591328839244452427:3004], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:59.776843Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7591328826359548993:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [3:7591328839244452428:3004] 2026-01-04T02:32:59.776850Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7591328826359548993:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:32:59.776868Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7591328826359548993:2056] Subscribe: subscriber# [3:7591328839244452428:3004], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:32:59.776963Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328839244452420:3005], recipient# [3:7591328839244452401:2315], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:59.777051Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591328839244452426:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328826359548987:2050] 2026-01-04T02:32:59.777102Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591328839244452427:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328826359548990:2053] 2026-01-04T02:32:59.777145Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591328839244452428:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328826359548993:2056] 2026-01-04T02:32:59.777184Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328839244452423:3004] 2026-01-04T02:32:59.777238Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328839244452424:3004] 2026-01-04T02:32:59.777271Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [3:7591328826359549306:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:59.777294Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [3:7591328839244452425:3004] 2026-01-04T02:32:59.777314Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591328839244452419:3004][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [3:7591328826359549306:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:32:59.777381Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328826359548990:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328839244452427:3004] 2026-01-04T02:32:59.777398Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328826359548993:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328839244452428:3004] 2026-01-04T02:32:59.777431Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591328826359549306:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:32:59.777475Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591328826359549306:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591328839244452419:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:59.777528Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328826359549306:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328839244452419:3004] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:32:59.777553Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591328826359548987:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591328839244452426:3004] 2026-01-04T02:32:59.777596Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328839244452429:3006], recipient# [3:7591328839244452403:2317], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:00.345953Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328826359549306:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:00.346155Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328826359549306:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328830654517567:2841] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:00.346244Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328843539419738:3010], recipient# [3:7591328843539419737:2319], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TxUsage::WriteToTopic_Demo_12_Table [GOOD] >> BasicUsage::WriteSessionWriteInHandlers [GOOD] >> BasicUsage::BasicWriteSession [GOOD] >> BasicUsage::CloseWriteSessionImmediately >> BasicUsage::WriteSessionNoAvailableDatabase [GOOD] >> BasicUsage::WriteSessionSwitchDatabases >> TxUsage::WriteToTopic_Demo_12_Query >> TExportToS3Tests::CompletedExportEndTime [GOOD] >> KqpErrors::ResolveTableError >> TPQTest::TestReadAndDeleteConsumer [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink >> TExportToS3Tests::DisableAutoDropping >> BasicUsage::CreateTopicWithStreamingConsumer [GOOD] >> BasicUsage::ReadWithoutConsumerWithRestarts [GOOD] >> BasicUsage::ReadWithRestarts >> BasicUsage::FallbackToSingleDb [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest >> BasicUsage::GetAllStartPartitionSessions [GOOD] >> BasicUsage::PreferredDatabaseNoFallback >> KqpErrors::ProposeError ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionWriteInHandlers [GOOD] Test command err: 2026-01-04T02:32:49.239727Z :WriteSessionWriteInHandlers INFO: Random seed for debugging is 1767493969239690 2026-01-04T02:32:49.634954Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328793956996030:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.635510Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.722972Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:49.824113Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.003690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.004202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.004332Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:50.240088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.240188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.256380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.264182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.385630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.436755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.457592Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.465239Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.479670Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:50.496571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.665309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.766091Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.768746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002d9f/r3tmp/yandex6I5oct.tmp 2026-01-04T02:32:50.768794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002d9f/r3tmp/yandex6I5oct.tmp 2026-01-04T02:32:50.768959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002d9f/r3tmp/yandex6I5oct.tmp 2026-01-04T02:32:50.769061Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:50.898520Z INFO: TTestServer started on Port 26659 GrpcPort 61617 PQClient connected to localhost:61617 2026-01-04T02:32:51.244820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:51.266435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:32:54.293939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815431833760:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.294048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815431833749:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.294200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.294561Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328814373884473:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.294574Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328814373884462:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.294663Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.295098Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815431833780:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.295176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.296263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328814373884493:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.296363Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.299599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:54.321964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815431833814:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.322073Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.331451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815431833817:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.331565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.366339Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328814373884495:2136] txid# 281474976710657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:54.387836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328815431833779:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:32:54.389298Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328814373884492:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:32:54.447306Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328815431833873:2994] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.480467Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328814373884522:2142] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.610437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.634154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328793956996030:2150];send_to=[0:7307199536658146131:7762515]; ... eceived event: NKikimr::NPQ::TEvPartitionWriter::TEvWriteRequest 2026-01-04T02:33:02.775667Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:33:02.775710Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:33:02.775759Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 2 requestId: cookie: 2 2026-01-04T02:33:02.775898Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-04T02:33:02.776270Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:33:02.776297Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:33:02.776353Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72075186224037892] got client message topic: rt3.dc1--test-topic partition: 0 SourceId: '\0src_id' SeqNo: 2 partNo : 0 messageNo: 3 size 107 offset: -1 2026-01-04T02:33:02.776389Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:636: [72075186224037892][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:33:02.776431Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:02.776443Z node 2 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:33:02.776464Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:33:02.776474Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:02.776489Z node 2 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:33:02.776563Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 2 partNo 0 2026-01-04T02:33:02.823522Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 1 PartNo 0 PackedSize 181 count 1 nextOffset 2 batches 1 2026-01-04T02:33:02.823582Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:33:02.823592Z node 2 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:33:02.823606Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:02.823926Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 1,1 HeadOffset 1 endOffset 1 curOffset 2 d0000000000_00000000000000000001_00000_0000000001_00000? size 169 WTime 1767493982823 2026-01-04T02:33:02.824146Z node 2 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037892][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:33:02.824165Z node 2 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:33:02.824248Z node 2 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 1 partNo 0 count 1 size 169 2026-01-04T02:33:02.827907Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-04T02:33:02.827248Z node 2 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 1 count 1 size 169 actorID [2:7591328844438656034:2386] 2026-01-04T02:33:02.827302Z node 2 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:33:02.827354Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 114 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:33:02.827381Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:33:02.827407Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 2, partNo: 0, Offset: 1 is stored on disk 2026-01-04T02:33:02.827554Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:02.827573Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.827583Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:02.827596Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.827604Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:02.827630Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:33:02.827663Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 3 requestId: cookie: 2 2026-01-04T02:33:02.827756Z node 2 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 1 partno 0 count 1 parts 0 suffix '63' size 169 2026-01-04T02:33:02.830190Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2026-01-04T02:33:02.830359Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 2 written { offset: 1 } } write_statistics { persisting_time { nanos: 3000000 } min_queue_wait_time { nanos: 47000000 } max_queue_wait_time { nanos: 47000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-04T02:33:02.830401Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] OnAck: seqNo=2, txId=? 2026-01-04T02:33:02.830425Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: acknoledged message 2 === Inside AcksHandler === Inside SessionClosedHandler 2026-01-04T02:33:02.833109Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write 1 messages with Id from 3 to 3 === SessionClosedHandler has 'written' a message 2026-01-04T02:33:02.833218Z :INFO: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-04T02:33:02.833247Z :INFO: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session will now close 2026-01-04T02:33:02.833284Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: aborting 2026-01-04T02:33:02.833684Z :WARNING: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: could not confirm all writes in time or session aborted, perform hard shutdown 2026-01-04T02:33:02.833715Z :DEBUG: [/Root] TraceId [] SessionId [src_id|4100fccd-76b05c72-c852886e-1d33584f_0] MessageGroupId [src_id] Write session: destroy 2026-01-04T02:33:02.844641Z node 1 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|4100fccd-76b05c72-c852886e-1d33584f_0 grpc read done: success: 0 data: 2026-01-04T02:33:02.844672Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|4100fccd-76b05c72-c852886e-1d33584f_0 grpc read failed 2026-01-04T02:33:02.844754Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 2 sessionId: src_id|4100fccd-76b05c72-c852886e-1d33584f_0 2026-01-04T02:33:02.844790Z node 1 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|4100fccd-76b05c72-c852886e-1d33584f_0 is DEAD 2026-01-04T02:33:02.847690Z node 1 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:33:02.848254Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7591328849791573342:2475] destroyed 2026-01-04T02:33:02.848291Z node 2 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:33:02.848310Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:02.848319Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.848326Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:02.848335Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.848342Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:02.924648Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:02.924678Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.924689Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:02.924707Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:02.924718Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:03.025164Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:03.025197Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:03.025207Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:03.025223Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:03.025233Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Table [GOOD] >> BasicUsage::WriteSessionCloseWaitsForWrites [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadAndDeleteConsumer [GOOD] Test command err: 2026-01-04T02:29:41.502469Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:29:41.622330Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:29:41.627904Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:29:41.628020Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:29:41.628111Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:29:41.699231Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:29:41.804319Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "important_user" Generation: 1 Important: true } 2026-01-04T02:29:41.805438Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:29:41.808325Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:29:41.854456Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:29:41.855058Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1b069648-a3c496ec-5cb9e992-2f8336dd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user1" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-04T02:29:42.013217Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.062080Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.086179Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.098064Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.146087Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.193174Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.234734Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.402118Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.466087Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.731835Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:42.746062Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:43.074085Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:43.350259Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:43.374440Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:43.721241Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:44.035731Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:44.382305Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:44.698414Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:44.726456Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:45.022173Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "" Offset: 0 Count: 2147483647 Bytes: 2147483647 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-04T02:29:45.486372Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:45.806197Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:46.108509Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:46.406140Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:46.417894Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:46.806161Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:47.105343Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:47.390088Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:47.692001Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:47.939359Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:47.990170Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:48.402196Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:48.690249Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:49.018588Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:49.322142Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:49.440526Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:49.646332Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:49.919236Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:50.220234Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:50.524454Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:50.838156Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:50.866318Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:29:52.094391Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates f ... 't have tx info 2026-01-04T02:32:56.208427Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:32:56.208516Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:56.208612Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [36:154:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927938 is [36:158:2176] sender: [36:159:2057] recipient: [36:152:2172] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:184:2057] recipient: [36:14:2061] 2026-01-04T02:32:56.237208Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:32:56.238050Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1002 actor [36:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1002 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1002 } Consumers { Name: "user1" Generation: 1002 Important: true } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-04T02:32:56.238783Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:190:2142] 2026-01-04T02:32:56.242145Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [36:190:2142] 2026-01-04T02:32:57.485972Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:32:57.486725Z node 36 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a0d5bbc5-290c6826-bcc43d1d-73fc2b7d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:262:2057] recipient: [36:104:2137] Leader for TabletID 72057594037927937 is [36:112:2142] sender: [36:265:2057] recipient: [36:264:2258] Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:267:2057] recipient: [36:264:2258] 2026-01-04T02:33:00.292143Z node 36 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:33:00.292244Z node 36 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:33:00.293034Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:00.293114Z node 36 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:33:00.293900Z node 36 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [36:315:2259] 2026-01-04T02:33:00.336958Z node 36 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:33:00.338329Z node 36 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [36:315:2259] 2026-01-04T02:33:00.382020Z node 36 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-04T02:33:00.413398Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:33:00.413575Z node 36 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 36 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [36:266:2259] sender: [36:339:2057] recipient: [36:14:2061] 2026-01-04T02:33:00.418532Z node 36 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:00.422732Z node 36 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-04T02:33:00.423064Z node 36 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1003 actor [36:336:2310] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1003 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1002 Important: true } 2026-01-04T02:33:01.074791Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:108:2057] recipient: [37:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:108:2057] recipient: [37:106:2138] Leader for TabletID 72057594037927937 is [37:112:2142] sender: [37:113:2057] recipient: [37:106:2138] 2026-01-04T02:33:01.144694Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:33:01.144761Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:33:01.144810Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:01.144862Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:154:2057] recipient: [37:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [37:154:2057] recipient: [37:152:2172] Leader for TabletID 72057594037927938 is [37:158:2176] sender: [37:159:2057] recipient: [37:152:2172] Leader for TabletID 72057594037927937 is [37:112:2142] sender: [37:182:2057] recipient: [37:14:2061] 2026-01-04T02:33:01.168907Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:01.169632Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1004 actor [37:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1004 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1004 } Consumers { Name: "user1" Generation: 1004 Important: true } Consumers { Name: "user2" Generation: 1004 Important: true } 2026-01-04T02:33:01.170360Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:188:2142] 2026-01-04T02:33:01.173451Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [37:188:2142] 2026-01-04T02:33:02.459780Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:33:02.460370Z node 37 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d50f917f-3a778b46-b82b6b02-5b99e568_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [37:112:2142] sender: [37:260:2057] recipient: [37:104:2137] Leader for TabletID 72057594037927937 is [37:112:2142] sender: [37:263:2057] recipient: [37:262:2256] Leader for TabletID 72057594037927937 is [37:264:2257] sender: [37:265:2057] recipient: [37:262:2256] 2026-01-04T02:33:04.994280Z node 37 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:33:04.994349Z node 37 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:33:04.994959Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:04.995009Z node 37 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:33:04.995636Z node 37 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [37:313:2257] 2026-01-04T02:33:05.026818Z node 37 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:33:05.027834Z node 37 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [37:313:2257] 2026-01-04T02:33:05.055389Z node 37 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 81 parts 0 suffix '0' size 8296398 2026-01-04T02:33:05.095570Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:33:05.095709Z node 37 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 37 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [37:264:2257] sender: [37:337:2057] recipient: [37:14:2061] 2026-01-04T02:33:05.101451Z node 37 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:05.106440Z node 37 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: cannot finish read request. Consumer user1 is gone from partition 2026-01-04T02:33:05.106754Z node 37 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1005 actor [37:334:2308] txId 42 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 Version: 1005 LocalDC: true Topic: "topic" Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } Consumers { Name: "user2" Generation: 1004 Important: true } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TExportToS3Tests::DisableAutoDropping [GOOD] >> TExportToS3Tests::ShouldExcludeBackupTableFromStats [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoGood [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Table [GOOD] >> TxUsage::WriteToTopic_Demo_41_Table >> BasicStatistics::Serverless [GOOD] >> DataShardVolatile::UpsertDependenciesShardsRestart-UseSink [GOOD] >> BasicStatistics::TwoServerlessDbs [GOOD] >> BasicUsage::CreateTopicWithCustomName [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartNo_Table [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut >> KqpErrors::ResolveTableError [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> TxUsage::WriteToTopic_Demo_12_Query [GOOD] >> KqpErrors::ProposeResultLost_RwTx+UseSink [GOOD] >> BasicStatistics::DedicatedTimeIntervals [GOOD] >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanWithRetry [GOOD] >> KqpErrors::ProposeError [GOOD] >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] >> KqpErrors::ProposeResultLost_RwTx-UseSink >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCreateClean [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDrop [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_1_Query [GOOD] >> BasicStatistics::TwoTables [GOOD] >> BasicStatistics::SimpleGlobalIndex [GOOD] >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] >> BasicUsage::ReadWithRestarts [GOOD] >> Describe::LocationWithKillTablets >> BasicStatistics::TwoDatabases [GOOD] >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] >> BasicStatistics::Simple [GOOD] >> TExportToS3Tests::AuditCompletedExport [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query >> TExportToS3Tests::ShouldCheckQuotasExportsLimited >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query >> TxUsage::WriteToTopic_Demo_13_Table >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables >> KqpErrors::ProposeErrorEvWrite >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table >> TExportToS3Tests::AuditCancelledExport >> TExportToS3Tests::AuditCancelledExport [GOOD] >> TExportToS3Tests::DecimalOutOfRange >> TExportToS3Tests::ShouldCheckQuotasExportsLimited [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartNo_Query [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_1_Query [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink >> TxUsage::WriteToTopic_Demo_41_Table [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestCreateCleanManyTables [GOOD] >> KqpErrors::ProposeErrorEvWrite [GOOD] >> TxUsage::WriteToTopic_Demo_13_Table [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartNo_Table [GOOD] >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Table [GOOD] >> Describe::LocationWithKillTablets [GOOD] >> TExportToS3Tests::AutoDropping >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut |87.4%| [LD] {RESULT} $(B)/ydb/services/ydb/ut/ydb-services-ydb-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::DedicatedTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:20.515936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:20.653894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:20.664562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:347:2226], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:20.664916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:20.665035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.157541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.250331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.250489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.299848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.380513Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.039170Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:22.039225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:22.039269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:22.041637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.041991Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:22.113190Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.705841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:26.132854Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.139943Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-04T02:32:26.145550Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.177393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.177521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.221003Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:32:26.234961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.422080Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.422204Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.449879Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.522538Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.523508Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.524537Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.525318Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.525745Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.526054Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.526388Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.526581Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.526800Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.745506Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:26.794127Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:26.794241Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:26.818060Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:26.819391Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:26.819625Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:26.819707Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:26.819771Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:26.819829Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:26.819890Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:26.819947Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:26.822118Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:26.843404Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:26.843541Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2215:2588], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:26.871026Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2257:2627] 2026-01-04T02:32:26.871322Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2257:2627], schemeshard id = 72075186224037897 2026-01-04T02:32:26.975946Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2352:2673] 2026-01-04T02:32:26.978219Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-04T02:32:26.986726Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Describe result: PathErrorUnknown 2026-01-04T02:32:26.986805Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Creating table 2026-01-04T02:32:26.986909Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-04T02:32:26.998306Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2399:2695], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:27.003083Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:27.011584Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:27.011756Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Subscribe on create table tx: 281474976720657 2026-01-04T02:32:27.028216Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:32:27.048663Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-04T02:32:27.255896Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:27.709050Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:32:27.839568Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:32:27.839667Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2357:2677] Owner: [3:2356:2676]. Column diff is empty, finishing 2026-01-04T02:32:28.566552Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWake ... e 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5918:3391], StatRequests.size() = 1 2026-01-04T02:33:03.705468Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:03.705579Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:03.705629Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:03.705678Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:03.833662Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5965:3404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:03.834213Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:03.834293Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5965:3404], StatRequests.size() = 1 2026-01-04T02:33:04.579783Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:05.019354Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6013:3416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:05.019680Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:05.019729Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6013:3416], StatRequests.size() = 1 2026-01-04T02:33:06.131869Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:06.132087Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 10 2026-01-04T02:33:06.132624Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:06.132832Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-04T02:33:06.156489Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:06.156572Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:06.156885Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:06.171732Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:06.227998Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6055:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.228314Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:06.228357Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6055:3428], StatRequests.size() = 1 2026-01-04T02:33:07.293284Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6101:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.293651Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:07.293699Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6101:3440], StatRequests.size() = 1 2026-01-04T02:33:08.016735Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-04T02:33:08.016785Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-04T02:33:08.017258Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:08.031701Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-04T02:33:08.384800Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:08.762240Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6142:3450]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.762527Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:08.762566Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6142:3450], StatRequests.size() = 1 2026-01-04T02:33:09.950045Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:09.950254Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 11 2026-01-04T02:33:09.951012Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:09.951273Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-04T02:33:10.064920Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:10.064998Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:10.065333Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:10.079514Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:10.124183Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6189:3464]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.124570Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:10.124621Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6189:3464], StatRequests.size() = 1 2026-01-04T02:33:10.125142Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6191:3379]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.183780Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:33:10.183906Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6201:3383] 2026-01-04T02:33:10.183972Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6201:3383] 2026-01-04T02:33:10.185287Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6208:3384] 2026-01-04T02:33:10.185600Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6201:3383], server id = [2:6208:3384], tablet id = 72075186224038895, status = OK 2026-01-04T02:33:10.185662Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6208:3384], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:33:10.185716Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:33:10.186093Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:33:10.186185Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6191:3379], StatRequests.size() = 1 2026-01-04T02:33:10.186324Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:33:10.243531Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224038895] EvFastPropagateCheck 2026-01-04T02:33:10.243624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224038895] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:33:10.298357Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6201:3383], schemeshard count = 1 2026-01-04T02:33:11.912672Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-04T02:33:11.912761Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224038898 2026-01-04T02:33:11.912848Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224038895] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:33:11.913154Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-04T02:33:11.913875Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:11.914187Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 1 2026-01-04T02:33:11.928642Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-04T02:33:12.149195Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:13.627955Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:13.628193Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 12 2026-01-04T02:33:13.628953Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:13.629306Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-04T02:33:13.673657Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:13.673731Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 3.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:13.674080Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:13.688488Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:13.951767Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224038895] EvPropagateTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::ShouldRetryAtFinalStage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:31.985041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:31.985135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.985175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:31.985221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:31.985282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:31.985316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:31.985383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:31.985467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:31.986419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:31.986720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.084588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.084654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.099202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.099414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.099619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.109773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.110103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.110982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.111301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.114880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.115200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.116533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.116607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.116737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.116813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.116867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.117098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.282220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.283900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.284778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:33:22.926764Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.926850Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.926896Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:33:22.926935Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-04T02:33:22.926982Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:33:22.927065Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-04T02:33:22.928177Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:33:22.928221Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 0/1, is published: true 2026-01-04T02:33:22.928286Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:33:22.929851Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710761:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710761 msg type: 269090816 2026-01-04T02:33:22.929994Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710761, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:33:22.930227Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 FAKE_COORDINATOR: Add transaction: 281474976710761 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710761 at step: 5000043 2026-01-04T02:33:22.930933Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:22.931038Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710761 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:22.931092Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710761:0, step: 5000043, at schemeshard: 72057594046678944 2026-01-04T02:33:22.931233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710761:0, at schemeshard: 72057594046678944 2026-01-04T02:33:22.931315Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-04T02:33:22.931365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:33:22.931424Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710761:0 progress is 1/1 2026-01-04T02:33:22.931472Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:33:22.931541Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:33:22.931624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:33:22.931688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710761, ready parts: 1/1, is published: false 2026-01-04T02:33:22.931776Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710761 ready parts: 1/1 2026-01-04T02:33:22.931829Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710761:0 2026-01-04T02:33:22.931876Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710761:0 2026-01-04T02:33:22.931974Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:33:22.932027Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710761, publications: 2, subscribers: 1 2026-01-04T02:33:22.932082Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:33:22.932165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710761, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-04T02:33:22.932972Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.934468Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:22.934512Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:22.934700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710761, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:33:22.934836Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:22.934890Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 1 2026-01-04T02:33:22.934932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710761, path id: 39 FAKE_COORDINATOR: Erasing txId 281474976710761 2026-01-04T02:33:22.935820Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.935919Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.935961Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:33:22.936016Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:33:22.936081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:33:22.936654Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.936750Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.936797Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710761 2026-01-04T02:33:22.936869Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710761, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:33:22.936933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:33:22.937019Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710761, subscribers: 1 2026-01-04T02:33:22.937078Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-04T02:33:22.939271Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.940222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710761 2026-01-04T02:33:22.940310Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:33:22.940363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:33:22.942088Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:33:22.942158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:997:2949] TestWaitNotification: OK eventTxId 102 >> TxUsage::WriteToTopic_Demo_42_Table >> TxUsage::WriteToTopic_Demo_21_RestartNo_Query [GOOD] >> TxUsage::WriteToTopic_Demo_13_Query >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query >> ReadSessionImplTest::BrokenCompressedData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::FallbackToSingleDbAfterBadRequest [GOOD] Test command err: 2026-01-04T02:32:51.555063Z :FallbackToSingleDb INFO: Random seed for debugging is 1767493971555026 2026-01-04T02:32:51.827556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328803852184546:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:51.827814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:51.903547Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328802085489674:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:51.907662Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:51.914601Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:51.938703Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:52.204620Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:52.211571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:52.250671Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:52.250791Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:52.253129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:52.253211Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:52.295832Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:52.303985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:52.304357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:52.350406Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:52.386821Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:52.437689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:52.659180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002abd/r3tmp/yandexXG3edx.tmp 2026-01-04T02:32:52.659217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002abd/r3tmp/yandexXG3edx.tmp 2026-01-04T02:32:52.659361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002abd/r3tmp/yandexXG3edx.tmp 2026-01-04T02:32:52.659503Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:52.711627Z INFO: TTestServer started on Port 18584 GrpcPort 9564 2026-01-04T02:32:52.853185Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:52.919781Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:9564 2026-01-04T02:32:53.018753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:53.126090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:32:56.117266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328823560326484:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.117267Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328823560326472:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.117383Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.120250Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328823560326487:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.120394Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.118974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825327022309:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.119051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.119118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825327022338:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.123869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:56.143726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825327022373:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.143831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.144196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825327022376:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.144261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.180235Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328823560326489:2133] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:56.193557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328825327022340:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:32:56.194994Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328823560326486:2307], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:32:56.256696Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328825327022425:2958] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:56.280152Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328823560326516:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:56.406533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.423126Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328825327022436:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:56.423738Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDlkMjllNGItND ... rocess user action and tx pending commits 2026-01-04T02:33:16.071839Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.071849Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.172164Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.172201Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.172215Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.172232Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.172246Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.272519Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.272552Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.272564Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.272581Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.272595Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.372849Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.372873Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.372882Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.372895Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.372904Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.473164Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.473186Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.473196Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.473207Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.473218Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.573553Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.573580Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.573589Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.573603Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.573611Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.673925Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.673959Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.673971Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.673989Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.674002Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.774270Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.774304Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.774318Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.774339Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.774353Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >>> Ready to answer: ok 2026-01-04T02:33:16.803319Z :INFO: [/Root] OnFederationDiscovery fall back to single mode, database=/Root 2026-01-04T02:33:16.803431Z :INFO: [/Root] [] [ee5a9405-53e482f-8d73ce6-3c02b77d] Open read subsessions to databases: { name: , endpoint: localhost:29511, path: /Root } 2026-01-04T02:33:16.803648Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Starting read session 2026-01-04T02:33:16.803709Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Starting single session 2026-01-04T02:33:16.804159Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-04T02:33:16.804199Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-04T02:33:16.804333Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] Reconnecting session to cluster in 0.000000s 2026-01-04T02:33:16.804641Z :ERROR: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] Got error. Status: CLIENT_CALL_UNIMPLEMENTED. Description:
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29511
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29511. 2026-01-04T02:33:16.804696Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] In Reconnect, ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-04T02:33:16.804726Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] New values: ReadSizeBudget = 524288, ReadSizeServerDelta = 0 2026-01-04T02:33:16.804855Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] Closing session to cluster: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:29511" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29511
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29511. " } 2026-01-04T02:33:16.805158Z :NOTICE: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:33:16.805209Z :DEBUG: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] [] Abort session to cluster Got new read session event: SessionClosed { Status: CLIENT_CALL_UNIMPLEMENTED Issues: "
: Error: Failed to establish connection to server "localhost:29511" ( cluster ). Attempts done: 1
: Error: GRpc error: (12):
: Error: Grpc error response on endpoint localhost:29511
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:29511. " } 2026-01-04T02:33:16.805337Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Closing read session. Close timeout: 0.010000s 2026-01-04T02:33:16.805396Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:33:16.805438Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:16.805474Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Closing read session. Close timeout: 0.000000s 2026-01-04T02:33:16.805502Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:33:16.805532Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Counters: { Errors: 1 CurrentSessionLifetimeMs: 1 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:16.805583Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Closing read session. Close timeout: 0.000000s 2026-01-04T02:33:16.805614Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:33:16.805650Z :INFO: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Counters: { Errors: 1 CurrentSessionLifetimeMs: 2 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:16.805708Z :NOTICE: [/Root] [/Root] [bfce433-3675af52-1f2af986-ac43f8b6] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:33:16.874627Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.874659Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.874668Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.874683Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.874694Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:16.974978Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:16.975007Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.975019Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:16.975036Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:16.975049Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::SimpleGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:35.311689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:35.401739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:35.408858Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:35.409143Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:35.409279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:35.838250Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:35.934594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:35.934758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:35.982557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.074847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:36.717770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:36.718808Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:36.718862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:36.718900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:36.719315Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:36.785855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:37.298123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:40.277102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:40.286189Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:40.290725Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:40.352919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:40.353057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:40.400390Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:40.410553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:40.684624Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.685255Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.686454Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.687129Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.687351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.687588Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.687845Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.688046Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.688131Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.864004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:40.876428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:40.876571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:40.891399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:41.046230Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:41.096905Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:41.096977Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:41.121283Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:41.121563Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:41.121737Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:41.121786Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:41.121837Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:41.121890Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:41.121939Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:41.121977Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:41.122598Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:41.263664Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:41.264151Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:41.265968Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:41.266014Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:41.266086Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:41.268933Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:41.271658Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:41.274614Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:41.335358Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:41.351365Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:41.351461Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:41.363009Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:41.364030Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:41.712965Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:41.715307Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:41.719269Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:41.724332Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:41.724476Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-04T02:33:02.912893Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3887:3412]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:02.913298Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:33:02.913352Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3887:3412], StatRequests.size() = 1 2026-01-04T02:33:03.806654Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3924:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:03.807018Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:33:03.807063Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3924:3428], StatRequests.size() = 1 2026-01-04T02:33:04.627571Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3953:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:04.627995Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:33:04.628045Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3953:3440], StatRequests.size() = 1 2026-01-04T02:33:05.179519Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:05.597385Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3986:3454]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:05.597692Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:05.597730Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3986:3454], StatRequests.size() = 1 2026-01-04T02:33:06.588362Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4013:3464]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.588637Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:06.588679Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4013:3464], StatRequests.size() = 1 2026-01-04T02:33:07.216145Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:07.216812Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:33:07.217157Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:07.217236Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:33:07.623925Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4040:3474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.624285Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:07.624338Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4040:3474], StatRequests.size() = 1 2026-01-04T02:33:08.615279Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4069:3486]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.615559Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:08.615601Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4069:3486], StatRequests.size() = 1 2026-01-04T02:33:09.630908Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4100:3496]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:09.631249Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:09.631294Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4100:3496], StatRequests.size() = 1 2026-01-04T02:33:10.472828Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:10.472921Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:10.472963Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:10.473020Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:10.581137Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:11.013578Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4136:3511]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.013877Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:11.013921Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4136:3511], StatRequests.size() = 1 2026-01-04T02:33:12.105747Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4169:3523]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:12.106089Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:12.106137Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4169:3523], StatRequests.size() = 1 2026-01-04T02:33:12.909694Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:12.910161Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:33:12.910422Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:12.910543Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:13.091333Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:13.091405Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:13.091595Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-04T02:33:13.104940Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:13.515830Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4200:3537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:13.516203Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:13.516250Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4200:3537], StatRequests.size() = 1 2026-01-04T02:33:14.628011Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4229:3549]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:14.628279Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:14.628317Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4229:3549], StatRequests.size() = 1 2026-01-04T02:33:15.605774Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4256:3559]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:15.606103Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:15.606147Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4256:3559], StatRequests.size() = 1 2026-01-04T02:33:16.280862Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:16.675204Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4289:3571]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:16.675493Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:16.675526Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4289:3571], StatRequests.size() = 1 2026-01-04T02:33:17.682755Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4318:3583]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:17.683031Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-04T02:33:17.683069Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4318:3583], StatRequests.size() = 1 2026-01-04T02:33:18.321159Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:18.321308Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-04T02:33:18.321752Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:18.321918Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:18.406971Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:18.407026Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:18.407221Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-04T02:33:18.420196Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:18.751193Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4351:3597]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.751514Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-04T02:33:18.751560Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4351:3597], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:38.223758Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:38.348814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:38.357638Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:38.358078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:38.358236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:38.745592Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:38.840129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:38.840352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:38.887876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:38.984436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:39.674479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:39.675143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:39.675190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:39.675231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:39.675815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:39.746368Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:40.289348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:43.689425Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:43.699738Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:43.703749Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:43.741983Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:43.742084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:43.776367Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:43.778731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:44.014681Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:44.014808Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:44.016817Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.017647Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.026419Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.027703Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.027936Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.028080Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.028266Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.028386Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.028449Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:44.060550Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:44.358791Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:44.387733Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:44.387815Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:44.424365Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:44.426981Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:44.427263Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:44.427334Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:44.427401Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:44.427466Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:44.427546Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:44.427611Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:44.428351Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:44.428725Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:44.431774Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:44.463867Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:44.463955Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:44.464086Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:44.488578Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:44.501507Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:44.512907Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:44.518359Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:44.518492Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:44.561506Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:44.562036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:44.696262Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:44.738719Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:45.126336Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:45.129004Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:45.133328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:45.137317Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:45.137432Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:06.671297Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:3644:3313]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.671564Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:33:06.671603Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:3644:3313], StatRequests.size() = 1 2026-01-04T02:33:07.760977Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3679:3327]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.761302Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:33:07.761344Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3679:3327], StatRequests.size() = 1 2026-01-04T02:33:08.774099Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3708:3339]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.774407Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:33:08.774447Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3708:3339], StatRequests.size() = 1 2026-01-04T02:33:09.298010Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:09.860199Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3739:3353]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:09.860435Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:09.860466Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3739:3353], StatRequests.size() = 1 2026-01-04T02:33:10.825156Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3766:3363]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.825384Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:10.825415Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3766:3363], StatRequests.size() = 1 2026-01-04T02:33:11.294886Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:11.295269Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:33:11.295590Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:11.295688Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:33:11.811387Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3793:3373]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.811726Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:11.811779Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3793:3373], StatRequests.size() = 1 2026-01-04T02:33:12.840383Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3824:3385]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:12.840740Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:12.840795Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3824:3385], StatRequests.size() = 1 2026-01-04T02:33:13.862141Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:3855:3395]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:13.862463Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:13.862509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:3855:3395], StatRequests.size() = 1 2026-01-04T02:33:14.418120Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:14.653058Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:14.653139Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:14.653178Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:14.653216Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:15.191484Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:3891:3410]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:15.191827Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:15.191881Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:3891:3410], StatRequests.size() = 1 2026-01-04T02:33:16.246995Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:3922:3422]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:16.247355Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:16.247407Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:3922:3422], StatRequests.size() = 1 2026-01-04T02:33:16.897249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:16.897411Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-04T02:33:16.897494Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:16.897730Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:16.973798Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:16.973917Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:16.974222Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:16.987686Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:17.544498Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:3951:3434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:17.544740Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:17.544777Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:3951:3434], StatRequests.size() = 1 2026-01-04T02:33:18.619377Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:3980:3446]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.619694Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:18.619739Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:3980:3446], StatRequests.size() = 1 2026-01-04T02:33:19.614483Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4009:3456]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:19.614794Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:19.614839Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4009:3456], StatRequests.size() = 1 2026-01-04T02:33:20.134393Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:20.712959Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4042:3468]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:20.713286Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:20.713324Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4042:3468], StatRequests.size() = 1 2026-01-04T02:33:21.723381Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4071:3480]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:21.723623Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-04T02:33:21.723655Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4071:3480], StatRequests.size() = 1 2026-01-04T02:33:22.222532Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:22.222881Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-04T02:33:22.223192Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:22.223258Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:22.310788Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:22.310870Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:22.311118Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:22.323957Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:22.786418Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4102:3494]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:22.786653Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-04T02:33:22.786690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4102:3494], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoDatabases [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:22.706640Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:22.850887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:22.858911Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:22.859239Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:22.859423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:23.265949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:23.377541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:23.377703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:23.414773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:23.525096Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:24.447426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:24.448540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:24.448613Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:24.448647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:24.449138Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:24.527144Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:25.166882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:28.591953Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:28.596472Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-04T02:32:28.600479Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:28.634479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.634580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.662916Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:32:28.664938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:28.835669Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.835788Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.837191Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.837886Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.838467Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.839155Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.839412Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.839549Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.839660Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.839825Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.840079Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:28.856084Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:29.088113Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:29.126077Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:29.126190Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:29.161451Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:29.163065Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:29.163306Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:29.163371Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:29.163425Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:29.163496Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:29.163563Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:29.163639Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:29.164349Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:29.181916Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.182006Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2208:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.207921Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2281:2643] 2026-01-04T02:32:29.208347Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2281:2643], schemeshard id = 72075186224037897 2026-01-04T02:32:29.266958Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2333:2667] 2026-01-04T02:32:29.270626Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database1 2026-01-04T02:32:29.282397Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Describe result: PathErrorUnknown 2026-01-04T02:32:29.282487Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Creating table 2026-01-04T02:32:29.282571Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Database1/.metadata/_statistics 2026-01-04T02:32:29.289138Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2390:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:29.293382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:29.302716Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:29.302861Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Subscribe on create table tx: 281474976720657 2026-01-04T02:32:29.316183Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:32:29.372312Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database1/.metadata/script_executions 2026-01-04T02:32:29.541428Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:29.931281Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:32:30.072606Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:32:30.072718Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2339:2671] Owner: [3:2338:2670]. Column diff is empty, finishing 2026-01-04T02:32:30.861088Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWake ... d[ 22 ] 2026-01-04T02:33:03.592422Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:5796:3359], StatRequests.size() = 1 2026-01-04T02:33:04.730123Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:04.730592Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:33:04.730950Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:33:04.731028Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:33:04.879438Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:5839:3369]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:04.879753Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:04.879802Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:5839:3369], StatRequests.size() = 1 2026-01-04T02:33:06.138874Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:5884:3381]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.139345Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:06.139397Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:5884:3381], StatRequests.size() = 1 2026-01-04T02:33:07.356050Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:5927:3391]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.356338Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:07.356371Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:5927:3391], StatRequests.size() = 1 2026-01-04T02:33:08.564384Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:08.681247Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:08.681352Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:08.681398Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:08.681462Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:08.841608Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:5974:3404]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.842018Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:08.842068Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:5974:3404], StatRequests.size() = 1 2026-01-04T02:33:10.141916Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6022:3416]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.142254Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:10.142303Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6022:3416], StatRequests.size() = 1 2026-01-04T02:33:11.390286Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:11.390513Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 6 2026-01-04T02:33:11.390620Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:11.390922Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:11.424707Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:11.424794Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:11.425063Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:11.439453Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:11.642608Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6064:3428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.642923Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:11.642969Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6064:3428], StatRequests.size() = 1 2026-01-04T02:33:12.869715Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6112:3440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:12.870161Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:12.870215Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6112:3440], StatRequests.size() = 1 2026-01-04T02:33:13.543742Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-04T02:33:13.543817Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-04T02:33:13.544276Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:13.558111Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-04T02:33:14.417334Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6155:3450]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:14.417651Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:14.417709Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6155:3450], StatRequests.size() = 1 2026-01-04T02:33:15.655526Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:15.896039Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6198:3460]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:15.896385Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:15.896434Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6198:3460], StatRequests.size() = 1 2026-01-04T02:33:17.188120Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6243:3474]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:17.188387Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:17.188431Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6243:3474], StatRequests.size() = 1 2026-01-04T02:33:18.403640Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:18.403920Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 7 2026-01-04T02:33:18.404066Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:18.404469Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:18.437678Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:18.437738Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:18.438007Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:18.451879Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:18.564861Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [3:6289:3486]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.565125Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:18.565161Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [3:6289:3486], StatRequests.size() = 1 2026-01-04T02:33:18.565580Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6291:3399]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.569452Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:33:18.569533Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6301:3403] 2026-01-04T02:33:18.569579Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6301:3403] 2026-01-04T02:33:18.570742Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6301:3403], server id = [2:6307:3404], tablet id = 72075186224038895, status = OK 2026-01-04T02:33:18.570814Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:6307:3404] 2026-01-04T02:33:18.571004Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:6307:3404], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:33:18.571049Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:33:18.571230Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:33:18.571293Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6291:3399], StatRequests.size() = 1 2026-01-04T02:33:18.571359Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 >> Describe::DescribePartitionPermissions >> TExportToS3Tests::AutoDropping [GOOD] >> TExportToS3Tests::DecimalOutOfRange [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Table [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_DeleteDeadLetterPolicy [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes+UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_42_Table [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table >> TxUsage::WriteToTopic_Demo_19_RestartNo_Query [GOOD] >> TExportToS3Tests::CorruptedDecimalValue >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy >> TExportToS3Tests::ShouldCheckQuotasChildrenLimited [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::CloseWriteSessionImmediately [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink Test command err: 2026-01-04T02:32:49.325902Z :BasicWriteSession INFO: Random seed for debugging is 1767493969325859 2026-01-04T02:32:49.692015Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328796678339159:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.692056Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.776103Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:49.782363Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:49.815362Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328796609602047:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.819088Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.865530Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.347190Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.354913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.472410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.472494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.477457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.477555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.548865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.609076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.633915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.653871Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:50.654306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.668123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.745366Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.849983Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:51.077974Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002da1/r3tmp/yandexXFnXFc.tmp 2026-01-04T02:32:51.078001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002da1/r3tmp/yandexXFnXFc.tmp 2026-01-04T02:32:51.078190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002da1/r3tmp/yandexXFnXFc.tmp 2026-01-04T02:32:51.078310Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:51.158367Z INFO: TTestServer started on Port 12902 GrpcPort 26852 PQClient connected to localhost:26852 2026-01-04T02:32:51.445378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:54.547573Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328818084438756:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.547748Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.548374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328818084438770:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.548428Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.549468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328818153176949:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.549511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328818153176925:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.549583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.549921Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328818153176963:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.549987Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.552335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328818084438768:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.552942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:54.613237Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328818084438773:2133] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:54.633544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328818153176962:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:32:54.634009Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328818084438772:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:32:54.694969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328796678339159:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:54.695031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:54.711093Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328818153177056:2976] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.729280Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328818084438800:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.817894Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328796609602047:2164];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:54.817957Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:54.964055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.966039Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328818153177069:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' beca ... G: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.084881Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.084894Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.084912Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.084924Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:14.147223Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:67: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) RequestPQRB 2026-01-04T02:33:14.147429Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591328903012546489:2446] connected; active server actors: 1 2026-01-04T02:33:14.147472Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__old_chooser_actor.h:80: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) Received partition 0 from PQRB for SourceId=src 2026-01-04T02:33:14.147484Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) Update the table 2026-01-04T02:33:14.147651Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591328903012546489:2446] disconnected. 2026-01-04T02:33:14.147666Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591328903012546489:2446] disconnected; active server actors: 1 2026-01-04T02:33:14.147677Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1697: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591328903012546489:2446] disconnected no session 2026-01-04T02:33:14.185177Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.185208Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.185234Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.185253Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.185266Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:14.222383Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-04T02:33:14.222421Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) ReplyResult: Partition=0, SeqNo=(NULL) 2026-01-04T02:33:14.222437Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7591328903012546450:2446] (SourceId=src, PreferedPartition=(NULL)) Start idle 2026-01-04T02:33:14.222462Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:694: ProceedPartition. session cookie: 1 sessionId: partition: 0 expectedGeneration: (NULL) 2026-01-04T02:33:14.223223Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [3:7591328903012546513:2446], now have 1 active actors on pipe 2026-01-04T02:33:14.223442Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 4, Generation: 1 2026-01-04T02:33:14.223632Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:33:14.223663Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:33:14.223733Z node 4 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|96a1075e-7805d98a-85e7dbf2-648aa388_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-01-04T02:33:14.223771Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.223784Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:33:14.223801Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:33:14.223812Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.223832Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:33:14.223859Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:33:14.223869Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:33:14.223882Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:14.223917Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-04T02:33:14.223957Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:33:14.224425Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:33:14.224449Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:33:14.224502Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:33:14.224755Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|96a1075e-7805d98a-85e7dbf2-648aa388_0 2026-01-04T02:33:14.225429Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767493994225 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:14.225548Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|96a1075e-7805d98a-85e7dbf2-648aa388_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-04T02:33:14.225755Z :INFO: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session: close. Timeout = 0 ms 2026-01-04T02:33:14.225822Z :INFO: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session will now close 2026-01-04T02:33:14.225862Z :DEBUG: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session: aborting 2026-01-04T02:33:14.226615Z :INFO: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:33:14.226651Z :DEBUG: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session is aborting and will not restart 2026-01-04T02:33:14.226722Z :DEBUG: [] MessageGroupId [src] SessionId [src|96a1075e-7805d98a-85e7dbf2-648aa388_0] Write session: destroy 2026-01-04T02:33:14.227062Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|96a1075e-7805d98a-85e7dbf2-648aa388_0 grpc read done: success: 0 data: 2026-01-04T02:33:14.227084Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|96a1075e-7805d98a-85e7dbf2-648aa388_0 grpc read failed 2026-01-04T02:33:14.227120Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|96a1075e-7805d98a-85e7dbf2-648aa388_0 grpc closed 2026-01-04T02:33:14.227137Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|96a1075e-7805d98a-85e7dbf2-648aa388_0 is DEAD 2026-01-04T02:33:14.227921Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:33:14.228287Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7591328903012546513:2446] destroyed 2026-01-04T02:33:14.228333Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:33:14.228356Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.228368Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.228378Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.228401Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.228412Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist Session was created 2026-01-04T02:33:14.285488Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.285523Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.285537Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.285554Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.285567Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:14.385869Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:14.385905Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.385928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:14.385942Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:14.385956Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TxUsage::WriteToTopic_Demo_42_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:35.093895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:35.172801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:35.178283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:35.178602Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:35.178698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:35.505723Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:35.611275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:35.611403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:35.657987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:35.748511Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:36.421712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:36.422383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:36.422440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:36.422477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:36.423122Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:36.500641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:37.020558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:39.972383Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:39.978562Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:39.981291Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:40.015755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:40.015880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:40.045201Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:40.047491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:40.223032Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:40.223163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:40.224991Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.225509Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.225990Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.227049Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.227261Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.227368Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.228834Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.228973Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.229061Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:40.255643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:40.508124Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:40.549010Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:40.549095Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:40.607752Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:40.608465Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:40.608735Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:40.608812Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:40.608879Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:40.608944Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:40.609007Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:40.609070Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:40.610325Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:40.613604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1909:2455] 2026-01-04T02:32:40.618960Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:40.628048Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Describe result: PathErrorUnknown 2026-01-04T02:32:40.628199Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Creating table 2026-01-04T02:32:40.628293Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:40.634046Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:40.634167Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:40.670631Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:40.670961Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:40.687094Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2234:2660], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:40.715795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:40.738514Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:40.738659Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Subscribe on create table tx: 281474976720657 2026-01-04T02:32:40.750346Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:32:40.816036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:40.945172Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:41.299338Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:32:41.428110Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:32:41.428208Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2115:2584] Owner: [2:2114:2583]. Column diff is empty, finishing 2026-01-04T02:32:42.180353Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... lyToActorId = [2:3805:3392], StatRequests.size() = 1 2026-01-04T02:33:03.297661Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:3842:3410]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:03.297980Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:33:03.298027Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:3842:3410], StatRequests.size() = 1 2026-01-04T02:33:04.418686Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:3869:3420]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:04.419041Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:33:04.419088Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:3869:3420], StatRequests.size() = 1 2026-01-04T02:33:04.975088Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:05.553308Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:3900:3434]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:05.553579Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:05.553619Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:3900:3434], StatRequests.size() = 1 2026-01-04T02:33:06.540938Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:3927:3444]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.541245Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:06.541283Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:3927:3444], StatRequests.size() = 1 2026-01-04T02:33:07.005192Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:07.005722Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:33:07.006125Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:07.006248Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:33:07.545049Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:3954:3454]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.545378Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:07.545422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:3954:3454], StatRequests.size() = 1 2026-01-04T02:33:08.584869Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:3985:3466]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.585125Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:08.585174Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:3985:3466], StatRequests.size() = 1 2026-01-04T02:33:09.591863Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4016:3478]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:09.592161Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:09.592201Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4016:3478], StatRequests.size() = 1 2026-01-04T02:33:10.166030Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:10.299561Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:10.299648Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:10.299695Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:10.299740Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:10.875150Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4050:3491]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.875511Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:10.875561Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4050:3491], StatRequests.size() = 1 2026-01-04T02:33:11.943981Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4081:3503]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.944318Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:11.944349Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4081:3503], StatRequests.size() = 1 2026-01-04T02:33:12.553559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:12.553896Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:33:12.554189Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:12.554242Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:12.586022Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:12.586099Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:12.586307Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-04T02:33:12.599422Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:13.209367Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4111:3515]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:13.209621Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:13.209648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4111:3515], StatRequests.size() = 1 2026-01-04T02:33:14.361186Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4140:3527]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:14.361620Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:14.361672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4140:3527], StatRequests.size() = 1 2026-01-04T02:33:15.386265Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4169:3537]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:15.386623Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:15.386672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4169:3537], StatRequests.size() = 1 2026-01-04T02:33:15.921280Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:16.466315Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4202:3551]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:16.466570Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:16.466608Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4202:3551], StatRequests.size() = 1 2026-01-04T02:33:17.513767Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4229:3561]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:17.514024Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-04T02:33:17.514058Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4229:3561], StatRequests.size() = 1 2026-01-04T02:33:17.989887Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:17.990062Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-04T02:33:17.990267Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:17.990457Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:18.056728Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 3, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:18.056803Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:18.057041Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 73, entries count: 3, are all stats full: 1 2026-01-04T02:33:18.070617Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:18.582470Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4260:3575]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.582813Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-04T02:33:18.582865Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4260:3575], StatRequests.size() = 1 2026-01-04T02:33:18.583400Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 36 ], ReplyToActorId[ [2:4262:3577]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:18.587138Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 36 ] 2026-01-04T02:33:18.587201Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 36, ReplyToActorId = [2:4262:3577], StatRequests.size() = 1 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionCloseIgnoresWrites [GOOD] Test command err: 2026-01-04T02:32:51.852466Z :WriteSessionCloseWaitsForWrites INFO: Random seed for debugging is 1767493971852428 2026-01-04T02:32:52.187151Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328807327049588:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:52.192160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:52.234014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:52.239222Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:52.255583Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328809715863006:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:52.255671Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:52.274751Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:52.501911Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:52.560954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:52.600475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:52.600549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:52.606886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:52.606936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:52.613350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:52.625647Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:52.652940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:52.754827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:52.777855Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:52.857147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:52.881563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002ab1/r3tmp/yandexoj0pyb.tmp 2026-01-04T02:32:52.881591Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002ab1/r3tmp/yandexoj0pyb.tmp 2026-01-04T02:32:52.881800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002ab1/r3tmp/yandexoj0pyb.tmp 2026-01-04T02:32:52.881938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:52.959580Z INFO: TTestServer started on Port 2055 GrpcPort 3653 PQClient connected to localhost:3653 2026-01-04T02:32:53.209885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:53.212694Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:53.264974Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:56.143645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328824506920016:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.143732Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328824506920008:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.143988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.149621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:56.151323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328824506920024:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.151427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.172848Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328824506920023:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:32:56.307034Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328824506920117:2991] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:56.572843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.573294Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328824506920127:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:56.574332Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Yjc4NTI0MjctMjNmZTIzNWQtZmJlZTczZTktZDMwYjVmYTQ=, ActorId: [1:7591328824506920006:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dkg2d06ax9hp6dz31akb8, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:56.576956Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:56.582830Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328826895732458:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:56.584411Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MzQyOTNkYTUtODNjODFkOGYtMmM5MWRiZTAtN2FiYjMyOTE=, ActorId: [2:7591328826895732433:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3dkg4z30xtjp0ydd2v2qf0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:56.584733Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you d ... dingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.452352Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:17.552630Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:17.552668Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.552676Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:17.552689Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.552697Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:17.652969Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:17.652990Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.652998Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:17.653010Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.653018Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:17.753501Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:17.753526Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.753538Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:17.753554Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.753565Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:17.853703Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:17.853735Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.853748Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:17.853766Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.853779Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:17.954062Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:17.954094Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.954130Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:17.954149Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:17.954162Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.054410Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.054443Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.054455Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.054474Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.054487Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.154761Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.154795Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.154808Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.154827Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.154840Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.255073Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.255100Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.255112Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.255129Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.255142Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.355456Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.355485Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.355497Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.355514Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.355528Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.455810Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.455840Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.455853Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.455870Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.455881Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.556155Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.556178Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.556186Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.556199Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.556209Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.656518Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.656549Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.656561Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.656579Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.656593Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.756829Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.756851Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.756860Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.756871Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.756880Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:18.857214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:18.857240Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.857250Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:18.857265Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:18.857275Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:19.099742Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [3:7591328919594861263:2480] TxId: 281474976715676. Ctx: { TraceId: 01ke3dm656d3k8efba7925m95r, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YjhjMjYwNmYtZTllMzY5Ny02ZGE4NjRmNy02MWQ4NWQ3YQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-04T02:33:19.100289Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591328919594861267:2480], TxId: 281474976715676, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3dm656d3k8efba7925m95r. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=YjhjMjYwNmYtZTllMzY5Ny02ZGE4NjRmNy02MWQ4NWQ3YQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591328919594861263:2480], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TExportToS3Tests::CorruptedDecimalValue [GOOD] >> DataShardVolatile::NotCachingAbortingDeletes-UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck >> TExportToS3Tests::SchemaMapping >> TExportToS3Tests::SchemaMapping [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeResultLost_RwTx-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:33:10.408042Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:10.408715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:10.411808Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:33:10.486361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:33:10.490925Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:33:10.491710Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:33:10.491927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:33:10.492071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:10.493252Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:10.493286Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:33:10.802158Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:10.891596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:10.891734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:10.892119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:10.892191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:10.948978Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:33:10.949578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:10.950039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:11.042474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:11.090089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:33:11.641646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:11.703913Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:11.704038Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:12.031708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:12.834365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1911:3230], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:12.834546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1921:3235], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:12.834679Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:12.835771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1925:3238], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:12.835946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:12.839530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:13.381148Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1926:3239], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:33:13.557522Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2036:3317] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:13.899102Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-04T02:33:13.899213Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-04T02:33:13.899492Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:2062:3228] TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-04T02:33:13.899563Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:851} ActorId: [1:2062:3228] TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-04T02:33:13.899770Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715660. Resolved key sets: 1 2026-01-04T02:33:13.899897Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976715660. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-04T02:33:13.900230Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:33:13.900586Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:2065:3228] 2026-01-04T02:33:13.900629Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:2065:3228], channels: 0 2026-01-04T02:33:13.900704Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [1:2062:3228] TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:33:13.900764Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [1:2062:3228] TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMtM2JlNzIyZTQtNWIyNzhlNjk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-04T02:33:13.900812Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715660. Ctx: { TraceId: 01ke3dm0bva0j3hbtamk35bdm3, Database: , SessionId: ydb://session/3?node_id=1&id=MjIwYmZlNjMtM2NiZDRhZDMt ... G: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CT 1, CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.048977Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.049233Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1045} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Executing task TaskId# 1 ComputeActor# [4:2112:2470] trace_id# 2026-01-04T02:33:23.049277Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [4:2112:2470] 2026-01-04T02:33:23.049325Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:908: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task: 1, output channelId: 1, dst task: 2, at actor [3:2110:3353] 2026-01-04T02:33:23.049370Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [4:2112:2470], channels: 1 2026-01-04T02:33:23.049430Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2110:3353], channels: 1 2026-01-04T02:33:23.049737Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2112:2470] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:33:23.049772Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2112:2470], CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.049798Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2112:2470], CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.050149Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2112:2470] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {CpuTimeUs: 440 Tasks { TaskId: 1 CpuTimeUs: 279 ComputeCpuTimeUs: 7 BuildCpuTimeUs: 272 HostName: "ghrun-46vtxmsa7i" NodeId: 4 CreateTimeMs: 1767494003047 CurrentWaitInputTimeUs: 20 UpdateTimeMs: 1767494003048 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:33:23.050235Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2112:2470], CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.050272Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [4:2112:2470], CA [3:2110:3353], trace_id# 2026-01-04T02:33:23.055479Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:504} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got result ChannelId# 2 InputIndex# 0 Sender# [3:2111:3353] Finished# false trace_id# 2026-01-04T02:33:23.055548Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:511} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send ack ChannelId# 2 SeqNo# 1 Recipient# [3:2111:3353] trace_id# 2026-01-04T02:33:23.060623Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:504} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got result ChannelId# 2 InputIndex# 0 Sender# [3:2111:3353] Finished# true trace_id# 2026-01-04T02:33:23.060670Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:511} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send ack ChannelId# 2 SeqNo# 2 Recipient# [3:2111:3353] trace_id# 2026-01-04T02:33:23.061335Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2110:3353] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1018 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 599 FinishTimeMs: 1767494003061 InputRows: 3 InputBytes: 12 OutputRows: 3 OutputBytes: 12 ResultRows: 3 ResultBytes: 12 ComputeCpuTimeUs: 116 BuildCpuTimeUs: 483 HostName: "ghrun-46vtxmsa7i" NodeId: 3 CreateTimeMs: 1767494003046 UpdateTimeMs: 1767494003061 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:33:23.061428Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2110:3353] 2026-01-04T02:33:23.061480Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:2112:2470], trace_id# 2026-01-04T02:33:23.061509Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [4:2112:2470], trace_id# 2026-01-04T02:33:23.061761Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:2112:2470] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1034 DurationUs: 6000 Tasks { TaskId: 1 CpuTimeUs: 356 FinishTimeMs: 1767494003060 OutputRows: 3 OutputBytes: 12 Tables { TablePath: "/Root/table-1" ReadRows: 3 ReadBytes: 24 AffectedPartitions: 4 } IngressRows: 3 ComputeCpuTimeUs: 84 BuildCpuTimeUs: 272 WaitInputTimeUs: 5467 HostName: "ghrun-46vtxmsa7i" NodeId: 4 StartTimeMs: 1767494003054 CreateTimeMs: 1767494003047 UpdateTimeMs: 1767494003060 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:33:23.061846Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:2112:2470] 2026-01-04T02:33:23.061992Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:33:23.062043Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-04T02:33:23.062079Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:2102:3353] TxId: 281474976710663. Ctx: { TraceId: 01ke3dma81a35jetvdnqswwg9e, Database: , SessionId: ydb://session/3?node_id=3&id=NmFiNmQ5OTQtNjllYjE5MmEtYzg2YzRlMjEtNzIxMTFmMDY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.002052s ReadRows: 3 ReadBytes: 24 RequestUnits# 3 ForceFlag# true trace_id# { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::AutoDropping [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:32.031854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:32.031956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.031992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:32.032036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:32.032072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:32.032130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:32.032218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.032280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:32.033104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:32.033435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.126815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.126874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.143277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.143453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.143636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.152128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.152404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.153066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.153303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.156295Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.156484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.157634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.157718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.157938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.157985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.158076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.158288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.309951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.310991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.311360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... de 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:33:23.557614Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:23.557667Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-04T02:33:23.557698Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-04T02:33:23.559157Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:33:23.559247Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:33:23.559291Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:878:2837] TestWaitNotification: OK eventTxId 102 2026-01-04T02:33:23.560300Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:33:23.560498Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 240us result status StatusSuccess 2026-01-04T02:33:23.561054Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 desc: 2 2026-01-04T02:33:23.561637Z node 7 :EXPORT DEBUG: schemeshard_export__forget.cpp:79: TExport::TTxForget, dropping export tables, info: { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Done WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-04T02:33:23.563614Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:23.563667Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:817: TExport::TTxProgress: Resume: id# 102 2026-01-04T02:33:23.563736Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:598: TExport::TTxProgress: Allocate txId: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 } 2026-01-04T02:33:23.563815Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:33:23.563912Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 102, at schemeshard: 72057594046678944 2026-01-04T02:33:23.563969Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:23.564027Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:938: TExport::TTxProgress: OnAllocateResult: txId# 281474976710762, id# 102 2026-01-04T02:33:23.564114Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:590: TExport::TTxProgress: Drop propose: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, txId# 281474976710762 2026-01-04T02:33:23.564215Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:33:23.566986Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "export-102" } Internal: true } TxId: 281474976710762 TabletId: 72057594046678944 PeerName: "" SanitizedToken: "" , at schemeshard: 72057594046678944 2026-01-04T02:33:23.567133Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/export-102, pathId: 0, opId: 281474976710762:0, at schemeshard: 72057594046678944 2026-01-04T02:33:23.567295Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710762:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:33:23.569232Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710762, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:33:23.569486Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710762, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/export-102', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761, operation: DROP DIRECTORY, path: /MyRoot/export-102 2026-01-04T02:33:23.569633Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7145: Handle: TEvModifySchemeTransactionResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-04T02:33:23.569734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7147: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-04T02:33:23.569842Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:23.569892Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:999: TExport::TTxProgress: OnModifyResult: txId# 281474976710762, status# StatusPathDoesNotExist 2026-01-04T02:33:23.569978Z node 7 :EXPORT TRACE: schemeshard_export__create.cpp:1000: Message: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/export-102\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000043, drop txId: 281474976710761" TxId: 281474976710762 SchemeshardId: 72057594046678944 PathId: 39 PathDropTxId: 281474976710761 2026-01-04T02:33:23.570100Z node 7 :EXPORT INFO: schemeshard_export__create.cpp:1181: TExport::TTxProgress: Wait for completion: info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Dropping WaitTxId: 281474976710761 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, itemIdx# 4294967295, txId# 281474976710761 2026-01-04T02:33:23.571697Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete 2026-01-04T02:33:23.571820Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710761, at schemeshard: 72057594046678944 2026-01-04T02:33:23.571903Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710761 2026-01-04T02:33:23.571956Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710761 2026-01-04T02:33:23.572008Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:23.572050Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761 2026-01-04T02:33:23.572094Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710761, id# 102, itemIdx# 4294967295 2026-01-04T02:33:23.573324Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 102 2026-01-04T02:33:23.573486Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:33:23.573531Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:33:23.573887Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:33:23.573990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:33:23.574037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:1097:3051] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideThenStopTenantAndForceDeleteSubDomain-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:32:49.098034Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.015982s 2026-01-04T02:32:49.211295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328796869929039:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.211380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.544995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:32:49.569064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:49.569192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:49.581174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:49.672960Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:49.835870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:32:50.042095Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328796869929176:2137] Handle TEvNavigate describe path dc-1 2026-01-04T02:32:50.042147Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591328801164896942:2450] HANDLE EvNavigateScheme dc-1 2026-01-04T02:32:50.042263Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328796869929183:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:32:50.042381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591328796869929417:2294][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328796869929183:2140], cookie# 1 2026-01-04T02:32:50.043907Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796869929471:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929468:2294], cookie# 1 2026-01-04T02:32:50.043958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796869929472:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929469:2294], cookie# 1 2026-01-04T02:32:50.043977Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591328796869929473:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929470:2294], cookie# 1 2026-01-04T02:32:50.044012Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796869928831:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929471:2294], cookie# 1 2026-01-04T02:32:50.044059Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796869928834:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929472:2294], cookie# 1 2026-01-04T02:32:50.044087Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328796869928837:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591328796869929473:2294], cookie# 1 2026-01-04T02:32:50.044152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796869929471:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869928831:2050], cookie# 1 2026-01-04T02:32:50.044170Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796869929472:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869928834:2053], cookie# 1 2026-01-04T02:32:50.044185Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591328796869929473:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869928837:2056], cookie# 1 2026-01-04T02:32:50.044240Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796869929417:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869929468:2294], cookie# 1 2026-01-04T02:32:50.044268Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591328796869929417:2294][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:32:50.044313Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796869929417:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869929469:2294], cookie# 1 2026-01-04T02:32:50.044340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591328796869929417:2294][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:32:50.044367Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591328796869929417:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328796869929470:2294], cookie# 1 2026-01-04T02:32:50.044381Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591328796869929417:2294][/dc-1] Sync cookie mismatch: sender# [1:7591328796869929470:2294], cookie# 1, current cookie# 0 2026-01-04T02:32:50.044431Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328796869929183:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:32:50.055244Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328796869929183:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591328796869929417:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:32:50.055408Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328796869929183:2140], cacheItem# { Subscriber: { Subscriber: [1:7591328796869929417:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:32:50.061305Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591328801164896943:2451], recipient# [1:7591328801164896942:2450], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:32:50.061561Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591328801164896942:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:32:50.101754Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591328801164896942:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:32:50.105931Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591328801164896942:2450] Handle TEvDescribeSchemeResult Forward to# [1:7591328801164896941:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:32:50.115057Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328796869929176:2137] Handle TEvProposeTransaction 2026-01-04T02:32:50.115089Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328796869929176:2137] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:32:50.115190Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328796869929176:2137] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591328801164896948:2455] 2026-01-04T02:32:50.211148Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328796869929183:2140], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true Sh ... esNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:07.743035Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328870050358911:3605], recipient# [3:7591328870050358910:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:07.886048Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:07.886209Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328844280554233:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:07.886303Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328870050358913:3606], recipient# [3:7591328870050358912:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:07.924963Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:07.925083Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328844280554233:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:07.925190Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328870050358915:3607], recipient# [3:7591328870050358914:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.743585Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.743690Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328857165456278:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:08.743790Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328874345326232:3611], recipient# [3:7591328874345326231:2335], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.886879Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.886978Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328844280554233:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:08.887040Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328874345326234:3612], recipient# [3:7591328874345326233:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.925874Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:08.925994Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328844280554233:2945] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:08.926094Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328874345326236:3613], recipient# [3:7591328874345326235:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:09.744520Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591328839985585831:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:09.744674Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591328839985585831:2121], cacheItem# { Subscriber: { Subscriber: [3:7591328857165456278:3023] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:09.744764Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591328878640293553:3617], recipient# [3:7591328878640293552:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-04T02:32:46.673859Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328782308110307:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:46.674540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:46.962420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:46.962524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:46.992167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:47.041249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:47.065954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328782308110194:2081] 1767493966667939 != 1767493966667942 2026-01-04T02:32:47.067984Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:47.150075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:47.150100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:47.150109Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:47.150229Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:47.318331Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:32:47.324431Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:32:47.324468Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:32:47.331794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:47.331869Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22876, port: 22876 2026-01-04T02:32:47.332672Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:32:47.386627Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:32:47.438676Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****gvIw (F65A580E) () has now valid token of ldapuser@ldap 2026-01-04T02:32:50.874945Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328800814652652:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:50.877570Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:50.944848Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:51.093193Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:51.093268Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:51.112542Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:51.154754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:51.226245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:51.258853Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:51.258891Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:51.258900Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:51.258969Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:51.418052Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:32:51.421446Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:32:51.421476Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:32:51.422157Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:4217, port: 4217 2026-01-04T02:32:51.422325Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-04T02:32:51.435133Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:4217. Invalid credentials 2026-01-04T02:32:51.435396Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****KOBA (092388A7) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:4217. Invalid credentials)' 2026-01-04T02:32:54.748347Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591328814612127714:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:54.748591Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:54.765331Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:54.858884Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:54.860356Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591328814612127537:2081] 1767493974727770 != 1767493974727773 2026-01-04T02:32:54.894947Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:54.895033Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:54.900136Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:54.968537Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:54.969025Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:54.969033Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:54.969040Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:54.969106Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:55.102012Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:32:55.103120Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:32:55.103145Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:32:55.103756Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14247, port: 14247 2026-01-04T02:32:55.103821Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:32:55.111559Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14247. Invalid credentials 2026-01-04T02:32:55.111718Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****kaZQ (BB10AAB1) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:14247. Invalid credentials)' 2026-01-04T02:32:58.516143Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591328835170633246:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:58.516212Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:58.613312Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:58.701298Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:58.719750Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:58.719833Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:58.751521Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:58.810216Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:58.810238Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:58.810245Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:58.810330Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:58.896018Z node 4 :KQP_PROXY WARN: kqp_finali ... PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591328851744062965:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:02.345235Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:02.421972Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:02.527646Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:02.527743Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:02.542160Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:02.583752Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:02.587838Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591328851744062920:2081] 1767493982293459 != 1767493982293462 2026-01-04T02:33:02.653327Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:02.690578Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:02.690600Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:02.690606Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:02.690679Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:03.134042Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:33:03.138104Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:33:03.138152Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:33:03.138956Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:13014, port: 13014 2026-01-04T02:33:03.139076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:33:03.153037Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:33:03.194214Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:33:03.194816Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:33:03.194883Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:03.242264Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:03.286191Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:03.287072Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****igDw (4B87449D) () has now valid token of ldapuser@ldap 2026-01-04T02:33:03.347824Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:07.328152Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591328851744062965:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:07.328303Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:08.346976Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****igDw (4B87449D) 2026-01-04T02:33:08.347086Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:13014, port: 13014 2026-01-04T02:33:08.347202Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:33:08.358807Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:33:08.402145Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:33:08.402698Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:33:08.402734Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:08.446076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:08.490137Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:33:08.491021Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****igDw (4B87449D) () has now valid token of ldapuser@ldap 2026-01-04T02:33:11.348372Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****igDw (4B87449D) 2026-01-04T02:33:13.890215Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591328900066882289:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:13.890420Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:13.907797Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:13.966448Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:13.967366Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591328900066882262:2081] 1767493993889295 != 1767493993889298 2026-01-04T02:33:14.013558Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:14.013668Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:14.026345Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:14.044900Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:14.044945Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:14.044957Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:14.045076Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:14.170809Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:14.277426Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:33:14.280998Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:33:14.281025Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:33:14.281625Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:30734, port: 30734 2026-01-04T02:33:14.281683Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:33:14.307044Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:33:14.354349Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****UbBQ (89E3CB0C) () has now valid token of ldapuser@ldap 2026-01-04T02:33:14.895443Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:18.890555Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7591328900066882289:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:18.890641Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:18.894718Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****UbBQ (89E3CB0C) 2026-01-04T02:33:18.894794Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:30734, port: 30734 2026-01-04T02:33:18.894862Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:33:18.905341Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:33:18.946342Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****UbBQ (89E3CB0C) () has now valid token of ldapuser@ldap 2026-01-04T02:33:21.895970Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****UbBQ (89E3CB0C) |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TExportToS3Tests::SchemaMappingEncryption >> TExportToS3Tests::SchemaMappingEncryption [GOOD] >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ResolveTableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:33:10.135916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:10.136556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:10.220924Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:10.221728Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:10.550114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:10.633776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:10.633928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:10.634545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:10.634616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:10.678466Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:33:10.679079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:10.679450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:10.746676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:10.760622Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:33:11.349162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:11.411962Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:11.412143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:11.727048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:12.727807Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-04T02:33:12.727914Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-04T02:33:12.728133Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-04T02:33:12.728181Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:851} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got request, become WaitResolveState trace_id# 2026-01-04T02:33:12.728356Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710658. Resolved key sets: 1 2026-01-04T02:33:12.728473Z node 1 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710658. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 2 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 4 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-04T02:33:12.728794Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:33:12.729095Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1850:3208] 2026-01-04T02:33:12.729158Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1850:3208], channels: 0 2026-01-04T02:33:12.729221Z node 1 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:33:12.729249Z node 1 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-04T02:33:12.729280Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [1:1850:3208] 2026-01-04T02:33:12.729315Z node 1 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [1:1850:3208], channels: 0 2026-01-04T02:33:12.729353Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1850:3208], trace_id# 2026-01-04T02:33:12.729392Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1850:3208], trace_id# 2026-01-04T02:33:12.729430Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:33:12.737031Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1850:3208] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:33:12.737140Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [1:1850:3208], trace_id# 2026-01-04T02:33:12.737189Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [1:1850:3208], trace_id# 2026-01-04T02:33:12.738235Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:1850:3208] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1345 Tasks { TaskId: 1 CpuTimeUs: 763 FinishTimeMs: 1767493992737 EgressBytes: 30 EgressRows: 3 ComputeCpuTimeUs: 45 BuildCpuTimeUs: 718 HostName: "ghrun-46vtxmsa7i" NodeId: 1 CreateTimeMs: 1767493992729 UpdateTimeMs: 1767493992737 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:33:12.738343Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [1:1850:3208] 2026-01-04T02:33:12.738411Z node 1 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [1:1846:3208] trace_id# 2026-01-04T02:33:12.738479Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.001345s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:33:12.778229Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:33:12.778287Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [1:1847:3208] TxId: 281474976710658. Ctx: { TraceId: 01ke3dm000d01wxhhm50pas1gf, Database: , SessionId: ydb://session/3?node_id=1&id=ZmIyNTc3MWUtZGI0NGM5OTEtYjQ3ZTYyOWMtMWY4MmJkYzM=, PoolId: , DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-04T02:33:12.815811Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1866:3226], status: UNAVAILABLE, issues:
: Error: Table metadata loading, code: 1050
:1:1: Error: Failed to load metadata for table: db.[/Root/table-1]
: Error: LookupError, code: 2005 2026-01-04T02:33:12.817984Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTAyYzlhOTgtOTlmYjAzMjEtYzZlN2I3MjQtZTE4YzdkNzI=, ActorId: [1:1864:3224], ActorState: ExecuteState, LegacyTraceId: 01ke3dm0adfk809jh08vr5ck3m, ReplyQueryCompileError, remove tx status# UNAVAILABLE issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 1 column: 1 } message: "Failed to load metadata for table: db.[/Root/table-1]" end_position { row: 1 column: 1 } severity: 1 issues { message: "LookupError" issue_code: 2005 severity: 1 } } } tx_id# trace_id# |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TxUsage::WriteToTopic_Demo_13_Query [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::SchemaMappingEncryptionIncorrectKey [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:32.020163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:32.020244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.020282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:32.020340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:32.020378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:32.020416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:32.020480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:32.020549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:32.021401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:32.021703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:32.101769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:32.101838Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:32.113869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:32.114028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:32.114181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:32.121555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:32.121849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:32.122528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:32.122774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:32.125518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.125719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:32.126871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:32.126930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:32.127047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:32.127111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:32.127175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:32.127364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:32.257458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.258979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:32.259548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... otify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710758, at schemeshard: 72057594046678944 2026-01-04T02:33:16.515148Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710758:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710758 msg type: 269090816 2026-01-04T02:33:16.515317Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710758, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:33:16.515639Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 FAKE_COORDINATOR: Add transaction: 281474976710758 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710758 at step: 5000041 2026-01-04T02:33:16.516398Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:16.516561Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710758 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:16.516627Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_rmdir.cpp:128: TRmDir HandleReply TEvOperationPlan, opId: 281474976710758:0, step: 5000041, at schemeshard: 72057594046678944 2026-01-04T02:33:16.516774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_rmdir.cpp:179: RmDir is done, opId: 281474976710758:0, at schemeshard: 72057594046678944 2026-01-04T02:33:16.516877Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-04T02:33:16.516925Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:33:16.516990Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710758:0 progress is 1/1 2026-01-04T02:33:16.517033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:33:16.517108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:33:16.517196Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:33:16.517245Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710758, ready parts: 1/1, is published: false 2026-01-04T02:33:16.517307Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710758 ready parts: 1/1 2026-01-04T02:33:16.517358Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710758:0 2026-01-04T02:33:16.517405Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710758:0 2026-01-04T02:33:16.517498Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:33:16.517558Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710758, publications: 2, subscribers: 1 2026-01-04T02:33:16.517609Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:33:16.517652Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710758, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-04T02:33:16.518661Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.520356Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:16.520402Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:16.520576Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710758, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:33:16.520742Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:16.520780Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 1 2026-01-04T02:33:16.520822Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710758, path id: 40 FAKE_COORDINATOR: Erasing txId 281474976710758 2026-01-04T02:33:16.521667Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.521759Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.521832Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-04T02:33:16.521891Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:33:16.521957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:33:16.522434Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.522521Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.522554Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976710758 2026-01-04T02:33:16.522586Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710758, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-04T02:33:16.522619Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:33:16.522703Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976710758, subscribers: 1 2026-01-04T02:33:16.522760Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [7:127:2152] 2026-01-04T02:33:16.523277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:33:16.523344Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:33:16.523435Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:33:16.525586Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.526880Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710758 2026-01-04T02:33:16.527005Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710758 2026-01-04T02:33:16.527106Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710758 2026-01-04T02:33:16.527178Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:372: TExport::TTxProgress: DoExecute 2026-01-04T02:33:16.527237Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1333: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758 2026-01-04T02:33:16.527305Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:1364: TExport::TTxProgress: OnNotifyResult: txId# 281474976710758, id# 103, itemIdx# 4294967295 2026-01-04T02:33:16.527702Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:33:16.529133Z node 7 :EXPORT DEBUG: schemeshard_export__create.cpp:392: TExport::TTxProgress: DoComplete TestWaitNotification wait txId: 103 2026-01-04T02:33:16.529400Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:33:16.529455Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:33:16.530042Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:33:16.530158Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:33:16.530213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [7:956:2914] TestWaitNotification: OK eventTxId 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> ReadSessionImplTest::BrokenCompressedData [GOOD] Test command err: 2026-01-04T02:31:40.913921Z :ReadSession INFO: Random seed for debugging is 1767493900913894 2026-01-04T02:31:41.216119Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328504433603759:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:41.216194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:41.378330Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:41.434432Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:41.872017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:41.887066Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:41.887185Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:42.118572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:42.118696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:42.129184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:42.129265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:42.296833Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:42.364015Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:42.369431Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:42.399378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:42.418057Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:42.402621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:42.409933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:31:42.410672Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:31:42.423756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:42.942988Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:42.970882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0026a8/r3tmp/yandexWR5Y2f.tmp 2026-01-04T02:31:42.970911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0026a8/r3tmp/yandexWR5Y2f.tmp 2026-01-04T02:31:42.971057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0026a8/r3tmp/yandexWR5Y2f.tmp 2026-01-04T02:31:42.971152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:43.037923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:43.194570Z INFO: TTestServer started on Port 5859 GrpcPort 18163 PQClient connected to localhost:18163 2026-01-04T02:31:43.872298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:44.087530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:31:46.218244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328504433603759:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:46.218323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:47.405353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328526899761146:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.405439Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328526899761136:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.405497Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.406493Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328526899761152:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.406573Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.414037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:47.449490Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328526899761151:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:31:47.725994Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328526899761181:2144] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:47.786184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:47.814213Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328530203408930:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:47.817266Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328526899761188:2313], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:31:47.816768Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmYyMTBmMjMtZTAyMmE3MmMtODViZTIyMjYtOWQyZGVhZg==, ActorId: [1:7591328530203408871:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3dhd08d54ne6t0ytgkmm0p, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:47.817990Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=MjEyNmNjMjktYmI2YmMyNjEtNWE0NGIwZC0yYWEwZGMzYQ==, ActorId: [2:7591328526899761119:2302], ActorState: ExecuteState, LegacyTraceId: 01ke3dhcya8m0a04bywns98eea, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:31:47.822010Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At ... No: 0 requestId: cookie: 2 2026-01-04T02:33:29.613244Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 2 Data: "... 99 bytes ..." SourceId: "\000test-message-group-id" SeqNo: 3 WriteTimestampMS: 1767494008500 CreateTimestampMS: 1767494008498 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 1 SizeLag: 20 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 2 } 2026-01-04T02:33:29.613351Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 3 from offset 3 2026-01-04T02:33:29.613387Z node 13 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid d66b81b5-db4f2f39-347eae8e-be7d6339 has messages 1 2026-01-04T02:33:29.613490Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 read done: guid# d66b81b5-db4f2f39-347eae8e-be7d6339, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 220 2026-01-04T02:33:29.613531Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 response to read: guid# d66b81b5-db4f2f39-347eae8e-be7d6339 2026-01-04T02:33:29.613757Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 Process answer. Aval parts: 0 2026-01-04T02:33:29.614241Z :DEBUG: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] [dc1] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:33:29.614447Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (2-2) 2026-01-04T02:33:29.614460Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 grpc read done: success# 1, data# { read { } } 2026-01-04T02:33:29.614556Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 got read request: guid# 43c775cc-a0ea955b-4058c200-c28e8e29 2026-01-04T02:33:29.614627Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (2-2) 2026-01-04T02:33:29.614687Z :DEBUG: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] [dc1] The application data is transferred to the client. Number of messages 1, size 8 bytes DataReceived { PartitionStreamId: 1 PartitionId: 0 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "dc1". Topic: "test-topic" Partition: 0 PartitionKey: "" Information: { Offset: 2 SeqNo: 3 MessageGroupId: "test-message-group-id" CreateTime: 2026-01-04T02:33:28.498000Z WriteTime: 2026-01-04T02:33:28.500000Z Ip: "ipv6:[::1]:33098" UncompressedSize: 8 Meta: { "logtype": "unknown", "ident": "unknown", "server": "ipv6:[::1]:33098" } } } } 2026-01-04T02:33:29.614854Z :INFO: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] Closing read session. Close timeout: 3.000000s 2026-01-04T02:33:29.614891Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-04T02:33:29.614924Z :INFO: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1307 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:29.615654Z :INFO: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] Closing read session. Close timeout: 0.000000s 2026-01-04T02:33:29.615689Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-04T02:33:29.615722Z :INFO: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1308 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:29.615676Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 grpc read done: success# 0, data# { } 2026-01-04T02:33:29.615694Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 grpc read failed 2026-01-04T02:33:29.615715Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 closed 2026-01-04T02:33:29.615812Z :NOTICE: [/Root] [/Root] [e204213-ae7917f9-36928730-5bfc704b] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:33:29.616085Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_13_1_15138055452448791511_v1 is DEAD 2026-01-04T02:33:29.616410Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_13_1_15138055452448791511_v1 2026-01-04T02:33:29.616455Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [13:7591328964486797401:2479] destroyed 2026-01-04T02:33:29.616473Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [13:7591328964486797398:2476] disconnected. 2026-01-04T02:33:29.616497Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [13:7591328964486797398:2476] disconnected; active server actors: 1 2026-01-04T02:33:29.616521Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_13_1_15138055452448791511_v1 2026-01-04T02:33:29.616513Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [13:7591328964486797398:2476] client user disconnected session shared/user_13_1_15138055452448791511_v1 2026-01-04T02:33:29.634460Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:29.634491Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.634507Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:29.634524Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.634538Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:29.734786Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:29.734824Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.734850Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:29.734866Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.734882Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:29.835126Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:29.835161Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.835177Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:29.835195Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:29.835211Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:30.932178Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:33:30.932229Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:33:30.932286Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:33:30.932660Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:33:30.933145Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:33:30.933358Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:33:30.933562Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:33:30.934880Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:33:30.945224Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-04T02:33:30.945396Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-04T02:33:30.945600Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:33:30.945652Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:33:30.945673Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:33:30.945717Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_2_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::PersistenceWithStorageFailuresAndReboots [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:29.505388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:29.613275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:29.622659Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:29.622978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:29.623140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:30.025942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:30.157949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:30.158166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:30.216210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.307735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:31.048297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:31.049603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:31.049658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:31.049694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:31.050183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:31.121349Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:31.678364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:37.398732Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:37.400778Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:37.402074Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:37.408220Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-04T02:32:37.411597Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:37.412729Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:37.457262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:37.457395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:37.458095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:37.458170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:37.498138Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:37.498399Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:32:37.501501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:37.502035Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:37.730877Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:37.730988Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:37.732264Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:37.732347Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:37.733016Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.733560Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734383Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734576Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734694Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734791Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734913Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.734978Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.735065Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:37.749216Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:37.749962Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:37.750348Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:37.920735Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:37.935272Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:37.935352Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:37.971367Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:37.972379Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:37.972566Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:37.972608Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:37.972653Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:37.972692Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:37.972746Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:37.972827Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:37.973933Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:37.977490Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2354:2411] 2026-01-04T02:32:37.993757Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:38.016507Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2498:2448] Owner: [2:2497:2447]. Describe result: PathErrorUnknown 2026-01-04T02:32:38.016577Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2498:2448] Owner: [2:2497:2447]. Creating table 2026-01-04T02:32:38.016669Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2498:2448] Owner: [2:2497:2447]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:38.018150Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:38.018253Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2541:2457], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:38.046741Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2604:2494] 2026-01-04T02:32:38.048550Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2604:2494], schemeshard id = 72075186224037897 2026-01-04T02:32:38.090305Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2652:2501], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:38.113214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:38.153316Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2498:2448] Owner: [2:2497:2447]. TEvProposeTransacti ... = 16, ReplyToActorId = [3:5860:3265], StatRequests.size() = 1 2026-01-04T02:33:24.736953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:33:24.737047Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:24.793686Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-04T02:33:24.793777Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2026-01-04T02:33:24.794102Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:33:24.794227Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:24.794278Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-04T02:33:24.805305Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 17 ], ReplyToActorId[ [3:5893:3277]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:24.805602Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 17 ] 2026-01-04T02:33:24.805639Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 17, ReplyToActorId = [3:5893:3277], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-04T02:33:25.327543Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [3:5926:3287]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:25.327725Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2026-01-04T02:33:25.327750Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [3:5926:3287], StatRequests.size() = 1 2026-01-04T02:33:25.776969Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [3:5957:3297]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:25.777187Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-04T02:33:25.777216Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [3:5957:3297], StatRequests.size() = 1 2026-01-04T02:33:26.347757Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:26.359033Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [3:5996:3308]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:26.359273Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:33:26.359313Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [3:5996:3308], StatRequests.size() = 1 2026-01-04T02:33:26.905853Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [3:6032:3318]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:26.906041Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:33:26.906067Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:6032:3318], StatRequests.size() = 1 2026-01-04T02:33:27.467110Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-04T02:33:27.467538Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2026-01-04T02:33:27.467948Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:27.468006Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:33:27.468062Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-04T02:33:27.478956Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:6070:3332]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:27.479133Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:33:27.479159Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:6070:3332], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-04T02:33:28.060256Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:6105:3344]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:28.060489Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:28.060519Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:6105:3344], StatRequests.size() = 1 2026-01-04T02:33:28.520006Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:6136:3354]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:28.520313Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:28.520356Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:6136:3354], StatRequests.size() = 1 2026-01-04T02:33:29.187406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:29.198984Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:6169:3364]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:29.199340Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:29.199394Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:6169:3364], StatRequests.size() = 1 2026-01-04T02:33:29.697785Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:6200:3374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:29.698021Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:29.698065Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:6200:3374], StatRequests.size() = 1 2026-01-04T02:33:30.342296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-04T02:33:30.342595Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 5 2026-01-04T02:33:30.343145Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:30.343223Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:30.343294Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:33:30.354547Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:6237:3386]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:30.354819Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:30.354864Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:6237:3386], StatRequests.size() = 1 row count: 5 (expected: 7) 2026-01-04T02:33:30.940459Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:6276:3400]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:30.940679Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:30.940709Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:6276:3400], StatRequests.size() = 1 2026-01-04T02:33:31.476084Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:6309:3412]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:31.476438Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:31.476483Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:6309:3412], StatRequests.size() = 1 2026-01-04T02:33:32.031477Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:32.042994Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:6346:3423]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:32.043411Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:32.043476Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:6346:3423], StatRequests.size() = 1 2026-01-04T02:33:32.562287Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [3:6380:3433]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:32.562493Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:32.562523Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [3:6380:3433], StatRequests.size() = 1 2026-01-04T02:33:33.030253Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:33.030334Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:33.030541Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:33:33.042946Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:33.107271Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 3, schemeshard count = 1 2026-01-04T02:33:33.107426Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-04T02:33:33.107581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:33.107745Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:33.107801Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:33:33.129906Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [3:6414:3443]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:33.130112Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:33.130141Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [3:6414:3443], StatRequests.size() = 1 row count: 7 (expected: 7) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_errors/unittest >> KqpErrors::ProposeErrorEvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:33:11.207281Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:11.207962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:11.211359Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:33:11.314249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:33:11.319033Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:33:11.319690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:33:11.319891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:33:11.320034Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:11.321163Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:11.321196Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:33:11.674397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:11.762854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:11.762988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:11.763315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:11.763364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:11.820548Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:33:11.820995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:11.821308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:11.909438Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:11.970481Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:33:12.562548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:12.628334Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:12.628471Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:12.961305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:13.807977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1915:3234], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:13.808097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1925:3239], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:13.808555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:13.809385Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1931:3244], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:13.809766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:13.814553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:14.383433Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1929:3242], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:33:14.584122Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2040:3321] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:14.954927Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-04T02:33:14.955040Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution operation_timeout# 0.000000s cancel_after# trace_id# 2026-01-04T02:33:14.955080Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-04T02:33:14.955117Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-04T02:33:14.955199Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-04T02:33:14.957764Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-04T02:33:14.965891Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-04T02:33:14.965999Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:99} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_id=1&id=OTE5ZmY2M2MtNTUyNWI1LWVhYjc3YjEzLTIyYWRjNjc3, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution operation_timeout# 299.426880s cancel_after# trace_id# 2026-01-04T02:33:14.966050Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:133} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_id=1&id=OTE5ZmY2M2MtNTUyNWI1LWVhYjc3YjEzLTIyYWRjNjc3, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Begin literal execution transactions_count# 1 trace_id# 2026-01-04T02:33:14.966103Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:144} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_id=1&id=OTE5ZmY2M2MtNTUyNWI1LWVhYjc3YjEzLTIyYWRjNjc3, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Stage AST stage_id# [0,0] ast# ( (return (lambda '() (block '( (let $1 (Just (Uint32 '1))) (let $2 (Just (Uint32 '2))) (let $3 (Just (Uint32 '3))) (return (ToStream (Just (AsList (AsStruct '('"key" $1) '('"value" $1)) (AsStruct '('"key" $2) '('"value" $2)) (AsStruct '('"key" $3) '('"value" $3)))))) )))) ) trace_id# 2026-01-04T02:33:14.966146Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-04T02:33:14.966794Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_id=1&id=OTE5ZmY2M2MtNTUyNWI1LWVhYjc3YjEzLTIyYWRjNjc3, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Execution is complete results_size# 1 trace_id# 2026-01-04T02:33:14.967020Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 0, OutputsCount: 1 2026-01-04T02:33:14.967101Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_id=1&id=OTE5ZmY2M2MtNTUyNWI1LWVhYjc3YjEzLTIyYWRjNjc3, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-04T02:33:14.967429Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [1:2066:3232] TxId: 281474976715660. Ctx: { TraceId: 01ke3dm1ad757jtc08kwempwfx, Database: , SessionId: ydb://session/3?node_i ... d# 2026-01-04T02:33:26.389071Z node 3 :KQP_EXECUTER TRACE: {KQPDATA@kqp_data_executer.cpp:2750} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Updating channels after the creation of compute actors trace_id# 2026-01-04T02:33:26.389106Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [3:2346:3472] 2026-01-04T02:33:26.389142Z node 3 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [3:2346:3472], channels: 0 2026-01-04T02:33:26.389187Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2346:3472], trace_id# 2026-01-04T02:33:26.389234Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2346:3472], trace_id# 2026-01-04T02:33:26.389289Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:33:26.390005Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2346:3472] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:33:26.390064Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [3:2346:3472], trace_id# 2026-01-04T02:33:26.390119Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [3:2346:3472], trace_id# 2026-01-04T02:33:26.390930Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [3:2346:3472] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 510 Tasks { TaskId: 1 CpuTimeUs: 88 FinishTimeMs: 1767494006390 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 23 BuildCpuTimeUs: 65 HostName: "ghrun-46vtxmsa7i" NodeId: 3 CreateTimeMs: 1767494006389 UpdateTimeMs: 1767494006390 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:33:26.391014Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:2346:3472] 2026-01-04T02:33:26.391069Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [3:2342:3472] trace_id# 2026-01-04T02:33:26.391134Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000510s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:33:26.405079Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:860: SelfId: [3:2349:3472], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:2333:3472]Got DISK_GROUP_OUT_OF_SPACE for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:2349:3472]. Ignored this error. 2026-01-04T02:33:26.405192Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:2342:3472], SessionActorId: [3:2333:3472], statusCode=UNAVAILABLE. Issue=
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 . sessionActorId=[3:2333:3472]. 2026-01-04T02:33:26.405469Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, ActorId: [3:2333:3472], ActorState: ExecuteState, LegacyTraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [3:2343:3472] from: [3:2342:3472] trace_id# 2026-01-04T02:33:26.405601Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1071} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got EvAbortExecution Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-04T02:33:26.405678Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Tablet 72075186224037888 is out of space. Table `/Root/table-1`., code: 2036 } trace_id# 2026-01-04T02:33:26.405748Z node 3 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-04T02:33:26.405924Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1349} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ReplyErrorAndDie Response# Status: UNAVAILABLE Issues { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } Result { Stats { CpuTimeUs: 510 Tables { TablePath: "/Root/table-1" WriteRows: 1 WriteBytes: 8 AffectedPartitions: 1 } } } TargetActor# [3:2333:3472] trace_id# 2026-01-04T02:33:26.405969Z node 3 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2823} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shutdown immediately - nothing to wait trace_id# 2026-01-04T02:33:26.406083Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:33:26.406122Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [3:2343:3472] TxId: 281474976710683. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-04T02:33:26.406337Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, ActorId: [3:2333:3472], ActorState: ExecuteState, LegacyTraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Tablet 72075186224037888 is out of space. Table `/Root/table-1`." issue_code: 2036 severity: 1 } trace_id# 2026-01-04T02:33:26.406527Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:187} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Bootstrap done, become ReadyState trace_id# 2026-01-04T02:33:26.406954Z node 3 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:236} ActorId: [3:2352:3472] TxId: 281474976710684. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Rollback to BufferActor buffer_actor_id# [3:2342:3472] trace_id# 2026-01-04T02:33:26.407205Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [3:2352:3472] TxId: 281474976710684. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:33:26.407247Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [3:2352:3472] TxId: 281474976710684. Ctx: { TraceId: 01ke3dmdj79qxg3t0kjdpqx3zt, Database: , SessionId: ydb://session/3?node_id=3&id=MzQwMzE2ZTgtYjEyMGZlYjQtMzhjZTBmNzctYTE0NGNiN2Y=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::Serverless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:31.070246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:31.192074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:31.201140Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:31.201538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:31.201696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:31.627455Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.731988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:31.732188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:31.781572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:31.868330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:32.561945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:32.562607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:32.562683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:32.562723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:32.563340Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:32.637549Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:33.210697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:36.405405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:36.414090Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:36.418733Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:36.450649Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:36.450734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:36.480747Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:36.483752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.673662Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:36.673757Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:36.675534Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.676224Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.676872Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.678055Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.678369Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.678560Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.678871Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.679049Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.679155Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.709093Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.877274Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:36.931991Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:36.932087Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:36.959826Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:36.961188Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:36.961430Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:36.961490Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:36.961550Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:36.961619Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:36.961668Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:36.961715Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:36.962291Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:36.962564Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:36.964898Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:32:36.973086Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:36.973155Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:36.973240Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:36.983354Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:36.994060Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:37.003927Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:37.007486Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:37.007614Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:37.034914Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:37.035140Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:37.142105Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:37.230205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:32:37.446480Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:37.448107Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2547:2872], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:37.451000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:37.454093Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:37.454224Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 2814749767 ... 6224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-04T02:32:55.067297Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 18 ], ReplyToActorId[ [2:4321:3705]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:55.067582Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18 ] 2026-01-04T02:32:55.067627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 18, ReplyToActorId = [2:4321:3705], StatRequests.size() = 1 2026-01-04T02:32:56.010383Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 19 ], ReplyToActorId[ [2:4357:3722]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:56.010707Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 19 ] 2026-01-04T02:32:56.010749Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 19, ReplyToActorId = [2:4357:3722], StatRequests.size() = 1 2026-01-04T02:32:56.918102Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 20 ], ReplyToActorId[ [2:4389:3733]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:56.918501Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:32:56.918551Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4389:3733], StatRequests.size() = 1 2026-01-04T02:32:57.851809Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:32:57.873943Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4421:3748]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:57.874238Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:32:57.874292Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4421:3748], StatRequests.size() = 1 2026-01-04T02:32:58.873150Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4453:3763]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:58.873637Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:32:58.873694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4453:3763], StatRequests.size() = 1 2026-01-04T02:32:59.722922Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:32:59.723114Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-04T02:32:59.723652Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:32:59.723775Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:32:59.735065Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4481:3774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:59.735486Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:32:59.735537Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4481:3774], StatRequests.size() = 1 2026-01-04T02:33:00.720155Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4515:3790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:00.720632Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:00.720685Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4515:3790], StatRequests.size() = 1 2026-01-04T02:33:01.557071Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4545:3801]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:01.557460Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:01.557513Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4545:3801], StatRequests.size() = 1 2026-01-04T02:33:02.633840Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:02.685077Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4584:3816]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:02.685422Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:02.685471Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4584:3816], StatRequests.size() = 1 2026-01-04T02:33:02.849882Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:02.849959Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:02.850002Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:02.850050Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:04.019803Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4621:3832]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:04.020209Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:04.020261Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4621:3832], StatRequests.size() = 1 2026-01-04T02:33:05.252132Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:05.252545Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:33:05.252802Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:05.252844Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:05.263955Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4652:3844]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:05.264254Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:05.264290Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4652:3844], StatRequests.size() = 1 2026-01-04T02:33:05.307656Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:05.307742Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:05.307968Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:05.322364Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:06.458943Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4686:3861]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.459295Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:06.459337Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4686:3861], StatRequests.size() = 1 2026-01-04T02:33:07.030326Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:33:07.030401Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.988000s, at schemeshard: 72075186224037899 2026-01-04T02:33:07.030574Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:33:07.044991Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:07.626168Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4716:3874]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.626448Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:07.626498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4716:3874], StatRequests.size() = 1 2026-01-04T02:33:08.735139Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:08.790728Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4750:3888]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.791048Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:08.791099Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4750:3888], StatRequests.size() = 1 2026-01-04T02:33:09.917388Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4788:3904]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:09.917589Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:09.917619Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4788:3904], StatRequests.size() = 1 2026-01-04T02:33:10.883030Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:10.883234Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-04T02:33:10.883372Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:10.883627Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:10.939150Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4818:3916]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.939436Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:10.939482Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4818:3916], StatRequests.size() = 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:20.915413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:21.031567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:21.041029Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:21.041430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:21.041605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.492050Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.584786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.584922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.632509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.708330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.466508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.467562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:22.467610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:22.467642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:22.468030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:22.534740Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:23.103563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:26.669362Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.678278Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:26.682709Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.719526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.719673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.750048Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:26.752092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.950821Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.951653Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.952429Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.953336Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.953677Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.954001Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.954263Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.954456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.954555Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:27.142439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:32:27.154428Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:27.154526Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:27.179248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:27.401602Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:27.471780Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:27.471898Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:27.500435Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:27.500763Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:27.501183Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:27.501253Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:27.501329Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:27.501398Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:27.501454Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:27.501514Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:27.502372Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:27.713740Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:27.714299Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:32:27.717045Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:27.717416Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:27.717517Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:27.721507Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:27.727937Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:27.732634Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:27.826618Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:27.853829Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:27.853966Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:27.876877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:27.879610Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:28.322898Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:28.324906Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:28.328630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:28.386331Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:28.386490Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... ce_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:32:56.867296Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:5356:4266], StatRequests.size() = 1 2026-01-04T02:32:57.911108Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:5385:4278]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:57.911439Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:32:57.911486Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:5385:4278], StatRequests.size() = 1 2026-01-04T02:32:58.518801Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:32:58.519033Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-04T02:32:58.519635Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:32:58.519870Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:32:59.001622Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:5414:4290]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:32:59.002071Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:32:59.002124Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:5414:4290], StatRequests.size() = 1 2026-01-04T02:33:00.131959Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:5451:4309]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:00.132366Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:33:00.132416Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:5451:4309], StatRequests.size() = 1 2026-01-04T02:33:01.180425Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:5486:4323]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:01.180826Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:33:01.180883Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:5486:4323], StatRequests.size() = 1 2026-01-04T02:33:01.938396Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:01.938492Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:01.938546Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:01.938593Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:02.058794Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:02.639369Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:5526:4342]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:02.639742Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:33:02.639792Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:5526:4342], StatRequests.size() = 1 2026-01-04T02:33:03.849370Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:5559:4356]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:03.849780Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:33:03.849852Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:5559:4356], StatRequests.size() = 1 2026-01-04T02:33:04.767969Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:04.768234Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-04T02:33:04.768876Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:04.769023Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:33:04.964657Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:04.964743Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:04.965023Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:04.982231Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:05.562960Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:5595:4374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:05.563285Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:33:05.563329Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:5595:4374], StatRequests.size() = 1 2026-01-04T02:33:06.681463Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:33:06.681525Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.049000s, at schemeshard: 72075186224037899 2026-01-04T02:33:06.681707Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:33:06.698325Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:06.844256Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:5628:4390]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:06.844655Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:33:06.844712Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:5628:4390], StatRequests.size() = 1 2026-01-04T02:33:07.980410Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:5657:4402]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:07.980710Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:33:07.980755Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5657:4402], StatRequests.size() = 1 2026-01-04T02:33:08.453953Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-04T02:33:08.454024Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.441000s, at schemeshard: 72075186224037905 2026-01-04T02:33:08.454221Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:33:08.468841Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:08.757657Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:09.318262Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5702:4425]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:09.318585Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:33:09.318622Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5702:4425], StatRequests.size() = 1 2026-01-04T02:33:10.577001Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5731:4437]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:10.577280Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:33:10.577315Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5731:4437], StatRequests.size() = 1 2026-01-04T02:33:11.204364Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:33:11.204933Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-04T02:33:11.205285Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:11.205360Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:33:11.303156Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:11.303209Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:11.303392Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:11.316622Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:11.732485Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5764:4453]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.732800Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:33:11.732845Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5764:4453], StatRequests.size() = 1 2026-01-04T02:33:11.733314Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5766:4455]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:11.735855Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:33:11.735900Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5766:4455], StatRequests.size() = 1 >> TxUsage::WriteToTopic_Demo_14_Table |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_errors/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_export/unittest >> TExportToS3Tests::CorruptedDecimalValue [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:32:33.300322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:33.300399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:33.300431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:33.300460Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:33.300493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:33.300516Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:33.300582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:33.300673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:33.301356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:33.301589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:33.385348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:33.385409Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:33.400080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:33.400253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:33.400401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:33.409613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:33.409911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:33.410708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:33.410962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:33.419864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:33.420137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:33.421521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:33.421601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:33.421749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:33.421822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:33.421874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:33.422125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:33.567085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.570988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.571934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.572006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.572078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:33.572157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... INATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710759 at step: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72075186233409547 for txId: 281474976710759 at step: 5000041 2026-01-04T02:33:11.178799Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:11.178886Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710759 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:11.178930Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 281474976710759:0 HandleReply TEvOperationPlan, stepId: 5000041, at schemeshard: 72057594046678944 2026-01-04T02:33:11.179040Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 128 -> 129 2026-01-04T02:33:11.179184Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 REQUEST: PUT /Backup1/metadata.json HTTP/1.1 HEADERS: Host: localhost:1749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1307A47C-DFB4-4DE2-9054-25EB66F7146F amz-sdk-request: attempt=1 content-length: 119 content-md5: oX2vUQoa+8sc65ouCpRQqg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json / / 119 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000041 2026-01-04T02:33:11.209224Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:11.209298Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 281474976710759, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:33:11.209609Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:11.209653Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 281474976710759, path id: 40 2026-01-04T02:33:11.210282Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-04T02:33:11.210349Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 281474976710759:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:33:11.211074Z node 7 :DATASHARD_BACKUP ERROR: export_scan.cpp:208: [Export] [scanner] Error read data from table: Invalid Decimal binary representation REQUEST: PUT /Backup1/metadata.json.sha256 HTTP/1.1 HEADERS: Host: localhost:1749 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 9500C808-E91E-44BC-AEA0-D8F0793227F2 amz-sdk-request: attempt=1 content-length: 78 content-md5: Vj/U8z9C9WvNxxNxUoW5sg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /Backup1/metadata.json.sha256 / / 78 2026-01-04T02:33:11.219841Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-04T02:33:11.219963Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 281474976710759 2026-01-04T02:33:11.219999Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 281474976710759 2026-01-04T02:33:11.220037Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976710759, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:33:11.220079Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-04T02:33:11.220211Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 281474976710759 2026-01-04T02:33:11.223713Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710759 2026-01-04T02:33:11.236671Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 853 RawX2: 30064773892 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-04T02:33:11.236728Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976710759, tablet: 72075186233409547, partId: 0 2026-01-04T02:33:11.236835Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944, message: Source { RawX1: 853 RawX2: 30064773892 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-04T02:33:11.236980Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 281474976710759:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 853 RawX2: 30064773892 } Origin: 72075186233409547 State: 2 TxId: 281474976710759 Step: 0 Generation: 2 OpResult { Success: false Explain: "Invalid Decimal binary representation" BytesProcessed: 0 RowsProcessed: 0 } 2026-01-04T02:33:11.237091Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976710759:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:11.237147Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-04T02:33:11.237212Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710759:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:33:11.237265Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710759:0 129 -> 240 2026-01-04T02:33:11.237419Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 281474976710759:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:11.239515Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-04T02:33:11.239787Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710759:0, at schemeshard: 72057594046678944 2026-01-04T02:33:11.239831Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710759:0 ProgressState 2026-01-04T02:33:11.239972Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-04T02:33:11.240008Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-04T02:33:11.240069Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710759:0 progress is 1/1 2026-01-04T02:33:11.240104Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-04T02:33:11.240154Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710759, ready parts: 1/1, is published: true 2026-01-04T02:33:11.240229Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:127:2152] message: TxId: 281474976710759 2026-01-04T02:33:11.240283Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710759 ready parts: 1/1 2026-01-04T02:33:11.240323Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710759:0 2026-01-04T02:33:11.240356Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710759:0 2026-01-04T02:33:11.240505Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:33:11.242179Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710759 2026-01-04T02:33:11.242233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710759 2026-01-04T02:33:11.242421Z node 7 :EXPORT NOTICE: schemeshard_export__create.cpp:729: TExport::TTxProgress: issues during backing up, cancelling, info# { Id: 102 Uid: '' Kind: S3 DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1] ExportPathId: [OwnerId: 72057594046678944, LocalPathId: 39] UserSID: '(empty maybe)' PeerName: '' State: Transferring WaitTxId: 0 Issue: '' Items: 1 PendingItems: 0 PendingDropItems: 0 }, item# { Idx: 0 SourcePathName: '/MyRoot/Table1' SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 38] SourcePathType: EPathTypeTable State: Done SubState: Proposed WaitTxId: 0 Issue: 'shard: 72057594046678944:2, error: Invalid Decimal binary representation' } 2026-01-04T02:33:11.243965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:33:11.244029Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:884:2843] TestWaitNotification: OK eventTxId 102 |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.4%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_export/unittest |87.4%| [LD] {RESULT} $(B)/ydb/core/cms/ut_sentinel/ydb-core-cms-ut_sentinel |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |87.4%| [LD] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/ydb-core-cms-console-ut_configs_dispatcher |87.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_errors/test-results/unittest/{meta.json ... results_accumulator.log} |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |87.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow |87.4%| [LD] {RESULT} $(B)/ydb/core/kqp/federated_query/ut_service/slow/ydb-core-kqp-federated_query-ut_service-slow >> KqpBatchUpdate::SimpleOnePartition |87.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/core-tx-replication-controller-ut_assign_tx_id |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots >> TExtSubDomainTest::GenericCases >> TSubDomainTest::FailIfAffectedSetNotInterior >> TSubDomainTest::CreateDummyTabletsInDifferentDomains |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_incremental_restore_reboots/schemeshard-ut_incremental_restore_reboots >> KqpBatchDelete::MultiStatement >> KqpBatchDelete::HasTxControl >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable >> TSubDomainTest::LsLs |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sequence/ydb-core-tx-schemeshard-ut_sequence >> TSubDomainTest::StartAndStopTenanNode |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut |87.5%| [LD] {RESULT} $(B)/ydb/core/http_proxy/ut/ydb-core-http_proxy-ut >> TSubDomainTest::CreateTablet |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |87.5%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/ydb-core-tx-schemeshard-ut_base |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut >> TModifyUserTest::ModifyUser >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates >> KqpBatchUpdate::UnknownColumn >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query >> KqpBatchUpdate::ManyPartitions_2 |87.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |87.5%| [LD] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/ydb-core-kqp-proxy_service-ut |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |87.5%| [TA] {RESULT} $(B)/ydb/tests/olap/scenario/test-results/py3test/{meta.json ... results_accumulator.log} >> KqpBatchDelete::Large_2 >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Table [GOOD] >> KqpBatchDelete::ManyPartitions_2 >> KqpBatchUpdate::UpdateOn >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::CreateTableInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:234:2060] recipient: [1:228:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:234:2060] recipient: [1:228:2145] Leader for TabletID 72057594046678944 is [1:244:2155] sender: [1:245:2060] recipient: [1:228:2145] 2026-01-04T02:32:47.011482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:47.011624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:47.011669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:47.011705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:47.011780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:47.011815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:47.011875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:47.011956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:47.012842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:47.013147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:47.107381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:47.107439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:47.120493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:47.120967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:47.121164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:47.135543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:47.135776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:47.136552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:47.136817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:47.140223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.140469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:47.141690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:47.141766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.141963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:47.142027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:47.142079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:47.142246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.319078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.320902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.321044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.321142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.321217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.321277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:330:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rts: 2/3 2026-01-04T02:33:37.073270Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:2 progress is 2/3 2026-01-04T02:33:37.073297Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 2/3 2026-01-04T02:33:37.073329Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-04T02:33:37.075387Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:33:37.075423Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:33:37.075444Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-04T02:33:37.075505Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1237:3018] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 at schemeshard: 72057594046678944 2026-01-04T02:33:37.075612Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [7:244:2156], Recipient [7:1237:3018]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 104 2026-01-04T02:33:37.075648Z node 7 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-04T02:33:37.075675Z node 7 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 104 datashard 72075186233409549 state Ready 2026-01-04T02:33:37.075721Z node 7 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186233409549 Got TEvSchemaChangedResult from SS at 72075186233409549 2026-01-04T02:33:37.075905Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:244:2156], Recipient [7:244:2156]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:33:37.075945Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:33:37.076004Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:33:37.076042Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:33:37.076125Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:33:37.076152Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:33:37.076178Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:33:37.076209Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:33:37.076234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:33:37.076262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-04T02:33:37.076334Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:988:2806] message: TxId: 104 2026-01-04T02:33:37.076390Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:33:37.076440Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:33:37.076473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:33:37.076595Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-04T02:33:37.076639Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-04T02:33:37.076664Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-04T02:33:37.076704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-04T02:33:37.076751Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-04T02:33:37.076774Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-04T02:33:37.076817Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-04T02:33:37.078890Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:33:37.078988Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:33:37.079045Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:988:2806] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 104 at schemeshard: 72057594046678944 2026-01-04T02:33:37.079147Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:33:37.079180Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [7:1289:3056] 2026-01-04T02:33:37.079354Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1291:3058], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:37.079384Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:37.079412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestModificationResults wait txId: 105 2026-01-04T02:33:37.081738Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:962:2104], Recipient [7:244:2156] 2026-01-04T02:33:37.081786Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:33:37.083965Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/tmp" OperationType: ESchemeOpCreateIndexedTable CreateIndexedTable { TableDescription { Name: "NotTempTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Utf8" } KeyColumnNames: "key" } IndexDescription { Name: "ValueIndex" KeyColumnNames: "value" } } AllowCreateInTempDir: false } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:33:37.084314Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 105:0, explain: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:33:37.084353Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:33:37.084525Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:33:37.086347Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/tmp\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:37.086562Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/tmp', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE TABLE WITH INDEXES, path: /MyRoot/tmp/NotTempTable 2026-01-04T02:33:37.086618Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:33:37.086949Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:33:37.086981Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:33:37.087261Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1364:3131], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:37.087301Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:37.087327Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:33:37.087434Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:988:2806], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 105 2026-01-04T02:33:37.087457Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:33:37.087516Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:33:37.087592Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:33:37.087624Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [7:1362:3129] 2026-01-04T02:33:37.087793Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1364:3131], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:37.087818Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:37.087840Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 105 |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> BasicUsage::WaitEventBlocksBeforeDiscovery [GOOD] >> BasicUsage::SimpleHandlers >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query |87.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::CreateTopicWithSharedConsumer_DisabledDeadLetterPolicy [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False |87.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |87.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TModifyUserTest::ModifyUser [GOOD] >> TModifyUserTest::ModifyLdapUser >> TSubDomainTest::Boot-EnableRealSystemViewPaths-false [GOOD] >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true >> TSubDomainTest::LsLs [GOOD] >> TOlap::StoreStatsQuota [GOOD] >> TOlapNaming::AlterColumnStoreFailed >> TSubDomainTest::LsAltered |87.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::WriteToTopic_Demo_11_Table [GOOD] >> BasicStatistics::NotFullStatisticsDatashard [GOOD] >> TxUsage::WriteToTopic_Demo_11_Query >> TSubDomainTest::UserAttributes >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates >> TSubDomainTest::DeleteTableAndThenForceDeleteSubDomain [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped >> TOlapNaming::AlterColumnStoreFailed [GOOD] >> TExtSubDomainTest::GenericCases [GOOD] >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsDatashard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:20.666326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:20.802101Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:20.814070Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:20.814651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:20.814849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:21.307821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:21.404661Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:21.404830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:21.453769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:21.540682Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:22.248580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:22.249203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:22.249258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:22.249296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:22.249976Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:22.314383Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:22.922505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:26.572030Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:26.580906Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:26.584932Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:26.617316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.617454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.646554Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:26.648555Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:26.832627Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:26.832755Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:26.834179Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.834833Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.835495Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.836584Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.836830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.836937Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.837113Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.837232Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.837293Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:26.864962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:27.144948Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:27.175803Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:27.175927Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:27.213755Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:27.215263Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:27.215524Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:27.215609Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:27.215682Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:27.215738Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:27.215807Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:27.215859Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:27.216523Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:27.216885Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:27.219982Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:27.230621Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:27.230710Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:27.230806Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:27.247662Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:27.261538Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:27.272708Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:27.277174Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:27.277300Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:27.314975Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:27.315485Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:27.467078Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:27.692581Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:27.915196Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:27.917477Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:27.921163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:27.925343Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:27.925452Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... 44Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4118:3549], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:32:52.869001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:32:52.869498Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:32:52.869592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4103:3544], StatRequests.size() = 1 2026-01-04T02:32:52.869944Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... unblocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR 2026-01-04T02:32:52.990309Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:32:52.990394Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:32:53.100930Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4113:3548], schemeshard count = 1 ... waiting for stats update from SchemeShard 2026-01-04T02:32:56.231818Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:32:58.446375Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:32:58.446794Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-04T02:32:58.447096Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:32:58.447224Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:32:58.526588Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:32:58.526691Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:32:58.526972Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:32:58.545324Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-04T02:33:01.520303Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:03.778438Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:03.778840Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 8 2026-01-04T02:33:03.779191Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2026-01-04T02:33:03.779564Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-04T02:33:03.791113Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:104: [72075186224037894] EvServerDisconnected, pipe server id = [2:2216:2651] 2026-01-04T02:33:03.919754Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:03.936776Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:33:03.936917Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:4459:3675], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:33:03.945937Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4464:3680] 2026-01-04T02:33:03.946561Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4464:3680], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-04T02:33:07.081232Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:09.228735Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:09.229256Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 9 2026-01-04T02:33:09.229710Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:09.229920Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-04T02:33:12.126326Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:14.085440Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:14.085625Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 10 2026-01-04T02:33:14.085891Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-04T02:33:14.086106Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-04T02:33:17.009100Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:18.884608Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:18.884931Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 11 2026-01-04T02:33:18.885203Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:18.885286Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-04T02:33:18.896131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:33:18.896216Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:21.780912Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:22.047073Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:22.047149Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:22.047183Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:22.047215Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:24.788692Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:24.788993Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 12 2026-01-04T02:33:24.789184Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:24.789254Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-04T02:33:28.868232Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:30.812131Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:30.812446Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 13 2026-01-04T02:33:30.812688Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-04T02:33:30.812736Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:33:33.592444Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:35.666556Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:35.666768Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2026-01-04T02:33:35.666896Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-04T02:33:35.667119Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:33:35.700102Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:33:35.700218Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:35.700442Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:33:35.724440Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_DATASHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 2026-01-04T02:33:38.867814Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:41.258340Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:33:41.258707Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 15 2026-01-04T02:33:41.258993Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 ... waiting for TEvPropagateStatistics (done) 2026-01-04T02:33:41.259313Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5486:4037]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:41.259535Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 15 2026-01-04T02:33:41.259682Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:33:41.259730Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5486:4037], StatRequests.size() = 1 |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSubDomainTest::StartAndStopTenanNode [GOOD] >> TSubDomainTest::StartTenanNodeAndStopAtDestructor >> KqpBatchDelete::HasTxControl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::GracefulShardRestartNoEarlyReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:28:46.558337Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:28:46.686526Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:28:46.686915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:28:46.686971Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:28:47.162905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:28:47.163084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:28:47.301308Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767493723267726 != 1767493723267729 2026-01-04T02:28:47.317612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:47.387541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:28:47.532916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:28:47.866710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:28:48.721186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:28:48.893242Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:28:48.893329Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:28:48.893478Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-04T02:28:49.244925Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:28:49.245038Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:28:49.245728Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:28:49.245881Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:28:49.246308Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:28:49.246458Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:28:49.246645Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:28:49.248615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:28:49.249152Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:28:49.249918Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:28:49.249983Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:28:49.283053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:28:49.284053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:28:49.284361Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2026-01-04T02:28:49.284622Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:28:49.331367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:28:49.332115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:28:49.332263Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:28:49.333911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:28:49.333981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:28:49.334033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:28:49.334430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:28:49.334551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:28:49.334632Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-04T02:28:49.345506Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:28:49.415096Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:28:49.415331Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:28:49.415448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-04T02:28:49.415486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:28:49.415520Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:28:49.415555Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:28:49.415815Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:28:49.415893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:28:49.416263Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:28:49.416372Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:28:49.416452Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:28:49.416502Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:28:49.416550Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:28:49.416586Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:28:49.416629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:28:49.416661Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:28:49.416716Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:28:49.416851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:28:49.416886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:28:49.416935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:887:2767], serverId# [1:892:2770], sessionId# [0:0:0] 2026-01-04T02:28:49.417028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2026-01-04T02:28:49.417066Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:28:49.417169Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:28:49.417397Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:28:49.417449Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:28:49.417545Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:28:49.417601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... 37: Add [0:7] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:33:37.321832Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:33:37.321936Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-04T02:33:37.322145Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-04T02:33:37.322190Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[29:1297:3056], 1} after executionsCount# 1 2026-01-04T02:33:37.322229Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[29:1297:3056], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:33:37.322293Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[29:1297:3056], 1} finished in read 2026-01-04T02:33:37.322340Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-04T02:33:37.322368Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:33:37.322394Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:33:37.322420Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:33:37.322461Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037889 is Executed 2026-01-04T02:33:37.322486Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:33:37.322510Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037889 has finished 2026-01-04T02:33:37.322539Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:33:37.322620Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:33:37.322666Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037889:1:16} Tx{33, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:33:37.322698Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:33:37.323417Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037889] send [29:1146:2944] 2026-01-04T02:33:37.323456Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037889] push event to server [29:1146:2944] 2026-01-04T02:33:37.323564Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1297:3056], Recipient [29:927:2787]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:33:37.323608Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } 2026-01-04T02:33:37.323755Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037890] ::Bootstrap [29:1300:3059] 2026-01-04T02:33:37.323844Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037890] lookup [29:1300:3059] 2026-01-04T02:33:37.324005Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037890] queue send [29:1300:3059] 2026-01-04T02:33:37.324172Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037890] forward result local node, try to connect [29:1300:3059] 2026-01-04T02:33:37.324215Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037890]::SendEvent [29:1300:3059] 2026-01-04T02:33:37.324390Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [29:1301:3060], Recipient [29:1253:3028]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:37.324425Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:37.324464Z node 29 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037890, clientId# [29:1300:3059], serverId# [29:1301:3060], sessionId# [0:0:0] 2026-01-04T02:33:37.324506Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037890] connected with status OK role: Leader [29:1300:3059] 2026-01-04T02:33:37.324546Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037890] send queued [29:1300:3059] 2026-01-04T02:33:37.324576Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1300:3059] 2026-01-04T02:33:37.324752Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [29:1297:3056], Recipient [29:1253:3028]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-04T02:33:37.324841Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-04T02:33:37.324893Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:33:37.324962Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 0 2026-01-04T02:33:37.325020Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-04T02:33:37.325080Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T02:33:37.325109Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-04T02:33:37.325134Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-04T02:33:37.325179Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-04T02:33:37.325235Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-04T02:33:37.325277Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T02:33:37.325307Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-04T02:33:37.325332Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-04T02:33:37.325361Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-04T02:33:37.325460Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2503 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-04T02:33:37.325685Z node 29 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037890 promoting UnprotectedReadEdge to v2503/18446744073709551615 2026-01-04T02:33:37.325734Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[29:1297:3056], 2} after executionsCount# 1 2026-01-04T02:33:37.325773Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[29:1297:3056], 2} sends rowCount# 1, bytes# 32, quota rows left# 998, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:33:37.325850Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037890 read iterator# {[29:1297:3056], 2} finished in read 2026-01-04T02:33:37.325898Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T02:33:37.325925Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-04T02:33:37.325953Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T02:33:37.325981Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-04T02:33:37.326025Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T02:33:37.326051Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T02:33:37.326077Z node 29 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-04T02:33:37.326106Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-04T02:33:37.326188Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{17, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:33:37.326238Z node 29 :TABLET_EXECUTOR DEBUG: Leader{72075186224037890:2:4} Tx{13, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:33:37.326272Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-04T02:33:37.326722Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037890] send [29:1300:3059] 2026-01-04T02:33:37.326761Z node 29 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037890] push event to server [29:1300:3059] 2026-01-04T02:33:37.326862Z node 29 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [29:1297:3056], Recipient [29:1253:3028]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-04T02:33:37.326903Z node 29 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 2 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 11 } items { uint32_value: 111 } }, { items { uint32_value: 21 } items { uint32_value: 21 } } >> KqpBatchDelete::MultiStatement [GOOD] >> TModifyUserTest::ModifyLdapUser [GOOD] >> TModifyUserTest::ModifyUserIsEnabled |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query >> TSubDomainTest::FailIfAffectedSetNotInterior [GOOD] >> TSubDomainTest::GenericCases >> TSubDomainTest::CreateTablet [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain >> TSubDomainTest::Boot-EnableRealSystemViewPaths-true [GOOD] >> TSubDomainTest::CheckAccessCopyTable >> KqpBatchUpdate::UnknownColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:29:17.887967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:29:17.888106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:17.888173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:29:17.888210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:29:17.888248Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:29:17.888282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:29:17.888342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:29:17.888429Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:29:17.889364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:29:17.889666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:29:18.177216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:29:18.177286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:29:18.210135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:29:18.226477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:29:18.226668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:29:18.239557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:29:18.239824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:29:18.240602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:29:18.240891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:29:18.250200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:18.250458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:29:18.251846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:29:18.251915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:29:18.252060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:29:18.252107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:29:18.252183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:29:18.252323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:29:18.621874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.623907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.624004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.624092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.624177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.624270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:29:18.624345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nStep5000039 State->FrontStep: 5000039 2026-01-04T02:33:43.095396Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:43.095463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:33:43.095741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:33:43.095941Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:43.095998Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:33:43.096052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-04T02:33:43.096153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.096216Z node 2 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:461: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:33:43.096287Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:487: TCreateColumnTable TProposedWaitParts operationId# 102:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-04T02:33:43.097544Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:33:43.097681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:33:43.097726Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:33:43.097773Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:33:43.097954Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:33:43.099648Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:33:43.099746Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:33:43.099781Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:33:43.099817Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:33:43.099854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:33:43.099946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:33:43.101906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-04T02:33:43.102006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:39 msg type: 268697639 2026-01-04T02:33:43.102099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 0, tablet: 72057594037968897 2026-01-04T02:33:43.102489Z node 2 :HIVE INFO: tablet_helpers.cpp:1623: [72057594037968897] TEvUpdateTabletsObject, msg: ObjectId: 7726343884038809171 TabletIds: 72075186233409546 TxId: 102 TxPartId: 0 2026-01-04T02:33:43.102589Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6412: Update tablets object reply, message: Status: OK TxId: 102 TxPartId: 0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.102717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Status: OK TxId: 102 TxPartId: 0 2026-01-04T02:33:43.103628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:33:43.105467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:33:43.106721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.120247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 2026-01-04T02:33:43.120336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:33:43.120485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:33:43.123527Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.123743Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.123802Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:33:43.123951Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:33:43.123997Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:33:43.124047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:33:43.124105Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:33:43.124151Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:33:43.124260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:745:2721] message: TxId: 102 2026-01-04T02:33:43.124323Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:33:43.124384Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:33:43.124425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:33:43.124601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:33:43.127484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:33:43.127558Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:809:2777] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-04T02:33:43.131735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "mess age" Type: "Utf8" } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:33:43.132026Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore, opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:33:43.132405Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-04T02:33:43.135899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:43.136220Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:33:43.136640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:33:43.136694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:33:43.137171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:33:43.137286Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:33:43.137329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:847:2815] TestWaitNotification: OK eventTxId 103 |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-04T02:33:37.638083Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329002079955656:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.639109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.898853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:37.971901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:37.972015Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:37.983782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.101330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.160582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.462301Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329002079955882:2126] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.462363Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329006374923664:2448] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.462485Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002079955893:2131], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.462630Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329002079956122:2282][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329002079955893:2131], cookie# 1 2026-01-04T02:33:38.465945Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329002079956141:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956138:2282], cookie# 1 2026-01-04T02:33:38.466033Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329002079956142:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956139:2282], cookie# 1 2026-01-04T02:33:38.466061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329002079956143:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956140:2282], cookie# 1 2026-01-04T02:33:38.466119Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002079955555:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956141:2282], cookie# 1 2026-01-04T02:33:38.466155Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002079955558:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956142:2282], cookie# 1 2026-01-04T02:33:38.466189Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002079955561:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329002079956143:2282], cookie# 1 2026-01-04T02:33:38.466259Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329002079956141:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079955555:2050], cookie# 1 2026-01-04T02:33:38.466280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329002079956142:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079955558:2053], cookie# 1 2026-01-04T02:33:38.466293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329002079956143:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079955561:2056], cookie# 1 2026-01-04T02:33:38.466336Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329002079956122:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079956138:2282], cookie# 1 2026-01-04T02:33:38.466364Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329002079956122:2282][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.466382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329002079956122:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079956139:2282], cookie# 1 2026-01-04T02:33:38.466415Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329002079956122:2282][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.466456Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329002079956122:2282][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002079956140:2282], cookie# 1 2026-01-04T02:33:38.466469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329002079956122:2282][/dc-1] Sync cookie mismatch: sender# [1:7591329002079956140:2282], cookie# 1, current cookie# 0 2026-01-04T02:33:38.466526Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329002079955893:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.473529Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329002079955893:2131], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329002079956122:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.473715Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002079955893:2131], cacheItem# { Subscriber: { Subscriber: [1:7591329002079956122:2282] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.477072Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329006374923665:2449], recipient# [1:7591329006374923664:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.477184Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329006374923664:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.521949Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329006374923664:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.525627Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329006374923664:2448] Handle TEvDescribeSchemeResult Forward to# [1:7591329006374923663:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.534346Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329002079955882:2126] Handle TEvProposeTransaction 2026-01-04T02:33:38.534378Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329002079955882:2126] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:33:38.534471Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329002079955882:2126] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591329006374923671:2454] 2026-01-04T02:33:38.657995Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:38.705431Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002079955893:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Oper ... tion: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:33:41.714506Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329002079955555:2050] Subscribe: subscriber# [1:7591329019259826376:3034], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:41.714541Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329002079955558:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7591329019259826377:3034] 2026-01-04T02:33:41.714549Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329002079955558:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:33:41.714567Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329002079955558:2053] Subscribe: subscriber# [1:7591329019259826377:3034], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:41.714590Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329002079955561:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7591329019259826378:3034] 2026-01-04T02:33:41.714597Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329002079955561:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:33:41.714616Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329002079955561:2056] Subscribe: subscriber# [1:7591329019259826378:3034], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:41.714681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329019259826376:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329002079955555:2050] 2026-01-04T02:33:41.714700Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329019259826377:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329002079955558:2053] 2026-01-04T02:33:41.714720Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329019259826378:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329002079955561:2056] 2026-01-04T02:33:41.714752Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329019259826372:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329019259826373:3034] 2026-01-04T02:33:41.714807Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329019259826372:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329019259826374:3034] 2026-01-04T02:33:41.714836Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591329019259826372:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591329002079955893:2131], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:41.714857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329019259826372:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591329019259826375:3034] 2026-01-04T02:33:41.714899Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7591329019259826372:3034][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7591329002079955893:2131], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:41.714938Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591329002079955555:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591329019259826376:3034] 2026-01-04T02:33:41.714957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591329002079955558:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591329019259826377:3034] 2026-01-04T02:33:41.714977Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591329002079955561:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591329019259826378:3034] 2026-01-04T02:33:41.715028Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329002079955893:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:33:41.715091Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329002079955893:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591329019259826372:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:41.715153Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002079955893:2131], cacheItem# { Subscriber: { Subscriber: [1:7591329019259826372:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:41.715221Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329019259826380:3035], recipient# [1:7591329019259826369:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:42.641769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329002079955656:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:42.641896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:42.717046Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002079955893:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:42.717219Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002079955893:2131], cacheItem# { Subscriber: { Subscriber: [1:7591329019259826372:3034] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:42.717279Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002079955893:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:42.717356Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002079955893:2131], cacheItem# { Subscriber: { Subscriber: [1:7591329006374923677:2456] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:42.717414Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329023554793700:3041], recipient# [1:7591329023554793698:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:42.717461Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329023554793701:3042], recipient# [1:7591329023554793699:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> KqpBatchUpdate::UpdateOn [GOOD] >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> TSubDomainTest::LsAltered [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 29041, MsgBus: 26364 2026-01-04T02:33:37.638463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329000460137421:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.638625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.909528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:37.918578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:37.918692Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:37.922056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.032247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329000460137392:2081] 1767494017636484 != 1767494017636487 2026-01-04T02:33:38.046674Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.173696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.173733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.173743Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.173903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:38.177929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:38.650513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:38.747297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:38.753432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:33:38.809584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:38.968203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.158039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.244332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.208933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329017640008450:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.209149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.213950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329017640008460:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.214046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.590032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.634142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.707425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.766846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.807834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.854725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.905609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.967615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.059934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329021934976624:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.060021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.060336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329021934976629:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.060384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329021934976630:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.060422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.064823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.080463Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329021934976633:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:42.182268Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329021934976686:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:42.639289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329000460137421:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:42.639402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:43.802421Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTY2YTIxMTItNzgxMjM2N2QtN2EwOWM1Yy0yOGExMjIxYw==, ActorId: [1:7591329026229944284:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmyf9b8v8wbhh3f7p51fd, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# >> TSubDomainTest::UserAttributes [GOOD] >> TSubDomainTest::UserAttributesApplyIf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 13923, MsgBus: 5471 2026-01-04T02:33:37.707042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328999800091953:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.707313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.983684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:37.991609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:37.991696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.041605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.112640Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328999800091722:2081] 1767494017696774 != 1767494017696777 2026-01-04T02:33:38.113973Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.174463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:38.239844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.239867Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.239876Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.239947Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:38.701915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:38.731607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:38.740363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:33:38.799201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.004481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.183597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.257149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.004869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329012684995489:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.005032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.005397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016979962795:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.005460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.330907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.364121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.390439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.419795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.458968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.522233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.577195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.665949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.753038Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016979963678:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.753151Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.753496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016979963684:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.753517Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016979963683:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.753558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.757538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:41.772975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329016979963687:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:41.860070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329016979963738:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:42.709954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328999800091953:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:42.717489Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:43.698326Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329025569898644:2537], status: GENERIC_ERROR, issues:
:4:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:33:43.699806Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI1YjRmYWUtZDA1ZmVkNGItZDVmNWJjZS1mZTdlZmNi, ActorId: [1:7591329025569898635:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmyebagqvmy1zq267c79k, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-04T02:33:43.727995Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329025569898648:2539], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:33:43.729945Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI1YjRmYWUtZDA1ZmVkNGItZDVmNWJjZS1mZTdlZmNi, ActorId: [1:7591329025569898635:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmygt44gmjgaz2y8gyvd1, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-04T02:33:43.775940Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329025569898654:2542], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:33:43.777778Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI1YjRmYWUtZDA1ZmVkNGItZDVmNWJjZS1mZTdlZmNi, ActorId: [1:7591329025569898635:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmyhq6txwrffmv3x4m1rf, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-04T02:33:43.802952Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329025569898660:2545], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:33:43.804384Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI1YjRmYWUtZDA1ZmVkNGItZDVmNWJjZS1mZTdlZmNi, ActorId: [1:7591329025569898635:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmyk9avvbfkzm974psczs, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-04T02:33:43.819039Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329025569898664:2547], status: GENERIC_ERROR, issues:
:3:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:33:43.819288Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWI1YjRmYWUtZDA1ZmVkNGItZDVmNWJjZS1mZTdlZmNi, ActorId: [1:7591329025569898635:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3dmym1acvg9jnewfe0tnm8, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 3 column: 29 } severity: 1 } tx_id# trace_id# |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 15278, MsgBus: 8583 2026-01-04T02:33:38.141112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329003305107374:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.141249Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.462970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.463122Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.518314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:38.551644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.554793Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329003305107271:2081] 1767494018132520 != 1767494018132523 2026-01-04T02:33:38.563448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.670441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.670472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.670480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.670612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:38.704784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:39.145222Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.199916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.212720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:39.285753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.419546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.566592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.639848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.884619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016190011034:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.884761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.885387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329016190011044:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.885455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.392161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.428998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.464464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.505311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.575711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.609103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.645130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.707394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.776158Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329020484979214:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.776242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.776724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329020484979219:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.776761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329020484979220:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.776870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.780512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.792867Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329020484979223:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:42.864049Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329020484979276:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.141974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329003305107374:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.142082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:44.482716Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329029074914188:2539], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At lambda, At function: Coalesce
:4:41: Error: At function: ==
:4:27: Error: At function: Member
:4:27: Error: Member not found: UnknownColumn 2026-01-04T02:33:44.483228Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmZiYzE4MjItMjcwNGY2ZmEtYTk3Mjc5MzMtMzRiYzY1ZjA=, ActorId: [1:7591329029074914179:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3dmz6w2kg4vbrw16bzzyqj, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At lambda, At function: Coalesce" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 4 column: 41 } message: "At function: ==" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 27 } message: "At function: Member" end_position { row: 4 column: 27 } severity: 1 issues { position { row: 4 column: 27 } message: "Member not found: UnknownColumn" end_position { row: 4 column: 27 } severity: 1 } } } } } tx_id# trace_id# 2026-01-04T02:33:44.558050Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329029074914197:2543], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:4:43: Error: At function: KiUpdateTable!
:4:43: Error: Column 'UnknownColumn' does not exist in table '/Root/Test'., code: 2017 2026-01-04T02:33:44.560072Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmZiYzE4MjItMjcwNGY2ZmEtYTk3Mjc5MzMtMzRiYzY1ZjA=, ActorId: [1:7591329029074914179:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3dmz9jbhd95wg9p32r5zbe, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 43 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 43 } severity: 1 issues { position { row: 4 column: 43 } message: "Column \'UnknownColumn\' does not exist in table \'/Root/Test\'." end_position { row: 4 column: 43 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleDropIndex [GOOD] >> TSchemeshardBackgroundCleaningTest::TempInTemp >> KqpBatchDelete::SimplePartitions >> TxUsage::WriteToTopic_Demo_14_Table [GOOD] |87.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::UpdateOn [GOOD] Test command err: Trying to start YDB, gRPC: 13279, MsgBus: 6571 2026-01-04T02:33:38.650424Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329005455074241:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.650692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.687554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.997901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:39.007952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:39.008037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:39.105333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:39.149796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:39.245399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:39.330378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:39.330402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:39.330407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:39.330716Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:39.649933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.767694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.776974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:33:39.837277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.985960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.158353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.237343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.187741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329022634945153:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.187857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.188338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329022634945163:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.188377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.564004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.603062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.636155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.683730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.722635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.770016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.800751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.845441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.931607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329022634946045:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.931687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.931935Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329022634946050:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.931967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329022634946051:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.932009Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.936233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.947624Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329022634946054:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:43.053100Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329026929913401:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.650166Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329005455074241:2186];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.650237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:44.770616Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329031224881015:2539], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with ON 2026-01-04T02:33:44.772668Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjJhNWU5M2YtNzIzZWM5NTItZDM1ZmYwZDEtYTA2NDM1MzU=, ActorId: [1:7591329031224881006:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3dmzhhcyk28fpjpeehm426, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::LsAltered [GOOD] Test command err: 2026-01-04T02:33:37.754534Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329002669809095:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.754661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.167788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.167911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.173747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.220127Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.254759Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.392642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.432945Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329002669809322:2105] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.433024Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329006964776926:2268] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.433123Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002669809328:2107], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.433216Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329006964776793:2196][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329002669809328:2107], cookie# 1 2026-01-04T02:33:38.439879Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006964776846:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776843:2196], cookie# 1 2026-01-04T02:33:38.439944Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006964776847:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776844:2196], cookie# 1 2026-01-04T02:33:38.439965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006964776848:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776845:2196], cookie# 1 2026-01-04T02:33:38.440004Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002669809026:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776846:2196], cookie# 1 2026-01-04T02:33:38.440031Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002669809029:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776847:2196], cookie# 1 2026-01-04T02:33:38.440048Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002669809032:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006964776848:2196], cookie# 1 2026-01-04T02:33:38.440123Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006964776846:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002669809026:2049], cookie# 1 2026-01-04T02:33:38.440144Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006964776847:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002669809029:2052], cookie# 1 2026-01-04T02:33:38.440181Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006964776848:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002669809032:2055], cookie# 1 2026-01-04T02:33:38.440235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006964776793:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006964776843:2196], cookie# 1 2026-01-04T02:33:38.440271Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329006964776793:2196][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.440293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006964776793:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006964776844:2196], cookie# 1 2026-01-04T02:33:38.440318Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329006964776793:2196][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.440366Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006964776793:2196][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006964776845:2196], cookie# 1 2026-01-04T02:33:38.440382Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329006964776793:2196][/dc-1] Sync cookie mismatch: sender# [1:7591329006964776845:2196], cookie# 1, current cookie# 0 2026-01-04T02:33:38.440439Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329002669809328:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.446759Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329002669809328:2107], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329006964776793:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.446906Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002669809328:2107], cacheItem# { Subscriber: { Subscriber: [1:7591329006964776793:2196] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.449631Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329006964776927:2269], recipient# [1:7591329006964776926:2268], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.449700Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329006964776926:2268] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.501174Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329006964776926:2268] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.511045Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329006964776926:2268] Handle TEvDescribeSchemeResult Forward to# [1:7591329006964776925:2267] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.519334Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329002669809322:2105] Handle TEvProposeTransaction 2026-01-04T02:33:38.519383Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329002669809322:2105] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:33:38.519471Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329002669809322:2105] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591329006964776932:2273] 2026-01-04T02:33:38.629838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591329006964776932:2273] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-04T02:33:38.629905Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:7591329006964776932:2273] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatab ... Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:42.790752Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7591329023432958518:2335] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:42.790813Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7591329023432958518:2335] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-01-04T02:33:42.791462Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7591329023432958518:2335] Handle TEvDescribeSchemeResult Forward to# [2:7591329023432958517:2334] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767494022308 ParentPathId: 1 PathState: EPathStateAlter Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046644480 2026-01-04T02:33:42.793963Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7591329019137990807:2106] Handle TEvNavigate describe path /dc-1 2026-01-04T02:33:42.794008Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7591329023432958521:2338] HANDLE EvNavigateScheme /dc-1 2026-01-04T02:33:42.794075Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591329019137990833:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:42.794154Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7591329019137991028:2221][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7591329019137990833:2119], cookie# 4 2026-01-04T02:33:42.794219Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329019137991044:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991041:2221], cookie# 4 2026-01-04T02:33:42.794239Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329019137991045:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991042:2221], cookie# 4 2026-01-04T02:33:42.794254Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329019137991046:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991043:2221], cookie# 4 2026-01-04T02:33:42.794281Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329019137990511:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991044:2221], cookie# 4 2026-01-04T02:33:42.794307Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329019137990514:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991045:2221], cookie# 4 2026-01-04T02:33:42.794346Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329019137990517:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [2:7591329019137991046:2221], cookie# 4 2026-01-04T02:33:42.794398Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329019137991044:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137990511:2049], cookie# 4 2026-01-04T02:33:42.794415Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329019137991045:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137990514:2052], cookie# 4 2026-01-04T02:33:42.794431Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329019137991046:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137990517:2055], cookie# 4 2026-01-04T02:33:42.794485Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329019137991028:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137991041:2221], cookie# 4 2026-01-04T02:33:42.794506Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7591329019137991028:2221][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:42.794525Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329019137991028:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137991042:2221], cookie# 4 2026-01-04T02:33:42.794554Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7591329019137991028:2221][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:42.794586Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329019137991028:2221][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [2:7591329019137991043:2221], cookie# 4 2026-01-04T02:33:42.794599Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7591329019137991028:2221][/dc-1] Sync cookie mismatch: sender# [2:7591329019137991043:2221], cookie# 4, current cookie# 0 2026-01-04T02:33:42.794637Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7591329019137990833:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:42.794718Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7591329019137990833:2119], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7591329019137991028:2221] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494022280 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:42.794809Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591329019137990833:2119], cacheItem# { Subscriber: { Subscriber: [2:7591329019137991028:2221] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494022280 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-04T02:33:42.794956Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591329023432958522:2339], recipient# [2:7591329023432958521:2338], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:42.794985Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7591329023432958521:2338] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:42.795039Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7591329023432958521:2338] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:42.795630Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7591329023432958521:2338] Handle TEvDescribeSchemeResult Forward to# [2:7591329023432958520:2337] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767494022280 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 21167, MsgBus: 20401 2026-01-04T02:33:38.219909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329006280240429:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.220018Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.490581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.490673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.547113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:38.579890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.580477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.662047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.662069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.662083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.662180Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:38.748707Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:39.109871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.192145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.215899Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639258 Duration# 0.005036s 2026-01-04T02:33:39.223172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.381176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.562806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.643110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.598363Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329019165144073:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.598499Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.598842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329019165144083:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.598886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.026801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.104445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.145182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.188693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.231700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.281341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.323360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.380836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.460162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023460112253:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.460234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.460382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023460112258:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.460417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023460112259:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.460722Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.464501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.475198Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329023460112262:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:42.541050Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329023460112313:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.217832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329006280240429:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.217906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:44.663310Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329032050047191:2530], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:33:44.664380Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzcyYTAyZDMtZTE1MGY1NDEtNTRjZWFmNzAtMmRlNThkYg==, ActorId: [1:7591329032050047180:2523], ActorState: ExecuteState, LegacyTraceId: 01ke3dmzcj58xtm32c3q487n55, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TxUsage::WriteToTopic_Demo_14_Query >> KqpBatchUpdate::SimplePartitions >> KqpBatchDelete::ManyPartitions_3 >> KqpBatchUpdate::Large_1 >> TModifyUserTest::ModifyUserIsEnabled [GOOD] >> KqpBatchUpdate::Large_3 >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable [GOOD] >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 >> TSchemeShardTest::CreateAlterTableWithCodec >> BasicUsage::PreferredDatabaseNoFallback [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartBeforeCommit_Query [GOOD] >> TSubDomainTest::CreateTabletForUnknownDomain [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain >> KqpProxy::InvalidSessionID >> TxUsage::Sinks_Oltp_WriteToTopic_2_Query [GOOD] >> TableCreation::ConcurrentTableCreation >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TModifyUserTest::ModifyUserIsEnabled [GOOD] Test command err: 2026-01-04T02:33:37.966711Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329000949465365:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.966759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.000614Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.285006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.285121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.311201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.350795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.389126Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.406026Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329000949465327:2081] 1767494017963905 != 1767494017963908 2026-01-04T02:33:38.573785Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329000949465593:2106] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.573861Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329005244433173:2264] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.573978Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329005244432915:2119], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.574081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329005244433119:2226][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329005244432915:2119], cookie# 1 2026-01-04T02:33:38.582121Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329005244432915:2119], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.582189Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591329005244432915:2119], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-04T02:33:38.582250Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591329005244432915:2119], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-04T02:33:38.582276Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591329005244432915:2119], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-04T02:33:38.582513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591329005244433174:2265][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:33:38.582906Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591329005244433175:2266][/dc-1/.metadata/script_execution_leases] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:33:38.583198Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591329005244433176:2267][/dc-1/.metadata/result_sets] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:33:38.586487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005244433131:2226][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005244433128:2226], cookie# 1 2026-01-04T02:33:38.586545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005244433132:2226][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005244433129:2226], cookie# 1 2026-01-04T02:33:38.586565Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005244433133:2226][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005244433130:2226], cookie# 1 2026-01-04T02:33:38.586601Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465298:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433181:2265] 2026-01-04T02:33:38.586614Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465298:2052] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-04T02:33:38.586677Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465298:2052] Subscribe: subscriber# [1:7591329005244433181:2265], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.586723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465298:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_execution_leases DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433187:2266] 2026-01-04T02:33:38.586731Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465298:2052] Upsert description: path# /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:38.586764Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465298:2052] Subscribe: subscriber# [1:7591329005244433187:2266], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.586806Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465298:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433193:2267] 2026-01-04T02:33:38.586814Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465298:2052] Upsert description: path# /dc-1/.metadata/result_sets 2026-01-04T02:33:38.586846Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465298:2052] Subscribe: subscriber# [1:7591329005244433193:2267], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.586875Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000949465298:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005244433132:2226], cookie# 1 2026-01-04T02:33:38.586903Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465301:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433182:2265] 2026-01-04T02:33:38.586909Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465301:2055] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-04T02:33:38.586926Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465301:2055] Subscribe: subscriber# [1:7591329005244433182:2265], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.586949Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465301:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_execution_leases DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433188:2266] 2026-01-04T02:33:38.586954Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465301:2055] Upsert description: path# /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:38.586970Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465301:2055] Subscribe: subscriber# [1:7591329005244433188:2266], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.586988Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329000949465301:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/result_sets DomainOwnerId: 72057594046644480 }: sender# [1:7591329005244433194:2267] 2026-01-04T02:33:38.586994Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329000949465301:2055] Upsert description: path# /dc-1/.metadata/result_sets 2026-01-04T02:33:38.587010Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329000949465301:2055] Subscribe: subscriber# [1:7591329005244433194:2267], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:38.587048Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000949465301:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005244433133:2226], cookie# 1 2026-01-04T02:33:38.587081Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329005244433181:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591329000949465298:2052] 2026-01-04T02:33:38.587115Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329005244433182:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591329000949465301:2055] 2026-01-04T02:33:38.587163Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329005244433174:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591329005244433178:2265] 2026-01-04T02:33:38.587205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329005244433174:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591329005244433179:2265] 2026-01-04T02:33:38.587238Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591329005244433174:2265][/dc-1/.metadata/script_executions] Set up state: owner# [1:7591329005244 ... [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-04T02:33:45.388112Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7591329030457334735:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335078:2125], cookie# 9 2026-01-04T02:33:45.388141Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7591329034752302706:2339] txid# 281474976710662 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:45.388181Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7591329034752302706:2339] txid# 281474976710662, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-04T02:33:45.388255Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329034752302706:2339] txid# 281474976710662, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-04T02:33:45.388281Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7591329034752302706:2339] txid# 281474976710662 SEND to# [3:7591329034752302705:2338] Source {TEvProposeTransactionStatus Status# 5} 2026-01-04T02:33:45.388363Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7591329030457335078:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457334735:2049], cookie# 9 2026-01-04T02:33:45.388388Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7591329030457335060:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457335074:2125], cookie# 9 2026-01-04T02:33:45.388401Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7591329030457335060:2125][/dc-1] Sync cookie mismatch: sender# [3:7591329030457335074:2125], cookie# 9, current cookie# 0 2026-01-04T02:33:45.389746Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7591329030457334910:2099] Handle TEvProposeTransaction 2026-01-04T02:33:45.389780Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7591329030457334910:2099] TxId# 281474976710663 ProcessProposeTransaction 2026-01-04T02:33:45.389827Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [3:7591329030457334910:2099] Cookie# 0 userReqId# "" txid# 281474976710663 SEND to# [3:7591329034752302709:2342] 2026-01-04T02:33:45.392587Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [3:7591329034752302709:2342] txid# 281474976710663 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/dc-1" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user2" Password: "password" CanLogin: false } } } } UserToken: "\n\005user2\022\030\022\026\n\024all-users@well-known\032\322\003eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NzUzNzIyNSwiaWF0IjoxNzY3NDk0MDI1LCJzdWIiOiJ1c2VyMiJ9.e0psE9dzLtnXjfCtIVbFbStXzQJQQtAx4ygVmmHeWb_1BW5dNIF4R0GPd3QZ61LkzzzyKbAAX-2Xx7pDYpaDnAH_a0i8JXHVNRsT6Sc-Bnty36h5B_UNcYCz542cEJ15BREFFn7Lc3cq9NNWivXs8EZY4gNwSpaB7seqLb_IAzVdJdRocYLpOJbIcLOQMvx54Uzvn4AAs0TQtEZBXQixoFuuJOGjfviaCI0wdXvS1K8Sb7KN5HGpURk-AtgFpv8DYlDEZf56kEbPTrCycStOHasx4CShBRSoSenVqar-YaYQvvd3UjtRpvibMsGVGTx5LpGD0R6TY6kYUjWR9VAFhA\"\005Login*~eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9kYy0xIl0sImV4cCI6MTc2NzUzNzIyNSwiaWF0IjoxNzY3NDk0MDI1LCJzdWIiOiJ1c2VyMiJ9.**0\000" PeerName: "" 2026-01-04T02:33:45.392634Z node 3 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [3:7591329034752302709:2342] txid# 281474976710663 Bootstrap, UserSID: user2 CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:33:45.392646Z node 3 :TX_PROXY DEBUG: schemereq.cpp:656: Actor# [3:7591329034752302709:2342] txid# 281474976710663 Bootstrap, UserSID: user2 IsClusterAdministrator: 1 2026-01-04T02:33:45.392689Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [3:7591329034752302709:2342] txid# 281474976710663 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:33:45.392753Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591329030457334929:2104], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:45.392824Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][3:7591329030457335060:2125][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [3:7591329030457334929:2104], cookie# 10 2026-01-04T02:33:45.392871Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7591329030457335078:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335074:2125], cookie# 10 2026-01-04T02:33:45.392887Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7591329030457335081:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335075:2125], cookie# 10 2026-01-04T02:33:45.392901Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][3:7591329030457335082:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335076:2125], cookie# 10 2026-01-04T02:33:45.392943Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7591329030457334741:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335082:2125], cookie# 10 2026-01-04T02:33:45.393003Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7591329030457335082:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457334741:2055], cookie# 10 2026-01-04T02:33:45.393039Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7591329030457335060:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457335076:2125], cookie# 10 2026-01-04T02:33:45.393058Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][3:7591329030457335060:2125][/dc-1] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:45.393075Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7591329030457334735:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335078:2125], cookie# 10 2026-01-04T02:33:45.393093Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7591329030457334738:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [3:7591329030457335081:2125], cookie# 10 2026-01-04T02:33:45.393118Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7591329030457335078:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457334735:2049], cookie# 10 2026-01-04T02:33:45.393134Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][3:7591329030457335081:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457334738:2052], cookie# 10 2026-01-04T02:33:45.393152Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7591329030457335060:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457335074:2125], cookie# 10 2026-01-04T02:33:45.393171Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][3:7591329030457335060:2125][/dc-1] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:45.393195Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][3:7591329030457335060:2125][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 8 Partial: 0 Cluster State: { } }: sender# [3:7591329030457335075:2125], cookie# 10 2026-01-04T02:33:45.393206Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][3:7591329030457335060:2125][/dc-1] Sync cookie mismatch: sender# [3:7591329030457335075:2125], cookie# 10, current cookie# 0 2026-01-04T02:33:45.393253Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591329030457334929:2104], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:45.393327Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591329030457334929:2104], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [3:7591329030457335060:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494025185 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:45.393420Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591329030457334929:2104], cacheItem# { Subscriber: { Subscriber: [3:7591329030457335060:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494025185 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-04T02:33:45.393563Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591329034752302710:2343], recipient# [3:7591329034752302709:2342], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [{ Sid: user2 },{ Sid: user1 }] Groups: [] } }] } 2026-01-04T02:33:45.393604Z node 3 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [3:7591329034752302709:2342] txid# 281474976710663 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:45.393645Z node 3 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [3:7591329034752302709:2342] txid# 281474976710663, Access denied for user2 on path /dc-1, with access AlterSchema 2026-01-04T02:33:45.393725Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329034752302709:2342] txid# 281474976710663, issues: { message: "Access denied for user2 on path /dc-1" issue_code: 200000 severity: 1 } 2026-01-04T02:33:45.393746Z node 3 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [3:7591329034752302709:2342] txid# 281474976710663 SEND to# [3:7591329034752302708:2341] Source {TEvProposeTransactionStatus Status# 5} |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> ScriptExecutionsTest::RunCheckLeaseStatus >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] >> ScriptExecutionsTest::RestartQueryWithGetOperation >> TSubDomainTest::UserAttributesApplyIf [GOOD] >> TSubDomainTest::CreateDummyTabletsInDifferentDomains [GOOD] >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_2_Query [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_False [GOOD] >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::PreferredDatabaseNoFallback [GOOD] Test command err: 2026-01-04T02:32:50.976979Z :GetAllStartPartitionSessions INFO: Random seed for debugging is 1767493970976946 2026-01-04T02:32:51.215938Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328805204528928:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:51.216053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:51.288631Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:51.339284Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:51.585893Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:51.586057Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:51.603453Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:51.633907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:51.634025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:51.636944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:51.637003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:51.680913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:51.703031Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:51.703933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:51.713727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:51.857846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:51.867100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:51.898233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002ad6/r3tmp/yandexwZPUo3.tmp 2026-01-04T02:32:51.898264Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002ad6/r3tmp/yandexwZPUo3.tmp 2026-01-04T02:32:51.898467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002ad6/r3tmp/yandexwZPUo3.tmp 2026-01-04T02:32:51.898579Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:51.950317Z INFO: TTestServer started on Port 20547 GrpcPort 27606 PQClient connected to localhost:27606 2026-01-04T02:32:52.237764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:52.253161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:52.337777Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:55.371714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328822167494590:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:55.371773Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328822167494582:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:55.372045Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:55.373209Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328822167494598:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:55.373287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:55.378570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:55.405699Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328822167494596:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:32:55.720718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:55.725050Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328822167494626:2141] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:55.733541Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328822384399508:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:55.734053Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTI4MGUxNTYtZTI4MjkwMTItMTdmMTJlZmYtZjBhNzcyZGE=, ActorId: [1:7591328822384399483:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkfbz725wxbmjd4azfb2m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:55.739633Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:55.757701Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328822167494635:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:55.758149Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZWQ4ZmMyYTktMjkxZTlmOTEtOTY4YjBmNDEtOTQ3ZTRhZDY=, ActorId: [2:7591328822167494579:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3dkfa96yt9dcsv96gee64m, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:55.758621Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:55.928346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemesh ... PendingWrites: 0 2026-01-04T02:33:46.490828Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:46.593971Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:46.594013Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.594032Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:46.594055Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.594069Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:46.697967Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:46.698006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.698023Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:46.698046Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.698061Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:46.801946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:46.801984Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.802004Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:46.802025Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.802039Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:46.905466Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:46.905503Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.905521Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:46.905541Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:46.905559Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.010702Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.010739Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.010756Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.010777Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.010792Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.113950Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.113987Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.114002Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.114021Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.114034Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.215067Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.215104Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.215119Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.215137Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.215150Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.317929Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.317969Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.317985Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.318003Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.318018Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.355490Z :INFO: [/Root] [] [] Start federated write session to database 'dc2' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "fancy_datacenter" DbInfos: [ { name: "dc1" path: "/Root" id: "account-dc1" endpoint: "localhost:15270" location: "dc1" status: AVAILABLE weight: 1000 } { name: "dc2" path: "/Root" id: "account-dc2" endpoint: "localhost:15270" location: "dc2" status: AVAILABLE weight: 500 } { name: "dc3" path: "/Root" id: "account-dc3" endpoint: "localhost:15270" location: "dc3" status: AVAILABLE weight: 500 } ] ControlPlaneEndpoint: cp.logbroker-federation:2135 } === Closing the session 2026-01-04T02:33:47.377024Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: try to update token 2026-01-04T02:33:47.377474Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Start write session. Will connect to nodeId: 0 2026-01-04T02:33:47.390123Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: close. Timeout 0.000000s 2026-01-04T02:33:47.390181Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session will now close 2026-01-04T02:33:47.390275Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: aborting 2026-01-04T02:33:47.390436Z :INFO: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: gracefully shut down, all writes complete 2026-01-04T02:33:47.390497Z :DEBUG: [/Root] TraceId [] SessionId [] MessageGroupId [src_id] Write session: destroy 2026-01-04T02:33:47.417991Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.418041Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.418074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.418099Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.418121Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.521952Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.521991Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.522006Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.522027Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.522042Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.625996Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.626036Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.626053Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.626075Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.626089Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.725928Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:47.725965Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.725982Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:47.726002Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:47.726016Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:47.817885Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [3:7591329043576679216:2718] TxId: 281474976715700. Ctx: { TraceId: 01ke3dn1y5dteatt1jx2cgp2k2, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZTk2YjBiNzktOTI1YWRkNzYtYTM4YzMwN2QtODkyYzIzNWU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-04T02:33:47.818683Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591329043576679220:2718], TxId: 281474976715700, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3dn1y5dteatt1jx2cgp2k2. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZTk2YjBiNzktOTI1YWRkNzYtYTM4YzMwN2QtODkyYzIzNWU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591329043576679216:2718], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |87.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::StartTenanNodeAndStopAtDestructor [GOOD] Test command err: 2026-01-04T02:33:38.187877Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329004735569530:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.187947Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.533909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.562140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.562274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.572550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.690385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.778332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:39.046901Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329004735569674:2137] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:39.046959Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329009030537435:2442] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:39.047063Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329004735569681:2140], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:39.047155Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329004735569903:2285][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329004735569681:2140], cookie# 1 2026-01-04T02:33:39.048682Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004735569969:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569966:2285], cookie# 1 2026-01-04T02:33:39.048713Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004735569970:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569967:2285], cookie# 1 2026-01-04T02:33:39.048731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004735569971:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569968:2285], cookie# 1 2026-01-04T02:33:39.048767Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000440602039:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569971:2285], cookie# 1 2026-01-04T02:33:39.048823Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004735569971:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000440602039:2056], cookie# 1 2026-01-04T02:33:39.048871Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004735569903:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004735569968:2285], cookie# 1 2026-01-04T02:33:39.048901Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329004735569903:2285][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:39.048930Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000440602033:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569969:2285], cookie# 1 2026-01-04T02:33:39.048957Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000440602036:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004735569970:2285], cookie# 1 2026-01-04T02:33:39.048981Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004735569969:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000440602033:2050], cookie# 1 2026-01-04T02:33:39.048997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004735569970:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000440602036:2053], cookie# 1 2026-01-04T02:33:39.049021Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004735569903:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004735569966:2285], cookie# 1 2026-01-04T02:33:39.049056Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329004735569903:2285][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:39.049087Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004735569903:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004735569967:2285], cookie# 1 2026-01-04T02:33:39.049100Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329004735569903:2285][/dc-1] Sync cookie mismatch: sender# [1:7591329004735569967:2285], cookie# 1, current cookie# 0 2026-01-04T02:33:39.049170Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329004735569681:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:39.077519Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329004735569681:2140], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329004735569903:2285] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:39.077654Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329004735569681:2140], cacheItem# { Subscriber: { Subscriber: [1:7591329004735569903:2285] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:39.080300Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329009030537436:2443], recipient# [1:7591329009030537435:2442], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:39.080353Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329009030537435:2442] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:39.136576Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329009030537435:2442] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:39.147969Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329009030537435:2442] Handle TEvDescribeSchemeResult Forward to# [1:7591329009030537434:2441] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:39.154842Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329004735569674:2137] Handle TEvProposeTransaction 2026-01-04T02:33:39.154871Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329004735569674:2137] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:33:39.154959Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329004735569674:2137] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591329009030537442:2448] 2026-01-04T02:33:39.189554Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329004735569681:2140], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:39.189664Z nod ... mrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591329031196648821:2053] 2026-01-04T02:33:48.423444Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591329048376518993:2562][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591329031196648824:2056] 2026-01-04T02:33:48.423485Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518986:2562][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591329048376518988:2562] 2026-01-04T02:33:48.423532Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518986:2562][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591329048376518989:2562] 2026-01-04T02:33:48.423570Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7591329048376518986:2562][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [3:7591329031196649156:2133], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:48.423596Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518986:2562][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591329048376518990:2562] 2026-01-04T02:33:48.423622Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591329048376518986:2562][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7591329031196649156:2133], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:48.423644Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591329048376518997:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329031196648818:2050] 2026-01-04T02:33:48.423664Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591329048376518998:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329031196648821:2053] 2026-01-04T02:33:48.423682Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][3:7591329048376518999:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329031196648824:2056] 2026-01-04T02:33:48.423711Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518987:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329048376518994:2563] 2026-01-04T02:33:48.423761Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518987:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329048376518995:2563] 2026-01-04T02:33:48.423791Z node 3 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][3:7591329048376518987:2563][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [3:7591329031196649156:2133], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:48.423812Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591329048376518987:2563][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591329048376518996:2563] 2026-01-04T02:33:48.423833Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591329048376518987:2563][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7591329031196649156:2133], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:48.423857Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648818:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518991:2562] 2026-01-04T02:33:48.423873Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648818:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518997:2563] 2026-01-04T02:33:48.423887Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648821:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518992:2562] 2026-01-04T02:33:48.423901Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648821:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518998:2563] 2026-01-04T02:33:48.423915Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648824:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518993:2562] 2026-01-04T02:33:48.423927Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7591329031196648824:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [3:7591329048376518999:2563] 2026-01-04T02:33:48.423976Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591329031196649156:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-04T02:33:48.424042Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591329031196649156:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591329048376518986:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:48.424164Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591329031196649156:2133], cacheItem# { Subscriber: { Subscriber: [3:7591329048376518986:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:48.424193Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [3:7591329031196649156:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-04T02:33:48.424232Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [3:7591329031196649156:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [3:7591329048376518987:2563] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:48.424278Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591329031196649156:2133], cacheItem# { Subscriber: { Subscriber: [3:7591329048376518987:2563] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:48.424369Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591329048376519000:2564], recipient# [3:7591329048376518982:2305], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:48.654352Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591329031196649156:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:48.654504Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591329031196649156:2133], cacheItem# { Subscriber: { Subscriber: [3:7591329035491617032:2545] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:48.654619Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591329048376519002:2565], recipient# [3:7591329048376519001:2308], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::UserAttributesApplyIf [GOOD] Test command err: 2026-01-04T02:33:43.048570Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329026273087535:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.048656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:43.326185Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:43.333193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:43.333469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:43.335943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:43.470702Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:43.477942Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329026273087441:2081] 1767494023045206 != 1767494023045209 2026-01-04T02:33:43.510768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:43.645474Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329026273087705:2105] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:43.645521Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329026273088009:2267] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:43.645624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329026273087712:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:43.645707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329026273087878:2200][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329026273087712:2108], cookie# 1 2026-01-04T02:33:43.647492Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329026273087923:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087920:2200], cookie# 1 2026-01-04T02:33:43.647537Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329026273087924:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087921:2200], cookie# 1 2026-01-04T02:33:43.647551Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329026273087925:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087922:2200], cookie# 1 2026-01-04T02:33:43.647577Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329026273087409:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087923:2200], cookie# 1 2026-01-04T02:33:43.647606Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329026273087412:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087924:2200], cookie# 1 2026-01-04T02:33:43.647629Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329026273087415:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329026273087925:2200], cookie# 1 2026-01-04T02:33:43.647681Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329026273087923:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087409:2049], cookie# 1 2026-01-04T02:33:43.647712Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329026273087924:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087412:2052], cookie# 1 2026-01-04T02:33:43.647731Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329026273087925:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087415:2055], cookie# 1 2026-01-04T02:33:43.647769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329026273087878:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087920:2200], cookie# 1 2026-01-04T02:33:43.647813Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329026273087878:2200][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:43.647834Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329026273087878:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087921:2200], cookie# 1 2026-01-04T02:33:43.647854Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329026273087878:2200][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:43.647953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329026273087878:2200][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329026273087922:2200], cookie# 1 2026-01-04T02:33:43.647966Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329026273087878:2200][/dc-1] Sync cookie mismatch: sender# [1:7591329026273087922:2200], cookie# 1, current cookie# 0 2026-01-04T02:33:43.648018Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329026273087712:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:43.654057Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329026273087712:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329026273087878:2200] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:43.654211Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329026273087712:2108], cacheItem# { Subscriber: { Subscriber: [1:7591329026273087878:2200] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:43.656835Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329026273088010:2268], recipient# [1:7591329026273088009:2267], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:43.656924Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329026273088009:2267] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:43.704127Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329026273088009:2267] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:43.707522Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329026273088009:2267] Handle TEvDescribeSchemeResult Forward to# [1:7591329026273088008:2266] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:43.709245Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329026273087705:2105] Handle TEvProposeTransaction 2026-01-04T02:33:43.709274Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329026273087705:2105] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:33:43.709328Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329026273087705:2105] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591329026273088015:2272] 2026-01-04T02:33:43.801867Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591329026273088015:2272] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" ... Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [2:7591329041346466613:2246] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046644480 Generation: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7591329041346466619:2252], cookie# 281474976710662 2026-01-04T02:33:46.974905Z node 2 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [2:7591329041346466613:2246] Ack for unknown update (already acked?): sender# [2:7591329041346466619:2252], cookie# 281474976710662 2026-01-04T02:33:46.974981Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7591329041346466706:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7591329041346466054:2049] 2026-01-04T02:33:46.975018Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591329041346466702:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Version: 6 }: sender# [2:7591329041346466703:2309] 2026-01-04T02:33:46.975057Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591329041346466702:2309][/dc-1/USER_0] Path was already updated: owner# [2:7591329041346466372:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 2], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:46.975102Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7591329041346466054:2049] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 6 }: sender# [2:7591329041346466706:2309] 2026-01-04T02:33:46.975147Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710662 2026-01-04T02:33:46.977852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-04T02:33:46.978537Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [2:7591329041346466344:2104] Handle TEvNavigate describe path /dc-1/USER_0 2026-01-04T02:33:46.978566Z node 2 :TX_PROXY DEBUG: describe.cpp:270: Actor# [2:7591329041346466770:2361] HANDLE EvNavigateScheme /dc-1/USER_0 2026-01-04T02:33:46.978641Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591329041346466372:2120], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:46.978715Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][2:7591329041346466702:2309][/dc-1/USER_0] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [2:7591329041346466372:2120], cookie# 10 2026-01-04T02:33:46.978773Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329041346466706:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466703:2309], cookie# 10 2026-01-04T02:33:46.978787Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329041346466707:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466704:2309], cookie# 10 2026-01-04T02:33:46.978800Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][2:7591329041346466708:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466705:2309], cookie# 10 2026-01-04T02:33:46.978831Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329041346466054:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466706:2309], cookie# 10 2026-01-04T02:33:46.978852Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329041346466057:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466707:2309], cookie# 10 2026-01-04T02:33:46.978867Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [2:7591329041346466060:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1/USER_0 }: sender# [2:7591329041346466708:2309], cookie# 10 2026-01-04T02:33:46.978907Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329041346466706:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466054:2049], cookie# 10 2026-01-04T02:33:46.978925Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329041346466707:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466057:2052], cookie# 10 2026-01-04T02:33:46.978941Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][2:7591329041346466708:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466060:2055], cookie# 10 2026-01-04T02:33:46.978971Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329041346466702:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466703:2309], cookie# 10 2026-01-04T02:33:46.978994Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][2:7591329041346466702:2309][/dc-1/USER_0] Sync is in progress: cookie# 10, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:46.979019Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329041346466702:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466704:2309], cookie# 10 2026-01-04T02:33:46.979037Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][2:7591329041346466702:2309][/dc-1/USER_0] Sync is done in the ring group: cookie# 10, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:46.979059Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][2:7591329041346466702:2309][/dc-1/USER_0] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 6 Partial: 0 Cluster State: { } }: sender# [2:7591329041346466705:2309], cookie# 10 2026-01-04T02:33:46.979070Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][2:7591329041346466702:2309][/dc-1/USER_0] Sync cookie mismatch: sender# [2:7591329041346466705:2309], cookie# 10, current cookie# 0 2026-01-04T02:33:46.979113Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:7591329041346466372:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-04T02:33:46.979183Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:7591329041346466372:2120], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [2:7591329041346466702:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767494026914 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:46.979263Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591329041346466372:2120], cacheItem# { Subscriber: { Subscriber: [2:7591329041346466702:2309] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 10 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767494026914 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 10 IsSync: true Partial: 0 } 2026-01-04T02:33:46.979384Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591329041346466771:2362], recipient# [2:7591329041346466770:2361], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:46.979407Z node 2 :TX_PROXY DEBUG: describe.cpp:354: Actor# [2:7591329041346466770:2361] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:46.979457Z node 2 :TX_PROXY DEBUG: describe.cpp:433: Actor# [2:7591329041346466770:2361] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0" Options { ShowPrivateTable: true } 2026-01-04T02:33:46.979963Z node 2 :TX_PROXY DEBUG: describe.cpp:446: Actor# [2:7591329041346466770:2361] Handle TEvDescribeSchemeResult Forward to# [2:7591329041346466769:2360] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "/dc-1/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046644480 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767494026914 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 4 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA3" Value: "ValA3" } } PathId: 2 PathOwnerId: 72057594046644480 >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TxUsage::WriteToTopic_Demo_24_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 29892, MsgBus: 18426 2026-01-04T02:33:43.392742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329028944086426:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.392905Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:43.692318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:43.766997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:43.767087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:43.768930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:43.876886Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:43.887110Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329028944086217:2081] 1767494023373201 != 1767494023373204 2026-01-04T02:33:43.898311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:43.945987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:43.946009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:43.946021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:43.946128Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:44.375491Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:44.448462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:44.527289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:44.696015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:44.857913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:44.958785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:46.785208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329041828989981:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:46.785350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:46.785725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329041828989991:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:46.785773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.274673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.334055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.421622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.520276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.629054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.690088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.753856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.816595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:47.959963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329046123958161:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.960043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.960686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329046123958166:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.960737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329046123958167:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.960993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:47.965227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:47.975834Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329046123958170:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:48.081160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329050418925517:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:48.392415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329028944086426:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:48.392481Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:50.151433Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329059008860396:2530], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:33:50.153854Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzJmNGQ5MjYtNzU3MjhkOGQtMzc4ZmVkOTUtYTNiYjBmM2U=, ActorId: [1:7591329054713893091:2524], ActorState: ExecuteState, LegacyTraceId: 01ke3dn4pg2v4ant2hsax53jh1, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSubDomainTest::GenericCases [GOOD] >> TSchemeShardTest::CreateAlterTableWithCodec [GOOD] >> TSchemeShardTest::CreateAlterTableWithCacheMode >> TxUsage::WriteToTopic_Demo_42_Query [GOOD] >> Describe::DescribePartitionPermissions [GOOD] >> DirectReadWithServer::KillPQTablet |87.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |87.6%| [LD] {RESULT} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut |87.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/sharding/ut/ydb-core-tx-sharding-ut >> TableCreation::TableCreationWithAcl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CheckAccessCopyTable [GOOD] Test command err: 2026-01-04T02:33:37.726013Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328999933765762:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.726580Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.761562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.068847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.069016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.071348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.168061Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.173933Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.333774Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328999933765969:2105] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.333839Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329004228733548:2263] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.333958Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328999933765977:2108], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.334059Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329004228733511:2235][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328999933765977:2108], cookie# 1 2026-01-04T02:33:38.335803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004228733527:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733524:2235], cookie# 1 2026-01-04T02:33:38.335842Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004228733528:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733525:2235], cookie# 1 2026-01-04T02:33:38.335860Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004228733529:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733526:2235], cookie# 1 2026-01-04T02:33:38.335906Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999933765673:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733527:2235], cookie# 1 2026-01-04T02:33:38.335953Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999933765676:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733528:2235], cookie# 1 2026-01-04T02:33:38.335974Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999933765679:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004228733529:2235], cookie# 1 2026-01-04T02:33:38.336010Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004228733527:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999933765673:2049], cookie# 1 2026-01-04T02:33:38.336026Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004228733528:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999933765676:2052], cookie# 1 2026-01-04T02:33:38.336041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004228733529:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999933765679:2055], cookie# 1 2026-01-04T02:33:38.336080Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004228733511:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004228733524:2235], cookie# 1 2026-01-04T02:33:38.336121Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329004228733511:2235][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.336149Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004228733511:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004228733525:2235], cookie# 1 2026-01-04T02:33:38.336171Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329004228733511:2235][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.336221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004228733511:2235][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004228733526:2235], cookie# 1 2026-01-04T02:33:38.336236Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329004228733511:2235][/dc-1] Sync cookie mismatch: sender# [1:7591329004228733526:2235], cookie# 1, current cookie# 0 2026-01-04T02:33:38.336308Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328999933765977:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.346549Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328999933765977:2108], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329004228733511:2235] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.346689Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328999933765977:2108], cacheItem# { Subscriber: { Subscriber: [1:7591329004228733511:2235] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.357729Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329004228733549:2264], recipient# [1:7591329004228733548:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.357835Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329004228733548:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.399494Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329004228733548:2263] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.402840Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329004228733548:2263] Handle TEvDescribeSchemeResult Forward to# [1:7591329004228733547:2262] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.414117Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328999933765969:2105] Handle TEvProposeTransaction 2026-01-04T02:33:38.414144Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328999933765969:2105] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:33:38.414200Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328999933765969:2105] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591329004228733555:2269] 2026-01-04T02:33:38.466076Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328999933765977:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073 ... nown DomainInfo }] } 2026-01-04T02:33:51.364925Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:51.370259Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591329060032681831:2236] 2026-01-04T02:33:51.370326Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.471521Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329038557845059:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.471662Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329038557845059:2109], cacheItem# { Subscriber: { Subscriber: [4:7591329060032681814:2234] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:51.471714Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329038557845059:2109], cacheItem# { Subscriber: { Subscriber: [4:7591329060032681815:2235] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:51.471834Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329060032681876:2241], recipient# [4:7591329060032681809:2312], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.475639Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:7591329060032681809:2312], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:33:51.525489Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591329060032681823:2235] 2026-01-04T02:33:51.525576Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525623Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591329060032681829:2236] 2026-01-04T02:33:51.525651Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591329060032681824:2235] 2026-01-04T02:33:51.525676Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525693Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525696Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [4:7591329060032681825:2235] 2026-01-04T02:33:51.525720Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681815:2235][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525722Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591329060032681830:2236] 2026-01-04T02:33:51.525831Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591329060032681817:2234] 2026-01-04T02:33:51.525864Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525885Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591329060032681818:2234] 2026-01-04T02:33:51.525917Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.525939Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [4:7591329060032681819:2234] 2026-01-04T02:33:51.525963Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681814:2234][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.526022Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:51.530907Z node 4 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591329060032681831:2236] 2026-01-04T02:33:51.530980Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329060032681816:2236][/dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591329038557845059:2109], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::GenericCases [GOOD] Test command err: 2026-01-04T02:33:37.785708Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329002295973278:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.785773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.845009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.099563Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.145958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.147034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.171627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.264501Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.309099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.489281Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329002295973515:2138] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.489340Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329006590941272:2441] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.489525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329002295973543:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.489674Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329006590941048:2290][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329002295973543:2152], cookie# 1 2026-01-04T02:33:38.491469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006590941077:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941074:2290], cookie# 1 2026-01-04T02:33:38.491545Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006590941078:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941075:2290], cookie# 1 2026-01-04T02:33:38.491571Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329006590941079:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941076:2290], cookie# 1 2026-01-04T02:33:38.491609Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002295973170:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941077:2290], cookie# 1 2026-01-04T02:33:38.491633Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002295973173:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941078:2290], cookie# 1 2026-01-04T02:33:38.491649Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329002295973176:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329006590941079:2290], cookie# 1 2026-01-04T02:33:38.491724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006590941077:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002295973170:2051], cookie# 1 2026-01-04T02:33:38.491743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006590941079:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002295973176:2057], cookie# 1 2026-01-04T02:33:38.491759Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329006590941078:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329002295973173:2054], cookie# 1 2026-01-04T02:33:38.491803Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006590941048:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006590941074:2290], cookie# 1 2026-01-04T02:33:38.491828Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329006590941048:2290][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.491864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006590941048:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006590941076:2290], cookie# 1 2026-01-04T02:33:38.491889Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329006590941048:2290][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.491937Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329006590941048:2290][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329006590941075:2290], cookie# 1 2026-01-04T02:33:38.491971Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329006590941048:2290][/dc-1] Sync cookie mismatch: sender# [1:7591329006590941075:2290], cookie# 1, current cookie# 0 2026-01-04T02:33:38.492027Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329002295973543:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.509205Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329002295973543:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329006590941048:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.509336Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329002295973543:2152], cacheItem# { Subscriber: { Subscriber: [1:7591329006590941048:2290] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.512289Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329006590941274:2443], recipient# [1:7591329006590941272:2441], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.512370Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329006590941272:2441] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.542841Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329006590941272:2441] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.546475Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329006590941272:2441] Handle TEvDescribeSchemeResult Forward to# [1:7591329006590941271:2440] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.558369Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329002295973515:2138] Handle TEvProposeTransaction 2026-01-04T02:33:38.558409Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329002295973515:2138] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:33:38.558487Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329002295973515:2138] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591329006590941279:2447] 2026-01-04T02:33:38.710605Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591329006590941279:2447] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: ... 1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [4:7591329054131598426:3035] 2026-01-04T02:33:49.238218Z node 4 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][4:7591329054131598422:3035][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [4:7591329036951727987:2131], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:49.238276Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [4:7591329036951727987:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:33:49.238358Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [4:7591329036951727987:2131], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [4:7591329054131598422:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:49.238453Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329054131598422:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:49.238546Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329054131598430:3037], recipient# [4:7591329054131598421:2325], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:49.621614Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329036951727987:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:49.621728Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329041246695915:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:49.621830Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329054131598432:3038], recipient# [4:7591329054131598431:2326], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:50.242080Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329036951727987:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:50.242266Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329054131598422:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:50.242361Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329058426565739:3042], recipient# [4:7591329058426565738:2327], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:50.623800Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329036951727987:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:50.623951Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329041246695915:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:50.624051Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329058426565743:3045], recipient# [4:7591329058426565742:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.242322Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329036951727987:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.242473Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329054131598422:3035] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:51.242538Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329062721533052:3051], recipient# [4:7591329062721533051:2329], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.626670Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591329036951727987:2131], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:51.626815Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591329036951727987:2131], cacheItem# { Subscriber: { Subscriber: [4:7591329041246695915:2562] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:51.626912Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591329062721533054:3052], recipient# [4:7591329062721533053:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 21407, MsgBus: 21636 2026-01-04T02:33:44.006312Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329032409434562:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:44.006402Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:44.050072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:44.310081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:44.310218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:44.312229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:44.369261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:44.428451Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329028114467238:2081] 1767494024005730 != 1767494024005733 2026-01-04T02:33:44.433638Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:44.538497Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:44.555885Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:44.555909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:44.555918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:44.556004Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:45.020120Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:45.071684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:45.173311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:45.392123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:45.654690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:45.758843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:48.578745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329049589305610:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:48.578894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:48.579293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329049589305620:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:48.579342Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:48.989053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.006733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329032409434562:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:49.006800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:49.036136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.085737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.165196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.204510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.254718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.301196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.365836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.481221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329053884273798:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:49.481313Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:49.481527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329053884273800:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:49.481571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:49.482084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329053884273805:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:49.485892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:49.498964Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329053884273807:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:49.568131Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329053884273860:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:51.673732Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591329062474208792:2541], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:33:51.676771Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhjNWNiYTctZTIwMGEwNWUtZWU5ZmY4MTUtZDNlZGJkNmY=, ActorId: [1:7591329062474208783:2535], ActorState: ExecuteState, LegacyTraceId: 01ke3dn65j7pnptag9agjwp0hw, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TxUsage::WriteToTopic_Demo_43_Table >> TxUsage::WriteToTopic_Demo_21_RestartBeforeCommit_Query [GOOD] >> HttpRequest::ProbeBaseStatsServerless [GOOD] >> KqpProxy::InvalidSessionID [GOOD] >> KqpProxy::LoadedMetadataAfterCompilationTimeout >> TableCreation::MultipleTablesCreation >> TableCreation::SimpleTableCreation >> KqpProxy::CalcPeerStats [GOOD] >> KqpProxy::CreatesScriptExecutionsTable >> TSchemeShardTest::CreateAlterTableWithCacheMode [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/ydb-core-tx-columnshard-ut_schema |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |87.7%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_table_writer/ydb-core-backup-impl-ut_table_writer >> KqpProxy::PassErrroViaSessionActor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStatsServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:23.760331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:23.895104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:23.903972Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:23.904412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:23.904625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:24.423611Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:24.580034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:24.580266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:24.640573Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:24.785330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:25.622560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:25.623526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:25.623569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:25.623599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:25.623941Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:25.690456Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:26.367538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:29.554972Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:29.564304Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:29.568970Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:29.608425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:29.608584Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:29.638874Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:29.640798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:29.824322Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.825229Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.826167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.827137Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.827428Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.827695Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.827913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.828181Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.828321Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.006960Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:32:30.036608Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:30.036742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:30.060766Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.265799Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:30.341736Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:30.341879Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:30.379163Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:30.379547Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:30.379800Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:30.379874Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:30.379933Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:30.380004Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:30.380068Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:30.380124Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:30.380948Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:30.548403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:30.548895Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:32:30.551399Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:30.551454Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:30.551546Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:30.555919Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:30.559964Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:30.564665Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:30.633973Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:30.655420Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:30.655544Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:30.674945Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:30.676577Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:31.067137Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:31.069539Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:31.073218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:31.077354Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:31.077466Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... sult] RequestId[ 51 ] 2026-01-04T02:33:33.857326Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 51, ReplyToActorId = [2:6294:5054], StatRequests.size() = 1 2026-01-04T02:33:35.044465Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 52 ], ReplyToActorId[ [2:6328:5070]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:35.044809Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 52 ] 2026-01-04T02:33:35.044845Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 52, ReplyToActorId = [2:6328:5070], StatRequests.size() = 1 2026-01-04T02:33:35.559668Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:35.559744Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:35.560015Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:35.574107Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:36.339279Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 53 ], ReplyToActorId[ [2:6366:5091]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:36.339726Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 53 ] 2026-01-04T02:33:36.339782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 53, ReplyToActorId = [2:6366:5091], StatRequests.size() = 1 2026-01-04T02:33:37.468765Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:37.593137Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 54 ], ReplyToActorId[ [2:6398:5106]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:37.593540Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 54 ] 2026-01-04T02:33:37.593592Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 54, ReplyToActorId = [2:6398:5106], StatRequests.size() = 1 2026-01-04T02:33:38.153777Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-04T02:33:38.153872Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.866000s, at schemeshard: 72075186224037899 2026-01-04T02:33:38.154101Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-04T02:33:38.171101Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:38.896254Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 55 ], ReplyToActorId[ [2:6434:5123]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:38.896625Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 55 ] 2026-01-04T02:33:38.896672Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 55, ReplyToActorId = [2:6434:5123], StatRequests.size() = 1 2026-01-04T02:33:39.631220Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:33:39.631286Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:39.631318Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:33:39.631350Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:33:40.810940Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:6479:5144]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:40.811381Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-04T02:33:40.811428Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:6479:5144], StatRequests.size() = 1 2026-01-04T02:33:41.585553Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:33:41.585826Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-04T02:33:41.586507Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-04T02:33:42.543937Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:6515:5161]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:42.544383Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-04T02:33:42.544437Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:6515:5161], StatRequests.size() = 1 2026-01-04T02:33:43.727019Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:43.727083Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:43.727357Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:43.746244Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:45.259601Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:6553:5182]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:45.259958Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-04T02:33:45.260005Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:6553:5182], StatRequests.size() = 1 2026-01-04T02:33:47.226261Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:6585:5197]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:47.226599Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-04T02:33:47.226647Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:6585:5197], StatRequests.size() = 1 2026-01-04T02:33:49.090441Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:33:49.237223Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:6627:5222]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:49.237518Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-04T02:33:49.237562Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:6627:5222], StatRequests.size() = 1 2026-01-04T02:33:49.612494Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:33:49.612579Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.501000s, at schemeshard: 72075186224037899 2026-01-04T02:33:49.612829Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 27, entries count: 1, are all stats full: 1 2026-01-04T02:33:49.628880Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:50.807004Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:6667:5243]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:50.807367Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-04T02:33:50.807417Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:6667:5243], StatRequests.size() = 1 2026-01-04T02:33:52.278249Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:6701:5258]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:52.278662Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-04T02:33:52.278715Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:6701:5258], StatRequests.size() = 1 2026-01-04T02:33:52.837797Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:33:52.838070Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-04T02:33:52.838558Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-04T02:33:52.928232Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:33:52.928318Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:33:52.928591Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:33:52.947483Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:33:53.786883Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:6741:5281]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:53.787200Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-04T02:33:53.787245Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:6741:5281], StatRequests.size() = 1 2026-01-04T02:33:53.788151Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:6744:5284]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:53.788390Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-04T02:33:53.788440Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:6744:5284], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11296 }' |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] >> TSchemeShardTest::CopyTableTwiceSimultaneously [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentChanges >> TSchemeShardCheckProposeSize::CopyTable >> TSchemeShardTest::RmDirTwice >> TableCreation::UpdateTableWithAclRollback >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartBeforeCommit_Query [GOOD] >> TxUsage::WriteToTopic_Demo_14_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::TempInTemp [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:233:2060] recipient: [1:227:2145] Leader for TabletID 72057594046678944 is [1:244:2156] sender: [1:245:2060] recipient: [1:227:2145] 2026-01-04T02:32:48.762068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:48.762156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:48.762239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:48.762289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:48.762326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:48.762361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:48.762421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:48.762480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:48.763339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:48.763658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:48.874163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:48.874235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:48.900728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:48.900853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:48.901031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:48.922559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:48.922868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:48.923592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:48.924209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:48.962862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:48.963082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:48.971375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:48.971461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:48.971552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:48.971595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:48.971639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:48.971885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:49.159151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.159966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.160912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.161009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:49.161104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:33:58.595657Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:33:58.595704Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:33:58.596063Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1090:2909], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.596137Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.596183Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:33:58.596322Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:983:2802], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 106 2026-01-04T02:33:58.596362Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:33:58.596427Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:33:58.596529Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:33:58.596570Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [7:1088:2907] 2026-01-04T02:33:58.596742Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1090:2909], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.596778Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.596818Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 2026-01-04T02:33:58.597225Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:957:2104], Recipient [7:244:2156] 2026-01-04T02:33:58.597269Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:33:58.599909Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 957 RawX2: 34359740472 } AllowCreateInTempDir: false } TxId: 107 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:33:58.600225Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:33:58.600369Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:33:58.600583Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:33:58.602834Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusPreconditionFailed Reason: "Check failed: path: \'/MyRoot/test/tmp/a/b\', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges)" TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:58.603172Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Check failed: path: '/MyRoot/test/tmp/a/b', error: path is temporary (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeDir, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-04T02:33:58.603244Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 2026-01-04T02:33:58.603670Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-04T02:33:58.603718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-04T02:33:58.604086Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1096:2915], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.604149Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.604189Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:33:58.604318Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:983:2802], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 107 2026-01-04T02:33:58.604350Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:33:58.604419Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-04T02:33:58.604520Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:33:58.604563Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [7:1094:2913] 2026-01-04T02:33:58.604753Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1096:2915], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.604791Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.604828Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-04T02:33:58.605241Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [8:957:2104], Recipient [7:244:2156] 2026-01-04T02:33:58.605298Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:33:58.608277Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/test/tmp/a/b" OperationType: ESchemeOpMkDir MkDir { Name: "tmp2" } TempDirOwnerActorId { RawX1: 957 RawX2: 34359740472 } AllowCreateInTempDir: true } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:33:58.608593Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/test/tmp/a/b/tmp2, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:33:58.608811Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., at schemeshard: 72057594046678944 2026-01-04T02:33:58.609044Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:33:58.611431Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusPreconditionFailed Reason: "Can\'t create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can\'t be created in another temporary directory." TxId: 108 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:33:58.611771Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Can't create temporary directory while flag AllowCreateInTempDir is set. Temporary directory can't be created in another temporary directory., operation: CREATE DIRECTORY, path: /MyRoot/test/tmp/a/b/tmp2 2026-01-04T02:33:58.611837Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-04T02:33:58.612287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-04T02:33:58.612336Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-04T02:33:58.612753Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1102:2921], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.612814Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:33:58.612861Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:33:58.612995Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [7:983:2802], Recipient [7:244:2156]: NKikimrScheme.TEvNotifyTxCompletion TxId: 108 2026-01-04T02:33:58.613028Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:33:58.613106Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-04T02:33:58.613213Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:33:58.613254Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:1100:2919] 2026-01-04T02:33:58.613436Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1102:2921], Recipient [7:244:2156]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.613475Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:33:58.613521Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 108 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::SimpleHandlers [GOOD] Test command err: 2026-01-04T02:32:48.988814Z :WaitEventBlocksBeforeDiscovery INFO: Random seed for debugging is 1767493968988780 2026-01-04T02:32:49.539505Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328794429498683:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.539662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.619801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:49.639776Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:49.638899Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328795078804820:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.638947Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:49.741409Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.166984Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.235608Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.310011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.310121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.310346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.310406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.326768Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:50.326900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.357842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.530993Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.536481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.549773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.550697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.739568Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:50.858938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002da2/r3tmp/yandex4OTLqt.tmp 2026-01-04T02:32:50.858965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002da2/r3tmp/yandex4OTLqt.tmp 2026-01-04T02:32:50.867774Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002da2/r3tmp/yandex4OTLqt.tmp 2026-01-04T02:32:50.867891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:50.963216Z INFO: TTestServer started on Port 64951 GrpcPort 7979 PQClient connected to localhost:7979 2026-01-04T02:32:51.262004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:54.312970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815904336441:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.313121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.313355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815904336453:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.313449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815904336456:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.313738Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.318564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:54.362130Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328815904336458:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:32:54.539376Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328794429498683:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:54.539451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:54.641899Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328795078804820:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:54.641960Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:54.659436Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328815904336544:3023] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.724024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.740430Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328815904336561:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:54.743236Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTg2ZmE0MzgtNTFiMjE4NzQtMWYxNTViZDMtZWJjMDY5NTE=, ActorId: [1:7591328815904336439:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dke97d6bm4xv2yhw5a6cg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:54.746414Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:54.752503Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328816553641646:2308], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:54.754268Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=NGZlZjRjNmMtYzIwOWVlOS1hNWNjN2IwZi00OTk0YTZhMg==, ActorId: [2:7591328816553641605:2302], ActorState: ExecuteState, LegacyTraceId: 01ke3dkeaad0va8jbh1h3jmyk1, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At funct ... 208-3d7b0c] Closing read session. Close timeout: 0.000000s 2026-01-04T02:33:54.963642Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:299:0 2026-01-04T02:33:54.963674Z :INFO: [/Root] [/Root] [9bee4226-fe81bc3f-d36cc208-3d7b0c] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1935 BytesRead: 4936800 MessagesRead: 300 BytesReadCompressed: 4936800 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:33:54.963716Z :NOTICE: [/Root] [/Root] [9bee4226-fe81bc3f-d36cc208-3d7b0c] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:33:54.963211Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_16888007410599802486_v1 grpc read done: success# 0, data# { } 2026-01-04T02:33:54.963239Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_16888007410599802486_v1 grpc read failed 2026-01-04T02:33:54.963266Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer shared/user session shared/user_3_2_16888007410599802486_v1 grpc closed 2026-01-04T02:33:54.963290Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_16888007410599802486_v1 is DEAD 2026-01-04T02:33:54.964138Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_4046596484087256739_v1 grpc read done: success# 0, data# { } 2026-01-04T02:33:54.964150Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_4046596484087256739_v1 grpc read failed 2026-01-04T02:33:54.964166Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_4046596484087256739_v1 grpc closed 2026-01-04T02:33:54.964182Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_4046596484087256739_v1 is DEAD 2026-01-04T02:33:54.965175Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: src_id|2feb05d5-4d7b3e4e-ed179671-7389146_0 grpc read done: success: 0 data: 2026-01-04T02:33:54.965193Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: src_id|2feb05d5-4d7b3e4e-ed179671-7389146_0 grpc read failed 2026-01-04T02:33:54.965220Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: src_id|2feb05d5-4d7b3e4e-ed179671-7389146_0 grpc closed 2026-01-04T02:33:54.965233Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: src_id|2feb05d5-4d7b3e4e-ed179671-7389146_0 is DEAD 2026-01-04T02:33:54.966481Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:33:54.966577Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639353:2481] disconnected. 2026-01-04T02:33:54.966601Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639353:2481] disconnected; active server actors: 1 2026-01-04T02:33:54.966636Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639353:2481] client user disconnected session shared/user_3_2_16888007410599802486_v1 2026-01-04T02:33:54.966682Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-04T02:33:54.966732Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037893][rt3.dc1--test-topic] consumer user balancing. Sessions=2, Families=1, UnreadableFamilies=0 [], RequireBalancing=0 [] 2026-01-04T02:33:54.966758Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037893][rt3.dc1--test-topic] consumer user start rebalancing. familyCount=1, sessionCount=2, desiredFamilyCount=0, allowPlusOne=1 2026-01-04T02:33:54.966786Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037893][rt3.dc1--test-topic] consumer user balancing duration: 0.000033s 2026-01-04T02:33:54.966847Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_6538808989369146330_v1 grpc read done: success# 0, data# { } 2026-01-04T02:33:54.966856Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_6538808989369146330_v1 grpc read failed 2026-01-04T02:33:54.966873Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_6538808989369146330_v1 grpc closed 2026-01-04T02:33:54.966910Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_6538808989369146330_v1 is DEAD 2026-01-04T02:33:54.967812Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7591329070488639404:2492] destroyed 2026-01-04T02:33:54.967869Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:33:54.967913Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:54.967930Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:54.967946Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:54.967965Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:54.967979Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:54.968631Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639356:2482] disconnected. 2026-01-04T02:33:54.968658Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639356:2482] disconnected; active server actors: 1 2026-01-04T02:33:54.968677Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639356:2482] client user disconnected session shared/user_3_3_4046596484087256739_v1 2026-01-04T02:33:54.968708Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-04T02:33:54.968738Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639357:2480] disconnected. 2026-01-04T02:33:54.968752Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639357:2480] disconnected; active server actors: 1 2026-01-04T02:33:54.968769Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329070488639357:2480] client user disconnected session shared/user_3_1_6538808989369146330_v1 2026-01-04T02:33:54.970022Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_6538808989369146330_v1 2026-01-04T02:33:54.970093Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7591329070488639365:2493] destroyed 2026-01-04T02:33:54.970157Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_6538808989369146330_v1 2026-01-04T02:33:55.034120Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:55.034162Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.034178Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:55.034204Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.034223Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:55.125938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:33:55.125973Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:55.133927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:55.133970Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.133983Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:55.134003Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.134018Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:55.237927Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:33:55.237965Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.237980Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:33:55.237999Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:33:55.238012Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:33:55.305960Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [3:7591329079078574140:2506] TxId: 281474976710678. Ctx: { TraceId: 01ke3dn9dd40gn7wd2z7rx3tmj, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzI2YzM2OC03MTA3MTYxMC1iMjY1MDEwMy0zMTJmYmFiZQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 4 trace_id# 2026-01-04T02:33:55.310550Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591329079078574144:2506], TxId: 281474976710678, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3dn9dd40gn7wd2z7rx3tmj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=NzI2YzM2OC03MTA3MTYxMC1iMjY1MDEwMy0zMTJmYmFiZQ==. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591329079078574140:2506], status: UNAVAILABLE, reason: {
: Error: Terminate execution } >> TSchemeShardTest::InitRootAgain |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest |87.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardTest::CopyTableAndConcurrentChanges [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplit >> TSubDomainTest::CreateTableInsideAndForceDeleteSubDomain [GOOD] >> TSubDomainTest::CreateTableInsideSubDomain >> TxUsage::WriteToTopic_Demo_16_Table >> TSchemeShardCheckProposeSize::CopyTable [GOOD] >> TSchemeShardCheckProposeSize::CopyTables >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table >> KqpProxy::PassErrroViaSessionActor [GOOD] >> KqpProxy::NodeDisconnectedTest >> TSchemeShardTest::RmDirTwice [GOOD] >> TSchemeShardTest::TopicMeteringMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsidetThenStopTenantAndForceDeleteSubDomain [GOOD] Test command err: 2026-01-04T02:33:38.279791Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329005692631474:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.283459Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.316463Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.334623Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591329003400847716:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.347819Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.714055Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.718203Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.714158Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:38.786088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.967822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.967916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.968334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.968399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.972990Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.973080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.985414Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:33:38.985634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.986885Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.989691Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:33:38.992867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:39.087570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:39.115859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:39.122997Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:39.142221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:39.289371Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.341971Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.367743Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.554564Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329005692631678:2146] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:39.554620Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329009987599467:2470] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:39.554680Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329005692631696:2150], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:39.554751Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329005692631930:2311][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329005692631696:2150], cookie# 1 2026-01-04T02:33:39.556295Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005692631978:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631975:2311], cookie# 1 2026-01-04T02:33:39.556350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005692631979:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631976:2311], cookie# 1 2026-01-04T02:33:39.556378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005692631980:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631977:2311], cookie# 1 2026-01-04T02:33:39.556409Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329005692631326:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631979:2311], cookie# 1 2026-01-04T02:33:39.556471Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329005692631329:2059] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631980:2311], cookie# 1 2026-01-04T02:33:39.556515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005692631979:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631326:2056], cookie# 1 2026-01-04T02:33:39.556533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005692631980:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631329:2059], cookie# 1 2026-01-04T02:33:39.556617Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005692631930:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631976:2311], cookie# 1 2026-01-04T02:33:39.556640Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329005692631930:2311][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:39.556662Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005692631930:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631977:2311], cookie# 1 2026-01-04T02:33:39.556683Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329005692631930:2311][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:39.556755Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329005692631696:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:39.557872Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329005692631323:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005692631978:2311], cookie# 1 2026-01-04T02:33:39.557964Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005692631978:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631323:2053], cookie# 1 2026-01-04T02:33:39.557994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005692631930:2311][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005692631975:2311], cookie# 1 2026-01-04T02:33:39.558008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329005692631930:2311][/dc-1] Sync cookie mismatch: sender# [1:7591329005692631975:2311], cookie# 1, current cookie# 0 2026-01-04T02:33:39.563467Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329005692631696:2150], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329005692631930:2311] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:39.563798Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329005692631696:2150], cacheItem# { Subscriber: { Subscriber: [1:7591329005692631930:2311] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:39.566293Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329009987599468:2471], recipient# [1:7591329009987599467:2470], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: ... ndle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7591329056246689423:2056] 2026-01-04T02:33:56.981873Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689417:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494535:2871] 2026-01-04T02:33:56.981905Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689417:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494541:2872] 2026-01-04T02:33:56.981923Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689417:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494547:2873] 2026-01-04T02:33:56.981944Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689420:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494536:2871] 2026-01-04T02:33:56.981958Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689420:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494542:2872] 2026-01-04T02:33:56.981972Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689420:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494548:2873] 2026-01-04T02:33:56.981987Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689423:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494537:2871] 2026-01-04T02:33:56.982025Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689423:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494543:2872] 2026-01-04T02:33:56.982046Z node 7 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [7:7591329056246689423:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [7:7591329082016494549:2873] 2026-01-04T02:33:56.982113Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-04T02:33:56.982209Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7591329082016494529:2871] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:56.982329Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591329060541657010:2129], cacheItem# { Subscriber: { Subscriber: [7:7591329082016494529:2871] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:56.982364Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-04T02:33:56.982425Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7591329082016494530:2872] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:56.982486Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591329060541657010:2129], cacheItem# { Subscriber: { Subscriber: [7:7591329082016494530:2872] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:56.982688Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591329082016494550:2874], recipient# [7:7591329082016494525:2310], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:56.989824Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7591329082016494531:2873][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7591329082016494544:2873] 2026-01-04T02:33:56.990001Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7591329082016494531:2873][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7591329082016494545:2873] 2026-01-04T02:33:56.990045Z node 7 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][7:7591329082016494531:2873][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [7:7591329060541657010:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:56.990089Z node 7 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][7:7591329082016494531:2873][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [7:7591329082016494546:2873] 2026-01-04T02:33:56.990123Z node 7 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][7:7591329082016494531:2873][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [7:7591329060541657010:2129], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:56.990231Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:33:56.990332Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [7:7591329060541657010:2129], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [7:7591329082016494531:2873] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:56.990462Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591329060541657010:2129], cacheItem# { Subscriber: { Subscriber: [7:7591329082016494531:2873] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:56.990564Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591329082016494551:2875], recipient# [7:7591329082016494527:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:57.142796Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [7:7591329060541657010:2129], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:57.142960Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [7:7591329060541657010:2129], cacheItem# { Subscriber: { Subscriber: [7:7591329064836624822:2449] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:33:57.143042Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591329086311461849:2876], recipient# [7:7591329086311461848:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_user_attributes_reboots/core-tx-schemeshard-ut_user_attributes_reboots >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table >> TSchemeShardTest::InitRootAgain [GOOD] >> TSchemeShardTest::InitRootWithOwner >> TSchemeShardTest::AlterTableDropColumnReCreateSplit >> TableCreation::ConcurrentTableCreation [GOOD] >> TableCreation::ConcurrentMultipleTablesCreation >> TSchemeShardTest::TopicMeteringMode [GOOD] >> TSchemeShardTest::Restart >> TSchemeShardTest::CopyTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentMerge >> TSchemeShardTest::Boot |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/ydb-core-tx-schemeshard-ut_continuous_backup |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/ydb-core-tx-datashard-ut_incremental_restore_scan >> TSchemeShardTest::InitRootWithOwner [GOOD] >> TSchemeShardTest::DropTableTwice >> TableCreation::TableCreationWithAcl [GOOD] >> TableCreation::UpdateTableWithAclModification >> TSubDomainTest::CoordinatorRunAtSubdomainNodeWhenAvailable2 [GOOD] >> TSubDomainTest::ConsistentCopyTable >> KqpBatchDelete::Large_2 [GOOD] |87.7%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |87.7%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots |87.7%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_filestore_reboots/ydb-core-tx-schemeshard-ut_filestore_reboots >> TSchemeShardTest::Boot [GOOD] >> TSchemeShardTest::CacheEffectiveACL [GOOD] >> TSchemeShardTest::ConsistentCopyTable >> BasicUsage::CreateTopicWithSharedConsumer_KeepMessagesOrder_True [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy >> TableCreation::SimpleTableCreation [GOOD] >> TableCreation::SimpleUpdateTable >> TSchemeShardTest::AlterTableDropColumnReCreateSplit [GOOD] >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate >> TSchemeShardTest::Restart [GOOD] >> TSchemeShardTest::SchemeErrors >> ScriptExecutionsTest::RunCheckLeaseStatus [GOOD] >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring >> TSchemeShardTest::CopyTableAndConcurrentMerge [GOOD] >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge >> TSchemeShardTest::DropTableTwice [GOOD] >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously >> TxUsage::WriteToTopic_Demo_24_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 2775, MsgBus: 18118 2026-01-04T02:33:38.413888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329006502542322:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.413964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.473625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.752960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.753064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.757005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.822238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:38.873393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.876410Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329006502542296:2081] 1767494018412352 != 1767494018412355 2026-01-04T02:33:38.990165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.990217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.990225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.990330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:39.019884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:39.431419Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.464956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.471304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:39.506374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.625291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.772481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.842110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.575482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329019387446055:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.575633Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.576148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329019387446065:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.576212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.959159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.996338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.033590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.070172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.106716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.157279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.235465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.305734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.401474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023682414237:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.401571Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.401917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023682414242:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.401966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329023682414243:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.402004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.406820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.421033Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329023682414246:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:42.504216Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329023682414299:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.417939Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329006502542322:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.418023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 46131:7762515]; 2026-01-04T02:33:51.069165Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:51.069313Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:51.309902Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:51.332700Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:51.369936Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329057866464131:2081] 1767494030947295 != 1767494030947298 2026-01-04T02:33:51.376775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:51.376862Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:51.381292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:51.637912Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:51.650375Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:51.650398Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:51.650411Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:51.650479Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:52.025879Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:52.198045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:52.232182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.336841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.539640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.634181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.976826Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329057866464241:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:55.976919Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:56.071217Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329083636269771:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.071320Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.071874Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329083636269781:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.071949Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.195966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.250632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.296947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.347313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.401473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.469672Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.542194Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.602224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.697374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329083636270659:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.697487Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.698181Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329083636270664:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.698238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329083636270665:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.698277Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.702102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:56.715879Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329083636270668:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:56.816087Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329083636270719:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:59.569462Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardTest::SchemeErrors [GOOD] >> TSchemeShardTest::SerializedCellVec >> TxUsage::Sinks_Oltp_WriteToTopic_3_Table [GOOD] >> TSchemeShardTest::SerializedCellVec [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate >> TxUsage::WriteToTopic_Demo_24_Query >> TSchemeShardTest::DropIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TSchemeShardTest::AlterTableDropColumnSplitThenReCreate [GOOD] >> TSchemeShardTest::AlterTableKeyColumns |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/ydb-core-tx-scheme_board-ut_populator >> TableCreation::MultipleTablesCreation [GOOD] >> TableCreation::ConcurrentUpdateTable >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldNotUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate >> KqpProxy::LoadedMetadataAfterCompilationTimeout [GOOD] >> KqpProxy::NoLocalSessionExecution >> TSchemeShardTest::CopyTableAndConcurrentSplitMerge [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig >> TSchemeShardTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTableAwait >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] >> TSchemeShardTest::IgnoreUserColumnIds [GOOD] >> TSchemeShardTest::DropTableAndConcurrentSplit >> TableCreation::UpdateTableWithAclRollback [GOOD] >> TestScriptExecutionsUtils::TestRetryPolicyItem [GOOD] >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] >> TSchemeShardTest::AlterTableKeyColumns [GOOD] >> TSchemeShardTest::AlterTableConfig |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.8%| [LD] {RESULT} $(B)/ydb/core/engine/ut/ydb-core-engine-ut |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/engine/ut/ydb-core-engine-ut >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false >> TSchemeShardTest::UpdateChannelsBindingSolomonShouldUpdate [GOOD] >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig >> TSchemeShardTest::ConsistentCopyTableAwait [GOOD] >> TSchemeShardTest::ConsistentCopyTableRejects >> KqpProxy::CreatesScriptExecutionsTable [GOOD] >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag >> DirectReadWithServer::KillPQTablet [GOOD] >> DirectReadWithServer::KillPQRBTablet [GOOD] >> LocalPartition::Restarts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TestScriptExecutionsUtils::TestRetryLimiter [GOOD] Test command err: 2026-01-04T02:33:59.187328Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329096804793192:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:59.187385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:34:00.034933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:00.035042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:00.102958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:00.274010Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329092509825671:2081] 1767494039004978 != 1767494039004981 2026-01-04T02:34:00.365062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:00.373373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:01.090544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:34:01.090567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:34:01.090574Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:34:01.090658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:34:01.213445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:01.220997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:34:04.177994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329096804793192:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:04.178068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:04.300103Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:04.352725Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:34:04.352777Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:34:04.352814Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:04.356090Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Describe result: PathErrorUnknown 2026-01-04T02:34:04.356104Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Creating table 2026-01-04T02:34:04.356154Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:34:04.356350Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Describe result: PathErrorUnknown 2026-01-04T02:34:04.356371Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Creating table 2026-01-04T02:34:04.356386Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:34:04.356416Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Describe result: PathErrorUnknown 2026-01-04T02:34:04.356420Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Creating table 2026-01-04T02:34:04.356435Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:34:04.361965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:04.364360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:04.372397Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:34:04.372461Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Subscribe on create table tx: 281474976715660 2026-01-04T02:34:04.377303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:04.380921Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:34:04.380968Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Subscribe on create table tx: 281474976715659 2026-01-04T02:34:04.391845Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:34:04.391881Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Subscribe on create table tx: 281474976715658 2026-01-04T02:34:04.394052Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Subscribe on tx: 281474976715658 registered 2026-01-04T02:34:04.394125Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Subscribe on tx: 281474976715660 registered 2026-01-04T02:34:04.394321Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Subscribe on tx: 281474976715659 registered 2026-01-04T02:34:04.515961Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2026-01-04T02:34:04.590309Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-04T02:34:04.590351Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Column diff is empty, finishing 2026-01-04T02:34:04.591387Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:34:04.592091Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329118279630296:2498] Owner: [1:7591329118279630293:2496]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2026-01-04T02:34:04.592557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:04.593627Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2026-01-04T02:34:04.593870Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:04.593910Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7591329118279630297:2499] Owner: [1:7591329118279630293:2496]. Successful alter request: ExecComplete 2026-01-04T02:34:04.662193Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-04T02:34:04.662321Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Column diff is empty, finishing 2026-01-04T02:34:04.662425Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329118279630295:2497] Owner: [1:7591329118279630293:2496]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/s ... DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7591329135459500486:2973], ActorId: [1:7591329135459500487:2974], TraceId: ExecutionId: c4c1855b-84886781-11737785-cc602f32, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-04T02:34:08.336949Z node 1 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [1:7591329135459500485:2972], ActorId: [1:7591329135459500486:2973], TraceId: ExecutionId: c4c1855b-84886781-11737785-cc602f32, RequestDatabase: /dc-1, Got response [1:7591329135459500487:2974] SUCCESS 2026-01-04T02:34:08.336998Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7591329135459500484:2971] ActorId: [1:7591329135459500485:2972] Database: /dc-1 ExecutionId: c4c1855b-84886781-11737785-cc602f32. Extracted script execution operation [1:7591329135459500487:2974], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [1:7591329118279630572:2679], LeaseGeneration: 0 2026-01-04T02:34:08.337047Z node 1 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [1:7591329135459500484:2971] ActorId: [1:7591329135459500485:2972] Database: /dc-1 ExecutionId: c4c1855b-84886781-11737785-cc602f32. Reply success 2026-01-04T02:34:08.337375Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=ODM3YzNkNTQtN2JmMGMxZDItNGNkZjlhNjgtYmQ4NTc3MTI=, workerId: [1:7591329135459500489:2530], local sessions count: 0 2026-01-04T02:34:08.386732Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dnpm21xr5tfe27r08vt8m", Request has 18444976579661.164914s seconds to be completed 2026-01-04T02:34:08.388725Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dnpm21xr5tfe27r08vt8m", Created new session, sessionId: ydb://session/3?node_id=1&id=YTJmZTA4ZjQtOGIyYTU3YTctMWVlOTY4NmEtNTMyMWY4Y2Q=, workerId: [1:7591329135459500525:2545], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:34:08.388891Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dnpm21xr5tfe27r08vt8m 2026-01-04T02:34:08.409017Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dnpmr9v2km6v03nh6bmfd, Database: /dc-1, SessionId: ydb://session/3?node_id=1&id=YTJmZTA4ZjQtOGIyYTU3YTctMWVlOTY4NmEtNTMyMWY4Y2Q=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 44, targetId: [1:7591329135459500525:2545] 2026-01-04T02:34:08.409065Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 44 timeout: 600.000000s actor id: [1:7591329135459500528:2984] 2026-01-04T02:34:08.433189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:08.440160Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dnpmr9v2km6v03nh6bmfd", Forwarded response to sender actor, requestId: 44, sender: [1:7591329135459500527:2546], selfId: [1:7591329096804793231:2266], source: [1:7591329135459500525:2545] --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:34:08.453756Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Describe result: PathErrorUnknown 2026-01-04T02:34:08.453778Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Creating table 2026-01-04T02:34:08.453843Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:08.456502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715688:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:08.457573Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715688 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-04T02:34:08.457605Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Subscribe on create table tx: 281474976715688 2026-01-04T02:34:08.460950Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Subscribe on tx: 281474976715688 registered 2026-01-04T02:34:08.533564Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Request: create. Transaction completed: 281474976715688. Doublechecking... 2026-01-04T02:34:08.585312Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:08.585348Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7591329135459500580:3032] Owner: [1:7591329135459500579:3031]. Column diff is empty, finishing 2026-01-04T02:34:08.619016Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dnpva9nfgaa7qjxg7n4nk", Request has 18444976579660.932633s seconds to be completed 2026-01-04T02:34:08.621043Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dnpva9nfgaa7qjxg7n4nk", Created new session, sessionId: ydb://session/3?node_id=1&id=OTEwZjA2NGQtYjQ4Y2VkMzQtYjdlOGQ4OTktYTc2MmFlN2Q=, workerId: [1:7591329135459500671:2555], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:08.626039Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dnpva9nfgaa7qjxg7n4nk 2026-01-04T02:34:08.657017Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:08.657052Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-04T02:34:08.657103Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:08.659914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:34:08.661352Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715689 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:08.661388Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Subscribe on create table tx: 281474976715689 2026-01-04T02:34:08.661685Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=OTEwZjA2NGQtYjQ4Y2VkMzQtYjdlOGQ4OTktYTc2MmFlN2Q=, workerId: [1:7591329135459500671:2555], local sessions count: 1 2026-01-04T02:34:08.666763Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Subscribe on tx: 281474976715689 registered 2026-01-04T02:34:08.684287Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Request: alter. Transaction completed: 281474976715689. Doublechecking... 2026-01-04T02:34:08.742843Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:08.742875Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Column diff is empty, finishing 2026-01-04T02:34:08.742950Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-04T02:34:08.743804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:08.745474Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715690 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:08.745488Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [1:7591329135459500677:3096] Owner: [1:7591329135459500676:3095]. Successful alter request: ExecComplete 2026-01-04T02:34:08.786162Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dnq0b0cttpxpw6q6xa26m", Request has 18444976579660.765493s seconds to be completed 2026-01-04T02:34:08.788215Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dnq0b0cttpxpw6q6xa26m", Created new session, sessionId: ydb://session/3?node_id=1&id=NjlhNzdjYWMtMzQ5NDJlNzEtMzE4NDFlNGMtMjE3ZTYwYzE=, workerId: [1:7591329135459500720:2563], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:08.788406Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dnq0b0cttpxpw6q6xa26m 2026-01-04T02:34:08.834921Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=NjlhNzdjYWMtMzQ5NDJlNzEtMzE4NDFlNGMtMjE3ZTYwYzE=, workerId: [1:7591329135459500720:2563], local sessions count: 1 2026-01-04T02:34:08.839485Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=1&id=YTJmZTA4ZjQtOGIyYTU3YTctMWVlOTY4NmEtNTMyMWY4Y2Q=, workerId: [1:7591329135459500525:2545], local sessions count: 0 |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::CreateTableInsideSubDomain [GOOD] Test command err: 2026-01-04T02:33:37.828298Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328999769345136:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.828369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.876811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.195783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.225966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.226108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.233506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.326879Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.492715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.537046Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591328999769345269:2137] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.537108Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329004064313036:2449] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.537206Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591328999769345297:2152], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.537299Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329004064312832:2308][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591328999769345297:2152], cookie# 1 2026-01-04T02:33:38.539011Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004064312864:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312861:2308], cookie# 1 2026-01-04T02:33:38.539054Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004064312865:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312862:2308], cookie# 1 2026-01-04T02:33:38.539069Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004064312866:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312863:2308], cookie# 1 2026-01-04T02:33:38.539118Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999769344925:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312864:2308], cookie# 1 2026-01-04T02:33:38.539151Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999769344928:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312865:2308], cookie# 1 2026-01-04T02:33:38.539234Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591328999769344931:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004064312866:2308], cookie# 1 2026-01-04T02:33:38.539300Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004064312864:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999769344925:2051], cookie# 1 2026-01-04T02:33:38.539339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004064312865:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999769344928:2054], cookie# 1 2026-01-04T02:33:38.539359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004064312866:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591328999769344931:2057], cookie# 1 2026-01-04T02:33:38.539395Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004064312832:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004064312861:2308], cookie# 1 2026-01-04T02:33:38.539432Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329004064312832:2308][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.539455Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004064312832:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004064312862:2308], cookie# 1 2026-01-04T02:33:38.539479Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329004064312832:2308][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.539523Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004064312832:2308][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004064312863:2308], cookie# 1 2026-01-04T02:33:38.539554Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329004064312832:2308][/dc-1] Sync cookie mismatch: sender# [1:7591329004064312863:2308], cookie# 1, current cookie# 0 2026-01-04T02:33:38.539611Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591328999769345297:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.554572Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591328999769345297:2152], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329004064312832:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.554741Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591328999769345297:2152], cacheItem# { Subscriber: { Subscriber: [1:7591329004064312832:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.557677Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329004064313037:2450], recipient# [1:7591329004064313036:2449], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.557768Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329004064313036:2449] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.614128Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329004064313036:2449] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.617297Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329004064313036:2449] Handle TEvDescribeSchemeResult Forward to# [1:7591329004064313035:2448] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.627610Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591328999769345269:2137] Handle TEvProposeTransaction 2026-01-04T02:33:38.627663Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591328999769345269:2137] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:33:38.627752Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591328999769345269:2137] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591329004064313043:2455] 2026-01-04T02:33:38.770197Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591329004064313043:2455] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: ... rsion: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:34:07.625793Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7591329109225601186:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7591329130700438884:2751] 2026-01-04T02:34:07.625834Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7591329109225601189:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7591329130700438885:2751] 2026-01-04T02:34:07.625851Z node 6 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [6:7591329109225601192:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [6:7591329130700438886:2751] 2026-01-04T02:34:07.625909Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [6:7591329109225601499:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:34:07.626000Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [6:7591329109225601499:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [6:7591329130700438880:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:34:07.626121Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329130700438880:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:07.626223Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329130700438887:2752], recipient# [6:7591329130700438879:2311], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.130181Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329109225601499:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.130349Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329113520569385:2529] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:08.130468Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329134995406193:2758], recipient# [6:7591329134995406192:2312], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.191043Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329109225601499:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.191203Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329113520569385:2529] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:08.191317Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329134995406195:2759], recipient# [6:7591329134995406194:2313], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.631064Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329109225601499:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:08.631246Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329130700438880:2751] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:08.631407Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329134995406200:2760], recipient# [6:7591329134995406199:2314], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:09.131344Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329109225601499:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:09.131508Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329113520569385:2529] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:09.131615Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329139290373504:2764], recipient# [6:7591329139290373503:2315], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:09.192079Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329109225601499:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:09.192241Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329109225601499:2124], cacheItem# { Subscriber: { Subscriber: [6:7591329113520569385:2529] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:09.192335Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591329139290373506:2765], recipient# [6:7591329139290373505:2316], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSchemeShardTest::DropTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::DropTable >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestReboot [GOOD] >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true >> TSchemeShardTest::UpdateChannelsBindingSolomonStorageConfig [GOOD] >> TSchemeShardTest::RejectAlterSolomon >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Table [GOOD] >> TSchemeShardTest::CopyTableWithAlterConfig [GOOD] >> TSchemeShardTest::AlterTableConfig [GOOD] >> TSchemeShardTest::AlterTableFollowers >> TSchemeShardTest::CopyTableOmitFollowers >> TableCreation::ConcurrentMultipleTablesCreation [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query >> TSchemeShardPgTypesInTables::CreateTableWithPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false >> ScriptExecutionsTest::RestartQueryWithGetOperation [GOOD] >> ScriptExecutionsTest::BackgroundOperationRestart >> TSchemeShardTest::RejectAlterSolomon [GOOD] >> TSchemeShardTest::SimultaneousDropForceDrop >> TSchemeShardTest::MkRmDir >> KqpProxy::NodeDisconnectedTest [GOOD] >> KqpProxy::PingNotExistedSession >> TSchemeShardTest::ConsistentCopyTableRejects [GOOD] >> TSchemeShardTest::ConsistentCopyTableToDeletedPath >> TSchemeShardTest::CreateTableWithUnknownNamedConfig >> TSchemeShardTest::CopyTableOmitFollowers [GOOD] >> TSchemeShardTest::CopyTableForBackup >> TableCreation::UpdateTableWithAclModification [GOOD] >> TableCreation::UpdateTableAcl >> TxUsage::WriteToTopic_Demo_43_Table [GOOD] >> TSchemeShardTest::AlterTableFollowers [GOOD] >> TSchemeShardTest::AlterTableSizeToSplit >> TSubDomainTest::ConsistentCopyTable [GOOD] >> TSchemeShardTest::DropTable [GOOD] >> TSchemeShardTest::DropTableById >> TSchemeShardTest::SimultaneousDropForceDrop [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false >> TSchemeShardTest::MkRmDir [GOOD] >> TSchemeShardTest::PathName >> KqpProxy::NoLocalSessionExecution [GOOD] >> KqpProxy::NoUserAccessToScriptExecutionsTable >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-false [GOOD] >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true >> TableCreation::SimpleUpdateTable [GOOD] >> TableCreation::RollbackTableAcl >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Table [GOOD] >> TSchemeShardTest::CreateTableWithUnknownNamedConfig [GOOD] >> TSchemeShardTest::DependentOps >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-false [GOOD] >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true >> TxUsage::WriteToTopic_Demo_16_Table [GOOD] >> TSchemeShardTest::DropTableById [GOOD] >> TSchemeShardTest::DropPQ >> TSchemeShardTest::PathName [GOOD] >> TSchemeShardTest::PathName_SetLocale >> TableCreation::ConcurrentUpdateTable [GOOD] >> TableCreation::CreateOldTable >> TSchemeShardTest::AlterTableSizeToSplit [GOOD] >> TSchemeShardTest::AlterTableSplitSchema >> TxUsage::WriteToTopic_Demo_16_Query >> TSchemeShardTest::RejectSystemViewPath-EnableSystemNamesProtection-true [GOOD] >> TSchemeShardTest::SplitKey [GOOD] >> TSchemeShardTest::SplitAlterCopy >> TSchemeShardTest::DependentOps [GOOD] >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query >> TSchemeShardTest::ConsistentCopyTableToDeletedPath [GOOD] >> TSchemeShardTest::CopyIndexedTable >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Table [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_MoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy >> TSchemeShardPgTypesInTables::AlterTableAddPgTypeColumn-EnableTablePgTypes-true [GOOD] >> TSchemeShardTest::AlterTableAndConcurrentSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::ConsistentCopyTable [GOOD] Test command err: 2026-01-04T02:33:38.144799Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007400s 2026-01-04T02:33:38.214809Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329005645112946:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.214852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.589889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.622280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.624815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.639857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.714736Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.832309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:39.214951Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329005645113160:2147] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:39.215033Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329009940080935:2467] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:39.215164Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329005645113166:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:39.215280Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329005645113380:2291][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329005645113166:2149], cookie# 1 2026-01-04T02:33:39.217685Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005645113441:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113438:2291], cookie# 1 2026-01-04T02:33:39.217729Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005645113442:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113439:2291], cookie# 1 2026-01-04T02:33:39.217747Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329005645113443:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113440:2291], cookie# 1 2026-01-04T02:33:39.217791Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329001350145503:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113441:2291], cookie# 1 2026-01-04T02:33:39.217835Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329001350145506:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113442:2291], cookie# 1 2026-01-04T02:33:39.217913Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329001350145509:2060] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329005645113443:2291], cookie# 1 2026-01-04T02:33:39.218302Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005645113441:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329001350145503:2054], cookie# 1 2026-01-04T02:33:39.218339Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005645113442:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329001350145506:2057], cookie# 1 2026-01-04T02:33:39.218356Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329005645113443:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329001350145509:2060], cookie# 1 2026-01-04T02:33:39.218420Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005645113380:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005645113438:2291], cookie# 1 2026-01-04T02:33:39.218445Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329005645113380:2291][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:39.218464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005645113380:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005645113439:2291], cookie# 1 2026-01-04T02:33:39.218488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329005645113380:2291][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:39.218516Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329005645113380:2291][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329005645113440:2291], cookie# 1 2026-01-04T02:33:39.218533Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329005645113380:2291][/dc-1] Sync cookie mismatch: sender# [1:7591329005645113440:2291], cookie# 1, current cookie# 0 2026-01-04T02:33:39.218612Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329005645113166:2149], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:39.218657Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591329005645113166:2149], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-04T02:33:39.218766Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329005645113166:2149], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:39.222133Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591329009940080937:2468][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:33:39.222612Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329001350145503:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591329009940080941:2468] 2026-01-04T02:33:39.222628Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329001350145503:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:33:39.222692Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329001350145503:2054] Subscribe: subscriber# [1:7591329009940080941:2468], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:39.222727Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329001350145506:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591329009940080942:2468] 2026-01-04T02:33:39.222733Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329001350145506:2057] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:33:39.222752Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329001350145506:2057] Subscribe: subscriber# [1:7591329009940080942:2468], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:39.222777Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591329001350145509:2060] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591329009940080943:2468] 2026-01-04T02:33:39.222783Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591329001350145509:2060] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:33:39.222799Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591329001350145509:2060] Subscribe: subscriber# [1:7591329009940080943:2468], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:33:39.222849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329009940080941:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329001350145503:2054] 2026-01-04T02:33:39.222900Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329009940080942:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329001350145506:2057] 2026-01-04T02:33:39.222919Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591329009940080943:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329001350145509:2060] 2026-01-04T02:33:39.222958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329009940080937:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329009940080938:2468] 2026-01-04T02:33:39.222997Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329009940080937:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329009940080939:2468] 2026-01-04T02:33:39.223034Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591329009940080937:2468][/dc-1/.metadata/initialization/migrations] Set up state: owner# [1:7591329005645113166:2149], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:33:39.223053Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591329009940080937:2468][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591329009940080940:2468 ... Notify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7591329161679438179:2247] 2026-01-04T02:34:15.371117Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7591329161679438169:2247][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7591329135909634127:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:34:15.371136Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7591329161679438169:2247][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [14:7591329161679438180:2247] 2026-01-04T02:34:15.371161Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7591329161679438169:2247][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [14:7591329135909634127:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:34:15.371607Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7591329161679438167:2246][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7591329135909634127:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:34:15.371647Z node 14 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][14:7591329161679438167:2246][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [14:7591329161679438173:2246] 2026-01-04T02:34:15.371675Z node 14 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][14:7591329161679438167:2246][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [14:7591329135909634127:2108], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:34:15.418462Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7591329135909634127:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.418711Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7591329135909634127:2108], cacheItem# { Subscriber: { Subscriber: [14:7591329140204601639:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:15.418883Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7591329165974405562:2258], recipient# [14:7591329165974405561:2321], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.452838Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7591329135909634127:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.453008Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7591329135909634127:2108], cacheItem# { Subscriber: { Subscriber: [14:7591329140204601639:2231] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:15.453136Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7591329165974405564:2259], recipient# [14:7591329165974405563:2322], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.963564Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7591329135909634127:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.963742Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7591329135909634127:2108], cacheItem# { Subscriber: { Subscriber: [14:7591329161679438167:2246] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:15.963804Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7591329135909634127:2108], cacheItem# { Subscriber: { Subscriber: [14:7591329161679438169:2247] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:15.963932Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7591329165974405565:2260], recipient# [14:7591329161679438162:2311], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:15.964479Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:7591329161679438162:2311], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:34:16.093415Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [14:7591329135909634127:2108], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:16.093575Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [14:7591329135909634127:2108], cacheItem# { Subscriber: { Subscriber: [14:7591329161679438170:2248] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:34:16.093680Z node 14 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [14:7591329170269372863:2261], recipient# [14:7591329170269372862:2323], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:16.094121Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; |87.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> KqpProxy::ExecuteScriptFailsWithoutFeatureFlag [GOOD] >> KqpProxy::DatabasesCacheForServerless >> TSchemeShardTest::PathName_SetLocale [GOOD] >> TSchemeShardTest::ModifyACL |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |87.8%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection |87.8%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_failure_injection/ydb-core-tx-schemeshard-ut_failure_injection >> ScriptExecutionsTest::UpdatesLeaseAfterExpiring [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions >> TSchemeShardTest::ModifyACL [GOOD] >> TSchemeShardTest::NameFormat >> TSchemeShardTest::SplitAlterCopy [GOOD] >> TSchemeShardTest::TopicReserveSize >> TSchemeShardTest::AlterTableSplitSchema [GOOD] >> TSchemeShardTest::AlterTableSettings >> TSchemeShardTest::DefaultColumnFamiliesWithNonCanonicName [GOOD] >> TSchemeShardTest::DropBlockStoreVolume >> TSchemeShardTest::CopyIndexedTable [GOOD] >> TSchemeShardTest::CopyTable >> KqpProxy::PingNotExistedSession [GOOD] >> ScriptExecutionsTest::AttemptToUpdateDeletedLease >> TSchemeShardTest::AlterTableAndConcurrentSplit [GOOD] >> TSchemeShardTest::AlterTable |87.8%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |87.8%| [LD] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/backup/impl/ut_local_partition_reader/ydb-core-backup-impl-ut_local_partition_reader |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_export_reboots_s3/ydb-core-tx-schemeshard-ut_export_reboots_s3 >> TSchemeShardTest::DropBlockStoreVolume [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions >> TSchemeShardTest::NameFormat [GOOD] >> TSchemeShardTest::ParallelCreateTable >> TSchemeShardTest::DropPQ [GOOD] >> TSchemeShardTest::DropPQFail >> TSchemeShardTest::CopyTable [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup >> TSchemeShardTest::DropBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 >> TxUsage::WriteToTopic_Demo_24_Query [GOOD] >> TxUsage::WriteToTopic_Demo_43_Query >> TSchemeShardTest::AlterTable [GOOD] >> TSchemeShardTest::AlterTableById >> TSchemeShardTest::TopicReserveSize [GOOD] >> TSchemeShardTest::TopicWithAutopartitioningReserveSize >> TxUsage::Sinks_Oltp_WriteToTopic_3_Query [GOOD] >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] >> TSchemeShardTest::DropBlockStoreVolume2 [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] >> Sharding::XXUsage >> TxUsage::WriteToTopic_Demo_27_Table >> Sharding::XXUsage [GOOD] >> TSchemeShardTest::AlterTableSettings [GOOD] >> TSchemeShardTest::AssignBlockStoreVolume >> TSchemeShardTest::ParallelCreateTable [GOOD] >> TSchemeShardTest::ParallelCreateSameTable >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table >> BasicUsage::RetryDiscoveryWithCancel [GOOD] >> BasicUsage::RecreateObserver >> TxUsage::WriteToTopic_Demo_11_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest >> Sharding::XXUsage [GOOD] Test command err: 17099506110185566985 13394009851024707258 8579012584329618622 8029731107092843497 98787222198356733 3605118096075377207 13686068701507014407 10092542322083336958 738289969770883286 12311130774861307203 12992238638986043772 5763950885143052443 12547043371892659603 6647008692816054474 11789144766474650014 18359439922950049899 4337909301970187725 15778743670993772837 10999273915276955774 59851744722436480 6462434379212690445 10860369530547396695 18028424575292188899 6652372074869989151 9825881490287977478 17949297083984485094 5266892329324926967 13425067848468668795 6228251083271008755 10170108051214191929 7944457645031362548 12664888582375705847 3832696829749855853 11614313400335609079 1564913822986586617 4941093415140956041 9354075268803891513 9237207097215951184 6128051767688441440 10379078307025562124 9962287286179718960 4074629484264669106 14979170403844370432 18230774754224489425 1776365197486923695 6907519376617187649 2848846807772214366 1096913688214513250 11650516694104001510 16166310179139919515 1440108869279352788 11121035849889936804 15243017756570744116 2159423431408936479 16923150347782138830 16758325277178437329 14254502719861409293 13261870369882536873 4370458115114690622 14819202119728014963 4836601571719512743 10934885695438051501 1809793935715313737 1396318749914413983 6209425874958316405 410971920260095392 17596365256340679480 9502548479072749294 18373960715474761024 4842758779577242380 12035792738368783173 577300324317534146 5435312351383134057 16239427529194763545 2289643015670829356 2238020319668806195 12408527518245347690 1893716201898139140 10328052421638871683 4410703824957442269 13551459357967668168 8247228904979568563 18366832006521283197 11386609365401584757 228469804542247041 6423800834967001893 10673210513524271372 16304187799904423756 7916849804599831493 10620038764209357669 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::ConcurrentTableCreationWithDifferentVersions [GOOD] Test command err: 2026-01-04T02:33:50.960122Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329056061351852:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:50.993848Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:51.576409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:51.576533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:51.593512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:51.901912Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329056061351800:2081] 1767494030944667 != 1767494030944670 2026-01-04T02:33:51.934011Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:52.098037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:52.574374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:52.574405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:52.574418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:52.574534Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:52.623293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:52.630572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:55.962027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329056061351852:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:55.965467Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:56.191723Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:56.200840Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:33:56.200886Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:33:56.200914Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:56.210838Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Describe result: PathErrorUnknown 2026-01-04T02:33:56.210882Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Creating table 2026-01-04T02:33:56.210937Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:56.211478Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Describe result: PathErrorUnknown 2026-01-04T02:33:56.211487Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Creating table 2026-01-04T02:33:56.211515Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:33:56.211807Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Describe result: PathErrorUnknown 2026-01-04T02:33:56.211815Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Creating table 2026-01-04T02:33:56.211886Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:56.216589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.218698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.221423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.226630Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:33:56.226703Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Subscribe on create table tx: 281474976710658 2026-01-04T02:33:56.226704Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:33:56.226735Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Subscribe on create table tx: 281474976710660 2026-01-04T02:33:56.226807Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:33:56.226827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Subscribe on create table tx: 281474976710659 2026-01-04T02:33:56.228428Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Subscribe on tx: 281474976710660 registered 2026-01-04T02:33:56.229834Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Subscribe on tx: 281474976710659 registered 2026-01-04T02:33:56.229908Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Subscribe on tx: 281474976710658 registered 2026-01-04T02:33:56.323493Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-04T02:33:56.363738Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-04T02:33:56.387692Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329081831156429:2504] Owner: [1:7591329081831156426:2501]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-04T02:33:56.410377Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-04T02:33:56.410427Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Column diff is empty, finishing 2026-01-04T02:33:56.411516Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:56.412837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:33:56.414546Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:33:56.414576Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7591329081831156428:2503] Owner: [1:7591329081831156426:2501]. Successful alter request: ExecComplete 2026-01-04T02:33:56.432981Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-04T02:33:56.433064Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2501]. Column diff is empty, finishing 2026-01-04T02:33:56.433158Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329081831156427:2502] Owner: [1:7591329081831156426:2 ... table 2026-01-04T02:34:23.962374Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631502:2963] Owner: [3:7591329197808631501:2962]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:23.962394Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631502:2963] Owner: [3:7591329197808631501:2962]. Column diff is empty, finishing 2026-01-04T02:34:23.962611Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:23.962628Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-04T02:34:23.962651Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:23.962856Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:23.962873Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-04T02:34:23.962897Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:23.963190Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631506:2967] Owner: [3:7591329197808631505:2966]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:23.963204Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631506:2967] Owner: [3:7591329197808631505:2966]. Column diff is empty, finishing 2026-01-04T02:34:23.965534Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329197808631729:3153] txid# 281474976710696, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:34:23.965773Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329197808631504:2965] Owner: [3:7591329197808631503:2964]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710696 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:23.966618Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329197808631732:3156] txid# 281474976710697, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:34:23.966782Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7591329197808631504:2965] Owner: [3:7591329197808631503:2964]. Unable to subscribe to concurrent transaction, falling back 2026-01-04T02:34:23.966784Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710697 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:23.966795Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Unable to subscribe to concurrent transaction, falling back 2026-01-04T02:34:23.967189Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329197808631734:3158] txid# 281474976710698, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:34:23.967316Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710698 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:23.967326Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Unable to subscribe to concurrent transaction, falling back 2026-01-04T02:34:23.982412Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:23.982455Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:444: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Adding columns. New columns: col4, col5. Existing columns: col1, col2, col3 2026-01-04T02:34:23.982499Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Created ESchemeOpAlterTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:23.985596Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329197808631757:3178] txid# 281474976710699, issues: { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:34:23.986055Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. TEvProposeTransactionStatus: { Status: 52 TxId: 281474976710699 Issues { message: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" severity: 1 } SchemeShardStatus: 8 SchemeShardReason: "Check failed: path: \'/dc-1/test/test_table\', error: path is under operation (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeTable, state: EPathStateAlter)" SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:23.986072Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:293: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Unable to subscribe to concurrent transaction, falling back 2026-01-04T02:34:23.986288Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7591329197808631500:2961] Owner: [3:7591329197808631499:2960]. Request: alter. Transaction completed: 281474976710695. Doublechecking... 2026-01-04T02:34:24.086195Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:24.086242Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631492:2953] Owner: [3:7591329197808631491:2952]. Column diff is empty, finishing 2026-01-04T02:34:24.086298Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:24.086318Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631496:2957] Owner: [3:7591329197808631495:2956]. Column diff is empty, finishing 2026-01-04T02:34:24.126185Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631504:2965] Owner: [3:7591329197808631503:2964]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:24.126230Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631504:2965] Owner: [3:7591329197808631503:2964]. Column diff is empty, finishing 2026-01-04T02:34:24.149130Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:24.149182Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631508:2969] Owner: [3:7591329197808631507:2968]. Column diff is empty, finishing 2026-01-04T02:34:24.178133Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329197808631500:2961] Owner: [3:7591329197808631499:2960]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:24.178180Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329197808631500:2961] Owner: [3:7591329197808631499:2960]. Column diff is empty, finishing 2026-01-04T02:34:24.263897Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dp647fng3zabrqj55btd2", Request has 18444976579645.287750s seconds to be completed 2026-01-04T02:34:24.267159Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dp647fng3zabrqj55btd2", Created new session, sessionId: ydb://session/3?node_id=3&id=NGY4MWMyZWMtM2NhODdmOTctYzc3MmMyYzktYWZmYTIzNmM=, workerId: [3:7591329202103599077:2526], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:24.267392Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dp647fng3zabrqj55btd2 2026-01-04T02:34:24.359737Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NGY4MWMyZWMtM2NhODdmOTctYzc3MmMyYzktYWZmYTIzNmM=, workerId: [3:7591329202103599077:2526], local sessions count: 1 2026-01-04T02:34:24.360031Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YTViOTRjNmUtNWY4MzNkZTktMTUwNDE2NWYtNWVhY2IwNDI=, workerId: [3:7591329197808631476:2515], local sessions count: 0 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TSchemeShardTest::AlterTableById [GOOD] >> TSchemeShardTest::AlterTableCompactionPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::NoUserAccessToScriptExecutionsTable [GOOD] Test command err: 2026-01-04T02:33:50.711174Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329058981892003:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:50.711583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:51.257123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:51.257229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:51.289271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:51.357715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:51.373990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329058981891965:2081] 1767494030651205 != 1767494030651208 2026-01-04T02:33:51.399193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:51.548009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:51.678151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:51.686305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:51.781966Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:55.012883Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.018819Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-04T02:33:55.022426Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-04T02:33:55.022592Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:33:55.022647Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:33:55.022664Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.022816Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7591329063276860051:2484], selfId: [1:7591329058981892217:2266], source: [1:7591329058981892217:2266] 2026-01-04T02:33:55.026883Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 2026-01-04T02:33:55.026935Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: unknown://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=1234&node_id=12345 } 2026-01-04T02:33:55.027010Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [1:7591329063276860051:2484], selfId: [1:7591329058981892217:2266], source: [1:7591329058981892217:2266] 2026-01-04T02:33:55.027190Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329080456729272:2303], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.027305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.028236Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1454: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq 2026-01-04T02:33:55.028277Z node 1 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: BAD_REQUEST, issues: {
: Error: Failed to parse session id: ydb://session/1?id=ZjY5NWRlM2EtYWMyYjA5YWEtNzQ0MTVlYTMtM2Q4ZDgzOWQ=&node_id=eqweq } 2026-01-04T02:33:55.028342Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 4, sender: [1:7591329063276860051:2484], selfId: [1:7591329058981892217:2266], source: [1:7591329058981892217:2266] 2026-01-04T02:33:55.033786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329080456729305:2305], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.033897Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.034332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329080456729307:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.034375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2026-01-04T02:33:59.180188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:59.193577Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:33:59.202128Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:33:59.202422Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:59.202623Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:33:59.658806Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.658934Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.696432Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494035772449 != 1767494035772453 2026-01-04T02:33:59.702991Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:59.758552Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:59.844578Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:34:00.271669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:00.297371Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:00.298640Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:307:2350], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:00.302209Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:307:2350], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:34:00.305139Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [2:307:2350], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/initialization/migrations PathId: Strong: 1 } 2026-01-04T02:34:00.305287Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [2:307:2350], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /Root/.metadata/initialization/migrations PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [2:840:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:34:00.305426Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:307:2350], cacheItem# { Subscriber: { Subscriber: [2:840:2735] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: R ... _service.cpp:1193: Scheduled timeout timer for requestId: 5 timeout: 300.000000s actor id: [5:7591329198215021109:3322] 2026-01-04T02:34:24.046458Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 5, sender: [5:7591329198215021108:2357], selfId: [5:7591329176740182943:2269], source: [5:7591329198215021107:2356] 2026-01-04T02:34:24.047839Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [5:7591329198215021094:3313], ActorId: [5:7591329198215021101:3317], TraceId: ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RequestDatabase: /Root, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=5&id=NWQzODdlZWQtNTc0YTMwYzAtYWM3MGQ0ZjQtNmNjNjQ0YQ==, TxId: 2026-01-04T02:34:24.047874Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [5:7591329198215021094:3313], ActorId: [5:7591329198215021101:3317], TraceId: ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RequestDatabase: /Root, Finish with SUCCESS, SessionId: ydb://session/3?node_id=5&id=NWQzODdlZWQtNTc0YTMwYzAtYWM3MGQ0ZjQtNmNjNjQ0YQ==, TxId: 2026-01-04T02:34:24.047958Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:490: [ScriptExecutions] [TQueryBase] [TCreateScriptOperationQuery] OwnerId: [5:7591329198215021094:3313], ActorId: [5:7591329198215021101:3317], TraceId: ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RequestDatabase: /Root, Create script execution operation, RetryState: , has PhysicalGraph: 0, Result: SUCCESS, Issues: 2026-01-04T02:34:24.048190Z node 5 :KQP_PROXY DEBUG: kqp_script_executions.cpp:589: [ScriptExecutions] [TCreateScriptExecutionActor] OwnerId: [5:7591329198215020713:2340] ActorId: [5:7591329198215021094:3313] Database: /Root ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef. Create script operation [5:7591329198215021101:3317] succeeded, RunScriptActorId: [5:7591329198215021098:3314] 2026-01-04T02:34:24.048664Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=5&id=NWQzODdlZWQtNTc0YTMwYzAtYWM3MGQ0ZjQtNmNjNjQ0YQ==, workerId: [5:7591329198215021107:2356], local sessions count: 0 2026-01-04T02:34:24.050896Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=YzhkMjg3NWUtZDBlMjllYmEtY2YyYzc3NmYtMjg0MWI3Yzc=, workerId: [5:7591329202509988476:2366], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:34:24.051099Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:34:24.051545Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7591329198215021098:3314], ActorId: [5:7591329202509988477:3360], TraceId: ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RequestDatabase: /Root, LeaseGeneration: 1, Bootstrap. Database: /Root, IsSystemUser: 1, run create session 2026-01-04T02:34:24.051868Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dp58sckqypp6yzrt44g99, Database: /Root, SessionId: ydb://session/3?node_id=5&id=YzhkMjg3NWUtZDBlMjllYmEtY2YyYzc3NmYtMjg0MWI3Yzc=, PoolId: , DatabaseId: , CustomerSuppliedId: 01ke3dp58sckqypp6yzrt44g99, CurrentExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RunScriptActorId: [5:7591329198215021098:3314], IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 604800.000000s timeout: 604800.000000s cancelAfter: 0.000000s. Send request to target, requestId: 7, targetId: [5:7591329202509988476:2366] 2026-01-04T02:34:24.051896Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 7 timeout: 604800.000000s actor id: [5:7591329202509988479:3361] 2026-01-04T02:34:24.052238Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976579645.499392s seconds to be completed 2026-01-04T02:34:24.052891Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591329202509988480:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:24.052966Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:24.053638Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591329202509988485:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:24.053683Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591329202509988486:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:24.053887Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:24.054444Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=5&id=Y2RkNDI4YjAtYWJiNWIyYTctNTE0NThlYTQtMTQ5MjZhNjM=, workerId: [5:7591329202509988491:2374], database: /Root, longSession: 1, local sessions count: 2 2026-01-04T02:34:24.054634Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:34:24.055372Z node 5 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptProgressActor] OwnerId: [5:7591329198215021098:3314], ActorId: [5:7591329202509988477:3360], TraceId: ExecutionId: 330d0920-d784078-bd8d32aa-c23d05ef, RequestDatabase: /Root, LeaseGeneration: 1, RunDataQuery with SessionId: ydb://session/3?node_id=5&id=Y2RkNDI4YjAtYWJiNWIyYTctNTE0NThlYTQtMTQ5MjZhNjM=, TxId: , text: -- TScriptProgressActor::OnRunQuery DECLARE $execution_id AS Text; DECLARE $database AS Text; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $execution_status AS Int32; DECLARE $lease_generation AS Int64; UPDATE `.metadata/script_executions` SET plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, execution_status = $execution_status WHERE database = $database AND execution_id = $execution_id AND (lease_generation IS NULL OR lease_generation = $lease_generation); 2026-01-04T02:34:24.056408Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /Root, SessionId: ydb://session/3?node_id=5&id=Y2RkNDI4YjAtYWJiNWIyYTctNTE0NThlYTQtMTQ5MjZhNjM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 9, targetId: [5:7591329202509988491:2374] 2026-01-04T02:34:24.056437Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 9 timeout: 300.000000s actor id: [5:7591329202509988508:3370] 2026-01-04T02:34:24.057973Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:34:24.074328Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dp5y975b5qqse5eeef0h2", Request has 18444976579645.477310s seconds to be completed 2026-01-04T02:34:24.076515Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dp5y975b5qqse5eeef0h2", Created new session, sessionId: ydb://session/3?node_id=5&id=ZDQwMDUwN2EtOTFkNzVkNmUtZjBhYjI0ODEtZjk0ZGZmYjQ=, workerId: [5:7591329202509988530:2379], database: /Root, longSession: 1, local sessions count: 3 2026-01-04T02:34:24.076713Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dp5y975b5qqse5eeef0h2 2026-01-04T02:34:24.101702Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dp5z59sbehzkxjp4zba32, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZDQwMDUwN2EtOTFkNzVkNmUtZjBhYjI0ODEtZjk0ZGZmYjQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 11, targetId: [5:7591329202509988530:2379] 2026-01-04T02:34:24.101763Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 11 timeout: 300.000000s actor id: [5:7591329202509988552:3406] 2026-01-04T02:34:24.110644Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591329202509988489:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2026-01-04T02:34:24.136345Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7591329202509988584:3424], for# user@builtin, access# DescribeSchema 2026-01-04T02:34:24.136385Z node 5 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [5:7591329202509988584:3424], for# user@builtin, access# DescribeSchema 2026-01-04T02:34:24.144425Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591329202509988576:2385], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:34:24.147217Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=ZDQwMDUwN2EtOTFkNzVkNmUtZjBhYjI0ODEtZjk0ZGZmYjQ=, ActorId: [5:7591329202509988530:2379], ActorState: ExecuteState, LegacyTraceId: 01ke3dp5z59sbehzkxjp4zba32, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:34:24.147651Z node 5 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dp5z59sbehzkxjp4zba32", Forwarded response to sender actor, requestId: 11, sender: [5:7591329202509988550:2380], selfId: [5:7591329176740182943:2269], source: [5:7591329202509988530:2379] 2026-01-04T02:34:24.232157Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591329202509988587:3426] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/replication/ydb_proxy/ut/ydb-core-tx-replication-ydb_proxy-ut >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction >> TableCreation::UpdateTableAcl [GOOD] >> TSchemeShardTest::DropBlockStoreVolumeWithFillGeneration [GOOD] >> TSchemeShardTest::CreateWithIntermediateDirs |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minikql/ydb-core-tx-datashard-ut_minikql >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] >> TSchemeShardTest::ConsistentCopyTablesForBackup [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup >> TSchemeShardTest::AssignBlockStoreVolume [GOOD] >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/ydb-core-tx-schemeshard-ut_resource_pool >> TSchemeShardTest::DropPQFail [GOOD] >> TSchemeShardTest::DropPQAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::TopicWithAutopartitioningReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:33:58.967195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:33:58.967289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:58.967349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:33:58.967391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:33:58.967431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:33:58.967462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:33:58.967531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:58.967623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:33:58.968584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:33:58.968923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:33:59.110591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:33:59.110643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:59.145928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:33:59.146629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:33:59.146993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:33:59.165183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:33:59.165491Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:33:59.166432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:59.166715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:33:59.175424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:59.175632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:33:59.177132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:59.177193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:59.177245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:33:59.177296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:59.177355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:33:59.177571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:33:59.826078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.827979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.828048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.828130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:59.828199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2, partId: 0 2026-01-04T02:34:28.027783Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-04T02:34:28.027845Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409552 Status: COMPLETE TxId: 104 Step: 5000041 2026-01-04T02:34:28.027890Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.027926Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:34:28.028139Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:34:28.028431Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:34:28.041751Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.042100Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.042185Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.042258Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.042323Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.045521Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.045872Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:28.045923Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:34:28.046220Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:28.046267Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [14:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:34:28.046929Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:34:28.046998Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:34:28.047226Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:34:28.047301Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:34:28.047375Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:34:28.047444Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:34:28.047520Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:34:28.047590Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:34:28.047673Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:34:28.047737Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:34:28.048093Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 10 2026-01-04T02:34:28.048173Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-04T02:34:28.048236Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:34:28.049555Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:34:28.049669Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:34:28.049728Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:34:28.049822Z node 14 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:34:28.049903Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-04T02:34:28.050056Z node 14 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:34:28.054650Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:34:28.055127Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:34:28.057152Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:34:28.058070Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:34:28.058256Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:34:28.058332Z node 14 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [14:1442:3267] TestWaitNotification: OK eventTxId 104 2026-01-04T02:34:28.059340Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:28.059641Z node 14 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 349us result status StatusSuccess 2026-01-04T02:34:28.060573Z node 14 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 4 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 6 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY PartitionStrategy { MinPartitionCount: 1 MaxPartitionCount: 7 PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 3 ChildPartitionIds: 4 } Partitions { PartitionId: 2 TabletId: 72075186233409549 KeyRange { FromBound: "A" } Status: Inactive ParentPartitionIds: 0 ChildPartitionIds: 5 } Partitions { PartitionId: 3 TabletId: 72075186233409550 KeyRange { ToBound: "0" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 4 TabletId: 72075186233409551 KeyRange { FromBound: "0" ToBound: "A" } Status: Inactive ParentPartitionIds: 1 ChildPartitionIds: 5 } Partitions { PartitionId: 5 TabletId: 72075186233409552 KeyRange { FromBound: "0" } Status: Active ParentPartitionIds: 2 ParentPartitionIds: 4 } AlterVersion: 4 BalancerTabletID: 72075186233409547 NextPartitionId: 6 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 494 AccountSize: 494 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots |87.9%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots >> TableCreation::RollbackTableAcl [GOOD] >> TSchemeShardTest::ParallelCreateSameTable [GOOD] >> TSchemeShardTest::MultipleColumnFamilies |87.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query_reboots/core-tx-schemeshard-ut_streaming_query_reboots ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::UpdateTableAcl [GOOD] Test command err: 2026-01-04T02:33:54.917187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329072038029789:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:54.917259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:55.520168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:55.520287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:55.520544Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329072038029751:2081] 1767494034915074 != 1767494034915077 2026-01-04T02:33:55.545312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:55.593161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:55.825188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:55.825215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:55.825223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:55.825349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:55.879316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:55.932943Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.689167Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:58.711143Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:33:58.711197Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:33:58.711216Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:58.715928Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Describe result: PathErrorUnknown 2026-01-04T02:33:58.715964Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Creating table 2026-01-04T02:33:58.716018Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:33:58.716265Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Describe result: PathErrorUnknown 2026-01-04T02:33:58.716271Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Creating table 2026-01-04T02:33:58.716290Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:58.716429Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Describe result: PathErrorUnknown 2026-01-04T02:33:58.716434Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Creating table 2026-01-04T02:33:58.716451Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:58.721146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:58.724028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:58.731129Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:33:58.731208Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Subscribe on create table tx: 281474976710659 2026-01-04T02:33:58.737468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:58.739623Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:33:58.739665Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Subscribe on create table tx: 281474976710660 2026-01-04T02:33:58.742082Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Subscribe on tx: 281474976710659 registered 2026-01-04T02:33:58.743146Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Subscribe on tx: 281474976710660 registered 2026-01-04T02:33:58.743322Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:33:58.743340Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Subscribe on create table tx: 281474976710658 2026-01-04T02:33:58.746869Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Subscribe on tx: 281474976710658 registered 2026-01-04T02:33:58.919434Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-04T02:33:58.995307Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-04T02:33:59.005245Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-04T02:33:59.005295Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Column diff is empty, finishing 2026-01-04T02:33:59.006298Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:59.010599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:33:59.012434Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:33:59.012449Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_execution_leases updater. SelfId: [1:7591329089217899787:2494] Owner: [1:7591329089217899785:2492]. Successful alter request: ExecComplete 2026-01-04T02:33:59.015739Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329089217899788:2495] Owner: [1:7591329089217899785:2492]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-04T02:33:59.065211Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-04T02:33:59.065305Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Column diff is empty, finishing 2026-01-04T02:33:59.065424Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: [1:7591329089217899785:2492]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:33:59.066371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:33:59.067832Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329089217899786:2493] Owner: ... RE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:34:26.785767Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YmJmYjQ1YmEtZTAwODc2NDQtZTdlNmNjMTUtNjM1MGMwNTM=, workerId: [3:7591329191870680044:2337], local sessions count: 1 2026-01-04T02:34:26.785969Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=YmM5MTZlNjctMmI1M2Y4ZTAtMTAxMGE4MTEtMjYwYjNiNmQ=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7591329213345517172:2518] 2026-01-04T02:34:26.786005Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7591329213345517182:2957] 2026-01-04T02:34:26.802513Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7591329213345517180:2520], selfId: [3:7591329170395842446:2073], source: [3:7591329213345517172:2518] 2026-01-04T02:34:26.803259Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329213345517169:2950], ActorId: [3:7591329213345517170:2951], TraceId: ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=YmM5MTZlNjctMmI1M2Y4ZTAtMTAxMGE4MTEtMjYwYjNiNmQ=, TxId: 2026-01-04T02:34:26.803820Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329213345517169:2950], ActorId: [3:7591329213345517170:2951], TraceId: ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=YmM5MTZlNjctMmI1M2Y4ZTAtMTAxMGE4MTEtMjYwYjNiNmQ=, TxId: 2026-01-04T02:34:26.803849Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329213345517169:2950], ActorId: [3:7591329213345517170:2951], TraceId: ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-04T02:34:26.803944Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329213345517168:2949], ActorId: [3:7591329213345517169:2950], TraceId: ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90, RequestDatabase: /dc-1, Got response [3:7591329213345517170:2951] SUCCESS 2026-01-04T02:34:26.804011Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329213345517167:2948] ActorId: [3:7591329213345517168:2949] Database: /dc-1 ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90. Extracted script execution operation [3:7591329213345517170:2951], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7591329191870679997:2673], LeaseGeneration: 0 2026-01-04T02:34:26.804038Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329213345517167:2948] ActorId: [3:7591329213345517168:2949] Database: /dc-1 ExecutionId: aa2398a8-3c16f05a-6f172f96-9148bc90. Reply success 2026-01-04T02:34:26.804875Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=YmM5MTZlNjctMmI1M2Y4ZTAtMTAxMGE4MTEtMjYwYjNiNmQ=, workerId: [3:7591329213345517172:2518], local sessions count: 0 2026-01-04T02:34:26.886502Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dp8p44r0j3b8thwahjrzg", Request has 18444976579642.665144s seconds to be completed 2026-01-04T02:34:26.888852Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dp8p44r0j3b8thwahjrzg", Created new session, sessionId: ydb://session/3?node_id=3&id=OGUwOTZkNWEtMzE3NjUxNTEtMzljODg1MzktNzZjZWJiYzA=, workerId: [3:7591329213345517214:2533], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:34:26.889047Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dp8p44r0j3b8thwahjrzg 2026-01-04T02:34:26.914410Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dp8py2m5m2pdr0z35ren2, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=OGUwOTZkNWEtMzE3NjUxNTEtMzljODg1MzktNzZjZWJiYzA=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7591329213345517214:2533] 2026-01-04T02:34:26.914462Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7591329213345517217:2965] 2026-01-04T02:34:26.936001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:26.957127Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dp8py2m5m2pdr0z35ren2", Forwarded response to sender actor, requestId: 42, sender: [3:7591329213345517216:2534], selfId: [3:7591329170395842446:2073], source: [3:7591329213345517214:2533] --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:34:26.970514Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Describe result: PathErrorUnknown 2026-01-04T02:34:26.970541Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Creating table 2026-01-04T02:34:26.970577Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:26.973760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:26.977078Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-04T02:34:26.977126Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Subscribe on create table tx: 281474976715687 2026-01-04T02:34:26.980084Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Subscribe on tx: 281474976715687 registered 2026-01-04T02:34:27.016515Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Request: create. Transaction completed: 281474976715687. Doublechecking... 2026-01-04T02:34:27.098319Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:27.098358Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329213345517276:3019] Owner: [3:7591329213345517275:3018]. Column diff is empty, finishing 2026-01-04T02:34:27.137646Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dp8y100y0awfhhdfv21av", Request has 18444976579642.414003s seconds to be completed 2026-01-04T02:34:27.140309Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dp8y100y0awfhhdfv21av", Created new session, sessionId: ydb://session/3?node_id=3&id=MWU1NmY4ZjAtY2UxZTVjM2MtNTY4ZjI3NWUtNWI3NTg1YmE=, workerId: [3:7591329217640484657:2543], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:27.140550Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dp8y100y0awfhhdfv21av 2026-01-04T02:34:27.175546Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329217640484663:3079] Owner: [3:7591329217640484662:3078]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:27.175582Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329217640484663:3079] Owner: [3:7591329217640484662:3078]. Column diff is empty, finishing 2026-01-04T02:34:27.175673Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329217640484663:3079] Owner: [3:7591329217640484662:3078]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-04T02:34:27.176688Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:27.178128Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329217640484663:3079] Owner: [3:7591329217640484662:3078]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715688 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:27.178147Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7591329217640484663:3079] Owner: [3:7591329217640484662:3078]. Successful alter request: ExecComplete 2026-01-04T02:34:27.186594Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MWU1NmY4ZjAtY2UxZTVjM2MtNTY4ZjI3NWUtNWI3NTg1YmE=, workerId: [3:7591329217640484657:2543], local sessions count: 1 2026-01-04T02:34:27.188811Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dp8zme97694bgdybzvf5f", Request has 18444976579642.362833s seconds to be completed 2026-01-04T02:34:27.191305Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dp8zme97694bgdybzvf5f", Created new session, sessionId: ydb://session/3?node_id=3&id=NzhiODQ4NzEtMjRhN2M3My1kY2I5N2M1NC0yNTUyODZmNg==, workerId: [3:7591329217640484675:2547], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:27.191521Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dp8zme97694bgdybzvf5f 2026-01-04T02:34:27.235814Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzhiODQ4NzEtMjRhN2M3My1kY2I5N2M1NC0yNTUyODZmNg==, workerId: [3:7591329217640484675:2547], local sessions count: 1 2026-01-04T02:34:27.238686Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=OGUwOTZkNWEtMzE3NjUxNTEtMzljODg1MzktNzZjZWJiYzA=, workerId: [3:7591329213345517214:2533], local sessions count: 0 |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TSchemeShardTest::AlterTableCompactionPolicy [GOOD] >> TSchemeShardTest::AlterPersQueueGroup |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] |87.9%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TSchemeShardTest::AssignBlockStoreVolumeDuringAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter >> TSchemeShardTest::CreateWithIntermediateDirs [GOOD] >> TSchemeShardTest::DocumentApiVersion |87.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |87.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |87.9%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/service/ydb-core-kqp-ut-service |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TableCreation::CreateOldTable [GOOD] >> TSchemeShardTest::CopyLockedTableForBackup [GOOD] >> TSchemeShardTest::ConfigColumnFamily ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::RollbackTableAcl [GOOD] Test command err: 2026-01-04T02:33:56.299528Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329081567622285:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:56.299812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:56.744245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:56.744355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:56.760471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:56.901951Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329081567622095:2081] 1767494036268756 != 1767494036268759 2026-01-04T02:33:57.026099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:57.317103Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:57.370555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:57.370589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:57.370598Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:57.370712Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:57.463058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:00.627575Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:00.638091Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:34:00.638132Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:34:00.638171Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:00.639549Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Describe result: PathErrorUnknown 2026-01-04T02:34:00.639569Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Describe result: PathErrorUnknown 2026-01-04T02:34:00.639578Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Creating table 2026-01-04T02:34:00.639578Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Creating table 2026-01-04T02:34:00.639622Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:34:00.639630Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:34:00.639801Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Describe result: PathErrorUnknown 2026-01-04T02:34:00.639810Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Creating table 2026-01-04T02:34:00.639827Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:34:00.644728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:00.646944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:00.649402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:00.664559Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:34:00.664637Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Subscribe on create table tx: 281474976710660 2026-01-04T02:34:00.665921Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:34:00.665949Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Subscribe on create table tx: 281474976710658 2026-01-04T02:34:00.666015Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:34:00.666026Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Subscribe on create table tx: 281474976710659 2026-01-04T02:34:00.669711Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Subscribe on tx: 281474976710660 registered 2026-01-04T02:34:00.669728Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Subscribe on tx: 281474976710659 registered 2026-01-04T02:34:00.669737Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Subscribe on tx: 281474976710658 registered 2026-01-04T02:34:00.763615Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-04T02:34:00.813321Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-04T02:34:00.827588Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329098747492138:2499] Owner: [1:7591329098747492131:2495]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-04T02:34:00.842781Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-04T02:34:00.842826Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Column diff is empty, finishing 2026-01-04T02:34:00.843905Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:34:00.845023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:00.847443Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:00.847480Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7591329098747492139:2500] Owner: [1:7591329098747492131:2495]. Successful alter request: ExecComplete 2026-01-04T02:34:00.910264Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-04T02:34:00.910342Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Column diff is empty, finishing 2026-01-04T02:34:00.910454Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:34:00.911334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:00.913031Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329098747492136:2497] Owner: [1:7591329098747492131:2495]. TEvProposeTransactionStatus: { Status: 48 ... (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:34:28.495000Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=M2MyMjNhMy04NGNkNzk5ZS03YzgxZjA0NS05Zjg5NGRkOA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7591329221800988822:2527] 2026-01-04T02:34:28.495039Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 300.000000s actor id: [3:7591329221800988824:2970] 2026-01-04T02:34:28.505333Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 42, sender: [3:7591329221800988823:2528], selfId: [3:7591329178851314152:2163], source: [3:7591329221800988822:2527] 2026-01-04T02:34:28.506151Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329221800988819:2968], ActorId: [3:7591329221800988820:2969], TraceId: ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=M2MyMjNhMy04NGNkNzk5ZS03YzgxZjA0NS05Zjg5NGRkOA==, TxId: 2026-01-04T02:34:28.560133Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329221800988819:2968], ActorId: [3:7591329221800988820:2969], TraceId: ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=M2MyMjNhMy04NGNkNzk5ZS03YzgxZjA0NS05Zjg5NGRkOA==, TxId: 2026-01-04T02:34:28.560416Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329221800988819:2968], ActorId: [3:7591329221800988820:2969], TraceId: ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-04T02:34:28.560580Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329221800988818:2967], ActorId: [3:7591329221800988819:2968], TraceId: ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf, RequestDatabase: /dc-1, Got response [3:7591329221800988820:2969] SUCCESS 2026-01-04T02:34:28.560646Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329221800988817:2966] ActorId: [3:7591329221800988818:2967] Database: /dc-1 ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf. Extracted script execution operation [3:7591329221800988820:2969], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7591329200326151608:2674], LeaseGeneration: 0 2026-01-04T02:34:28.560675Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329221800988817:2966] ActorId: [3:7591329221800988818:2967] Database: /dc-1 ExecutionId: 17f98601-98d5c120-a0acc2e0-2482f5bf. Reply success 2026-01-04T02:34:28.562701Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=M2MyMjNhMy04NGNkNzk5ZS03YzgxZjA0NS05Zjg5NGRkOA==, workerId: [3:7591329221800988822:2527], local sessions count: 0 2026-01-04T02:34:28.608430Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dpabz9bzhg2868cf9xf93", Request has 18444976579640.943215s seconds to be completed 2026-01-04T02:34:28.611240Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dpabz9bzhg2868cf9xf93", Created new session, sessionId: ydb://session/3?node_id=3&id=NzcxMTdiZGItM2ZhOTczZDEtYWVhZDc3NWMtMWY2MDBjYWI=, workerId: [3:7591329221800988857:2541], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:34:28.611486Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dpabz9bzhg2868cf9xf93 2026-01-04T02:34:28.629130Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dpacm1vhhq3v0j82z2b7d, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzcxMTdiZGItM2ZhOTczZDEtYWVhZDc3NWMtMWY2MDBjYWI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 44, targetId: [3:7591329221800988857:2541] 2026-01-04T02:34:28.629185Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 44 timeout: 600.000000s actor id: [3:7591329221800988860:2979] 2026-01-04T02:34:28.646901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:28.654095Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dpacm1vhhq3v0j82z2b7d", Forwarded response to sender actor, requestId: 44, sender: [3:7591329221800988859:2542], selfId: [3:7591329178851314152:2163], source: [3:7591329221800988857:2541] --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:34:28.672275Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Describe result: PathErrorUnknown 2026-01-04T02:34:28.677844Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Creating table 2026-01-04T02:34:28.677938Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:28.690470Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:28.694751Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710688 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-04T02:34:28.694806Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Subscribe on create table tx: 281474976710688 2026-01-04T02:34:28.696802Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Subscribe on tx: 281474976710688 registered 2026-01-04T02:34:28.740308Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Request: create. Transaction completed: 281474976710688. Doublechecking... 2026-01-04T02:34:28.823881Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:28.823924Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329221800988871:2986] Owner: [3:7591329221800988870:2985]. Column diff is empty, finishing 2026-01-04T02:34:28.860019Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dpakvadyzpj2tf5fwqasp", Request has 18444976579640.691637s seconds to be completed 2026-01-04T02:34:28.862929Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dpakvadyzpj2tf5fwqasp", Created new session, sessionId: ydb://session/3?node_id=3&id=ZWZhMDlhN2ItYjliNjBiZWYtNzQwNjUwZmUtMTNiMTI5ZTc=, workerId: [3:7591329221800989006:2553], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:28.863175Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dpakvadyzpj2tf5fwqasp 2026-01-04T02:34:28.907369Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329221800989012:3093] Owner: [3:7591329221800989011:3092]. Table already exists, number of columns: 3, has SecurityObject: true 2026-01-04T02:34:28.907407Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329221800989012:3093] Owner: [3:7591329221800989011:3092]. Column diff is empty, finishing 2026-01-04T02:34:28.907519Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329221800989012:3093] Owner: [3:7591329221800989011:3092]. Created ESchemeOpModifyACL transaction for path: /dc-1/test/test_table 2026-01-04T02:34:28.908588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:28.910625Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329221800989012:3093] Owner: [3:7591329221800989011:3092]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710689 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:28.910645Z node 3 :KQP_PROXY INFO: table_creator.cpp:361: Table test_table updater. SelfId: [3:7591329221800989012:3093] Owner: [3:7591329221800989011:3092]. Successful alter request: ExecComplete 2026-01-04T02:34:28.925862Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZWZhMDlhN2ItYjliNjBiZWYtNzQwNjUwZmUtMTNiMTI5ZTc=, workerId: [3:7591329221800989006:2553], local sessions count: 1 2026-01-04T02:34:28.928265Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dpanz3c439saq6k0xq9m4", Request has 18444976579640.623379s seconds to be completed 2026-01-04T02:34:28.930982Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dpanz3c439saq6k0xq9m4", Created new session, sessionId: ydb://session/3?node_id=3&id=NjdhZjQ2NDMtOWU2OWViNWUtZTNjY2JhZTYtZGFiMGJiOTU=, workerId: [3:7591329221800989024:2557], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:28.931199Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dpanz3c439saq6k0xq9m4 2026-01-04T02:34:28.967921Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NjdhZjQ2NDMtOWU2OWViNWUtZTNjY2JhZTYtZGFiMGJiOTU=, workerId: [3:7591329221800989024:2557], local sessions count: 1 2026-01-04T02:34:28.968941Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzcxMTdiZGItM2ZhOTczZDEtYWVhZDc3NWMtMWY2MDBjYWI=, workerId: [3:7591329221800988857:2541], local sessions count: 0 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_restore/ydb-core-tx-schemeshard-ut_restore >> TableWriter::Backup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 14027, MsgBus: 2791 2026-01-04T02:33:37.500543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328999946402653:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.500625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.732506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:37.732605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:37.771607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:37.807723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:37.834974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:37.839738Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328999946402625:2081] 1767494017498699 != 1767494017498702 2026-01-04T02:33:37.946433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:37.946460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:37.946466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:37.946564Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:38.038493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:38.371123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:38.460436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:38.510996Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:38.621123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:38.865824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:38.997980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.892067Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329012831306387:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:40.892255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:40.893298Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329012831306397:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:40.893379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.176926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.220162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.260860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.305024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.355106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.435869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.474245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.526173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.664123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329017126274564:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.664208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.664667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329017126274569:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.664710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329017126274570:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.664973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.669256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:41.685377Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329017126274573:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:41.783520Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329017126274624:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:42.502587Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328999946402653:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:42.502661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 63486, MsgBus: 13744 2026-01-04T02:33:50.105860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:50.106023Z node 2 :METADATA_PROVIDER ER ... lt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16957, MsgBus: 10189 2026-01-04T02:34:15.795469Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329165078951067:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:15.795937Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:34:15.835911Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:34:15.945387Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:15.945729Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:15.960378Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:15.961980Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591329165078950946:2081] 1767494055789227 != 1767494055789230 2026-01-04T02:34:15.976406Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:16.086441Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:34:16.086465Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:34:16.086475Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:34:16.086558Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:34:16.121026Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:34:16.815299Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:16.843391Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:16.860276Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:34:16.880805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:17.043517Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:17.230271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:17.316580Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:20.795819Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591329165078951067:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:20.795900Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:21.098096Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329190848756608:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.098166Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.098449Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329190848756617:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.098489Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.247525Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.291547Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.331381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.375875Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.415258Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.478968Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.554210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.632756Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:21.734579Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329190848757486:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.734656Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.734923Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329190848757491:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.734965Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329190848757492:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.735226Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:21.739520Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:34:21.753463Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329190848757495:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:34:21.829793Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329190848757548:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TSchemeShardTest::AssignBlockStoreCheckVersionInAlter [GOOD] >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter >> TableWriter::Restore [GOOD] >> TSchemeShardTest::DocumentApiVersion [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Backup [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TableWriter::Restore [GOOD] |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> TSchemeShardCheckProposeSize::CopyTables [GOOD] >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb >> IncrementalRestoreScan::ChangeSenderEmpty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> TableCreation::CreateOldTable [GOOD] Test command err: 2026-01-04T02:33:55.918285Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329076782053668:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:55.918342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:56.253379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:56.253498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:56.256370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:56.328640Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329076782053455:2081] 1767494035906394 != 1767494035906397 2026-01-04T02:33:56.361265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:56.790325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:56.790348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:56.790367Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:56.790445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:56.918028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:56.949638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:56.966821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:34:00.918325Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329076782053668:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:00.918410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:01.326626Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:01.357309Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:34:01.357356Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:34:01.357383Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:01.359100Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Describe result: PathErrorUnknown 2026-01-04T02:34:01.359126Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Creating table 2026-01-04T02:34:01.359173Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:34:01.360029Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Describe result: PathErrorUnknown 2026-01-04T02:34:01.360035Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Creating table 2026-01-04T02:34:01.360061Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:34:01.360154Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Describe result: PathErrorUnknown 2026-01-04T02:34:01.360172Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Creating table 2026-01-04T02:34:01.360196Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:34:01.373325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:01.375967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:01.377639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:01.396134Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:34:01.396203Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Subscribe on create table tx: 281474976715658 2026-01-04T02:34:01.397995Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:34:01.398026Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Subscribe on create table tx: 281474976715659 2026-01-04T02:34:01.398128Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976715660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:34:01.398139Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Subscribe on create table tx: 281474976715660 2026-01-04T02:34:01.399783Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Subscribe on tx: 281474976715658 registered 2026-01-04T02:34:01.399994Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Subscribe on tx: 281474976715659 registered 2026-01-04T02:34:01.400826Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Subscribe on tx: 281474976715660 registered 2026-01-04T02:34:01.589913Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Request: create. Transaction completed: 281474976715658. Doublechecking... 2026-01-04T02:34:01.647603Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Table already exists, number of columns: 33, has SecurityObject: true 2026-01-04T02:34:01.647701Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Column diff is empty, finishing 2026-01-04T02:34:01.648796Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:34:01.649868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:01.651704Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976715661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:34:01.651719Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table script_executions updater. SelfId: [1:7591329102551858070:2498] Owner: [1:7591329102551858069:2497]. Successful alter request: ExecComplete 2026-01-04T02:34:01.684147Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329102551858072:2500] Owner: [1:7591329102551858069:2497]. Request: create. Transaction completed: 281474976715659. Doublechecking... 2026-01-04T02:34:01.689779Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Request: create. Transaction completed: 281474976715660. Doublechecking... 2026-01-04T02:34:01.774528Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-04T02:34:01.774566Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Column diff is empty, finishing 2026-01-04T02:34:01.774661Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329102551858071:2499] Owner: [1:7591329102551858069:2497]. Created ESch ... ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-04T02:34:29.587935Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976579639.963708s seconds to be completed 2026-01-04T02:34:29.590894Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, workerId: [3:7591329225919084804:2520], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:34:29.591171Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:34:29.591661Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329225919084801:2959], ActorId: [3:7591329225919084802:2960], TraceId: ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, RunDataQuery with SessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, TxId: , text: -- TGetScriptExecutionOperationQueryActor::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT run_script_actor_id, operation_status, execution_status, finalization_status, query_text, syntax, execution_mode, result_set_metas, plan, plan_compressed, plan_compression_method, issues, transient_issues, stats, ast, ast_compressed, ast_compression_method, graph_compressed IS NOT NULL AS has_graph, retry_state, user_token FROM `.metadata/script_executions` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); SELECT lease_deadline, lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:34:29.592095Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 40, targetId: [3:7591329225919084804:2520] 2026-01-04T02:34:29.592139Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 40 timeout: 300.000000s actor id: [3:7591329225919084806:2961] 2026-01-04T02:34:29.608285Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 40, sender: [3:7591329225919084805:2521], selfId: [3:7591329182969410090:2083], source: [3:7591329225919084804:2520] 2026-01-04T02:34:29.609612Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329225919084801:2959], ActorId: [3:7591329225919084802:2960], TraceId: ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, TxId: 2026-01-04T02:34:29.610413Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329225919084801:2959], ActorId: [3:7591329225919084802:2960], TraceId: ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, TxId: 2026-01-04T02:34:29.610457Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329225919084801:2959], ActorId: [3:7591329225919084802:2960], TraceId: ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-04T02:34:29.610600Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329225919084800:2958], ActorId: [3:7591329225919084801:2959], TraceId: ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b, RequestDatabase: /dc-1, Got response [3:7591329225919084802:2960] SUCCESS 2026-01-04T02:34:29.610665Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329225919084799:2957] ActorId: [3:7591329225919084800:2958] Database: /dc-1 ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b. Extracted script execution operation [3:7591329225919084802:2960], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7591329204444247620:2675], LeaseGeneration: 0 2026-01-04T02:34:29.610696Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329225919084799:2957] ActorId: [3:7591329225919084800:2958] Database: /dc-1 ExecutionId: a83e9c4f-76b13511-56bfb01b-f585ad8b. Reply success 2026-01-04T02:34:29.614477Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZGM4ZDdhZDEtMTcwMzBmYWEtZjQ5YWJjNC1iNmE1NzFmYQ==, workerId: [3:7591329225919084804:2520], local sessions count: 0 2026-01-04T02:34:29.652561Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dpbcmbv62etjtecwv55js", Request has 18444976579639.899088s seconds to be completed 2026-01-04T02:34:29.655042Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dpbcmbv62etjtecwv55js", Created new session, sessionId: ydb://session/3?node_id=3&id=MzUzYTk5MmQtMjI0ZjBlZmUtNmQwNDlkNDUtY2Q5NGMyNg==, workerId: [3:7591329225919084841:2534], database: /dc-1, longSession: 1, local sessions count: 1 2026-01-04T02:34:29.655270Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dpbcmbv62etjtecwv55js 2026-01-04T02:34:29.673163Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dpbd7btpr93qakf6w48f4, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=MzUzYTk5MmQtMjI0ZjBlZmUtNmQwNDlkNDUtY2Q5NGMyNg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 42, targetId: [3:7591329225919084841:2534] 2026-01-04T02:34:29.673224Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 42 timeout: 600.000000s actor id: [3:7591329225919084844:2970] 2026-01-04T02:34:29.689657Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:34:29.697511Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dpbd7btpr93qakf6w48f4", Forwarded response to sender actor, requestId: 42, sender: [3:7591329225919084843:2535], selfId: [3:7591329182969410090:2083], source: [3:7591329225919084841:2534] --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:34:29.712766Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:191: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Describe result: PathErrorUnknown 2026-01-04T02:34:29.712796Z node 3 :KQP_PROXY NOTICE: table_creator.cpp:211: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Creating table 2026-01-04T02:34:29.712840Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:112: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Created ESchemeOpCreateTable transaction for path: /dc-1/test/test_table 2026-01-04T02:34:29.728843Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.733195Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:229: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710687 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 46 } 2026-01-04T02:34:29.736464Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:304: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Subscribe on create table tx: 281474976710687 2026-01-04T02:34:29.737086Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:329: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Subscribe on tx: 281474976710687 registered 2026-01-04T02:34:29.782538Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:334: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Request: create. Transaction completed: 281474976710687. Doublechecking... 2026-01-04T02:34:29.845057Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:29.845112Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329225919084855:2977] Owner: [3:7591329225919084854:2976]. Column diff is empty, finishing 2026-01-04T02:34:29.845877Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:221: Table test_table updater. SelfId: [3:7591329225919084981:3077] Owner: [3:7591329225919084980:3076]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:34:29.845916Z node 3 :KQP_PROXY DEBUG: table_creator.cpp:441: Table test_table updater. SelfId: [3:7591329225919084981:3077] Owner: [3:7591329225919084980:3076]. Column diff is empty, finishing 2026-01-04T02:34:29.871367Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3dpbkedme9d9v24v7awzw1", Request has 18444976579639.680280s seconds to be completed 2026-01-04T02:34:29.874140Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3dpbkedme9d9v24v7awzw1", Created new session, sessionId: ydb://session/3?node_id=3&id=ZDgyNGRkZWQtNjAxZDExYzQtNjBlMmU0ZDUtMmUxODRmYTc=, workerId: [3:7591329225919084987:2544], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:29.874389Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3dpbkedme9d9v24v7awzw1 2026-01-04T02:34:29.906426Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=ZDgyNGRkZWQtNjAxZDExYzQtNjBlMmU0ZDUtMmUxODRmYTc=, workerId: [3:7591329225919084987:2544], local sessions count: 1 2026-01-04T02:34:29.906752Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=MzUzYTk5MmQtMjI0ZjBlZmUtNmQwNDlkNDUtY2Q5NGMyNg==, workerId: [3:7591329225919084841:2534], local sessions count: 0 |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListTypeYdb >> TMiniKQLEngineFlatTest::TestEmptyProgram >> TSchemeShardTest::ConfigColumnFamily [GOOD] >> TSchemeShardTest::BlockStoreVolumeLimits >> TMiniKQLProtoTestYdb::TestExportListTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportIntegralYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb >> TMiniKQLEngineFlatTest::TestEmptyProgram [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRow [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNullKey >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb >> TMiniKQLEngineFlatTest::TestEraseRowNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowManyShards >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalNotEmptyYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalYdb >> TMiniKQLEngineFlatTest::TestEraseRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Success >> TMiniKQLProtoTestYdb::TestExportOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportListYdb >> TMiniKQLEngineFlatTest::TestCASBoth2Success [GOOD] >> TMiniKQLEngineFlatTest::TestEraseRowNoShards >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMiniKQLProtoTestYdb::TestExportListYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb >> TMiniKQLEngineFlatTest::TestEraseRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestDiagnostics >> TSchemeShardTest::MultipleColumnFamilies [GOOD] >> TSchemeShardTest::MultipleColumnFamiliesWithStorage >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestDiagnostics [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb >> TMiniKQLEngineFlatTest::TestCombineByKeyPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown >> TSchemeShardCountersTest::PathsCounterDecrementsOnFail [GOOD] >> TSchemeShardDecimalTypesInTables::Parameterless >> TSchemeShardTest::AssignBlockStoreCheckFillGenerationInAlter [GOOD] >> TSchemeShardTest::BlockStoreNonreplVolumeLimits >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportMultipleOptionalVariantOptionalNotNullYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType >> TMiniKQLEngineFlatTest::TestCombineByKeyNoPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] >> TMiniKQLEngineFlatTest::TestLengthPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestInternalResult [GOOD] >> TMiniKQLEngineFlatTest::TestIndependentSelects >> TMiniKQLEngineFlatTest::TestIndependentSelects [GOOD] >> TMiniKQLEngineFlatTest::TestCrossTableRs |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TMiniKQLProgramBuilderTest::TestEraseRowStaticKey [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Dir [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Table >> BasicUsage::AlterTopicWithSharedConsumer_DisableDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMiniKQLProgramBuilderTest::TestEraseRowPartialDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectRow >> TMiniKQLProgramBuilderTest::TestSelectRow [GOOD] >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey >> TMiniKQLProgramBuilderTest::TestUpdateRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectFromExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportOptionalVariantOptionalYdbType [GOOD] |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName >> TMiniKQLProgramBuilderTest::TestSelectToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange >> TMiniKQLProgramBuilderTest::TestSelectBothFromInclusiveToInclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange |88.0%| [LD] {RESULT} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/mind/hive/ut/ydb-core-mind-hive-ut |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestSelectBothFromExclusiveToExclusiveRange [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterName [GOOD] >> TMiniKQLProgramBuilderTest::TestInvalidParameterType |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] >> KqpBatchUpdate::Large_1 [GOOD] >> TSchemeShardTest::AlterPersQueueGroup [GOOD] >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestCrossTableRs [GOOD] Test command err: SetProgram (370): ydb/core/engine/mkql_engine_flat.cpp:183: ExtractResultType(): requirement !label.StartsWith(TxInternalResultPrefix) failed. Label can't be used in SetResult as it's reserved for internal purposes: __cantuse PrepareShardPrograms (491): too many shard readsets (1 > 0), src tables: [200:301:0], dst tables: [200:302:0] Type { Kind: Struct } |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |88.0%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |88.0%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query |88.0%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/ydb-core-tx-schemeshard-ut_streaming_query >> TSchemeShardTest::BlockStoreVolumeLimits [GOOD] >> TSchemeShardTest::BlockStoreSystemVolumeLimits >> TSchemeShardDecimalTypesInTables::Parameterless [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false >> TSchemeShardTest::BlockStoreNonreplVolumeLimits [GOOD] >> TSchemeShardTest::AlterTableWithCompactionStrategies >> TSchemeShardTest::CopyTableForBackup [GOOD] >> TSchemeShardTest::CreateBlockStoreVolume |88.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestInvalidParameterType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TSchemeShardTest::MultipleColumnFamiliesWithStorage [GOOD] >> TSchemeShardTest::ParallelModifying >> TResourcePoolTest::CreateResourcePool >> TSchemeShardTest::DisablePublicationsOfDropping_Table [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable >> TSchemeShardTest::AlterPersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::AlterBlockStoreVolume >> TSchemeShardTest::CreateBlockStoreVolume [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 24381, MsgBus: 25502 2026-01-04T02:33:48.989879Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329047495205607:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:48.992500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:49.298028Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:49.310364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:49.310467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:49.361110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:49.449469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:49.562565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:49.562587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:49.562599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:49.562691Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:49.603951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:49.998116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:50.582012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:50.601791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:33:50.712700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:50.920297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.129102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.245979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:53.985914Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329047495205607:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:53.985984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:54.463471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329073265011133:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.463598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.478563Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329073265011143:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.478671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.878827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.931090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.970443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.020530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.065133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.128534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.206735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.280237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.394988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329077559979311:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.395092Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.395522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329077559979317:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.395566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329077559979316:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.395842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.403059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:55.430707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329077559979320:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:55.528313Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329077559979373:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:57.365183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... error=scheme_cache_undelivered_message; 2026-01-04T02:34:24.183364Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:34:24.526041Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:34:24.570004Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591329203196829463:2081] 1767494064037328 != 1767494064037331 2026-01-04T02:34:24.584449Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:24.585607Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:24.585697Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:24.658128Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:25.010070Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:34:25.010097Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:34:25.010107Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:34:25.010203Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:34:25.061743Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:34:25.083316Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:26.073765Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:26.106694Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:34:26.117787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:26.247615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:26.475702Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:26.648320Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.046865Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591329203196829498:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:29.046953Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:29.109313Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329224671667801:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.109429Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.110525Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329224671667811:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.110590Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.193947Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.283795Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.341837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.397159Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.444232Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.499112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.557146Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.616662Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:29.716886Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329224671668684:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.717029Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.717433Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329224671668689:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.717487Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329224671668690:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.717531Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:29.721996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:34:29.741754Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329224671668693:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:34:29.836990Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329224671668744:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:34:31.731380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] >> TxUsage::WriteToTopic_Demo_19_RestartAfterCommit_Query [GOOD] >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] >> TResourcePoolTest::ReadOnlyMode >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true >> TResourcePoolTest::ParallelCreateResourcePool >> TSchemeShardTest::AlterTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas >> TResourcePoolTest::DropResourcePoolTwice >> TxUsage::WriteToTopic_Demo_16_Query [GOOD] >> TResourcePoolTest::ParallelAlterResourcePool >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] >> TResourcePoolTest::CreateResourcePoolWithProperties >> TSchemeShardTest::CreateBlockStoreVolumeWithVolumeChannelsProfiles [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions >> ScriptExecutionsTest::BackgroundOperationRestart [GOOD] >> ScriptExecutionsTest::BackgroundOperationFinalization >> TSchemeShardTest::BlockStoreSystemVolumeLimits [GOOD] >> TSchemeShardTest::ConsistentCopyAfterDropIndexes >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table >> TResourcePoolTest::DropResourcePool >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_3_Query [GOOD] >> TResourcePoolTest::ParallelCreateSameResourcePool >> TSchemeShardTest::AlterBlockStoreVolume [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions >> TResourcePoolTest::CreateResourcePool [GOOD] >> TSchemeShardTest::CreateBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> KqpProxy::DatabasesCacheForServerless [GOOD] Test command err: 2026-01-04T02:33:57.137199Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329087212014065:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.141651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:57.270371Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.020420s 2026-01-04T02:33:57.344761Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329088188415919:2221];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.344808Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:57.530269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:57.450185Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329085777860765:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.457828Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:57.476889Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:57.477198Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591329086991827308:2281];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.477266Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:57.654500Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591329084849351939:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.662950Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:58.250410Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.418039Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.426073Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.586074Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.875332Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:58.890047Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:58.909130Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:58.913940Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:58.918376Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:58.918522Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:58.917872Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:58.918014Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:58.950587Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:59.006013Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.006114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:59.185860Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.186222Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.186308Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.186456Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.654033Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.744437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.744568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.748589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.748662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.748812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.748859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.748954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.748984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.749162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:59.749199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:59.769845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:59.919458Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.929760Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.947770Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:59.947940Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:33:59.949904Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:59.949940Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:33:59.949961Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:33:59.949975Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:33:59.949988Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:33:59.990698Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:34:00.065867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:00.068346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:00.069920Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:34:00.073404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:00.073666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:00.073785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Nod ... 4;actor=TTableExistsActor;event=undelivered;self_id=[10:7591329219905871281:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:28.843197Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:34:28.875831Z node 10 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.009652s 2026-01-04T02:34:28.918606Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:34:28.919007Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:28.919096Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:28.927606Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-04T02:34:28.931226Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:28.956060Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:29.003400Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.003572Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.003723Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.003916Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.003976Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.004062Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.004130Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.004253Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.004352Z node 10 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224038889 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:34:29.025680Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/test-shared/.metadata/script_executions 2026-01-04T02:34:29.034237Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:29.034367Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:29.050519Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72075186224038889 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:29.271119Z node 10 :STATISTICS WARN: tx_init.cpp:298: [72075186224038895] TTxInit::Complete. EnableColumnStatistics=false 2026-01-04T02:34:29.272188Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:29.488246Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:34:29.488273Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:34:29.488281Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:34:29.488409Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:34:29.513910Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:34:29.640089Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:29.760606Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:566: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7591329224200839822:2880], Database: /Root/test-serverless, Start database fetching 2026-01-04T02:34:29.760796Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: scheme_actors.cpp:592: [WorkloadService] [TDatabaseFetcherActor] ActorId: [10:7591329224200839822:2880], Database: /Root/test-serverless, Database info successfully fetched, serverless: 1 2026-01-04T02:34:29.859400Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:32.953904Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591329216492319484:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:32.954303Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-dedicated/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:33.491551Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:34:33.493047Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7591329242262124216:2356], Start check tables existence, number paths: 2 2026-01-04T02:34:33.494923Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:34:33.494972Z node 11 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:34:33.498320Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7591329242262124216:2356], Describe table /Root/test-dedicated/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:34:33.498397Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7591329242262124216:2356], Describe table /Root/test-dedicated/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:34:33.498431Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [11:7591329242262124216:2356], Successfully finished 2026-01-04T02:34:33.498502Z node 11 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:34:33.498533Z node 11 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:34:33.826910Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:447: [WorkloadService] [Service] Started workload service initialization 2026-01-04T02:34:33.827729Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:241: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7591329241380709235:2389], Start check tables existence, number paths: 2 2026-01-04T02:34:33.828001Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:102: [WorkloadService] [Service] Subscribed for config changes 2026-01-04T02:34:33.828021Z node 10 :KQP_WORKLOAD_SERVICE INFO: kqp_workload_service.cpp:113: [WorkloadService] [Service] Resource pools was enabled 2026-01-04T02:34:33.831543Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7591329241380709235:2389], Describe table /Root/test-shared/.metadata/workload_manager/delayed_requests status PathErrorUnknown 2026-01-04T02:34:33.831610Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:182: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7591329241380709235:2389], Describe table /Root/test-shared/.metadata/workload_manager/running_requests status PathErrorUnknown 2026-01-04T02:34:33.831650Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: table_queries.cpp:289: [WorkloadService] [TCleanupTablesActor] ActorId: [10:7591329241380709235:2389], Successfully finished 2026-01-04T02:34:33.831728Z node 10 :KQP_WORKLOAD_SERVICE DEBUG: kqp_workload_service.cpp:374: [WorkloadService] [Service] Cleanup completed, tables exists: 0 2026-01-04T02:34:33.832581Z node 10 :KQP_WORKLOAD_SERVICE TRACE: kqp_workload_service.cpp:132: [WorkloadService] [Service] Updated node info, node count: 1 2026-01-04T02:34:33.849952Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591329219905871281:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:33.850039Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/test-shared/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:34.773112Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 11 2026-01-04T02:34:34.773588Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:34:34.773735Z node 9 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 10 2026-01-04T02:34:34.773966Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:34:34.792821Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:2885} SessionId: ydb://session/3?node_id=9&id=YTI0ZjA0M2ItMjI3ZWIyZjMtZWJiNTVmYzAtZGU0MTlmMDg=, ActorId: [9:7591329216186620903:2335], ActorState: ReadyState, Session closed due to explicit close event trace_id# 2026-01-04T02:34:34.792932Z node 9 :KQP_SESSION INFO: {KQPSA@kqp_session_actor.cpp:3045} SessionId: ydb://session/3?node_id=9&id=YTI0ZjA0M2ItMjI3ZWIyZjMtZWJiNTVmYzAtZGU0MTlmMDg=, ActorId: [9:7591329216186620903:2335], ActorState: ReadyState, Cleanup start is_final# true has_cleanup_ctx# false transactions_to_be_aborted_size# 0 worker_id# [0:0:0] workload_service_cleanup# false trace_id# 2026-01-04T02:34:34.792971Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3122} SessionId: ydb://session/3?node_id=9&id=YTI0ZjA0M2ItMjI3ZWIyZjMtZWJiNTVmYzAtZGU0MTlmMDg=, ActorId: [9:7591329216186620903:2335], ActorState: ReadyState, EndCleanup is_final# true trace_id# 2026-01-04T02:34:34.793003Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3136} SessionId: ydb://session/3?node_id=9&id=YTI0ZjA0M2ItMjI3ZWIyZjMtZWJiNTVmYzAtZGU0MTlmMDg=, ActorId: [9:7591329216186620903:2335], ActorState: unknown state, Cleanup temp tables temp_tables_size# 0 trace_id# 2026-01-04T02:34:34.793079Z node 9 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:3230} SessionId: ydb://session/3?node_id=9&id=YTI0ZjA0M2ItMjI3ZWIyZjMtZWJiNTVmYzAtZGU0MTlmMDg=, ActorId: [9:7591329216186620903:2335], ActorState: unknown state, Session actor destroyed trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::TestSecureScriptExecutions [GOOD] Test command err: 2026-01-04T02:33:51.202513Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329060353194611:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:51.202748Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:51.620171Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:51.620292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:51.817584Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329060353194476:2081] 1767494031144164 != 1767494031144167 2026-01-04T02:33:51.828677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:51.842818Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:52.429919Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:52.475168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:52.475192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:52.475198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:52.475277Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:52.571278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:52.582064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:55.799620Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.813508Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:33:55.813592Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:33:55.813610Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.818388Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.818414Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Creating table 2026-01-04T02:33:55.818469Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:33:55.818661Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.818668Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Creating table 2026-01-04T02:33:55.818683Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:55.818728Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.818736Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Creating table 2026-01-04T02:33:55.818750Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:55.824638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.826732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.829584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.844754Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:33:55.844829Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Subscribe on create table tx: 281474976710659 2026-01-04T02:33:55.845493Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:33:55.845531Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Subscribe on create table tx: 281474976710660 2026-01-04T02:33:55.846163Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:33:55.846195Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Subscribe on create table tx: 281474976710658 2026-01-04T02:33:55.846741Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Subscribe on tx: 281474976710659 registered 2026-01-04T02:33:55.850615Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Subscribe on tx: 281474976710660 registered 2026-01-04T02:33:55.850643Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Subscribe on tx: 281474976710658 registered 2026-01-04T02:33:56.011160Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-04T02:33:56.061218Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-04T02:33:56.066621Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-04T02:33:56.066683Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Column diff is empty, finishing 2026-01-04T02:33:56.067953Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:56.072004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:33:56.075681Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:33:56.075719Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7591329077533064501:2496] Owner: [1:7591329077533064496:2493]. Successful alter request: ExecComplete 2026-01-04T02:33:56.079173Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329077533064499:2494] Owner: [1:7591329077533064496:2493]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-04T02:33:56.125470Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-04T02:33:56.125514Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Column diff is empty, finishing 2026-01-04T02:33:56.125589Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329077533064500:2495] Owner: [1:7591329077533064496:2493]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:56.126594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02: ... lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:34:35.702363Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=Y2ZkNDFhMzQtOGQ0OWU5MDMtZDBhNDk2YjEtZTQ0MmEzODE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 93, targetId: [3:7591329249249234139:2716] 2026-01-04T02:34:35.702436Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 93 timeout: 300.000000s actor id: [3:7591329249249234141:3286] 2026-01-04T02:34:35.712835Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 93, sender: [3:7591329249249234140:2717], selfId: [3:7591329189119689653:2266], source: [3:7591329249249234139:2716] 2026-01-04T02:34:35.713421Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329249249234136:3284], ActorId: [3:7591329249249234137:3285], TraceId: ExecutionId: 915db419-f56590c4-66cc038c-91378325, RequestDatabase: /dc-1, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=3&id=Y2ZkNDFhMzQtOGQ0OWU5MDMtZDBhNDk2YjEtZTQ0MmEzODE=, TxId: 2026-01-04T02:34:35.714132Z node 3 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329249249234136:3284], ActorId: [3:7591329249249234137:3285], TraceId: ExecutionId: 915db419-f56590c4-66cc038c-91378325, RequestDatabase: /dc-1, Finish with SUCCESS, SessionId: ydb://session/3?node_id=3&id=Y2ZkNDFhMzQtOGQ0OWU5MDMtZDBhNDk2YjEtZTQ0MmEzODE=, TxId: 2026-01-04T02:34:35.714162Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2345: [ScriptExecutions] [TQueryBase] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329249249234136:3284], ActorId: [3:7591329249249234137:3285], TraceId: ExecutionId: 915db419-f56590c4-66cc038c-91378325, RequestDatabase: /dc-1, Finish, OperationStatus: SUCCESS, FinalizationStatus: -1, LeaseStatus: -1 2026-01-04T02:34:35.714269Z node 3 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TGetScriptExecutionOperationQueryActor] OwnerId: [3:7591329249249234135:3283], ActorId: [3:7591329249249234136:3284], TraceId: ExecutionId: 915db419-f56590c4-66cc038c-91378325, RequestDatabase: /dc-1, Got response [3:7591329249249234137:3285] SUCCESS 2026-01-04T02:34:35.714340Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2458: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329249249234134:3282] ActorId: [3:7591329249249234135:3283] Database: /dc-1 ExecutionId: 915db419-f56590c4-66cc038c-91378325. Extracted script execution operation [3:7591329249249234137:3285], Status: SUCCESS, Issues: , Ready: 1, LeaseExpired: 0, WaitRetry: 0, RunScriptActorId: [3:7591329244954266622:3210], LeaseGeneration: 0 2026-01-04T02:34:35.714367Z node 3 :KQP_PROXY DEBUG: kqp_script_executions.cpp:2484: [ScriptExecutions] [TGetScriptExecutionOperationActor] OwnerId: [3:7591329249249234134:3282] ActorId: [3:7591329249249234135:3283] Database: /dc-1 ExecutionId: 915db419-f56590c4-66cc038c-91378325. Reply success 2026-01-04T02:34:35.715028Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=Y2ZkNDFhMzQtOGQ0OWU5MDMtZDBhNDk2YjEtZTQ0MmEzODE=, workerId: [3:7591329249249234139:2716], local sessions count: 1 2026-01-04T02:34:36.729280Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dpj9p88c88bh1m2yr02vv, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 94, targetId: [3:7591329232069364201:2541] 2026-01-04T02:34:36.729332Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 94 timeout: 300.000000s actor id: [3:7591329253544201475:3300] 2026-01-04T02:34:36.749469Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201479:3302], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.749520Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201479:3302], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.752554Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591329253544201476:2730], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_executions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:34:36.752988Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, ActorId: [3:7591329232069364201:2541], ActorState: ExecuteState, LegacyTraceId: 01ke3dpj9p88c88bh1m2yr02vv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_executions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:34:36.753170Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dpj9p88c88bh1m2yr02vv", Forwarded response to sender actor, requestId: 94, sender: [3:7591329253544201474:2729], selfId: [3:7591329189119689653:2266], source: [3:7591329232069364201:2541] 2026-01-04T02:34:36.770233Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dpjb14y9081c5k274cmhj, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 95, targetId: [3:7591329232069364201:2541] 2026-01-04T02:34:36.770284Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 95 timeout: 300.000000s actor id: [3:7591329253544201482:3303] 2026-01-04T02:34:36.789897Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201486:3305], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.789930Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201486:3305], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.792426Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591329253544201483:2733], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/script_execution_leases]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:34:36.792957Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, ActorId: [3:7591329232069364201:2541], ActorState: ExecuteState, LegacyTraceId: 01ke3dpjb14y9081c5k274cmhj, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/script_execution_leases]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:34:36.793122Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dpjb14y9081c5k274cmhj", Forwarded response to sender actor, requestId: 95, sender: [3:7591329253544201481:2732], selfId: [3:7591329189119689653:2266], source: [3:7591329232069364201:2541] 2026-01-04T02:34:36.806745Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dpjc6emb6tnesnpxt3cwp, Database: /dc-1, SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 96, targetId: [3:7591329232069364201:2541] 2026-01-04T02:34:36.806800Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 96 timeout: 300.000000s actor id: [3:7591329253544201489:3306] 2026-01-04T02:34:36.831885Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201493:3308], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.831930Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:7591329253544201493:3308], for# root@builtin, access# DescribeSchema 2026-01-04T02:34:36.834427Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591329253544201490:2736], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:21: Error: At function: KiReadTable!
:2:21: Error: Cannot find table 'db.[/dc-1/.metadata/result_sets]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:34:36.836482Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, ActorId: [3:7591329232069364201:2541], ActorState: ExecuteState, LegacyTraceId: 01ke3dpjc6emb6tnesnpxt3cwp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 21 } message: "At function: KiReadTable!" end_position { row: 2 column: 21 } severity: 1 issues { position { row: 2 column: 21 } message: "Cannot find table \'db.[/dc-1/.metadata/result_sets]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 21 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:34:36.836649Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dpjc6emb6tnesnpxt3cwp", Forwarded response to sender actor, requestId: 96, sender: [3:7591329253544201488:2735], selfId: [3:7591329189119689653:2266], source: [3:7591329232069364201:2541] 2026-01-04T02:34:36.850733Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=3&id=NzI1ODY4NmMtYzFiNmU5NDYtOTZiOGYzMjYtNmRlNDdkMTk=, workerId: [3:7591329232069364201:2541], local sessions count: 0 2026-01-04T02:34:36.999089Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:34:36.999128Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TResourcePoolTest::SchemeErrors >> TResourcePoolTest::DropResourcePoolTwice [GOOD] >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table >> TSchemeShardDecimalTypesInTables::Parameters_22_9-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:37.863921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:37.864060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:37.864106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:37.864147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:37.864227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:37.864277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:37.864349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:37.864430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:37.865609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:37.866002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:37.977844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:37.977917Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:37.993277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:37.993497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:37.993689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:38.006014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:38.006280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:38.007101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:38.007430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:38.014531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.014805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:38.016132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:38.016205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.016335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:38.016413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:38.016467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:38.016681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:38.215222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.216962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.217026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.217101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.217185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.217260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.217357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:39.115259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:34:39.115398Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:34:39.115580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:39.115662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:39.117557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:39.119961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:39.120199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:34:39.120768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.120824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:34:39.121039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:39.121143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:39.121252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.121307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-04T02:34:39.121357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-04T02:34:39.121385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-04T02:34:39.121655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.121705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:34:39.121864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:39.121913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:39.121959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:39.122000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:39.122055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:34:39.122115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:39.122161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:34:39.122198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:34:39.122280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:34:39.122325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:34:39.122357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-04T02:34:39.122389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-04T02:34:39.123561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:39.123660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:39.123717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:39.123761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-04T02:34:39.123807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:39.125119Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:39.125246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:39.125302Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:39.125343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-04T02:34:39.125385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:39.125477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:34:39.128242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:39.129525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:39.129749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:39.129795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:39.130240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:39.130332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:39.130369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:754:2744] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:39.130891Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.131128Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 259us result status StatusSuccess 2026-01-04T02:34:39.131551Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderEmpty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:34:36.570952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:34:36.685858Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:34:36.686432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:34:36.686517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:34:36.976417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:36.976594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:37.087844Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494073543672 != 1767494073543675 2026-01-04T02:34:37.107067Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:37.158388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:37.268988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:34:37.733062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:34:37.733292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.733541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:34:37.733590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:34:37.733776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:34:37.733882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:37.734663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:34:37.734845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:34:37.735074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.735135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:34:37.735213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:34:37.735249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:37.735912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.735964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:34:37.736005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:37.736564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.736610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.736660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:34:37.736713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:37.750009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:37.750878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:37.751051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:34:37.751660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:34:37.751706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-04T02:34:37.751771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:34:37.764652Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:37.871198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:34:37.871407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:34:37.871477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:34:37.871739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:37.871803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:34:37.871984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:34:37.872078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:34:37.873083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:34:37.873146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:34:37.873388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:34:37.873435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:559:2493], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-04T02:34:37.873684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:34:37.873745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-04T02:34:37.883853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:37.883955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:37.884009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:37.884078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:37.884141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:37.884193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:37.884234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:37.884271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:37.884370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-04T02:34:37.884411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-04T02:34:37.884448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 6 2026-01-04T02:34:37.888279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshar ... ountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-04T02:34:38.669571Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-04T02:34:38.669732Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1024:2865] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-04T02:34:38.670295Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1024:2865] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:34:38.670403Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1024:2865] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" 2026-01-04T02:34:38.671669Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1024:2865] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:34:38.672376Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:34:38.672560Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:34:38.672796Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:34:38.672957Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1026:2867] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |88.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_IndexedTable [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Pq >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] >> TSchemeShardTest::AlterBlockStoreVolumeWithNonReplicatedPartitions [GOOD] >> TSchemeShardTest::AdoptDropSolomon ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::AttemptToUpdateDeletedLease [GOOD] Test command err: 2026-01-04T02:33:57.121522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329085927821037:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:57.121592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:57.541923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:57.638972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:57.639072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:57.724981Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:57.810743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:57.828508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:58.134515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:58.285029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:58.305986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:34:01.190758Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:01.207591Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=1&id=ODEzYzIyZGUtZWM2NGM1MGUtMjAyZWFlZGEtNGZjOWIxNjE=, workerId: [1:7591329103107691014:2303], database: , longSession: 0, local sessions count: 1 2026-01-04T02:34:01.207937Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=1&id=ODEzYzIyZGUtZWM2NGM1MGUtMjAyZWFlZGEtNGZjOWIxNjE=, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.010000s timeout: 0.010000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [1:7591329103107691014:2303] 2026-01-04T02:34:01.207959Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.010000s actor id: [0:0:0] 2026-01-04T02:34:01.208018Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:34:01.208046Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:34:01.208081Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:34:01.214337Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2718} SessionId: ydb://session/3?node_id=1&id=ODEzYzIyZGUtZWM2NGM1MGUtMjAyZWFlZGEtNGZjOWIxNjE=, ActorId: [1:7591329103107691014:2303], ActorState: ReadyState, Reply query error, msg:
: Error: SomeUniqTextForUt proxy_request_id# 2 trace_id# 2026-01-04T02:34:01.214727Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 2, sender: [1:7591329090222789090:2486], selfId: [1:7591329085927821257:2266], source: [1:7591329103107691014:2303] 2026-01-04T02:34:01.233921Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1212: Handle TEvPrivate::TEvOnRequestTimeout(2) 2026-01-04T02:34:01.233951Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1215: Invalid request info while on request timeout handle. RequestId: 2 2026-01-04T02:34:01.266662Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329103107691015:2304], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:01.266825Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:01.273982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329103107691040:2306], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:01.274116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ... waiting for SysViewsRoster update finished 2026-01-04T02:34:09.185489Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:34:09.185749Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:34:09.197143Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:34:09.198761Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:34:09.200472Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:34:09.200717Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:34:09.200885Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:34:09.203439Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:34:09.203860Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:34:09.203911Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:34:09.519461Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:09.632264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:09.632402Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:09.633228Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:09.633301Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:09.693585Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:34:09.694334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:09.694777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:09.816528Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:34:09.853026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) KQP PROXY1 [2:8678280833929343339:121] KQP PROXY2 [3:8678280833929343339:121] SENDER [2:1499:2983] 2026-01-04T02:34:10.671925Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=3&id=YjBkMmQ2MzItY2Q3NmY4Y2MtNmNhOTdiYjYtZDNjMGRhNA==, workerId: [3:1500:2378], database: , longSession: 1, local sessions count: 1 2026-01-04T02:34:10.672201Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: Created session ydb://session/3?node_id=3&id=YjBkMmQ2MzItY2Q3NmY4Y2MtNmNhOTdiYjYtZDNjMGRhNA== 2026-01-04T02:34:10.672852Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=YjBkMmQ2MzItY2Q3NmY4Y2MtNmNhOTdiYjYtZDNjMGRhNA==, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 2, targetId: [3:8678280833929343339:121] 2026-01-04T02:34:10.672925Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 2 timeout: 0.001000s actor id: [0:0:0] 2026-01-04T02:34:10.673680Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: , SessionId: ydb://session/3?node_id=3&id=YjBkMmQ2MzItY2Q3NmY4Y2MtNmNhOTdiYjYtZDNjMGRhNA==, PoolId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 0.001000s timeout: 0.001000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [3:1500:2378] 2026-01-04T02:34:10.673723Z node 3 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 0.001000s actor id: [0:0:0] 2026-01-04T02:34:10.979703Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1501:2984], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:10.979856Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:10.980452Z ... 2026-01-04T02:34:36.173531Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=YTk1NzE0NjUtYmVhN2I5ZjEtNDMzYjNmYmQtZDA4MDgxODU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 50, targetId: [7:7591329255981278612:2576] 2026-01-04T02:34:36.173564Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 50 timeout: 300.000000s actor id: [7:7591329255981278637:3078] 2026-01-04T02:34:36.190137Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 50, sender: [7:7591329255981278636:2583], selfId: [7:7591329195851734685:2263], source: [7:7591329255981278612:2576] 2026-01-04T02:34:36.190825Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7591329255981278608:2573], ActorId: [7:7591329255981278609:2574], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=7&id=YTk1NzE0NjUtYmVhN2I5ZjEtNDMzYjNmYmQtZDA4MDgxODU=, TxId: 2026-01-04T02:34:36.190918Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7591329255981278608:2573], ActorId: [7:7591329255981278609:2574], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=7&id=YTk1NzE0NjUtYmVhN2I5ZjEtNDMzYjNmYmQtZDA4MDgxODU=, TxId: 2026-01-04T02:34:36.190995Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [7:7591329255981278608:2573], ActorId: [7:7591329255981278609:2574], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-04T02:34:36.191181Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [7:7591329255981278607:2572], ActorId: [7:7591329255981278608:2573], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7591329255981278609:2574] SUCCESS 2026-01-04T02:34:36.192027Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=YTk1NzE0NjUtYmVhN2I5ZjEtNDMzYjNmYmQtZDA4MDgxODU=, workerId: [7:7591329255981278612:2576], local sessions count: 1 2026-01-04T02:34:36.192195Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7591329251686311269:3053] ActorId: [7:7591329251686311270:3054] Database: /dc-1 ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-04T02:34:36.192247Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [7:7591329251686311269:3053] ActorId: [7:7591329251686311270:3054] Database: /dc-1 ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693. Reply success 2026-01-04T02:34:36.205615Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dphsdct0t6k12gr1f8wwp, Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ZWUxMzczM2UtNmU4Njg5Mi01MjNjNmJmYS04ZmJiMWNkMQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 51, targetId: [7:7591329243096376540:2533] 2026-01-04T02:34:36.205666Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 51 timeout: 300.000000s actor id: [7:7591329255981278664:3086] 2026-01-04T02:34:37.192666Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dphsdct0t6k12gr1f8wwp", Forwarded response to sender actor, requestId: 51, sender: [7:7591329255981278663:2588], selfId: [7:7591329195851734685:2263], source: [7:7591329243096376540:2533] 2026-01-04T02:34:37.198349Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:833: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7591329260276246002:3103] ActorId: [7:7591329260276246003:3104] Database: /dc-1 ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693. Bootstrap. Start TLeaseUpdateRetryActor [7:7591329260276246004:3105] 2026-01-04T02:34:37.198443Z node 7 :KQP_PROXY DEBUG: query_actor.h:292: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246003:3104], ActorId: [7:7591329260276246004:3105], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, Starting query actor #1 [7:7591329260276246005:3106] 2026-01-04T02:34:37.198482Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:135: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, Bootstrap. Database: /dc-1, IsSystemUser: 1, run create session 2026-01-04T02:34:37.198671Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: Request has 18444976579632.352960s seconds to be completed 2026-01-04T02:34:37.200812Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: Created new session, sessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, workerId: [7:7591329260276246007:2602], database: /dc-1, longSession: 1, local sessions count: 2 2026-01-04T02:34:37.200999Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 2026-01-04T02:34:37.201370Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:695: [ScriptExecutions] [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, Update lease on duration: 1.000000s 2026-01-04T02:34:37.201495Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RunDataQuery with SessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, TxId: , text: -- TScriptLeaseUpdater::OnRunQuery DECLARE $database AS Text; DECLARE $execution_id AS Text; SELECT lease_generation, lease_state FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:34:37.201854Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 53, targetId: [7:7591329260276246007:2602] 2026-01-04T02:34:37.201891Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 53 timeout: 300.000000s actor id: [7:7591329260276246009:3107] 2026-01-04T02:34:37.543486Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 53, sender: [7:7591329260276246008:2603], selfId: [7:7591329195851734685:2263], source: [7:7591329260276246007:2602] 2026-01-04T02:34:37.544065Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, TxId: 01ke3dpk2y173wrtms0n02xwmh 2026-01-04T02:34:37.544222Z node 7 :KQP_PROXY WARN: query_actor.cpp:376: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Finish with NOT_FOUND, Issues: {
: Error: No such execution }, SessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, TxId: 01ke3dpk2y173wrtms0n02xwmh 2026-01-04T02:34:37.544278Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:432: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, Rollback transaction: 01ke3dpk2y173wrtms0n02xwmh in session: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA== 2026-01-04T02:34:37.544442Z node 7 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246003:3104], ActorId: [7:7591329260276246004:3105], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [7:7591329260276246005:3106] NOT_FOUND 2026-01-04T02:34:37.544545Z node 7 :KQP_PROXY DEBUG: kqp_script_executions.cpp:843: [ScriptExecutions] [TScriptLeaseUpdateActor] OwnerId: [7:7591329260276246002:3103] ActorId: [7:7591329260276246003:3104] Database: /dc-1 ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693. Lease update [7:7591329260276246005:3106] finished NOT_FOUND, issues: {
: Error: No such execution } 2026-01-04T02:34:37.544837Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 54, targetId: [7:7591329260276246007:2602] 2026-01-04T02:34:37.544867Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 54 timeout: 600.000000s actor id: [7:7591329260276246037:3119] 2026-01-04T02:34:37.545885Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 54, sender: [7:7591329260276246036:2612], selfId: [7:7591329195851734685:2263], source: [7:7591329260276246007:2602] 2026-01-04T02:34:37.546242Z node 7 :KQP_PROXY DEBUG: query_actor.cpp:441: [TQueryBase] [TScriptLeaseUpdater] OwnerId: [7:7591329260276246004:3105], ActorId: [7:7591329260276246005:3106], TraceId: ExecutionId: bbd8c241-9cf62dcb-8048c5e1-e7b68693, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get lease info, RollbackTransactionResult: SUCCESS. Issues: 2026-01-04T02:34:37.546788Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=ODAxNTk2MjEtNWQwOWQ0ZDktYWM2NGRmZjktZTNmMjQyZA==, workerId: [7:7591329260276246007:2602], local sessions count: 1 2026-01-04T02:34:37.567548Z node 7 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=7&id=ZWUxMzczM2UtNmU4Njg5Mi01MjNjNmJmYS04ZmJiMWNkMQ==, workerId: [7:7591329243096376540:2533], local sessions count: 0 >> TResourcePoolTest::ReadOnlyMode [GOOD] |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest >> TSchemeShardTest::AlterTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false >> TResourcePoolTest::DropResourcePool [GOOD] >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] >> TSchemeShardTest::CreateAlterBlockStoreVolumeWithInvalidPoolKinds [GOOD] >> TSchemeShardTest::CreateDropKesus ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelAlterResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:38.953443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:38.953533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.953582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:38.953628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:38.953674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:38.953703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:38.953769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.954300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:38.955364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:38.955672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:39.054227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:39.054301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:39.088697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:39.088945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:39.089206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:39.099074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:39.099315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:39.100350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:39.100643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:39.103792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.104023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:39.105198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.105257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.105387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:39.105444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:39.105486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:39.105701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.251316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.252949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.253476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.191934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.192000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.192019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.192112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.192250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.192330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.192724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.192782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:34:40.192881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.192902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.192971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.193059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.193142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:34:40.193256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-04T02:34:40.193347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.193417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-04T02:34:40.193548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.193614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-04T02:34:40.193705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.193972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.193998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-04T02:34:40.194319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 132, at schemeshard: 72057594046678944 2026-01-04T02:34:40.194538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.194961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.194985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.195179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.195202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:842:2832] 2026-01-04T02:34:40.195344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 132: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.195370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 132: satisfy waiter [1:842:2832] TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 TestWaitNotification: OK eventTxId 132 2026-01-04T02:34:40.199250Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:40.199509Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 302us result status StatusSuccess 2026-01-04T02:34:40.200014Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 2 Properties { Properties { key: "concurrent_query_limit" value: "20" } Properties { key: "query_cancel_after_seconds" value: "60" } Properties { key: "query_count_limit" value: "50" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePoolTwice [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:38.854770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:38.854875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.854929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:38.854972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:38.855013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:38.855044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:38.855110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.855183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:38.856088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:38.856370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:38.954645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:38.954715Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:38.970889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:38.971098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:38.971301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:38.980307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:38.980540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:38.981333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:38.981642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:38.985308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.985558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:38.986760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:38.986823Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.986964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:38.987018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:38.987066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:38.987251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.152828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.154998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.155059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.155135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.155215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.155303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.902545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:34:39.902715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:34:39.902781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:39.902939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.902984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:34:39.903043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-04T02:34:39.903074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-01-04T02:34:39.903389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.903438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:34:39.903550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:34:39.903586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:39.903640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:34:39.903683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:39.903724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:34:39.903778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:39.903824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:34:39.903859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:34:39.903941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:39.903990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-04T02:34:39.904024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-04T02:34:39.904085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-04T02:34:39.904128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-04T02:34:39.904913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.905013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.905050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:39.905090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-04T02:34:39.905136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:34:39.905479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:34:39.905522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:34:39.905591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:39.905748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.905878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.905912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:39.905939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-04T02:34:39.905993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:34:39.907017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.907106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:39.907145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:39.907177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-04T02:34:39.907206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:39.907302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:34:39.911356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:39.911908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:34:39.912060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:39.912514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:34:39.912858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:34:39.912912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:34:39.913340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:34:39.913466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:34:39.913508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:786:2776] TestWaitNotification: OK eventTxId 103 2026-01-04T02:34:39.914062Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.914288Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 241us result status StatusPathDoesNotExist 2026-01-04T02:34:39.914512Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::CreateResourcePoolWithProperties [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:39.088721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:39.088834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.088886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:39.088952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:39.089000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:39.089034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:39.089092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.089178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:39.090133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:39.090438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:39.185475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:39.185555Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:39.200365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:39.200578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:39.200750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:39.208967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:39.209207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:39.209911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:39.210197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:39.213229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.213458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:39.214692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.214753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.214896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:39.214952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:39.214997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:39.215193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.371160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.372996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.373075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.373142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.373284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.373401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-04T02:34:40.075394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_resource_pool.cpp:19: [72057594046678944] TCreateResourcePool TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:34:40.075482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:34:40.075605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:40.075660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:40.076764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:40.079019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:40.079215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:34:40.079707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:40.079736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:34:40.079889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:40.079991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:40.080078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:40.080102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 40 2026-01-04T02:34:40.080137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-04T02:34:40.080152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 41 2026-01-04T02:34:40.080373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:40.080409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:34:40.080492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:40.080532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:40.080566Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:40.080595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:40.080627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:34:40.080667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:40.080695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:34:40.080721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:34:40.080772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:34:40.080809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:34:40.080839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 40], 5 2026-01-04T02:34:40.080861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 41], 2 2026-01-04T02:34:40.082068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:40.082155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:40.082221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:40.082332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 5 2026-01-04T02:34:40.082373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:40.083471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:40.083582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:40.083632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:40.083663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-04T02:34:40.083700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:40.083785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:34:40.086232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:40.087210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:40.087388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:40.087421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:40.087802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:40.087884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.087917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:754:2744] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:40.088387Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:40.088614Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 232us result status StatusSuccess 2026-01-04T02:34:40.089055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathDescription { Self { Name: "MyResourcePool" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { Properties { key: "concurrent_query_limit" value: "10" } Properties { key: "query_cancel_after_seconds" value: "60" } } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:38.722265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:38.722380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.722433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:38.722474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:38.722515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:38.722551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:38.722605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.722675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:38.723666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:38.723971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:38.822796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:38.822874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:38.841003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:38.841208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:38.841426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:38.855328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:38.855579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:38.856352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:38.856653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:38.864578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.864843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:38.866162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:38.866229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.866399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:38.866456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:38.866504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:38.866706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.074013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.074985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.075950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.076061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.076152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:39.830885Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.831095Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 216us result status StatusSuccess 2026-01-04T02:34:39.831388Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:39.832213Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.832413Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools" took 212us result status StatusSuccess 2026-01-04T02:34:39.832866Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools" PathDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:39.833427Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.833637Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" took 181us result status StatusSuccess 2026-01-04T02:34:39.834058Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool1" PathDescription { Self { Name: "MyResourcePool1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool1" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:39.834602Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:39.834771Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" took 174us result status StatusSuccess 2026-01-04T02:34:39.835081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool2" PathDescription { Self { Name: "MyResourcePool2" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "MyResourcePool2" PathId { OwnerId: 72057594046678944 LocalId: 42 } Version: 1 Properties { } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:38.641393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:38.641506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.641551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:38.641589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:38.641654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:38.641697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:38.641784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:38.641883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:38.642930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:38.643280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:38.760350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:38.760418Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:38.775978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:38.776202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:38.776394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:38.784618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:38.784843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:38.785571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:38.785885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:38.788970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.789206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:38.790418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:38.790476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:38.790582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:38.790648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:38.790697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:38.790882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:38.946772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.947773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.947958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:38.948920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-04T02:34:40.186920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-04T02:34:40.187019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:40.187805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.187919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.187967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:34:40.188006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-04T02:34:40.188089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:34:40.189011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.189097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.189126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:34:40.189191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 2 2026-01-04T02:34:40.189247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:34:40.189324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-04T02:34:40.191581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-04T02:34:40.191750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-04T02:34:40.193155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-04T02:34:40.194203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:40.194338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:40.194391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-04T02:34:40.194548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-04T02:34:40.194727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:34:40.194789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:34:40.195191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-01-04T02:34:40.197173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:40.197213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:40.197377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:34:40.197460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:40.197494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:899:2857], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-04T02:34:40.197533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:899:2857], at schemeshard: 72057594046678944, txId: 129, path id: 43 2026-01-04T02:34:40.197845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-04T02:34:40.197898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-04T02:34:40.198002Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:34:40.198054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:34:40.198101Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:34:40.198136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:34:40.198189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-04T02:34:40.198249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:34:40.198284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-04T02:34:40.198316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-04T02:34:40.198394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:34:40.198449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-04T02:34:40.198480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 13 2026-01-04T02:34:40.198509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 43], 3 2026-01-04T02:34:40.199271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.199368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.199401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:34:40.199455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-04T02:34:40.199498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:34:40.200265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.200355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 43 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:34:40.200394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:34:40.200432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 3 2026-01-04T02:34:40.200464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:34:40.200524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-04T02:34:40.205100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-04T02:34:40.215309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TSchemeShardTest::ConsistentCopyAfterDropIndexes [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-false [GOOD] >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::ParallelCreateSameResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:39.570451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:39.570562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.570626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:39.570666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:39.570709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:39.570745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:39.570824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.570917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:39.572151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:39.572473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:39.665571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:39.665622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:39.679063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:39.682640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:39.682851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:39.688754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:39.688959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:39.689704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:39.689962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:39.692563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.692750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:39.693710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.693781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.693928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:39.693968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:39.694005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:39.694129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.851840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.852945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.853991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.854092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.854174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 01-04T02:34:40.697251Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 309us result status StatusSuccess 2026-01-04T02:34:40.697692Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:40.698237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:40.698410Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 174us result status StatusSuccess 2026-01-04T02:34:40.698729Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-04T02:34:40.699021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-04T02:34:40.699066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-04T02:34:40.699167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-04T02:34:40.699192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-04T02:34:40.699266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-04T02:34:40.699292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-04T02:34:40.699805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:34:40.699983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.700026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:754:2744] 2026-01-04T02:34:40.700196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:34:40.700283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:34:40.700332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.700356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:754:2744] 2026-01-04T02:34:40.700423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.700510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:754:2744] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-04T02:34:40.701055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:40.701259Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" took 190us result status StatusSuccess 2026-01-04T02:34:40.701585Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeResourcePool CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ResourcePoolVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ResourcePoolDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 41 } Version: 1 Properties { } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-04T02:34:40.704909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "NilNoviSubLuna" } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:40.705178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 128:0, path# /MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna 2026-01-04T02:34:40.705364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:34:40.708233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-04T02:34:40.708544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges), operation: CREATE RESOURCE POOL, path: NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::DropResourcePool [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:39.521044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:39.521127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.521163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:39.521194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:39.521234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:39.521261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:39.521338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:39.521403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:39.522263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:39.522528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:39.602445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:39.602513Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:39.616897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:39.619874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:39.620052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:39.624512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:39.624686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:39.625349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:39.625616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:39.628201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.628413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:39.629622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:39.629715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:39.629879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:39.629938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:39.629984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:39.630135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:39.803248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.804968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:39.805595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:40.673765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:34:40.673916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:34:40.673996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:34:40.674161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:40.674209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:34:40.674267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 103, path id: 40 2026-01-04T02:34:40.674293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 103, path id: 41 2026-01-04T02:34:40.674361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:34:40.674405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:34:40.674494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:34:40.674530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:40.674576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:34:40.674611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:40.674647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:34:40.674696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:34:40.674744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:34:40.674845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:34:40.674940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:34:40.674985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 3, subscribers: 0 2026-01-04T02:34:40.675042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 7 2026-01-04T02:34:40.675074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 40], 7 2026-01-04T02:34:40.675104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 41], 18446744073709551615 2026-01-04T02:34:40.675912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.676009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.676060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:40.676106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 18446744073709551615 2026-01-04T02:34:40.676161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:34:40.676429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:34:40.676475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:34:40.676551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:40.677594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.677676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.677709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:40.677779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-04T02:34:40.677891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:34:40.678936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.679015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:34:40.679049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:34:40.679091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 7 2026-01-04T02:34:40.679123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:40.679213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:34:40.681861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:40.682466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:34:40.683855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:40.683948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:34:40.684184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:34:40.684236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:34:40.684700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:34:40.684796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:34:40.684834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:779:2769] TestWaitNotification: OK eventTxId 103 2026-01-04T02:34:40.685407Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:40.685700Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" took 288us result status StatusPathDoesNotExist 2026-01-04T02:34:40.685969Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/.metadata/workload_manager/pools/MyResourcePool" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.metadata/workload_manager/pools" LastExistedPrefixPathId: 40 LastExistedPrefixDescription { Self { Name: "pools" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 39 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> TSchemeShardTest::AdoptDropSolomon [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit >> TResourcePoolTest::SchemeErrors [GOOD] |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_reboots/ydb-core-tx-schemeshard-ut_reboots >> TxUsage::WriteToTopic_Demo_22_RestartNo_Table [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_4_Table [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table >> TSchemeShardTest::CreateDropKesus [GOOD] >> TSchemeShardTest::CreateAlterKesus >> TSchemeShardTest::ParallelModifying [GOOD] >> TSchemeShardTest::PQGroupExplicitChannels >> TTxDataShardMiniKQL::WriteEraseRead >> IndexBuildTest::CancellationNotEnoughRetriesUniq [GOOD] >> IndexBuildTest::CancellationNoTable >> TResourcePoolTest::AlterResourcePool ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:40.386779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:40.386890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:40.386933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:40.386980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:40.387025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:40.387060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:40.387126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:40.387218Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:40.388274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:40.388565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:40.514843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:40.514918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:40.540936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:40.546659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:40.546949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:40.574947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:40.575237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:40.576155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:40.576499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:40.580565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:40.580814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:40.582216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:40.582305Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:40.582460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:40.582526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:40.582579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:40.582743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:40.793171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.795957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:40.796599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:34:41.789817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-04T02:34:41.789913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 124 2026-01-04T02:34:41.789954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 124 2026-01-04T02:34:41.789986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 124, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:34:41.790017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:41.790090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 124, subscribers: 0 2026-01-04T02:34:41.792884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-04T02:34:41.793102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-04T02:34:41.793320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 2026-01-04T02:34:41.794237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 124 TestModificationResult got TxId: 124, wait until txId: 124 TestWaitNotification wait txId: 124 2026-01-04T02:34:41.794489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 124: send EvNotifyTxCompletion 2026-01-04T02:34:41.794528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 124 2026-01-04T02:34:41.794875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-04T02:34:41.795009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-04T02:34:41.795050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:719:2709] TestWaitNotification: OK eventTxId 124 TestModificationResults wait txId: 125 2026-01-04T02:34:41.798157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:41.798417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/AnotherDir, operationId: 125:0, at schemeshard: 72057594046678944 2026-01-04T02:34:41.798563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:34:41.798630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 0 2026-01-04T02:34:41.798674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 125:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 41] source path: 2026-01-04T02:34:41.798746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:34:41.798807Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 125:1, path# /MyRoot/AnotherDir/MyResourcePool 2026-01-04T02:34:41.798901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 125:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-04T02:34:41.801346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 125:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 125 TabletId: 72057594046678944 2026-01-04T02:34:41.801513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 125:0, at schemeshard: 72057594046678944 2026-01-04T02:34:41.803741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 125, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 125 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:41.803953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 125, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 125, wait until txId: 125 TestModificationResults wait txId: 126 2026-01-04T02:34:41.807013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:41.807300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/.metadata/workload_manager/pools/AnotherDir, operationId: 126:0, at schemeshard: 72057594046678944 2026-01-04T02:34:41.807460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 40], parent name: pools, child name: AnotherDir, child id: [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-04T02:34:41.807536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 0 2026-01-04T02:34:41.807578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 126:0 type: TxMkDir target path: [OwnerId: 72057594046678944, LocalPathId: 42] source path: 2026-01-04T02:34:41.807662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:34:41.807715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 126:1, path# /MyRoot/.metadata/workload_manager/pools/AnotherDir/MyResourcePool 2026-01-04T02:34:41.807849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:2, propose status:StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, at schemeshard: 72057594046678944 2026-01-04T02:34:41.810230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:148: Abort operation: IgniteOperation fail to propose a part, opId: 126:1, at schemeshard: 72057594046678944, already accepted parts: 1, propose result status: StatusSchemeError, with reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, tx message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "AnotherDir/MyResourcePool" } } TxId: 126 TabletId: 72057594046678944 2026-01-04T02:34:41.810359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:275: MkDir AbortPropose, opId: 126:0, at schemeshard: 72057594046678944 2026-01-04T02:34:41.814244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:41.814514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Resource pools shoud be placed in /MyRoot/.metadata/workload_manager/pools, operation: CREATE RESOURCE POOL, path: AnotherDir/MyResourcePool TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-04T02:34:41.817480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpCreateResourcePool CreateResourcePool { Name: "" } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:41.817643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046678944] TCreateResourcePool Propose: opId# 127:0, path# /MyRoot/.metadata/workload_manager/pools/ 2026-01-04T02:34:41.817792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-04T02:34:41.827076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/\', error: path part shouldn\'t be empty" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:41.827378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/', error: path part shouldn't be empty, operation: CREATE RESOURCE POOL, path: TestModificationResult got TxId: 127, wait until txId: 127 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TSchemeShardTest::DisablePublicationsOfDropping_Pq [GOOD] >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query >> TTxDataShardMiniKQL::CrossShard_1_Cycle >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query >> TTxDataShardMiniKQL::WriteEraseRead [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMultipleShards >> TStreamingQueryTest::ParallelCreateStreamingQuery >> TSchemeShardDecimalTypesInTables::Parameters_35_6-EnableParameterizedDecimal-true [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag >> TStreamingQueryTest::AlterStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryOrReplace >> IndexBuildTest::CancellationNoTable [GOOD] >> IndexBuildTest::CancellationNoTableUniq |88.1%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source |88.1%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TSchemeShardTest::CreateAlterKesus [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon >> TResourcePoolTest::AlterResourcePool [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists >> TStreamingQueryTest::ParallelCreateSameStreamingQuery |88.1%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/ydb-core-tx-schemeshard-ut_external_data_source >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict >> TSchemeShardTest::BackupBackupCollection-WithIncremental-false [GOOD] >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-false [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true >> LocalPartition::Restarts [GOOD] >> LocalPartition::WithoutPartitionWithRestart >> BasicUsage::RecreateObserver [GOOD] >> TSchemeShardTest::AlterTableAndAfterSplit [GOOD] >> TSchemeShardTest::AlterIndexTableDirectly >> TSchemeShardTest::PQGroupExplicitChannels [GOOD] >> TSchemeShardTest::ReadOnlyMode >> TSchemeShardTest::DisablePublicationsOfDropping_Solomon [GOOD] >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas >> TTxDataShardMiniKQL::WriteAndReadMultipleShards [GOOD] >> TTxDataShardMiniKQL::WriteAndReadMany >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplace [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists >> TStreamingQueryTest::ParallelAlterStreamingQuery >> IndexBuildTest::CancellationNoTableUniq [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex >> TStreamingQueryTest::AlterStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] >> TStreamingQueryTest::CreateStreamingQueryOrReplaceFailNameConflict [GOOD] >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] >> TxUsage::The_TxWriteInfo_Is_Deleted_After_The_Immediate_Transaction [GOOD] >> TSchemeShardDecimalTypesInTables::CopyTableShouldNotFailOnDisabledFeatureFlag [GOOD] >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters >> TSchemeShardTest::CreateTopicOverDiskSpaceQuotas [GOOD] >> TSchemeShardTest::DefaultStorageConfig >> TxUsage::WriteToTopic_Demo_43_Query [GOOD] >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:44.628104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:44.628210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.628273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:44.628330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:44.628374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:44.628408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:44.628486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.628552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:44.629456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:44.629763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:44.722117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:44.722197Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:44.737072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:44.737277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:44.737456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:44.745642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:44.745919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:44.746644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:44.746920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:44.749968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.750185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:44.751282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:44.751338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.751458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:44.751507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:44.751552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:44.751717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:44.990864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.995991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.996116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.996186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.996274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:44.996380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... th "/MyRoot/DirA/MyStreamingQuery1" took 232us result status StatusSuccess 2026-01-04T02:34:46.105747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.106527Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.106758Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 189us result status StatusSuccess 2026-01-04T02:34:46.107048Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.107675Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.107826Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 154us result status StatusSuccess 2026-01-04T02:34:46.108237Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.108695Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.108838Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery1" took 140us result status StatusSuccess 2026-01-04T02:34:46.109111Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery1" PathDescription { Self { Name: "MyStreamingQuery1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery1" Properties { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.109558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyStreamingQuery2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.109751Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyStreamingQuery2" took 172us result status StatusSuccess 2026-01-04T02:34:46.110376Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyStreamingQuery2" PathDescription { Self { Name: "MyStreamingQuery2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery2" Properties { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelCreateSameStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:45.211914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:45.212054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:45.212102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:45.212142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:45.212190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:45.212250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:45.212359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:45.212447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:45.213462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:45.213906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:45.323680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:45.323762Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:45.338774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:45.339406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:45.339590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:45.348901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:45.349198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:45.350099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:45.350368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:45.353975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:45.354186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:45.355471Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:45.355888Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:45.355982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:45.356060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:45.356107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:45.356308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:45.519855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.520998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.521166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.521274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.521359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.523897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.524017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.524148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... escribe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.384124Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 259us result status StatusSuccess 2026-01-04T02:34:46.384499Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.384980Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.385182Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 146us result status StatusSuccess 2026-01-04T02:34:46.385509Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 101 2026-01-04T02:34:46.385913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:46.385963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:34:46.386058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:46.386079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:34:46.386131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:34:46.386150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:34:46.386614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:46.386785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:46.386825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:711:2701] 2026-01-04T02:34:46.387063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:46.387185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:34:46.387233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:46.387259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:711:2701] 2026-01-04T02:34:46.387399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:34:46.387426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:711:2701] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-04T02:34:46.387913Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:46.388136Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 229us result status StatusSuccess 2026-01-04T02:34:46.388495Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "NilNoviSubLuna" Properties { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-04T02:34:46.392533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "NilNoviSubLuna" } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:46.392792Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 104:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "NilNoviSubLuna" } 2026-01-04T02:34:46.392863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 104:0, path# /MyRoot/NilNoviSubLuna 2026-01-04T02:34:46.393038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:34:46.395628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 104 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:46.395837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: NilNoviSubLuna TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_resource_pool/unittest >> TResourcePoolTest::AlterResourcePoolShouldFailIfSuchEntityNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:43.165043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:43.165171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:43.165231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:43.165281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:43.165328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:43.165363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:43.165420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:43.165501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:43.166651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:43.166997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:43.252152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:43.252231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:43.267573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:43.267814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:43.268016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:43.279663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:43.279919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:43.280730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:43.281067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:43.285902Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:43.286145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:43.287327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:43.287400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:43.287583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:43.287646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:43.287700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:43.287907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:43.436475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.437939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:43.438436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... is done id#101:0 progress is 3/3 2026-01-04T02:34:46.289658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:34:46.289687Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:34:46.289712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:34:46.289738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: false 2026-01-04T02:34:46.289769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:34:46.289860Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:34:46.289907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:34:46.289982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:34:46.290020Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-04T02:34:46.290043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-04T02:34:46.290075Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:34:46.290097Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-04T02:34:46.290117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-04T02:34:46.290146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:46.290175Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 4, subscribers: 0 2026-01-04T02:34:46.290212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:34:46.290245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 6 2026-01-04T02:34:46.290294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 5 2026-01-04T02:34:46.290318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-04T02:34:46.293110Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.293219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.293257Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:46.293303Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:34:46.293373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:46.294206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.294291Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.294331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:46.294361Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-04T02:34:46.294395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:46.295766Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.295869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.295906Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:46.295933Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-04T02:34:46.295974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:34:46.297004Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.297080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:46.297108Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:46.297135Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:34:46.297165Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:46.297239Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:34:46.299990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:46.300121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:46.302256Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:46.302434Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:46.302657Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:46.302698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:46.303101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:46.303214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:46.303255Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:724:2713] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:34:46.306831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.metadata/workload_manager/pools" OperationType: ESchemeOpAlterResourcePool CreateResourcePool { Name: "MyResourcePool" Properties { Properties { key: "concurrent_query_limit" value: "20" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:46.307090Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_resource_pool.cpp:123: [72057594046678944] TAlterResourcePool Propose: opId# 102:0, path# /MyRoot/.metadata/workload_manager/pools/MyResourcePool 2026-01-04T02:34:46.307294Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), at schemeshard: 72057594046678944 2026-01-04T02:34:46.310321Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.metadata/workload_manager/pools/MyResourcePool\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.metadata/workload_manager/pools\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:46.310620Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/.metadata/workload_manager/pools/MyResourcePool', error: path hasn't been resolved, nearest resolved path: '/MyRoot/.metadata/workload_manager/pools' (id: [OwnerId: 72057594046678944, LocalPathId: 40]), operation: ALTER RESOURCE POOL, path: MyResourcePool TestModificationResult got TxId: 102, wait until txId: 102 |88.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_background_cleaning/unittest >> TSchemeshardBackgroundCleaningTest::SchemeshardBackgroundCleaningTestSimpleCleanIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:232:2060] recipient: [1:226:2145] Leader for TabletID 72057594046678944 is [1:244:2157] sender: [1:245:2060] recipient: [1:226:2145] 2026-01-04T02:32:47.531982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:32:47.532074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:47.532217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:32:47.532271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:32:47.532313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:32:47.532345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:32:47.532470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:32:47.532557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:32:47.533557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:32:47.538048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:32:47.705876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:32:47.705948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:47.731942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:32:47.732075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:32:47.732297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:32:47.750304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:32:47.750662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:32:47.751473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:32:47.751860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:32:47.760119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.760338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:32:47.761732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:32:47.761821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:32:47.762027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:32:47.762078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:32:47.762141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:32:47.762287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:32:47.929382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.930942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:32:47.931597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:329:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:42.390540Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:42.390616Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:42.390648Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:42.799257Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:42.799332Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:42.799407Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:42.799437Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.122967Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.123037Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.123100Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.123130Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.497015Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.497108Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.497192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.497224Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.866711Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.866789Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:43.866862Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:43.866892Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:44.251197Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:44.251290Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:44.251368Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:44.251401Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:44.631833Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:44.631912Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:44.631984Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:44.632016Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:45.074233Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:45.074318Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:34:45.074394Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [7:245:2156], Recipient [7:245:2156]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:45.074425Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:34:45.166180Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1496:3244], Recipient [7:245:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:34:45.166280Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:34:45.166432Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:34:45.166679Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable" took 245us result status StatusPathDoesNotExist 2026-01-04T02:34:45.166863Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:34:45.167412Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1497:3245], Recipient [7:245:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:34:45.167474Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:34:45.167634Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:34:45.167811Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp" took 204us result status StatusPathDoesNotExist 2026-01-04T02:34:45.167972Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:34:45.168455Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1498:3246], Recipient [7:245:2156]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true } 2026-01-04T02:34:45.168513Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:34:45.168628Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/tmp/TempTable/ValueIndex" Options { ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:34:45.168805Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/tmp/TempTable/ValueIndex" took 181us result status StatusPathDoesNotExist 2026-01-04T02:34:45.168969Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/tmp/TempTable/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/tmp/TempTable/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_resource_pool/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateAlterDropSolomon [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:33:50.120339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:33:50.120437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:50.120478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:33:50.120518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:33:50.120556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:33:50.120585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:33:50.120672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:50.120768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:33:50.121627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:33:50.123141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:33:50.244588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:33:50.244651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:50.263061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:33:50.267349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:33:50.267561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:33:50.273932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:33:50.274236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:33:50.274964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:50.275253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:33:50.280934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:50.281184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:33:50.282696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:50.282764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:50.282906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:33:50.282959Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:50.282999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:33:50.283135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:33:50.547577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.548790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.548927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.549752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.552368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:50.552503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:34:45.619238Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:34:45.619269Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:34:45.620577Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:45.621818Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:34:45.623404Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:34:45.623924Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409548 2026-01-04T02:34:45.625165Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:34:45.625396Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:45.625688Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:34:45.627030Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:34:45.627278Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:34:45.627489Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:34:45.629287Z node 17 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409546 2026-01-04T02:34:45.629535Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:34:45.629763Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 Forgetting tablet 72075186233409549 2026-01-04T02:34:45.631273Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:34:45.631362Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:34:45.631493Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 Forgetting tablet 72075186233409547 2026-01-04T02:34:45.632073Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:34:45.634631Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:34:45.634725Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:34:45.637247Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:34:45.637299Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:34:45.637404Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:34:45.637434Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:34:45.637501Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:34:45.637565Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:34:45.637828Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:34:45.638203Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:34:45.638282Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:34:45.638884Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:34:45.639013Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:34:45.639090Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:542:2494] TestWaitNotification: OK eventTxId 103 2026-01-04T02:34:45.639857Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Solomon" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:45.640150Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Solomon" took 337us result status StatusPathDoesNotExist 2026-01-04T02:34:45.640387Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Solomon\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Solomon" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 wait until 72075186233409546 is deleted wait until 72075186233409547 is deleted wait until 72075186233409548 is deleted wait until 72075186233409549 is deleted 2026-01-04T02:34:45.640998Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409546 2026-01-04T02:34:45.641116Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409547 2026-01-04T02:34:45.641156Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409548 2026-01-04T02:34:45.641216Z node 17 :HIVE INFO: tablet_helpers.cpp:1658: [72057594037968897] TEvSubscribeToTabletDeletion, 72075186233409549 Deleted tabletId 72075186233409546 Deleted tabletId 72075186233409547 Deleted tabletId 72075186233409548 Deleted tabletId 72075186233409549 2026-01-04T02:34:45.641874Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:45.642133Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 326us result status StatusSuccess 2026-01-04T02:34:45.642707Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_background_cleaning/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::RecreateObserver [GOOD] Test command err: 2026-01-04T02:32:49.535845Z :RetryDiscoveryWithCancel INFO: Random seed for debugging is 1767493969535812 2026-01-04T02:32:49.979120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328793132610154:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:49.979168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:50.050019Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.009296s 2026-01-04T02:32:50.100844Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.160817Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328799019672285:2165];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:50.161396Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:50.174944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:50.183355Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.609088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.609877Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.642088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.642207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.673623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.673703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.723897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.744362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.747547Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:50.752106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.850213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.870702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002d8d/r3tmp/yandexD8DPRc.tmp 2026-01-04T02:32:50.870740Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002d8d/r3tmp/yandexD8DPRc.tmp 2026-01-04T02:32:50.870949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002d8d/r3tmp/yandexD8DPRc.tmp 2026-01-04T02:32:50.871071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:50.907492Z INFO: TTestServer started on Port 28270 GrpcPort 21441 2026-01-04T02:32:50.912870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:51.005399Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:21441 2026-01-04T02:32:51.093410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:51.159476Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:54.066865Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816199541696:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.066962Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816199541685:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.067312Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.070675Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816199541700:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.070765Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.075441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:54.143413Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328816199541699:2306], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:32:54.457928Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328816199541731:2139] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:54.490768Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591328816199541746:2312], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:54.492811Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjAyMzUzZjEtNDQyNmY4NjgtZjg4ZTlkMWMtN2Y2ZjE3Y2E=, ActorId: [2:7591328816199541682:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3dke1f5x00b6rbx70sy39s, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:54.490082Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328814607447985:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:54.491411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.493900Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmRhZGU1MzYtOWFmMjQyMDctYmQzYWVjNGItNWQ5ZmI3OTA=, ActorId: [1:7591328814607447901:2328], ActorState: ExecuteState, LegacyTraceId: 01ke3dke2wb17g26cjx5m84xy0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:54.495512Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:54.495723Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { po ... sing read session. Close timeout: 0.000000s 2026-01-04T02:34:42.802796Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:34:42.802830Z :INFO: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 52 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:34:42.802877Z :NOTICE: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:34:42.802902Z :DEBUG: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] [] Abort session to cluster 2026-01-04T02:34:42.802842Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:34:42.802867Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:42.802875Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:34:42.802888Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:42.802897Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:34:42.803107Z :INFO: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Closing read session. Close timeout: 0.000000s 2026-01-04T02:34:42.803161Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:34:42.803191Z :INFO: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Counters: { Errors: 0 CurrentSessionLifetimeMs: 50 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:34:42.803230Z :NOTICE: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:34:42.803253Z :DEBUG: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] [] Abort session to cluster 2026-01-04T02:34:42.803449Z :INFO: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Closing read session. Close timeout: 0.000000s 2026-01-04T02:34:42.803503Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:34:42.803541Z :INFO: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Counters: { Errors: 0 CurrentSessionLifetimeMs: 51 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:34:42.803607Z :NOTICE: [/Root] [/Root] [fd808027-e53bb0cd-47b59ea6-8e527335] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:34:42.803723Z :INFO: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] Closing read session. Close timeout: 0.000000s 2026-01-04T02:34:42.803770Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): 2026-01-04T02:34:42.803795Z :INFO: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] Counters: { Errors: 0 CurrentSessionLifetimeMs: 53 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:34:42.803831Z :NOTICE: [/Root] [/Root] [51c1e88-bef4d8a1-f1eb0af5-1c4595ba] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:34:42.803881Z :INFO: [/Root] [/Root] [e62623b-416b9bde-aa442fef-db46b099] Closing read session. Close timeout: 0.000000s 2026-01-04T02:34:42.803933Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:0:0 2026-01-04T02:34:42.803959Z :INFO: [/Root] [/Root] [e62623b-416b9bde-aa442fef-db46b099] Counters: { Errors: 0 CurrentSessionLifetimeMs: 61 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:34:42.803996Z :NOTICE: [/Root] [/Root] [e62623b-416b9bde-aa442fef-db46b099] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:34:42.814525Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_9953475812132749846_v1 grpc read done: success# 0, data# { } 2026-01-04T02:34:42.814553Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_9953475812132749846_v1 grpc read failed 2026-01-04T02:34:42.814571Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_9953475812132749846_v1 grpc closed 2026-01-04T02:34:42.814600Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_9953475812132749846_v1 is DEAD 2026-01-04T02:34:42.815259Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 2 consumer shared/user session shared/user_3_2_1999824921949733259_v1 grpc read done: success# 0, data# { } 2026-01-04T02:34:42.815266Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 2 consumer shared/user session shared/user_3_2_1999824921949733259_v1 grpc read failed 2026-01-04T02:34:42.815277Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 2 consumer shared/user session shared/user_3_2_1999824921949733259_v1 grpc closed 2026-01-04T02:34:42.815287Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 2 consumer shared/user session shared/user_3_2_1999824921949733259_v1 is DEAD 2026-01-04T02:34:42.815566Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer shared/user session shared/user_3_3_7443546787196305114_v1 grpc read done: success# 0, data# { } 2026-01-04T02:34:42.815573Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer shared/user session shared/user_3_3_7443546787196305114_v1 grpc read failed 2026-01-04T02:34:42.815585Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer shared/user session shared/user_3_3_7443546787196305114_v1 grpc closed 2026-01-04T02:34:42.815594Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer shared/user session shared/user_3_3_7443546787196305114_v1 is DEAD 2026-01-04T02:34:42.816800Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907024:2490] disconnected. 2026-01-04T02:34:42.816828Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907024:2490] disconnected; active server actors: 1 2026-01-04T02:34:42.818857Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907024:2490] client user disconnected session shared/user_3_1_9953475812132749846_v1 2026-01-04T02:34:42.818564Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_3_1_9953475812132749846_v1 2026-01-04T02:34:42.818609Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7591329282106907031:2500] destroyed 2026-01-04T02:34:42.822031Z node 4 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_9953475812132749846_v1 2026-01-04T02:34:42.821533Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037893][rt3.dc1--test-topic] consumer user rebalancing was scheduled 2026-01-04T02:34:42.821605Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907020:2491] disconnected. 2026-01-04T02:34:42.821626Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907020:2491] disconnected; active server actors: 1 2026-01-04T02:34:42.821646Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907020:2491] client user disconnected session shared/user_3_2_1999824921949733259_v1 2026-01-04T02:34:42.821677Z node 3 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907023:2494] disconnected. 2026-01-04T02:34:42.821691Z node 3 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907023:2494] disconnected; active server actors: 1 2026-01-04T02:34:42.821707Z node 3 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [3:7591329282106907023:2494] client user disconnected session shared/user_3_3_7443546787196305114_v1 2026-01-04T02:34:42.904433Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:34:42.904468Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:42.904482Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:34:42.904503Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:42.904516Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:34:43.004878Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:34:43.004917Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:43.004933Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:34:43.004951Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:43.004965Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:34:43.105155Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:34:43.105193Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:43.105207Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:34:43.105225Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:34:43.105238Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardTest::ReadOnlyMode [GOOD] >> TSchemeShardTest::PathErrors >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> BasicUsage::AlterTopicWithSharedConsumer_SetDeleteDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] >> TSchemeShardTest::CopyTableAccessToPrivatePaths-EnableAccessToIndexImplTables-true [GOOD] >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable >> TxUsage::WriteToTopic_Demo_45_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQueryFailAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:44.673144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:44.673236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.673272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:44.673325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:44.673379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:44.673420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:44.673486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.673565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:44.674554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:44.674903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:44.786804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:44.786868Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:44.803592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:44.808000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:44.808264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:44.815017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:44.815313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:44.816000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:44.816308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:44.820398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.820779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:44.822062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:44.822135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.822251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:44.822309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:44.822347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:44.822499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:45.003256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.004918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.005450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.042900Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:47.042942Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:34:47.042995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:47.043556Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.043630Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.043657Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:47.043682Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:34:47.043710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:47.043775Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:34:47.046088Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:47.046186Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:47.046374Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:47.046416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:47.046785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:47.046908Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.046946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:702:2691] TestWaitNotification: OK eventTxId 101 2026-01-04T02:34:47.047322Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:47.047526Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 229us result status StatusSuccess 2026-01-04T02:34:47.047956Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:34:47.051176Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:47.051471Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 102:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery FailOnExist: false CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } } } 2026-01-04T02:34:47.051566Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 102:0, path# /MyRoot/MyStreamingQuery 2026-01-04T02:34:47.051690Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:34:47.053836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:47.054030Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeStreamingQuery, state: EPathStateNoChanges), operation: CREATE STREAMING QUERY, path: MyStreamingQuery TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:47.054318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:47.054359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:47.054683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:47.054759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.054796Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:710:2699] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:47.055257Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:47.055421Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 220us result status StatusSuccess 2026-01-04T02:34:47.055787Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO Output SELECT * FROM Input" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailNameConflict [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:44.801299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:44.801388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.801426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:44.801462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:44.801521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:44.801569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:44.801653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:44.801744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:44.802640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:44.802929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:44.909426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:44.909488Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:44.923042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:44.927322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:44.927537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:44.938925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:44.939229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:44.939980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:44.940271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:44.943693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.943915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:44.945205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:44.945283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:44.945401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:44.945457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:44.945499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:44.945655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:45.139694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.140910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.141799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.145554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.145682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.145790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:34:47.149003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:34:47.149447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.149507Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:34:47.149619Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:34:47.149658Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:47.149699Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:34:47.149730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:47.149769Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:34:47.149863Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:47.149904Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:34:47.149941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:34:47.150012Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:47.150059Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:34:47.150096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:34:47.150127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:34:47.150655Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.150753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.150798Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:47.150857Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:34:47.150904Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:47.151511Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.151584Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:34:47.151618Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:34:47.151650Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:34:47.151709Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:34:47.151786Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:34:47.154763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:47.155555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:47.155765Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:47.155807Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:47.156153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:47.156243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.156282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:708:2697] TestWaitNotification: OK eventTxId 101 2026-01-04T02:34:47.156725Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:47.156936Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 228us result status StatusSuccess 2026-01-04T02:34:47.157345Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:34:47.160798Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "UniqueName" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:47.160985Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-04T02:34:47.161151Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, at schemeshard: 72057594046678944 2026-01-04T02:34:47.163304Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:47.163517Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeStreamingQuery, operation: ALTER STREAMING QUERY, path: UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:47.163825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:47.163872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:47.164282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:47.164382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.164423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:716:2705] TestWaitNotification: OK eventTxId 102 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardDecimalTypesInTables::CreateWithWrongParameters [GOOD] >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters |88.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::CreateStreamingQuerySchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:45.196148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:45.196222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:45.196253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:45.196285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:45.196318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:45.196344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:45.196382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:45.196446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:45.197144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:45.197355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:45.292449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:45.292507Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:45.315541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:45.315760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:45.315979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:45.325308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:45.325541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:45.326266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:45.326553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:45.329740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:45.329983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:45.331191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:45.331272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:45.331382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:45.331429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:45.331479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:45.331675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:45.480312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.481879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:45.482407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... emeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:47.489684Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:47.489918Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:34:47.490146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.490212Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:34:47.490264Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:34:47.490329Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:47.492860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.492946Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:34:47.492995Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:47.495145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.495206Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.495258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:47.495331Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:47.495469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:47.497322Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:47.497502Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:34:47.497908Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:47.498042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:47.498092Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:47.498379Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:47.498444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:47.498618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:47.498800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:47.500874Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:47.500931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:47.501157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:47.501210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:34:47.501644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:47.501695Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:34:47.501831Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:47.501878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:47.501920Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:47.501964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:47.502010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:47.502055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:47.502098Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:47.502160Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:47.502239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:47.502283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:34:47.502325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:34:47.502783Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:47.502883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:47.502926Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:34:47.502979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:34:47.503044Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:47.503143Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:34:47.506158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:34:47.506648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:34:47.507167Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:678:2667] Bootstrap 2026-01-04T02:34:47.508376Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:678:2667] Become StateWork (SchemeCache [2:683:2672]) 2026-01-04T02:34:47.511504Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:47.511702Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_streaming_query.cpp:290: [72057594046678944] CreateNewStreamingQuery, opId# 101:0, tx# WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateStreamingQuery CreateStreamingQuery { Name: "" } 2026-01-04T02:34:47.511781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_streaming_query.cpp:230: [72057594046678944] TCreateStreamingQuery Propose: opId# 101:0, path# /MyRoot/ 2026-01-04T02:34:47.511893Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-04T02:34:47.512768Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:678:2667] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:34:47.515598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:47.515827Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE STREAMING QUERY, path: 2026-01-04T02:34:47.516229Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ParallelAlterStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:46.507314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:46.507410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:46.507454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:46.507521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:46.507570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:46.507636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:46.507727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:46.507829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:46.509097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:46.509547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:46.610838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:46.610910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:46.627425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:46.632115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:46.632777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:46.638904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:46.639170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:46.639960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:46.640280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:46.643492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:46.643705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:46.645006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:46.645121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:46.645262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:46.645314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:46.645366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:46.645535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:46.802936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.804902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:46.805549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-04T02:34:47.717981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-04T02:34:47.718076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 113: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.718118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.718314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-04T02:34:47.718418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.718453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.718557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-04T02:34:47.718680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-04T02:34:47.718741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.718764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.718899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.718928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.719012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-04T02:34:47.719212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.719242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.719341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:34:47.719400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.719426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.719520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:34:47.719658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.719684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.719884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:34:47.719965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.719990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.720112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-04T02:34:47.720169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.720192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.720342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.720371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.720487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-04T02:34:47.720550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.720574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.720693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-04T02:34:47.720762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.720805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.720961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-04T02:34:47.721043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.721849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [1:789:2779] 2026-01-04T02:34:47.721994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-04T02:34:47.722024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [1:789:2779] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2026-01-04T02:34:47.725041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:47.725306Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 252us result status StatusSuccess 2026-01-04T02:34:47.725828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyStreamingQuery" PathDescription { Self { Name: "MyStreamingQuery" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeStreamingQuery CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 StreamingQueryVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } StreamingQueryDescription { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } Properties { key: "resource_pool" value: "my_pool" } Properties { key: "run" value: "true" } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_background_cleaning/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/ydb-core-tx-schemeshard-ut_sysview |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TSchemeShardTest::DefaultStorageConfig [GOOD] >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex |88.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_resource_pool/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::DropPQAbort [GOOD] >> TSchemeShardTest::ManyDirs >> TStreamingQueryTest::CreateStreamingQuery >> TSchemeShardTest::PathErrors [GOOD] >> TSchemeShardTest::NestedDirs >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery >> TStreamingQueryTest::CreateStreamingQueryWithProperties |88.2%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |88.2%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots |88.2%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/tx-schemeshard-ut_continuous_backup_reboots >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndex [GOOD] >> IndexBuildTest::CancelBuildUniq >> TSchemeShardDecimalTypesInTables::AlterWithWrongParameters [GOOD] >> TSchemeShardInfoTypesTest::EmptyFamilies [GOOD] >> TSchemeShardInfoTypesTest::LostId [GOOD] >> TSchemeShardInfoTypesTest::DeduplicationOrder [GOOD] >> TSchemeShardInfoTypesTest::MultipleDeduplications [GOOD] >> TSchemeShardInfoTypesTest::IndexBuildInfoAddParent [GOOD] >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] >> TStreamingQueryTest::DropStreamingQueryTwice >> TxUsage::WriteToTopic_Demo_27_Table [GOOD] >> TStreamingQueryTest::CreateStreamingQueryWithProperties [GOOD] >> TStreamingQueryTest::DropStreamingQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CannotAddChannelProfileIdToStorageConfigTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:04.825693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:04.825776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:04.825953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:04.825993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:04.826031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:04.826072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:04.826151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:04.826226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:04.827089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:04.827379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:04.998681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:04.998741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:05.034085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:05.039099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:05.039297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:05.065271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:05.065659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:05.066552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:05.066865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:05.070782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:05.070999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:05.072616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:05.072680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:05.072800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:05.072846Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:05.072892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:05.073042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:05.298572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.299670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.299796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.299859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:05.300865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... end, to populator: [16:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:34:49.578487Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [16:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:34:49.580097Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.580230Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:34:49.582362Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:49.582543Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:49.582620Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:49.582683Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:34:49.582765Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:49.584024Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:49.584157Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:49.584194Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:49.584238Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:34:49.584277Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:34:49.584374Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:34:49.585111Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3133 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:34:49.585160Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:34:49.585309Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3133 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:34:49.585851Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 3133 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-04T02:34:49.587374Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 68719479455 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:34:49.587462Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:34:49.587684Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 68719479455 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:34:49.587807Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:34:49.587991Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 68719479455 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:34:49.588156Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:49.588247Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.588321Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:34:49.588399Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:34:49.593223Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:49.593652Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:49.594500Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.595073Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.595542Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.595613Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:34:49.595846Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:49.595927Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:49.596008Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:49.596093Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:49.596172Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:34:49.596304Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:702:2691] message: TxId: 102 2026-01-04T02:34:49.596405Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:49.596489Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:34:49.596551Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:34:49.596781Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:34:49.599863Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:49.599992Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [16:762:2739] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-04T02:34:49.604802Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" PartitionConfig { ChannelProfileId: 0 } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:49.605176Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:34:49.605680Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, at schemeshard: 72057594046678944 2026-01-04T02:34:49.609520Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Setting ChannelProfileId to 0 for tables with storage config is not allowed" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:49.610010Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Setting ChannelProfileId to 0 for tables with storage config is not allowed, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 >> TStreamingQueryTest::CreateStreamingQuery [GOOD] >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] >> TSchemeShardTest::NestedDirs [GOOD] >> TSchemeShardTest::NewOwnerOnDatabase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::BackupBackupCollection-WithIncremental-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:04.287385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:04.287476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:04.287520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:04.287556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:04.287592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:04.287621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:04.287687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:04.287777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:04.288692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:04.288975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:04.385529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:04.385589Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:04.400294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:04.404601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:04.404800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:04.410770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:04.411100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:04.411857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:04.412224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:04.415448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:04.415673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:04.417178Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:04.417251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:04.417378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:04.417435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:04.417503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:04.417699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:04.580966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.584900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.585934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.586021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.586092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:04.586170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6 ParentPathId: 80 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 } ChildrenExist: true } Children { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 85 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:50.113770Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:50.115788Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" took 2.02ms result status StatusSuccess 2026-01-04T02:34:50.117824Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/Table2" PathDescription { Self { Name: "Table2" PathId: 91 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 91 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:50.121548Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:50.122736Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" took 1.24ms result status StatusSuccess 2026-01-04T02:34:50.123341Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB" PathDescription { Self { Name: "DirB" PathId: 86 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 85 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 86 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:50.124751Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:50.125144Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" took 424us result status StatusSuccess 2026-01-04T02:34:50.125729Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.backups/collections/MyCollection1/19700101000001Z_incremental/DirA/DirB/Table3" PathDescription { Self { Name: "Table3" PathId: 94 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 109 CreateStep: 5000046 ParentPathId: 86 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value0" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 103 PathsLimit: 10000 ShardsInside: 42 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 12 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 94 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TxUsage::WriteToTopic_Demo_27_Query |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> ScriptExecutionsTest::BackgroundOperationFinalization [GOOD] >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] >> TStreamingQueryTest::DropStreamingQueryTwice [GOOD] >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists >> TStreamingQueryTest::DropStreamingQuery [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::ReadOnlyModeAndCreateStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:50.213347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:50.213435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.213477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:50.213541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:50.213581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:50.213611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:50.213688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.213785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:50.214722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:50.215064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:50.306536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:50.306607Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:50.322444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:50.326839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:50.327093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:50.332793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:50.333066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:50.333742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:50.334061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:50.336968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.337169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:50.338408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:50.338490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.338624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:50.338676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:50.338716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:50.338894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:50.488425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.489934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.490571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard: 72057594046678944 2026-01-04T02:34:51.719897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 105 ready parts: 1/1 2026-01-04T02:34:51.720087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 105 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:51.722057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.722229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.722281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:34:51.722337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:34:51.722386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:34:51.723694Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.723785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 2 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.723816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:34:51.723854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 2 2026-01-04T02:34:51.723887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:51.724003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 0/1, is published: true 2026-01-04T02:34:51.726356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 105:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:105 msg type: 269090816 2026-01-04T02:34:51.726556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 105, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 105 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000040 2026-01-04T02:34:51.728229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:34:51.733476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:51.733592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 105 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:51.733657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 105:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000040, at schemeshard: 72057594046678944 2026-01-04T02:34:51.733797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:34:51.734014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:34:51.734096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:51.737227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 FAKE_COORDINATOR: Erasing txId 105 2026-01-04T02:34:51.739807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:51.739861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:51.740110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:34:51.740238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:51.740281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2818], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-04T02:34:51.740336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:860:2818], at schemeshard: 72057594046678944, txId: 105, path id: 40 2026-01-04T02:34:51.740744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:34:51.740808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:34:51.740936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:34:51.740975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:34:51.741025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:34:51.741108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:34:51.741175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:34:51.741240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:34:51.741286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:34:51.741322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:34:51.741450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:34:51.741503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-04T02:34:51.741565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 11 2026-01-04T02:34:51.741620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 3 2026-01-04T02:34:51.742515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.742620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 11 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.742669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:34:51.742715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 11 2026-01-04T02:34:51.742763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:34:51.743700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.743799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:34:51.743832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:34:51.743876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:34:51.743913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:34:51.743997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:34:51.748058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:34:51.748194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 TestModificationResult got TxId: 105, wait until txId: 105 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::DefaultStorageConfigTableWithChannelProfileIdBuildIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:16.623486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:16.623599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:16.623646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:16.623691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:16.623745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:16.623778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:16.623827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:16.623911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:16.624955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:16.625334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:16.747888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:16.747948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:16.792134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:16.799009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:16.799230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:16.817120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:16.817422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:16.818238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:16.818531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:16.827593Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:16.827842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:16.829350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:16.829415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:16.829517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:16.829583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:16.829628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:16.829829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:17.008816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.010971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.011038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.011151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.011354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.011451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:17.011514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... status: StatusAccepted 2026-01-04T02:34:51.380440Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-04T02:34:51.380505Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:30: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 ProgressState 2026-01-04T02:34:51.380580Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:34:51.380729Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 281474976710760 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:51.383053Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-04T02:34:51.383198Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:821:2791], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:34:51.383317Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710760:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:281474976710760 msg type: 269090816 2026-01-04T02:34:51.383443Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710760, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 281474976710760 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710760 at step: 5000043 2026-01-04T02:34:51.383760Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710760, at schemeshard: 72057594046678944 2026-01-04T02:34:51.383801Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 0/1, is published: true 2026-01-04T02:34:51.383843Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710760, at schemeshard: 72057594046678944 2026-01-04T02:34:51.383952Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:51.384066Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 73014446190 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:51.384142Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000043 2026-01-04T02:34:51.384215Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-04T02:34:51.386744Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-04T02:34:51.386802Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-04T02:34:51.386898Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:34:51.386933Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:34:51.386982Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:34:51.387014Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:34:51.387060Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:34:51.387141Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:129:2154] message: TxId: 281474976710760 2026-01-04T02:34:51.387205Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:34:51.387247Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:34:51.387279Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:34:51.387353Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:34:51.389447Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:34:51.389516Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:34:51.389580Z node 17 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 103, txId# 281474976710760 2026-01-04T02:34:51.389705Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:821:2791], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:34:51.391540Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking 2026-01-04T02:34:51.391682Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Unlocking TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:821:2791], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:34:51.391737Z node 17 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:34:51.393509Z node 17 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done 2026-01-04T02:34:51.393657Z node 17 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 103 Done TBuildInfo{ IndexBuildId: 103, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobal, IndexName: Index, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [17:821:2791], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000041, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:34:51.393727Z node 17 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 103, subscribers count# 1 2026-01-04T02:34:51.393926Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:34:51.394001Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [17:912:2871] TestWaitNotification: OK eventTxId 103 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] >> TCutHistoryRestrictions::BasicTest [GOOD] >> TCutHistoryRestrictions::EmptyAllowList [GOOD] >> TCutHistoryRestrictions::EmptyDenyList [GOOD] >> TCutHistoryRestrictions::SameTabletInBothLists [GOOD] >> TCutHistoryRestrictions::BothListsEmpty [GOOD] >> THeavyPerfTest::TTestLoadEverything >> TxUsage::WriteToTopic_Demo_20_RestartNo_Table [GOOD] >> THiveTest::TestDrain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQuery [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:50.292512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:50.292603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.292636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:50.292671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:50.292708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:50.292756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:50.292835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.292911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:50.293755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:50.294161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:50.394477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:50.394552Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:50.426947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:50.431432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:50.431675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:50.437254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:50.437492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:50.438180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:50.438424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:50.441189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.441378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:50.442603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:50.442670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.442784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:50.442832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:50.442870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:50.443022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:50.596557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.597651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.597785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.597910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.598869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-04T02:34:52.605989Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:52.606117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:52.606175Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_streaming_query.cpp:22: [72057594046678944] TDropStreamingQuery TPropose, operationId: 102:0, HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:34:52.606271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:52.606365Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:34:52.606538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.606659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:34:52.607747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:52.608260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:34:52.609924Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:52.609968Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:52.610145Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:34:52.610337Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:52.610398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:34:52.610447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:34:52.610751Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.610804Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:34:52.610928Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:52.610972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:52.611021Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:52.611061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:52.611104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:34:52.611154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:52.611198Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:34:52.611240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:34:52.611339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:52.611393Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:34:52.611446Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:34:52.611501Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:34:52.611979Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:52.612086Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:52.612133Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:52.612188Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:34:52.612243Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:34:52.612685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:34:52.612741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:34:52.612831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:52.613187Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:52.613267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:52.613308Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:52.613342Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:34:52.613382Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.613494Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:34:52.617546Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:52.617697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:34:52.617788Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:52.618045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:52.618097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:52.618531Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:52.618638Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:52.618681Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:736:2725] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:52.619226Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyStreamingQuery" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:52.619432Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyStreamingQuery" took 247us result status StatusPathDoesNotExist 2026-01-04T02:34:52.619664Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyStreamingQuery" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TargetTrackingScaleRecommenderPolicy::ScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleOut [GOOD] >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn >> TargetTrackingScaleRecommenderPolicy::BigNumbersScaleIn [GOOD] >> TargetTrackingScaleRecommenderPolicy::SpikeResistance [GOOD] >> TargetTrackingScaleRecommenderPolicy::NearTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::AtTarget [GOOD] >> TargetTrackingScaleRecommenderPolicy::Fluctuations [GOOD] >> TargetTrackingScaleRecommenderPolicy::FluctuationsBigNumbers [GOOD] >> TargetTrackingScaleRecommenderPolicy::ScaleInToMaxSeen [GOOD] >> TargetTrackingScaleRecommenderPolicy::Idle [GOOD] >> TScaleRecommenderTest::BasicTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::AlterStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:50.024830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:50.024925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.024972Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:50.025009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:50.025055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:50.025094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:50.025146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:50.025207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:50.026178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:50.026515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:50.116981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:50.117062Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:50.133977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:50.134821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:50.135036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:50.151936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:50.152350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:50.153068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:50.153357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:50.162699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.162951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:50.164141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:50.164207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:50.164334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:50.164388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:50.164433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:50.164605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:50.320594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.321585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.321701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.321785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.321877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.321938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:50.322691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :34:52.707986Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:52.710035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.710114Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:34:52.710158Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:52.712007Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.712085Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.712134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.712188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.712326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:52.713921Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:52.714070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:34:52.714367Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:52.714512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:52.714569Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.714842Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:52.714895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.715097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.715189Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:52.717124Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:52.717188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:52.717413Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:52.717467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:34:52.717829Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.717910Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:34:52.718043Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:52.718083Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.718130Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:52.718167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.718212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:52.718261Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.718301Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:52.718339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:52.718437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:52.718495Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:34:52.718539Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:34:52.719145Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:52.719262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:52.719310Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:34:52.719358Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:34:52.719410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.719506Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:34:52.722452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:34:52.722942Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:34:52.723923Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:670:2659] Bootstrap 2026-01-04T02:34:52.725150Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:670:2659] Become StateWork (SchemeCache [2:675:2664]) 2026-01-04T02:34:52.728618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterStreamingQuery CreateStreamingQuery { Name: "MyStreamingQuery" Properties { Properties { key: "query_text" value: "INSERT INTO OtherSink SELECT * FROM OtherSource" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:52.728830Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_streaming_query.cpp:208: [72057594046678944] TAlterStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-04T02:34:52.728984Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-04T02:34:52.731137Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:670:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:34:52.736486Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:52.736735Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: ALTER STREAMING QUERY, path: MyStreamingQuery 2026-01-04T02:34:52.744590Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:52.744855Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:52.744910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:52.745326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:52.745430Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:52.745470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:685:2674] TestWaitNotification: OK eventTxId 101 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TExternalDataSourceTest::SchemeErrors >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] >> THiveTest::TestServerlessMigration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardInfoTypesTest::FillItemsFromSchemaMappingTest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:33:58.475579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:33:58.475669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:58.475738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:33:58.475774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:33:58.475813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:33:58.475868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:33:58.475929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:33:58.476001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:33:58.476869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:33:58.477174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:33:58.593464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:33:58.593527Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:58.617123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:33:58.621465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:33:58.621695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:33:58.635768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:33:58.636122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:33:58.636917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:33:58.637226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:33:58.650817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:58.651105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:33:58.652744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:33:58.652824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:33:58.652954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:33:58.653033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:33:58.653086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:33:58.653266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:33:58.842271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.846677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.846896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.846992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:33:58.847976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-04T02:34:52.226314Z node 12 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-04T02:34:52.241989Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:52.242304Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.242589Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:34:52.242654Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:34:52.242923Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:34:52.243029Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:52.246677Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:52.246948Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:34:52.247284Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.247383Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:34:52.247450Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:34:52.247504Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:52.250285Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.250379Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:34:52.250449Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:52.252791Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.252866Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.252947Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.253040Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.253273Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:52.255483Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:52.255700Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:34:52.256221Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:52.256419Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 51539609710 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:52.256503Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.256892Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:52.256996Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:52.257320Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.257443Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:52.260225Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:52.260310Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:52.260639Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:52.260707Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [12:211:2212], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:34:52.260854Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:52.260934Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:34:52.261143Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:52.261205Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.261282Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:52.261345Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.261408Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:52.261480Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:52.261534Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:52.261588Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:52.261703Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:52.261771Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:34:52.261851Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:34:52.263061Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:52.263245Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:52.263317Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:34:52.263389Z node 12 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:34:52.263453Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:52.263592Z node 12 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:34:52.267501Z node 12 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:34:52.268247Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:52.272964Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [12:676:2666] Bootstrap 2026-01-04T02:34:52.275044Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [12:676:2666] Become StateWork (SchemeCache [12:682:2672]) 2026-01-04T02:34:52.275988Z node 12 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [12:676:2666] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:34:52.278993Z node 12 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::NewOwnerOnDatabase [GOOD] >> TSchemeShardTest::PreserveColumnOrder |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/ydb-core-tx-tx_proxy-ut_encrypted_storage >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_streaming_query/unittest >> TStreamingQueryTest::DropStreamingQueryFailOnNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:51.416965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:51.417088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:51.417150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:51.417191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:51.417229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:51.417264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:51.417319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:51.417387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:51.422498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:51.422898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:51.550838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:51.550913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:51.571918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:51.572133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:51.572305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:51.585611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:51.585851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:51.586549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:51.586822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:51.589892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:51.590091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:51.591145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:51.591212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:51.591338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:51.591387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:51.591428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:51.591588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:51.729712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.730942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:51.731279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:34:54.109591Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:54.111814Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:54.111889Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:34:54.111943Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:54.118370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:54.118457Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:54.118505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:54.118560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:54.118724Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:54.122396Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:54.122574Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:34:54.122958Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:54.123101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:54.123153Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:54.123447Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:54.123505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:54.123691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:54.123776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:54.127820Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:54.127891Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:54.128164Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:54.128220Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:34:54.128699Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:54.128764Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:34:54.128889Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:54.128934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:54.128988Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:54.129031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:54.129076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:54.129127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:54.129172Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:54.129212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:54.129314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:54.129382Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:34:54.129426Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:34:54.130030Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:54.130158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:54.130207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:34:54.130250Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:34:54.130298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:54.130408Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:34:54.133938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:34:54.134465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:34:54.134959Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:678:2667] Bootstrap 2026-01-04T02:34:54.135934Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:678:2667] Become StateWork (SchemeCache [2:683:2672]) 2026-01-04T02:34:54.138153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropStreamingQuery Drop { Name: "MyStreamingQuery" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:54.138334Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_streaming_query.cpp:182: [72057594046678944] TDropStreamingQuery Propose: opId# 101:0, path# /MyRoot/MyStreamingQuery 2026-01-04T02:34:54.138465Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), at schemeshard: 72057594046678944 2026-01-04T02:34:54.139229Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:678:2667] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:34:54.142098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyStreamingQuery\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:54.142310Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/MyStreamingQuery', error: path hasn't been resolved, nearest resolved path: '/MyRoot' (id: [OwnerId: 72057594046678944, LocalPathId: 1]), operation: DROP STREAMING QUERY, path: MyStreamingQuery 2026-01-04T02:34:54.142712Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:54.142962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:54.143045Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:54.143463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:54.143585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:54.143632Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:693:2682] TestWaitNotification: OK eventTxId 101 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_streaming_query/unittest |88.3%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |88.3%| [LD] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica |88.3%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/scheme_board/ut_replica/ydb-core-tx-scheme_board-ut_replica >> TExternalDataSourceTest::DropTableTwice >> TExternalDataSourceTest::SchemeErrors [GOOD] >> TExternalDataSourceTest::ReadOnlyMode >> TScaleRecommenderTest::BasicTest [GOOD] >> TScaleRecommenderTest::RollingRestart |88.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} |88.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::PreserveColumnOrder [GOOD] >> TExternalDataSourceTest::CreateExternalDataSource >> TxUsage::WriteToTopic_Demo_22_RestartNo_Query [GOOD] >> THiveTest::TestServerlessMigration [GOOD] >> THiveTest::TestUpdateChannelValues >> TExternalDataSourceTest::ParallelCreateSameExternalDataSource [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::SchemeErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:54.565651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:54.565770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:54.565841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:54.565878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:54.565930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:54.565963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:54.566069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:54.566146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:54.567055Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:54.567381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:54.679926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:54.680039Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:54.680885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:54.699910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:54.700256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:54.700472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:54.706291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:54.706565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:54.707280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:54.707538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:54.710806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:54.711033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:54.712197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:54.712269Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:54.712428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:54.712487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:54.712535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:54.712715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:54.890100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.891921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.892001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.892100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.892250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:54.892333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... 79Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 126:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" } 2026-01-04T02:34:55.692099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 126:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-04T02:34:55.692308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 126:1, propose status:StatusSchemeError, reason: Authorization method isn't specified, at schemeshard: 72057594046678944 2026-01-04T02:34:55.706919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 126, response: Status: StatusSchemeError Reason: "Authorization method isn\'t specified" TxId: 126 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:55.707226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 126, database: /MyRoot, subject: , status: StatusSchemeError, reason: Authorization method isn't specified, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 126, wait until txId: 126 TestModificationResults wait txId: 127 2026-01-04T02:34:55.711125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 127 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:55.711518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 127:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-04T02:34:55.711612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 127:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-04T02:34:55.711785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 127:1, propose status:StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-04T02:34:55.718980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 127, response: Status: StatusSchemeError Reason: "Maximum length of location must be less or equal equal to 1000 but got 1001" TxId: 127 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:55.719311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 127, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of location must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 127, wait until txId: 127 TestModificationResults wait txId: 128 2026-01-04T02:34:55.727578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:55.727992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Installation: "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" Auth { None { } } } 2026-01-04T02:34:55.728138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 128:0, path# /MyRoot/DirA/MyExternalDataSource 2026-01-04T02:34:55.728302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, at schemeshard: 72057594046678944 2026-01-04T02:34:55.735402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusSchemeError Reason: "Maximum length of installation must be less or equal equal to 1000 but got 1001" TxId: 128 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:55.735769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusSchemeError, reason: Maximum length of installation must be less or equal equal to 1000 but got 1001, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/MyExternalDataSource TestModificationResult got TxId: 128, wait until txId: 128 TestModificationResults wait txId: 129 2026-01-04T02:34:55.743556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } } TxId: 129 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:55.743886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 129:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot/DirA" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } } 2026-01-04T02:34:55.744002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 129:0, path# /MyRoot/DirA/ 2026-01-04T02:34:55.744173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 129:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-04T02:34:55.750850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 129, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/DirA/\', error: path part shouldn\'t be empty" TxId: 129 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:55.751190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 129, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/DirA/', error: path part shouldn't be empty, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/DirA/ TestModificationResult got TxId: 129, wait until txId: 129 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists >> TxUsage::WriteToTopic_Demo_18_RestartNo_Table [GOOD] >> TExternalDataSourceTest::DropTableTwice [GOOD] >> TExternalDataSourceTest::ParallelCreateExternalDataSource >> IndexBuildTest::CancelBuildUniq [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::PreserveColumnOrder [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:16.177955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:16.178059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:16.178105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:16.178155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:16.178199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:16.178233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:16.182164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:16.182288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:16.183308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:16.183633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:16.286825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:16.286900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:16.302194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:16.308354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:16.308801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:16.316244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:16.316548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:16.317316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:16.317628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:16.321058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:16.321289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:16.322980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:16.323053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:16.323178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:16.323257Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:16.323316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:16.323483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.333515Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:241:2058] recipient: [1:15:2062] 2026-01-04T02:34:16.512703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:16.512998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.513285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:34:16.513350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:34:16.513636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:34:16.513746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:16.526300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:16.526608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:34:16.526875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.526952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:34:16.527001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:34:16.527063Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:34:16.530708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.530814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:34:16.530867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:34:16.533252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.533342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:16.533406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:16.533481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:34:16.537757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:34:16.554780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:16.555091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:34:16.556525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:16.556699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:16.556779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:16.557157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:16.557237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:16.557442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:34:16.557536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:16.560328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:16.560417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2293 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:34:56.143345Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 2293 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:34:56.144713Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 68719479435 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:34:56.144802Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:34:56.145042Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 68719479435 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:34:56.145170Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:34:56.145356Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 714 RawX2: 68719479435 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:34:56.145480Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:56.145558Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:34:56.145626Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:34:56.145698Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:34:56.146728Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:56.148386Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:34:56.150646Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:34:56.150838Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:34:56.151275Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:34:56.151341Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:34:56.151568Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:34:56.151646Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:56.151717Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:34:56.151791Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:56.151856Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:34:56.151992Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:740:2717] message: TxId: 101 2026-01-04T02:34:56.152091Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:34:56.152163Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:34:56.152227Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:34:56.152422Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:34:56.154781Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:56.154871Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [16:741:2718] TestWaitNotification: OK eventTxId 101 2026-01-04T02:34:56.155647Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:56.156163Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 562us result status StatusSuccess 2026-01-04T02:34:56.157329Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "col01" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "col02" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "col03" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "col04" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } Columns { Name: "col05" Type: "Utf8" TypeId: 4608 Id: 5 NotNull: false IsBuildInProgress: false } Columns { Name: "col06" Type: "Utf8" TypeId: 4608 Id: 6 NotNull: false IsBuildInProgress: false } Columns { Name: "col07" Type: "Utf8" TypeId: 4608 Id: 7 NotNull: false IsBuildInProgress: false } Columns { Name: "col08" Type: "Utf8" TypeId: 4608 Id: 8 NotNull: false IsBuildInProgress: false } Columns { Name: "col09" Type: "Utf8" TypeId: 4608 Id: 9 NotNull: false IsBuildInProgress: false } Columns { Name: "col10" Type: "Utf8" TypeId: 4608 Id: 10 NotNull: false IsBuildInProgress: false } Columns { Name: "col11" Type: "Utf8" TypeId: 4608 Id: 11 NotNull: false IsBuildInProgress: false } Columns { Name: "col12" Type: "Utf8" TypeId: 4608 Id: 12 NotNull: false IsBuildInProgress: false } Columns { Name: "col13" Type: "Utf8" TypeId: 4608 Id: 13 NotNull: false IsBuildInProgress: false } Columns { Name: "col14" Type: "Utf8" TypeId: 4608 Id: 14 NotNull: false IsBuildInProgress: false } Columns { Name: "col15" Type: "Utf8" TypeId: 4608 Id: 15 NotNull: false IsBuildInProgress: false } Columns { Name: "col16" Type: "Utf8" TypeId: 4608 Id: 16 NotNull: false IsBuildInProgress: false } Columns { Name: "col17" Type: "Utf8" TypeId: 4608 Id: 17 NotNull: false IsBuildInProgress: false } Columns { Name: "col18" Type: "Utf8" TypeId: 4608 Id: 18 NotNull: false IsBuildInProgress: false } Columns { Name: "col19" Type: "Utf8" TypeId: 4608 Id: 19 NotNull: false IsBuildInProgress: false } Columns { Name: "col20" Type: "Utf8" TypeId: 4608 Id: 20 NotNull: false IsBuildInProgress: false } Columns { Name: "col21" Type: "Utf8" TypeId: 4608 Id: 21 NotNull: false IsBuildInProgress: false } Columns { Name: "col22" Type: "Utf8" TypeId: 4608 Id: 22 NotNull: false IsBuildInProgress: false } Columns { Name: "col23" Type: "Utf8" TypeId: 4608 Id: 23 NotNull: false IsBuildInProgress: false } Columns { Name: "col24" Type: "Utf8" TypeId: 4608 Id: 24 NotNull: false IsBuildInProgress: false } Columns { Name: "col25" Type: "Utf8" TypeId: 4608 Id: 25 NotNull: false IsBuildInProgress: false } Columns { Name: "col26" Type: "Utf8" TypeId: 4608 Id: 26 NotNull: false IsBuildInProgress: false } Columns { Name: "col27" Type: "Utf8" TypeId: 4608 Id: 27 NotNull: false IsBuildInProgress: false } Columns { Name: "col28" Type: "Utf8" TypeId: 4608 Id: 28 NotNull: false IsBuildInProgress: false } Columns { Name: "col29" Type: "Utf8" TypeId: 4608 Id: 29 NotNull: false IsBuildInProgress: false } KeyColumnNames: "col01" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_5_Table [GOOD] >> THiveTest::TestUpdateChannelValues [GOOD] >> THiveTest::TestStorageBalancer >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query >> TxUsage::Sinks_Oltp_WriteToTopic_4_Query [GOOD] >> TExternalDataSourceTest::ReadOnlyMode [GOOD] >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources >> TExternalDataSourceTest::CreateExternalDataSource [GOOD] >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists >> TScaleRecommenderTest::RollingRestart [GOOD] >> TScaleRecommenderTest::RollingRestartNoLastRecommendation >> TxUsage::WriteToTopic_Demo_45_Table [GOOD] >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:57.339003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:57.339107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.339164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:57.339206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:57.339241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:57.339275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:57.339342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.339415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:57.340343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.340638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:57.482808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:57.482884Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:57.483806Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.504682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:57.504941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:57.505131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:57.510576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:57.510821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:57.511585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:57.511820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:57.514695Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.514858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:57.515747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:57.515799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.515948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:57.515996Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:57.516047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:57.516179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:57.686749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.687870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.688947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.689025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... : 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:34:58.351768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:34:58.352105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:58.352240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:58.352290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:58.352551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:34:58.352609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:34:58.352775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:58.352842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:34:58.354731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:58.354776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:58.354975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:58.355034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:34:58.355364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:34:58.355407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:34:58.355498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:58.355532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:58.355578Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:34:58.355610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:58.355644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:34:58.355684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:34:58.355717Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:34:58.355765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:34:58.355837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:58.355873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:34:58.355905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:34:58.356501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:58.356592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:34:58.356630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:34:58.356666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:34:58.356717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:58.356822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:34:58.359366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:34:58.359813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:34:58.361076Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:676:2666] Bootstrap 2026-01-04T02:34:58.362185Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:676:2666] Become StateWork (SchemeCache [1:681:2671]) 2026-01-04T02:34:58.365071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:34:58.365361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 101:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-04T02:34:58.365452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 101:0, explain: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-04T02:34:58.365515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-04T02:34:58.366497Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:676:2666] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:34:58.369301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:58.369574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalDataSource request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource 2026-01-04T02:34:58.370247Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:34:58.370477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:34:58.370521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:34:58.370896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:34:58.370980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.371014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:691:2681] TestWaitNotification: OK eventTxId 101 2026-01-04T02:34:58.371475Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:58.371702Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 213us result status StatusPathDoesNotExist 2026-01-04T02:34:58.371883Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:34:55.301090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:55.301183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:55.301220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:55.301251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:55.301279Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:55.301298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:55.301343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:55.301390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:55.302240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:55.302480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:55.458520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:55.458653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:55.459538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:55.479084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:55.489065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:55.489262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:55.500347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:55.500605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:55.501311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:55.501600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:55.528320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:55.528550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:55.530098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:55.530172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:55.530280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:55.530336Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:55.530382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:55.530513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:55.716452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.717621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.717786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.717912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.718882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:55.719051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModif ... onResult 2026-01-04T02:34:58.407618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 113: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.407682Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 119, at schemeshard: 72057594046678944 2026-01-04T02:34:58.407779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 120, at schemeshard: 72057594046678944 2026-01-04T02:34:58.407852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 121, at schemeshard: 72057594046678944 2026-01-04T02:34:58.407905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.407932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.408106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 115: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.408138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 115: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.408271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 122, at schemeshard: 72057594046678944 2026-01-04T02:34:58.408337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 123, at schemeshard: 72057594046678944 2026-01-04T02:34:58.408393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 116: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.408421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 116: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.408534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 117: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.408559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 117: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.408625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 124, at schemeshard: 72057594046678944 2026-01-04T02:34:58.408721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:34:58.408797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 118: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.408830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 118: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.408931Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:34:58.409001Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 119: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409028Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 119: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.409094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 120: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 120: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.409186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:34:58.409271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 121: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409295Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 121: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.409394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 128, at schemeshard: 72057594046678944 2026-01-04T02:34:58.409493Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 122: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 122: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.409612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 123: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409637Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 123: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.409741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 129, at schemeshard: 72057594046678944 2026-01-04T02:34:58.409851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 130, at schemeshard: 72057594046678944 2026-01-04T02:34:58.409907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 124: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.409934Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 124: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 131, at schemeshard: 72057594046678944 2026-01-04T02:34:58.410097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410125Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410435Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410463Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 128: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410598Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 128: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410740Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 129: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410774Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 129: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 130: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.410888Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 130: satisfy waiter [2:805:2794] 2026-01-04T02:34:58.410990Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 131: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.411017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 131: satisfy waiter [2:805:2794] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 TestWaitNotification: OK eventTxId 106 TestWaitNotification: OK eventTxId 107 TestWaitNotification: OK eventTxId 108 TestWaitNotification: OK eventTxId 109 TestWaitNotification: OK eventTxId 110 TestWaitNotification: OK eventTxId 111 TestWaitNotification: OK eventTxId 112 TestWaitNotification: OK eventTxId 113 TestWaitNotification: OK eventTxId 114 TestWaitNotification: OK eventTxId 115 TestWaitNotification: OK eventTxId 116 TestWaitNotification: OK eventTxId 117 TestWaitNotification: OK eventTxId 118 TestWaitNotification: OK eventTxId 119 TestWaitNotification: OK eventTxId 120 TestWaitNotification: OK eventTxId 121 TestWaitNotification: OK eventTxId 122 TestWaitNotification: OK eventTxId 123 TestWaitNotification: OK eventTxId 124 TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 TestWaitNotification: OK eventTxId 128 TestWaitNotification: OK eventTxId 129 TestWaitNotification: OK eventTxId 130 TestWaitNotification: OK eventTxId 131 2026-01-04T02:34:58.414576Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:58.414822Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 286us result status StatusSuccess 2026-01-04T02:34:58.415243Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/other_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TxUsage::WriteToTopic_Demo_45_Query >> TExternalDataSourceTest::CreateExternalDataSourceWithProperties [GOOD] >> TExternalDataSourceTest::DropExternalDataSource |88.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_streaming_query/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataSourceIfNotExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:57.762689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:57.762776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.762815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:57.762850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:57.762905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:57.762957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:57.763062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.763126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:57.763981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.764290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:57.899051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:57.899121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:57.899921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.923681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:57.923949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:57.924119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:57.935169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:57.935427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:57.936121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:57.936361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:57.940072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.940278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:57.941297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:57.941353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.941459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:57.941503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:57.941565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:57.941729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:58.095642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.096598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.096747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.096818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.096898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.096963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.097707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... eshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 140 RawX2: 4294969457 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:58.942662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_external_data_source.cpp:34: [72057594046678944] TAlterExternalDataSource TPropose, operationId: 102:0HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:34:58.942789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:34:58.942970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:34:58.943030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:58.943988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:58.946185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:34:58.948650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:58.948688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:58.948862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:34:58.948973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:34:58.949047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:58.949074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:34:58.949100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:34:58.949115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:209:2210], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:34:58.949310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:34:58.949342Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:34:58.949425Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:58.949477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:58.949508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:34:58.949541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:58.949570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:34:58.949605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:34:58.949632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:34:58.949654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:34:58.949717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:34:58.949745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:34:58.949769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:34:58.949793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:34:58.952391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:58.952460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:58.952502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:58.952539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:34:58.952598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:34:58.954531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:58.954638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:34:58.954672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:34:58.954700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:34:58.954731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:34:58.954808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:34:58.958133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:34:58.959589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:34:58.959836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:34:58.959893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:34:58.960263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:34:58.960364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:58.960413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:739:2729] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:58.960869Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:58.961069Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 219us result status StatusSuccess 2026-01-04T02:34:58.961416Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 2 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> LocalPartition::WithoutPartitionWithRestart [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ParallelCreateExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:56.364542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:56.364640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:56.364686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:56.364725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:56.364762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:56.364818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:56.364991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:56.365075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:56.366042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:56.366385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:56.517984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:56.518078Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:56.519026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:56.541160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:56.541439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:56.541635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:56.547726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:56.548049Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:56.548803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:56.549072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:56.552381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:56.552582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:56.553759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:56.553847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:56.553980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:56.554028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:56.554072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:56.554265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:56.713123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.714957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.715053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.715192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:56.715256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:59.127547Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:59.127779Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 247us result status StatusSuccess 2026-01-04T02:34:59.128200Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:59.129042Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:59.129231Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 204us result status StatusSuccess 2026-01-04T02:34:59.129638Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 124 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:59.132271Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:59.132492Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource1" took 258us result status StatusSuccess 2026-01-04T02:34:59.132850Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource1" PathDescription { Self { Name: "MyExternalDataSource1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 125 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource1" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:59.133401Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/MyExternalDataSource2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:59.133606Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/MyExternalDataSource2" took 222us result status StatusSuccess 2026-01-04T02:34:59.133975Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/MyExternalDataSource2" PathDescription { Self { Name: "MyExternalDataSource2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 126 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource2" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> THiveImplTest::BootQueueSpeed |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] >> THiveTest::TestDrainWithMaxTabletsScheduled >> THiveTest::TestLocalDisconnect >> TExternalDataSourceTest::RemovingReferencesFromDataSources >> TExternalDataSourceTest::DropExternalDataSource [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::CreateExternalDataSourceShouldFailIfSuchEntityAlreadyExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:34:57.441051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:57.441168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.441213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:57.441251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:57.441292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:57.441322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:57.441397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:57.441467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:57.442435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.442771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:57.595490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:57.595625Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:57.596557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:57.621274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:57.625378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:57.625598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:57.630541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:57.630793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:57.631475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:57.631731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:57.636621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.636804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:57.638149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:57.638208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:57.638296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:57.638339Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:57.638377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:57.638528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:57.828063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.829762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.830515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.830654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.830736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.830845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.830933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.831084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.831167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.831277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModif ... MESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:00.284549Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:35:00.284606Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:00.285898Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.285981Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.286010Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:00.286040Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:35:00.286072Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:00.286156Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:35:00.289375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:35:00.290518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:35:00.290777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:35:00.290836Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:35:00.291284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:00.291423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:35:00.291470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:715:2704] TestWaitNotification: OK eventTxId 101 2026-01-04T02:35:00.291956Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:00.292214Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 309us result status StatusSuccess 2026-01-04T02:35:00.292630Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:35:00.300445Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:35:00.300850Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "MyExternalDataSource" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_new_bucket" Auth { None { } } } 2026-01-04T02:35:00.300953Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_data_source.cpp:233: [72057594046678944] TCreateExternalDataSource Propose: opId# 102:0, path# /MyRoot/MyExternalDataSource 2026-01-04T02:35:00.301128Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:35:00.308520Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:00.308855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/MyExternalDataSource', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeExternalDataSource, state: EPathStateNoChanges), operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/MyExternalDataSource TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:35:00.309339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:35:00.309393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:35:00.309888Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:35:00.310005Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:35:00.310046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:723:2712] TestWaitNotification: OK eventTxId 102 2026-01-04T02:35:00.310600Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:00.310823Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 271us result status StatusSuccess 2026-01-04T02:35:00.311223Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyExternalDataSource" PathDescription { Self { Name: "MyExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "MyExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> THiveTest::TestNoMigrationToSelf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::PreventDeletionOfDependentDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:56.733063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:56.733174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:56.733232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:56.733273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:56.733310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:56.733360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:56.733438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:56.733513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:56.737728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:56.738128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:56.885424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:56.885511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:56.886373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:56.914137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:56.914476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:56.914680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:56.922157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:56.922419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:56.923157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:56.923409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:56.931841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:56.932075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:56.933199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:56.933267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:56.933398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:56.933454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:56.933495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:56.933669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:57.144778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.149941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.150006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.150090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.150250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:57.150319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... fCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:35:00.114547Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.114635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.114668Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:00.114701Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:35:00.114738Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:35:00.115578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.115659Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:00.115690Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:00.115722Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:35:00.115754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:00.115823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:35:00.117479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:35:00.119107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:35:00.119219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:35:00.119452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:35:00.119505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:35:00.119932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:00.120054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:35:00.120096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:741:2730] TestWaitNotification: OK eventTxId 101 2026-01-04T02:35:00.120573Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:00.120803Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 272us result status StatusSuccess 2026-01-04T02:35:00.121163Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-04T02:35:00.125031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpDropExternalDataSource Drop { Name: "ExternalDataSource" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:35:00.125222Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_drop_external_data_source.cpp:116: [72057594046678944] TDropExternalDataSource Propose: opId# 103:0, path# /MyRoot/ExternalDataSource 2026-01-04T02:35:00.125666Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, at schemeshard: 72057594046678944 2026-01-04T02:35:00.128098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusSchemeError Reason: "Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:00.128389Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusSchemeError, reason: Other entities depend on this data source, please remove them at the beginning: /MyRoot/ExternalTable, operation: DROP EXTERNAL DATA SOURCE, path: /MyRoot/ExternalDataSource TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:35:00.128731Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:35:00.128780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:35:00.129252Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:35:00.129357Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:35:00.129398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:749:2738] TestWaitNotification: OK eventTxId 103 2026-01-04T02:35:00.129921Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:00.130141Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 258us result status StatusSuccess 2026-01-04T02:35:00.130525Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { References { Path: "/MyRoot/ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } } } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> BasicUsage::AlterTopicWithSharedConsumer_SetMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy >> THiveTest::TestLocalDisconnect [GOOD] >> THiveTest::TestLocalReplacement >> TScaleRecommenderTest::RollingRestartNoLastRecommendation [GOOD] >> TStorageBalanceTest::TestScenario1 |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> THiveTest::TestCreateTablet >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::DropExternalDataSource [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:127:2152] sender: [1:129:2058] recipient: [1:113:2144] 2026-01-04T02:34:58.629275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:58.629402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:58.629448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:58.629482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:58.629519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:58.629572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:58.629704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:58.629768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:58.630675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:58.630984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:58.763301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:34:58.763377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:58.764228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:58.785480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:58.785793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:58.785995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:58.791124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:58.791337Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:58.792036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:58.792252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:58.794496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:58.794642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:58.795542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:58.795587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:58.795659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:58.795691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:58.795728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:58.795859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:58.960930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.962953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.963041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.963178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:58.963244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:213:2214] at schemeshard: 72057594046678944 Send TEvModif ... nByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-04T02:35:01.406277Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:01.406440Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 8589936752 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:01.406505Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 102:0 HandleReply TEvOperationPlan: step# 5000039 2026-01-04T02:35:01.406650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:01.406745Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:35:01.406944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:35:01.407018Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:01.407965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:35:01.408628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:35:01.415213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:01.415273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:01.415444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:35:01.415626Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:01.415673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:35:01.415717Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:35:01.415801Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:01.415884Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:35:01.416011Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:35:01.416070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:01.416115Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:35:01.416154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:01.416200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:35:01.416245Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:01.416290Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:35:01.416332Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:35:01.416432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:01.416484Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:35:01.416526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:35:01.416563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:35:01.417346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:01.417443Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:01.417479Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:35:01.417617Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:35:01.417673Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:01.418140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:35:01.418194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:35:01.418305Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:01.418645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:01.418726Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:01.418758Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:35:01.418789Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:35:01.418831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:35:01.418909Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:35:01.423471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:35:01.423644Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:35:01.423780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:35:01.424061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:35:01.424117Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:35:01.424584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:35:01.424702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:35:01.424745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:743:2732] TestWaitNotification: OK eventTxId 102 2026-01-04T02:35:01.425633Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:01.425873Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyExternalDataSource" took 248us result status StatusPathDoesNotExist 2026-01-04T02:35:01.426070Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] |88.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::ReplaceExternalDataStoreShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:35:00.436648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:00.436745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:00.436801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:00.436845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:00.436894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:00.436930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:00.437002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:00.437076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:00.438012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:00.438341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:00.551087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:35:00.551159Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:00.566414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:00.570845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:00.571125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:00.578421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:00.578704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:00.579481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:00.579767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:00.583309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:00.583528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:00.584940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:00.585014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:00.585146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:00.585200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:00.585245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:00.585438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:00.981466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.983939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:00.986596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:35:02.375066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:35:02.375173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:35:02.375213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:02.375267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:35:02.375307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:02.375347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:35:02.375391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:02.375433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:35:02.375463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:35:02.375548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:02.375599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:35:02.375662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:35:02.375689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:35:02.376604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:02.376718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:02.376768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:02.376824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:35:02.376875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:02.378895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:02.379009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:02.379053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:02.379118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:35:02.379152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:02.379247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:35:02.386769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:35:02.386930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:35:02.387205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:35:02.387247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:35:02.387642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:02.387771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:35:02.387810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2691] TestWaitNotification: OK eventTxId 101 2026-01-04T02:35:02.388260Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/UniqueName" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:02.388527Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/UniqueName" took 272us result status StatusSuccess 2026-01-04T02:35:02.389279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/UniqueName" PathDescription { Self { Name: "UniqueName" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "UniqueName" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:35:02.393440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:35:02.393748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_data_source.cpp:337: [72057594046678944] CreateNewExternalDataSource, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalDataSource FailOnExist: false CreateExternalDataSource { Name: "UniqueName" SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Auth { None { } } ReplaceIfExists: true } 2026-01-04T02:35:02.410076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_data_source.cpp:202: [72057594046678944] TAlterExternalDataSource Propose: opId# 102:0, path# /MyRoot/UniqueName 2026-01-04T02:35:02.410390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, at schemeshard: 72057594046678944 2026-01-04T02:35:02.415234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:02.415570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalDataSource, operation: CREATE EXTERNAL DATA SOURCE, path: /MyRoot/UniqueName TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:35:02.415954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:35:02.416005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:35:02.416435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:35:02.416543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:35:02.416579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2699] TestWaitNotification: OK eventTxId 102 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> THiveTest::TestLocalReplacement [GOOD] >> THiveTest::TestHiveRestart |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> THiveTest::TestNoMigrationToSelf [GOOD] >> THiveTest::TestReCreateTablet >> THiveTest::TestHiveBalancer >> THiveTest::TestCreateTablet [GOOD] >> THiveTest::TestCreate100Tablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TExternalDataSourceTest::RemovingReferencesFromDataSources [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:35:01.929666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:01.929769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:01.933981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:01.934081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:01.934127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:01.934159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:01.934264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:01.934342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:01.935323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:01.935683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:02.156226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:35:02.156365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:02.157320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:02.174311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:02.178704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:02.178933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:02.185350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:02.185609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:02.186493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:02.186772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:02.200138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:02.200423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:02.202073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:02.202158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:02.202267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:02.202323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:02.202372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:02.202557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:02.395206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.396958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:02.397708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModif ... rRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-04T02:35:03.642751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:03.642892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:03.642950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_external_data_source.cpp:40: [72057594046678944] TDropExternalDataSource TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-04T02:35:03.643083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:03.643162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:35:03.643361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:35:03.643440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:03.644328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:03.644467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:35:03.646754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:03.646794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:03.646913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:35:03.647015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:03.647037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:206:2207], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-04T02:35:03.647060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:206:2207], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:35:03.647317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:35:03.647354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:35:03.647439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:35:03.647464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:35:03.647493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:35:03.647518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:35:03.647545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:35:03.647582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:35:03.647610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:35:03.647634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:35:03.647699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:03.647730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:35:03.647765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:35:03.647804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:35:03.648128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:03.648179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:03.648201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:35:03.648230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:35:03.648260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:03.648472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:35:03.648510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:35:03.648585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:03.648726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:03.648778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:03.648802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:35:03.648818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:35:03.648840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:35:03.648892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:35:03.652069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:03.652167Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:35:03.652225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:35:03.652423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:35:03.652458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:35:03.652834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:35:03.652904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:35:03.652928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:787:2777] TestWaitNotification: OK eventTxId 104 2026-01-04T02:35:03.653389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:03.653602Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 237us result status StatusPathDoesNotExist 2026-01-04T02:35:03.653787Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalDataSource\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalDataSource" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_data_source/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query >> THiveTest::TestHiveRestart [GOOD] >> THiveTest::TestLimitedNodeList |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup >> THiveTest::TestReCreateTablet [GOOD] >> THiveTest::TestReCreateTabletError |88.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} |88.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_data_source/test-results/unittest/{meta.json ... results_accumulator.log} >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TReplicaTest::UpdateWithoutHandshake |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/idx_test/ydb-core-kqp-ut-idx_test >> TReplicaTest::Merge >> TReplicaTest::UpdateWithoutHandshake [GOOD] >> TReplicaTest::UpdateWithStaleGeneration >> TReplicaTest::Merge [GOOD] >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers >> TReplicaTest::Unsubscribe >> TReplicaTest::IdempotencyUpdatesWithoutSubscribers [GOOD] >> TTxDataShardMiniKQL::CrossShard_1_Cycle [GOOD] >> TReplicaTest::StrongNotificationAfterCommit >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy >> THiveTest::TestReCreateTabletError [GOOD] >> THiveTest::TestNodeDisconnect >> TReplicaTest::UpdateWithStaleGeneration [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsDomainRoot [GOOD] >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TReplicaTest::StrongNotificationAfterCommit [GOOD] |88.4%| [LD] {RESULT} $(B)/ydb/core/persqueue/ut/ut_with_sdk/ydb-core-persqueue-ut-ut_with_sdk >> TReplicaTest::Unsubscribe [GOOD] >> TReplicaTest::UnsubscribeUnknownPath >> TReplicaTest::Subscribe >> THiveTest::TestLimitedNodeList [GOOD] >> THiveTest::TestHiveFollowersWithChangingDC >> TReplicaCombinationTest::UpdatesCombinationsMigratedPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation >> TReplicaTest::UnsubscribeUnknownPath [GOOD] >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] >> TReplicaTest::CommitWithoutHandshake >> TReplicaTest::Subscribe [GOOD] >> TReplicaTest::SubscribeUnknownPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UpdateWithStaleGeneration [GOOD] Test command err: 2026-01-04T02:35:06.700405Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:06.700480Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:806: [1:7:2054] Reject update from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-04T02:35:06.700561Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-04T02:35:06.700603Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-04T02:35:06.700747Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:06.700905Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-04T02:35:06.700981Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-04T02:35:06.701040Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-04T02:35:06.701073Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:06.701130Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:06.701193Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-04T02:35:06.701231Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.066536Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:07.066607Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:07.066760Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 0 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.066806Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:814: [2:7:2054] Reject update from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-04T02:35:07.066924Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-04T02:35:07.066959Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-04T02:35:07.067021Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.067143Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-04T02:35:07.067200Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-04T02:35:07.067256Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-04T02:35:07.067288Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.067359Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.067443Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:8:2055] 2026-01-04T02:35:07.067486Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# [OwnerId: 1, LocalPathId: 1] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::StrongNotificationAfterCommit [GOOD] Test command err: 2026-01-04T02:35:06.968534Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-04T02:35:06.968608Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-04T02:35:06.968729Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:06.968841Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-04T02:35:06.968896Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:06.968973Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:06.969075Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:06.969119Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:06.969246Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:06.969286Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:06.976140Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:06.976475Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:06.976526Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:06.976619Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.080821Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:07.080885Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:07.080980Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-04T02:35:07.081015Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.081092Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.081246Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.081345Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:07.081405Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:07.081529Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:07.081559Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:07.081592Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.081686Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [2:9:2056] 2026-01-04T02:35:07.081741Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.085917Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.086029Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:07.086093Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.086228Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.086268Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-04T02:35:07.086336Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:07.086480Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-04T02:35:07.086542Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.304822Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 1 }: sender# [3:9:2056] 2026-01-04T02:35:07.304883Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-04T02:35:07.304950Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 1, capabilities# 2026-01-04T02:35:07.305065Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:07.305104Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:07.305173Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:07.305208Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 1, generation# 1 2026-01-04T02:35:07.305307Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1006: [3:7:2054] Handle NKikimr::NSchemeBoard::TReplica::TEvPrivate::TEvSendStrongNotifications { Owner: 1 } |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::CommitWithoutHandshake [GOOD] >> TReplicaTest::CommitWithStaleGeneration >> TReplicaTest::SubscribeUnknownPath [GOOD] >> THiveTest::TestStorageBalancer [GOOD] >> THiveTest::TestRestartsWithFollower >> TReplicaTest::SyncVersion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeUnknownPath [GOOD] Test command err: 2026-01-04T02:35:07.212335Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:07.212422Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:07.212545Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:9:2056] 2026-01-04T02:35:07.212602Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7:2054] Upsert description: path# path 2026-01-04T02:35:07.212755Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.212867Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-04T02:35:07.212923Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.213092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.213132Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:07.223445Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:07.236241Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:9:2056] 2026-01-04T02:35:07.236346Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:9:2056], path# path 2026-01-04T02:35:07.236468Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:07.236514Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:07.236552Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:07.519314Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::HandshakeWithStaleGeneration >> TReplicaTest::CommitWithStaleGeneration [GOOD] >> TReplicaTest::Delete >> TReplicaTest::SyncVersion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaCombinationTest::MigratedPathRecreation [GOOD] Test command err: 2026-01-04T02:35:06.644970Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:06.645056Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.645191Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:06.645235Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.645320Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-04T02:35:06.645355Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.645437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:9:2056] 2026-01-04T02:35:06.645477Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.645692Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 103 2026-01-04T02:35:06.645754Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:35:06.652236Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.652455Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:9:2056], cookie# 0, event size# 103 2026-01-04T02:35:06.652511Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:35:06.652584Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.652722Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:10:2057] 2026-01-04T02:35:06.652850Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:10:2057], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-04T02:35:06.778467Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-04T02:35:06.778524Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.778605Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:12:2059] 2026-01-04T02:35:06.778658Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.778738Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:11:2058] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-04T02:35:06.778773Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:11:2058] Successful handshake: owner# 900, generation# 1 2026-01-04T02:35:06.778856Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:11:2058] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [1:13:2060] 2026-01-04T02:35:06.778890Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:11:2058] Commit generation: owner# 900, generation# 1 2026-01-04T02:35:06.778994Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:12:2059], cookie# 0, event size# 103 2026-01-04T02:35:06.779051Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:35:06.779108Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.779212Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:11:2058] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [1:13:2060], cookie# 0, event size# 103 2026-01-04T02:35:06.779254Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:11:2058] Update description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], deletion# false 2026-01-04T02:35:06.779299Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [1:11:2058] Replace GSS by TSS description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], domainId# [OwnerId: 800, LocalPathId: 2], curPathId# [OwnerId: 800, LocalPathId: 2], curDomainId# [OwnerId: 800, LocalPathId: 2] 2026-01-04T02:35:06.779379Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:11:2058] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 900, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 900, LocalPathId: 1], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.779488Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:11:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:14:2061] 2026-01-04T02:35:06.779539Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:11:2058] Subscribe: subscriber# [1:14:2061], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 1 PathOwnerId: 900 2026-01-04T02:35:06.779929Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-04T02:35:06.779968Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.780047Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:16:2063] 2026-01-04T02:35:06.780090Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.780149Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:15:2062] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-04T02:35:06.780178Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:15:2062] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.780228Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:15:2062] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:17:2064] 2026-01-04T02:35:06.780279Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:15:2062] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.780354Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:16:2063], cookie# 0, event size# 103 2026-01-04T02:35:06.780385Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:35:06.780434Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.780535Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:15:2062] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [1:17:2064], cookie# 0, event size# 103 2026-01-04T02:35:06.780567Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:15:2062] Update description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], deletion# false 2026-01-04T02:35:06.780611Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:15:2062] Upsert description: path# /Root/Tenant, pathId# [OwnerId: 800, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path /Root/Tenant, PathId [OwnerId: 800, LocalPathId: 2], PathVersion 2, SubdomainPathId [OwnerId: 800, LocalPathId: 2], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 60} 2026-01-04T02:35:06.780704Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:15:2062] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant DomainOwnerId: 0 }: sender# [1:18:2065] 2026-01-04T02:35:06.780747Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:15:2062] Subscribe: subscriber# [1:18:2065], path# /Root/Tenant, domainOwnerId# 0, capabilities# =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 =========== Path: "/Root/Tenant" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 2 } } } PathId: 2 PathOwnerId: 800 2026-01-04T02:35:06.781095Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-04T02:35:06.781143Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:19:2066] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:06.781202Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:19:2066] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [1:20:2067] 2026-01-04T02:35:06.781228Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:19:2066] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:06.781279Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:19:2066] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Gener ... DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-04T02:35:07.556934Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-04T02:35:07.556997Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-04T02:35:07.557062Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:400:2447] 2026-01-04T02:35:07.557109Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-04T02:35:07.557178Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:399:2446] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-04T02:35:07.557203Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:399:2446] Successful handshake: owner# 910, generation# 1 2026-01-04T02:35:07.557258Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:399:2446] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:401:2448] 2026-01-04T02:35:07.557281Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:399:2446] Commit generation: owner# 910, generation# 1 2026-01-04T02:35:07.557366Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:400:2447], cookie# 0, event size# 64 2026-01-04T02:35:07.557403Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-04T02:35:07.557430Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-04T02:35:07.557536Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:399:2446] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:401:2448], cookie# 0, event size# 130 2026-01-04T02:35:07.557567Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:399:2446] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# false 2026-01-04T02:35:07.557595Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:399:2446] Path was explicitly deleted, ignoring: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9] 2026-01-04T02:35:07.557688Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:399:2446] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:402:2449] 2026-01-04T02:35:07.557716Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:399:2446] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:35:07.557763Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:399:2446] Subscribe: subscriber# [2:402:2449], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 2 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-04T02:35:07.560506Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-04T02:35:07.560583Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-04T02:35:07.560642Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:404:2451] 2026-01-04T02:35:07.560677Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-04T02:35:07.560741Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:403:2450] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-04T02:35:07.560775Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:403:2450] Successful handshake: owner# 910, generation# 1 2026-01-04T02:35:07.560835Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:403:2450] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 910 Generation: 1 }: sender# [2:405:2452] 2026-01-04T02:35:07.560866Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:403:2450] Commit generation: owner# 910, generation# 1 2026-01-04T02:35:07.560963Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:404:2451], cookie# 0, event size# 64 2026-01-04T02:35:07.560996Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-04T02:35:07.561022Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# [OwnerId: 910, LocalPathId: 9] 2026-01-04T02:35:07.561095Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:403:2450] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 910 Generation: 1 }: sender# [2:405:2452], cookie# 0, event size# 64 2026-01-04T02:35:07.561123Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:403:2450] Update description: path# /Root/Tenant/table_inside, pathId# [OwnerId: 910, LocalPathId: 9], deletion# true 2026-01-04T02:35:07.561189Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:403:2450] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /Root/Tenant/table_inside DomainOwnerId: 0 }: sender# [2:406:2453] 2026-01-04T02:35:07.561218Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:403:2450] Upsert description: path# /Root/Tenant/table_inside 2026-01-04T02:35:07.561297Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:403:2450] Subscribe: subscriber# [2:406:2453], path# /Root/Tenant/table_inside, domainOwnerId# 0, capabilities# =========== Left ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== Right ==Path: "/Root/Tenant/table_inside" PathDescription { Self { PathVersion: 18446744073709551615 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 333 } } } PathId: 9 PathOwnerId: 910 =========== super id == DomainId: [OwnerId: 800, LocalPathId: 333] IsDeletion: 1 PathId: [OwnerId: 910, LocalPathId: 9] Verions: 18446744073709551615 =========== WIN ==/Root/Tenant/table_inside PathID: [OwnerId: 0, LocalPathId: 0] deleted: 1 version: 0 domainId: [OwnerId: 0, LocalPathId: 0] 2026-01-04T02:35:07.751016Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:07.751087Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 800, generation# 1 2026-01-04T02:35:07.751200Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 800 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:07.751251Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 800, generation# 1 2026-01-04T02:35:07.751333Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-04T02:35:07.751372Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 900, generation# 1 2026-01-04T02:35:07.751430Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [3:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 900 Generation: 1 }: sender# [3:9:2056] 2026-01-04T02:35:07.751467Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [3:7:2054] Commit generation: owner# 900, generation# 1 2026-01-04T02:35:07.751591Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 800 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 118 2026-01-04T02:35:07.751631Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], deletion# false 2026-01-04T02:35:07.751786Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 800, LocalPathId: 1111], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-04T02:35:07.751931Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 900 Generation: 1 }: sender# [3:9:2056], cookie# 0, event size# 117 2026-01-04T02:35:07.751975Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], deletion# false 2026-01-04T02:35:07.752040Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:893: [3:7:2054] Update description by newest path form tenant schemeshard: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], domainId# [OwnerId: 800, LocalPathId: 1], curPathId# [OwnerId: 800, LocalPathId: 1111], curDomainId# [OwnerId: 800, LocalPathId: 1] 2026-01-04T02:35:07.752080Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# /root/db/dir_inside, pathId# [OwnerId: 800, LocalPathId: 1111] 2026-01-04T02:35:07.752145Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# /root/db/dir_inside, pathId# [OwnerId: 900, LocalPathId: 11], pathDescription# {Status StatusSuccess, Path /root/db/dir_inside, PathId [OwnerId: 900, LocalPathId: 11], PathVersion 1, SubdomainPathId [OwnerId: 800, LocalPathId: 1], PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 67} 2026-01-04T02:35:07.752247Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /root/db/dir_inside DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-04T02:35:07.752309Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# /root/db/dir_inside, domainOwnerId# 0, capabilities# =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 1111 PathOwnerId: 800 =========== Path: "/root/db/dir_inside" PathDescription { Self { PathVersion: 1 } DomainDescription { DomainKey { SchemeShard: 800 PathId: 1 } } } PathId: 11 PathOwnerId: 900 =========== DomainId: [OwnerId: 800, LocalPathId: 1] IsDeletion: 0 PathId: [OwnerId: 900, LocalPathId: 11] Versions: 1 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> THiveTest::TestNodeDisconnect [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet >> TReplicaTest::HandshakeWithStaleGeneration [GOOD] >> TReplicaTest::IdempotencyUpdatesAliveSubscriber >> TReplicaTest::Delete [GOOD] >> THiveTest::TestCreate100Tablets [GOOD] >> THiveTest::TestCreateSubHiveCreateTablet >> TReplicaTest::Handshake >> TReplicaTest::IdempotencyUpdatesAliveSubscriber [GOOD] >> TReplicaTest::IdempotencyUpdatesVariant2 >> THiveTest::TestDrain [GOOD] >> THiveTest::TestDownAfterDrain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::SyncVersion [GOOD] Test command err: 2026-01-04T02:35:07.755414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:07.755502Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:07.755689Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:07.755739Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:07.762629Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:07.762828Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-04T02:35:07.762955Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:07.763125Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:07.763173Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:07.763226Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [1:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:08.070149Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-04T02:35:08.070261Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-04T02:35:08.070392Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.422038Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:08.422135Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:08.422299Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 76 2026-01-04T02:35:08.422357Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:08.422434Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 100500, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 32} 2026-01-04T02:35:08.422532Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:8:2055] 2026-01-04T02:35:08.422603Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.422701Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [3:7:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: path }: sender# [3:8:2055], cookie# 1 |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Handshake [GOOD] >> TReplicaTest::DoubleUnsubscribe >> THiveImplTest::BootQueueSpeed [GOOD] >> THiveImplTest::BalancerSpeedAndDistribution >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Delete [GOOD] Test command err: 2026-01-04T02:35:08.065047Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:08.065128Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:978: [1:7:2054] Reject commit from unknown populator: sender# [1:8:2055], owner# 1, generation# 1 2026-01-04T02:35:08.065202Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:08.065248Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:08.378170Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-04T02:35:08.378233Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 0 2026-01-04T02:35:08.378323Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-04T02:35:08.378358Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:08.378451Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [2:9:2056] 2026-01-04T02:35:08.378482Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [2:7:2054] Commit generation: owner# 1, generation# 1 2026-01-04T02:35:08.378565Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [2:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 0 }: sender# [2:8:2055] 2026-01-04T02:35:08.378600Z node 2 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:988: [2:7:2054] Reject commit from stale populator: sender# [2:8:2055], owner# 1, generation# 0, pending generation# 1 2026-01-04T02:35:08.378640Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [2:8:2055] 2026-01-04T02:35:08.378668Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-04T02:35:08.676253Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:08.676323Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:08.676494Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:08.676538Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# false 2026-01-04T02:35:08.683726Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 42, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:08.683930Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-04T02:35:08.684062Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.684209Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-04T02:35:08.684294Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.684437Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:08.684475Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 42, LocalPathId: 1], deletion# true 2026-01-04T02:35:08.684527Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 42, LocalPathId: 1] 2026-01-04T02:35:08.684660Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:11:2058] 2026-01-04T02:35:08.684706Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:11:2058], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.684812Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 42, LocalPathId: 1] DomainOwnerId: 0 }: sender# [3:12:2059] 2026-01-04T02:35:08.684858Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:12:2059], path# [OwnerId: 42, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.685005Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:13:2060] 2026-01-04T02:35:08.685071Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:13:2060], path# path, domainOwnerId# 0, capabilities# |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleUnsubscribe [GOOD] >> TReplicaTest::DoubleDelete >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartNo_Query [GOOD] >> THiveTest::TestReassignGroupsWithRecreateTablet [GOOD] >> THiveTest::TestReassignUseRelativeSpace >> TReplicaTest::DoubleDelete [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::IdempotencyUpdatesVariant2 [GOOD] Test command err: 2026-01-04T02:35:08.665981Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-04T02:35:08.666057Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-04T02:35:08.666139Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:08.666197Z node 1 :SCHEME_BOARD_REPLICA ERROR: replica.cpp:772: [1:7:2054] Reject handshake from stale populator: sender# [1:8:2055], owner# 1, generation# 1, pending generation# 2 2026-01-04T02:35:08.956823Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:08.956898Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:08.957022Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-04T02:35:08.957060Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:08.957283Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:08.957516Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:08.957566Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:08.968833Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:08.969094Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:08.969128Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:08.969179Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:08.969273Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:08.969307Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:08.969341Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [2:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:08.969400Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:08.969448Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-04T02:35:08.969561Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:08.969678Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 2] DomainOwnerId: 0 }: sender# [2:10:2057] 2026-01-04T02:35:08.969747Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:10:2057], path# [OwnerId: 1, LocalPathId: 2], domainOwnerId# 0, capabilities# 2026-01-04T02:35:09.256175Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:09.256231Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:09.256353Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.256388Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:09.256435Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:09.256510Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.256551Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-04T02:35:09.256580Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:09.256619Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:09.256671Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:09.256693Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# true 2026-01-04T02:35:09.256720Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 2] 2026-01-04T02:35:09.256775Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.256807Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:09.256839Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:09.256897Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.256928Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-04T02:35:09.256952Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:843: [3:7:2054] Path was explicitly deleted, ignoring: path# path, pathId# [OwnerId: 1, LocalPathId: 2] |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::Update >> THiveTest::TestCreateSubHiveCreateTablet [GOOD] >> THiveTest::TestCheckSubHiveForwarding >> TReplicaTest::Update [GOOD] >> TReplicaTest::UnsubscribeWithoutSubscribe >> TReplicaTest::Commit >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] >> TReplicaTest::Commit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::DoubleDelete [GOOD] Test command err: 2026-01-04T02:35:09.247254Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:09.247358Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:09.596427Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:09.596500Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:09.596708Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.596747Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:09.603058Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:09.603221Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:8:2055] 2026-01-04T02:35:09.603321Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:09.603430Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-04T02:35:09.603490Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [2:7:2054] Unsubscribe: subscriber# [2:8:2055], path# path 2026-01-04T02:35:09.603565Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] 2026-01-04T02:35:09.892593Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:09.892698Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:09.892850Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-04T02:35:09.892899Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-04T02:35:09.892984Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:09.893130Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:09.893173Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:09.893240Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:09.893441Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:09.893487Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:09.893523Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:09.893667Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:10:2057] 2026-01-04T02:35:09.893745Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:10:2057], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:09.893930Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:09.893983Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotifications >> THiveTest::TestRestartsWithFollower [GOOD] >> THiveTest::TestStartTabletTwiceInARow >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] >> THiveTest::TestHiveBalancer [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC1 |88.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TReplicaTest::AckNotifications [GOOD] >> TReplicaTest::AckNotificationsUponPathRecreation >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::UnsubscribeWithoutSubscribe [GOOD] Test command err: 2026-01-04T02:35:10.395020Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:10.395106Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:10.395365Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [1:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [1:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:10.395417Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:10.402552Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [1:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:10.402750Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-04T02:35:10.402874Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# path, domainOwnerId# 0, capabilities# 2026-01-04T02:35:10.403029Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [1:8:2055] 2026-01-04T02:35:10.403088Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# path 2026-01-04T02:35:10.403153Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { PathId: [OwnerId: 1, LocalPathId: 1] DomainOwnerId: 0 }: sender# [1:8:2055] 2026-01-04T02:35:10.403220Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7:2054] Subscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1], domainOwnerId# 0, capabilities# 2026-01-04T02:35:10.403352Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [1:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { PathId: [OwnerId: 1, LocalPathId: 1] }: sender# [1:8:2055] 2026-01-04T02:35:10.403403Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [1:7:2054] Unsubscribe: subscriber# [1:8:2055], path# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:10.684673Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:10.684745Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:10.684872Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:10.684941Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:10.685004Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:10.685093Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [2:7:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: path }: sender# [2:8:2055] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] |88.4%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |88.4%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |88.4%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/rbo/ydb-core-kqp-ut-rbo |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::CreateTableOutsideDatabaseFailToStartTabletsButDropIsOk [GOOD] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> THiveTest::TestCheckSubHiveForwarding [GOOD] >> THiveTest::TestCheckSubHiveDrain >> THiveTest::TestReassignUseRelativeSpace [GOOD] >> THiveTest::TestManyFollowersOnOneNode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/proxy_service/ut/unittest >> ScriptExecutionsTest::BackgroundChecksStartAfterRestart [GOOD] Test command err: 2026-01-04T02:33:51.284203Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329063078507659:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:51.284231Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:51.740369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:51.740504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:51.826062Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329063078507570:2081] 1767494031280714 != 1767494031280717 2026-01-04T02:33:51.900568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:52.195116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:52.315381Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:52.557324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:52.557345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:52.557365Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:52.557445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:52.637065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:52.646723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:55.854078Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.858741Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:33:55.858784Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:33:55.858805Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 4 2026-01-04T02:33:55.864009Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.864030Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Creating table 2026-01-04T02:33:55.864091Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_executions 2026-01-04T02:33:55.864233Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.864239Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Creating table 2026-01-04T02:33:55.864251Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:55.864292Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:191: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Describe result: PathErrorUnknown 2026-01-04T02:33:55.864297Z node 1 :KQP_PROXY NOTICE: table_creator.cpp:211: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Creating table 2026-01-04T02:33:55.864310Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Created ESchemeOpCreateTable transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:55.869140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.871820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.879876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.910428Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 } 2026-01-04T02:33:55.910491Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Subscribe on create table tx: 281474976710660 2026-01-04T02:33:55.910931Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 41 } 2026-01-04T02:33:55.910951Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Subscribe on create table tx: 281474976710658 2026-01-04T02:33:55.913895Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976710659 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 40 } 2026-01-04T02:33:55.913922Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:304: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Subscribe on create table tx: 281474976710659 2026-01-04T02:33:55.916358Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Subscribe on tx: 281474976710658 registered 2026-01-04T02:33:55.916381Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Subscribe on tx: 281474976710659 registered 2026-01-04T02:33:55.916389Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:329: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Subscribe on tx: 281474976710660 registered 2026-01-04T02:33:56.052318Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Request: create. Transaction completed: 281474976710660. Doublechecking... 2026-01-04T02:33:56.108861Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Request: create. Transaction completed: 281474976710659. Doublechecking... 2026-01-04T02:33:56.110162Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:334: Table script_executions updater. SelfId: [1:7591329080258377587:2491] Owner: [1:7591329080258377586:2490]. Request: create. Transaction completed: 281474976710658. Doublechecking... 2026-01-04T02:33:56.142171Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Table already exists, number of columns: 7, has SecurityObject: true 2026-01-04T02:33:56.142219Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Column diff is empty, finishing 2026-01-04T02:33:56.143302Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/result_sets 2026-01-04T02:33:56.144281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:33:56.154945Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:229: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. TEvProposeTransactionStatus: { Status: 48 TxId: 281474976710661 SchemeShardStatus: 0 SchemeShardTabletId: 72057594046644480 } 2026-01-04T02:33:56.154979Z node 1 :KQP_PROXY INFO: table_creator.cpp:361: Table result_sets updater. SelfId: [1:7591329080258377589:2493] Owner: [1:7591329080258377586:2490]. Successful alter request: ExecComplete 2026-01-04T02:33:56.177166Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:221: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Table already exists, number of columns: 6, has SecurityObject: true 2026-01-04T02:33:56.177234Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:441: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Column diff is empty, finishing 2026-01-04T02:33:56.177370Z node 1 :KQP_PROXY DEBUG: table_creator.cpp:112: Table script_execution_leases updater. SelfId: [1:7591329080258377588:2492] Owner: [1:7591329080258377586:2490]. Created ESchemeOpModifyACL transaction for path: /dc-1/.metadata/script_execution_leases 2026-01-04T02:33:56.178538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02: ... WHERE database = $database AND execution_id = $execution_id AND (expire_at > CurrentUtcTimestamp() OR expire_at IS NULL); 2026-01-04T02:35:07.834039Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 58, targetId: [4:7591329389442600296:2618] 2026-01-04T02:35:07.834081Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 58 timeout: 300.000000s actor id: [4:7591329389442600298:3129] 2026-01-04T02:35:07.846019Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 58, sender: [4:7591329389442600297:2619], selfId: [4:7591329325018088865:2233], source: [4:7591329389442600296:2618] 2026-01-04T02:35:07.846433Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, TxId: 01ke3dqgnte9a68tz9pb3gqwx8 2026-01-04T02:35:07.846996Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4237: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Get operation info, Do finalization with status UNAVAILABLE, exec status: EXEC_STATUS_ABORTED, finalization status (applicate effect: 0): 1, issues: {
: Error: Lease expired }, retry deadline (wait retry: 0): 2026-01-04T02:35:07.846865Z, lease state: 1 2026-01-04T02:35:07.847502Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:201: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, RunDataQuery with SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, TxId: 01ke3dqgnte9a68tz9pb3gqwx8, text: -- TSaveScriptFinalStatusActor::FinishScriptExecution DECLARE $database AS Text; DECLARE $execution_id AS Text; DECLARE $operation_status AS Int32; DECLARE $execution_status AS Int32; DECLARE $finalization_status AS Int32; DECLARE $issues AS JsonDocument; DECLARE $plan_compressed AS Optional; DECLARE $plan_compression_method AS Optional; DECLARE $stats AS JsonDocument; DECLARE $ast_compressed AS Optional; DECLARE $ast_compression_method AS Optional; DECLARE $operation_ttl AS Interval; DECLARE $customer_supplied_id AS Text; DECLARE $script_sinks AS Optional; DECLARE $script_secret_names AS Optional; DECLARE $applicate_script_external_effect_required AS Bool; DECLARE $retry_state AS JsonDocument; DECLARE $retry_deadline AS Timestamp; DECLARE $lease_state AS Int32; UPDATE `.metadata/script_executions` SET operation_status = $operation_status, execution_status = $execution_status, finalization_status = IF($applicate_script_external_effect_required, $finalization_status, NULL), issues = $issues, plan_compressed = $plan_compressed, plan_compression_method = $plan_compression_method, end_ts = CurrentUtcTimestamp(), stats = $stats, ast_compressed = $ast_compressed, ast_compression_method = $ast_compression_method, expire_at = IF($operation_ttl > CAST(0 AS Interval), CurrentUtcTimestamp() + $operation_ttl, NULL), customer_supplied_id = IF($applicate_script_external_effect_required, $customer_supplied_id, NULL), script_sinks = IF($applicate_script_external_effect_required, $script_sinks, NULL), script_secret_names = IF($applicate_script_external_effect_required, $script_secret_names, NULL), retry_state = $retry_state WHERE database = $database AND execution_id = $execution_id; DELETE FROM `.metadata/script_execution_leases` WHERE database = $database AND execution_id = $execution_id; 2026-01-04T02:35:07.848082Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: , Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 59, targetId: [4:7591329389442600296:2618] 2026-01-04T02:35:07.848122Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 59 timeout: 300.000000s actor id: [4:7591329389442600321:3136] 2026-01-04T02:35:07.855624Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dqgped8aagyx7thyqsatd, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YTUyNWVjZjMtYmE4MmQzNjUtOWFjYTVjMjctZDhiMDg0ZDk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 60, targetId: [4:7591329372262730803:2542] 2026-01-04T02:35:07.855682Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 60 timeout: 300.000000s actor id: [4:7591329389442600340:3139] 2026-01-04T02:35:07.876533Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 59, sender: [4:7591329389442600320:2625], selfId: [4:7591329325018088865:2233], source: [4:7591329389442600296:2618] 2026-01-04T02:35:07.877694Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:244: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, DataQuery #2 finished SUCCESS, Issues: [ {
:20:21: Warning: Symbol $retry_deadline is not used, code: 4527 } {
:21:21: Warning: Symbol $lease_state is not used, code: 4527 } ], SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, TxId: 2026-01-04T02:35:07.877871Z node 4 :KQP_PROXY DEBUG: query_actor.cpp:371: [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, TxId: 2026-01-04T02:35:07.877928Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4311: [ScriptExecutions] [TQueryBase] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600293:2615], ActorId: [4:7591329389442600294:2616], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, State: Update final status, Finish script execution operation. Status: UNAVAILABLE. Issues: {
: Error: Lease expired } 2026-01-04T02:35:07.878163Z node 4 :KQP_PROXY DEBUG: query_actor.h:311: [TQueryRetryActor] [TSaveScriptFinalStatusActor] OwnerId: [4:7591329389442600291:2614], ActorId: [4:7591329389442600293:2615], TraceId: ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc, RequestDatabase: /dc-1, LeaseGeneration: 1, Got response [4:7591329389442600294:2616] SUCCESS 2026-01-04T02:35:07.878814Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=MjNkMmQ4NjEtZjllOTU0MmQtM2ZlNTFmODItNzIzNjI4YTM=, workerId: [4:7591329389442600296:2618], local sessions count: 1 2026-01-04T02:35:07.879841Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1443: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7591329385147632858:3075] ActorId: [4:7591329385147632904:3093] Database: /dc-1 ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc. Successfully finalized script execution operation, WaitingRetry: 0 2026-01-04T02:35:07.879894Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:1789: [ScriptExecutions] [TCheckLeaseStatusActor] OwnerId: [4:7591329385147632858:3075] ActorId: [4:7591329385147632904:3093] Database: /dc-1 ExecutionId: d6dea43a-77f241e8-44f5cc25-88ef7adc. Reply success 2026-01-04T02:35:07.880000Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4833: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7591329380852665530:3059] ActorId: [4:7591329385147632858:3075]. Lease check #0 [4:7591329385147632906:3095] successfully completed, OperationsToCheck: 0 2026-01-04T02:35:07.880044Z node 4 :KQP_PROXY DEBUG: kqp_script_executions.cpp:4845: [ScriptExecutions] [TRefreshScriptExecutionLeasesActor] OwnerId: [4:7591329380852665530:3059] ActorId: [4:7591329385147632858:3075]. Finish, success: 1, issues: 2026-01-04T02:35:07.880110Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:78: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Refresh successfully completed 2026-01-04T02:35:08.067278Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dqgped8aagyx7thyqsatd", Forwarded response to sender actor, requestId: 60, sender: [4:7591329389442600339:2629], selfId: [4:7591329325018088865:2233], source: [4:7591329372262730803:2542] 2026-01-04T02:35:08.090935Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3dqgxtf4tyjamj7m3tfqdg, Database: /dc-1, SessionId: ydb://session/3?node_id=4&id=YTUyNWVjZjMtYmE4MmQzNjUtOWFjYTVjMjctZDhiMDg0ZDk=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 300.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 61, targetId: [4:7591329372262730803:2542] 2026-01-04T02:35:08.090993Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 61 timeout: 300.000000s actor id: [4:7591329393737567663:3152] 2026-01-04T02:35:08.138578Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:35:08.138609Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:08.690826Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:94: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Do ScheduleRefreshScriptExecutions (WaitRefreshScriptExecutions: 0), next refresh after 1.000000s 2026-01-04T02:35:08.690885Z node 4 :KQP_PROXY DEBUG: kqp_check_script_lease_actor.cpp:109: [ScriptExecutions] [TScriptExecutionLeaseCheckActor] Schedule lease check after 0.887478s 2026-01-04T02:35:09.135476Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3dqgxtf4tyjamj7m3tfqdg", Forwarded response to sender actor, requestId: 61, sender: [4:7591329393737567662:2634], selfId: [4:7591329325018088865:2233], source: [4:7591329372262730803:2542] 2026-01-04T02:35:09.153219Z node 4 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=4&id=YTUyNWVjZjMtYmE4MmQzNjUtOWFjYTVjMjctZDhiMDg0ZDk=, workerId: [4:7591329372262730803:2542], local sessions count: 0 |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/proxy_service/ut/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_replica/unittest >> TReplicaTest::AckNotificationsUponPathRecreation [GOOD] Test command err: 2026-01-04T02:35:10.733430Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:10.733495Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:10.733577Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:967: [1:7:2054] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 1 Generation: 1 }: sender# [1:8:2055] 2026-01-04T02:35:10.733618Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:994: [1:7:2054] Commit generation: owner# 1, generation# 1 2026-01-04T02:35:10.733653Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [1:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 2 }: sender# [1:8:2055] 2026-01-04T02:35:10.733676Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [1:7:2054] Successful handshake: owner# 1, generation# 2 2026-01-04T02:35:11.036441Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [2:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [2:9:2056] 2026-01-04T02:35:11.036513Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [2:7:2054] Upsert description: path# path 2026-01-04T02:35:11.036673Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [2:7:2054] Subscribe: subscriber# [2:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-04T02:35:11.036828Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [2:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [2:8:2055] 2026-01-04T02:35:11.036874Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [2:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:11.037063Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:11.037100Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:11.047818Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [2:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:11.048073Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [2:9:2056] 2026-01-04T02:35:11.048186Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [2:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [2:8:2055], cookie# 0, event size# 40 2026-01-04T02:35:11.048222Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# true 2026-01-04T02:35:11.048256Z node 2 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [2:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:11.048345Z node 2 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [2:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 1 }: sender# [2:9:2056] 2026-01-04T02:35:11.380565Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:760: [3:7:2054] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 1 Generation: 1 }: sender# [3:8:2055] 2026-01-04T02:35:11.380638Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:778: [3:7:2054] Successful handshake: owner# 1, generation# 1 2026-01-04T02:35:11.380789Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:11.380842Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:11.380917Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 2, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:11.381027Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [3:7:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: path DomainOwnerId: 0 }: sender# [3:9:2056] 2026-01-04T02:35:11.381127Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-04T02:35:11.381233Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:11.381268Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], deletion# false 2026-01-04T02:35:11.381322Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 1], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 1], PathVersion 3, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:11.381498Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:791: [3:7:2054] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 1 Generation: 1 }: sender# [3:8:2055], cookie# 0, event size# 72 2026-01-04T02:35:11.381534Z node 3 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [3:7:2054] Update description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], deletion# false 2026-01-04T02:35:11.381588Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:584: [3:7:2054] Delete description: path# path, pathId# [OwnerId: 1, LocalPathId: 1] 2026-01-04T02:35:11.381671Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [3:7:2054] Upsert description: path# path 2026-01-04T02:35:11.381793Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [3:7:2054] Subscribe: subscriber# [3:9:2056], path# path, domainOwnerId# 0, capabilities# AckNotifications: true 2026-01-04T02:35:11.381952Z node 3 :SCHEME_BOARD_REPLICA INFO: replica.cpp:559: [3:7:2054] Upsert description: path# path, pathId# [OwnerId: 1, LocalPathId: 2], pathDescription# {Status StatusSuccess, Path path, PathId [OwnerId: 1, LocalPathId: 2], PathVersion 1, SubdomainPathId , PathAbandonedTenantsSchemeShards size 0, DescribeSchemeResultSerialized size 30} 2026-01-04T02:35:11.382114Z node 3 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [3:7:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 3 }: sender# [3:9:2056] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_replica/unittest >> THiveTest::TestStartTabletTwiceInARow [GOOD] >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject >> THiveTest::TestDownAfterDrain [GOOD] >> THiveTest::TestDrainAndReconnect |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/ydb-core-tx-tx_allocator_client-ut |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TxUsage::Sinks_Oltp_WriteToTopic_5_Query [GOOD] |88.5%| [TA] $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_replica/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> HttpRequest::ProbeServerless [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Table [GOOD] |88.5%| [TA] $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> THiveTest::TestHiveFollowersWithChangingDC [GOOD] >> THiveTest::TestHiveBalancerWithSystemTablets |88.5%| [TA] {RESULT} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/proxy_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestManyFollowersOnOneNode [GOOD] >> THiveTest::TestNotEnoughResources >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Table [GOOD] >> THiveTest::TestCheckSubHiveDrain [GOOD] >> THiveTest::TestCheckSubHiveMigration >> TxUsage::WriteToTopic_Demo_45_Query [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartNo_Query [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn >> KqpBatchUpdate::Large_3 [GOOD] >> YdbIndexTable::MultiShardTableOneIndex >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex >> THiveImplTest::BalancerSpeedAndDistribution [GOOD] >> THiveImplTest::TestShortTabletTypes [GOOD] >> THiveImplTest::TestStDev [GOOD] >> THiveImplTest::BootQueueConfigurePriorities [GOOD] >> THiveTest::TestBlockCreateTablet >> THiveTest::TestDrainAndReconnect [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 >> YdbIndexTable::OnlineBuild >> YdbIndexTable::MultiShardTableOneUniqIndex >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:37.079750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:37.189113Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:37.197791Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:37.198221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:37.198359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:37.536748Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:37.620779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:37.620944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:37.668158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:37.757968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:38.403854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:38.404463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:38.404509Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:38.404539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:38.405065Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:38.469696Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:39.002389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:42.279965Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:42.290798Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:42.296409Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:42.349999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:42.350144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:42.384542Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:42.387497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:42.605860Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:42.606012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:42.610520Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.611512Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.612188Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.613628Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.614217Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.614363Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.614659Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.614839Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.614984Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:42.647173Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:42.887012Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:42.956903Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:42.957025Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:42.988420Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:42.989732Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:42.989975Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:42.990035Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:42.990107Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:42.990164Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:42.990213Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:42.990267Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:42.990890Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:42.991203Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:42.993914Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:32:43.006096Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:43.006191Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:43.006292Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:43.022095Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:43.033965Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:43.044090Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:43.048168Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:43.048278Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:43.079762Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:43.080262Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:43.220060Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:43.253119Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:32:43.596982Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:43.599729Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:43.603965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:43.608772Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:43.608894Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 2814749767 ... ry with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:33:08.369856Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:5987:5103], Start read next stream part 2026-01-04T02:33:08.673220Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:6014:5117]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:08.673499Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:33:08.673758Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:33:08.673843Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:6017:5120] 2026-01-04T02:33:08.673918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:6017:5120] 2026-01-04T02:33:08.674498Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6017:5120], server id = [2:6018:5121], tablet id = 72075186224037894, status = OK 2026-01-04T02:33:08.674565Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:6018:5121] 2026-01-04T02:33:08.674695Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:6018:5121], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:33:08.674768Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:33:08.674942Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:33:08.675034Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6014:5117], StatRequests.size() = 1 2026-01-04T02:33:08.675123Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:34:09.382520Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:5987:5103], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:34:09.382819Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:5987:5103], Start read next stream part 2026-01-04T02:34:09.383395Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dkw0h3m6ygh27v6zhte86", SessionId: ydb://session/3?node_id=2&id=YmFmYTVhNDYtYzExMmZmZTgtYTUwZTFkMjgtZmM2YjgzYjY=, Slow query, duration: 60.958856s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:34:09.385254Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:34:09.388345Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:5987:5103], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:34:09.388426Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:5987:5103], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzRlYmIwMTEtNzVkYWM3MzUtNTU5NzhlMzgtNTU0NTMyZWI=, TxId: 2026-01-04T02:34:09.388564Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-04T02:34:09.388911Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:34:09.389024Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], Start read next stream part 2026-01-04T02:34:09.598375Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6294:5309]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:34:09.598811Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:34:09.598877Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6294:5309], StatRequests.size() = 1 2026-01-04T02:35:11.298010Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:35:11.298178Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], Start read next stream part 2026-01-04T02:35:11.304702Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6515:5449], ActorId: [2:6516:5450], Starting query actor #1 [2:6517:5451] 2026-01-04T02:35:11.304818Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6516:5450], ActorId: [2:6517:5451], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-04T02:35:11.308636Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dnqkcbwcc2h5ae2rsq9p9", SessionId: ydb://session/3?node_id=2&id=NDA5MzRlZmItMmQzMDg1YWYtZDg0OTUwZDUtOTRjYmQwZWM=, Slow query, duration: 61.904074s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:35:11.313016Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6516:5450], ActorId: [2:6517:5451], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MWY3MTdkNDctYjg5ZjNkNzQtMjg4ZGMwNjEtNjBkNTBiZjE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:35:11.314145Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:35:11.314213Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5977:2462], ActorId: [2:6276:5294], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTQ3ZGZhYzQtNTMzYjk3ZTUtNGVlZmNhOS00ZmEzZWQwOA==, TxId: 2026-01-04T02:35:11.314334Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33540, txId: 18446744073709551615] shutting down 2026-01-04T02:35:11.441354Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6534:5465]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:35:11.441768Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:35:11.442317Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-04T02:35:11.442393Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:35:11.442642Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:35:11.442738Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6534:5465], StatRequests.size() = 1 2026-01-04T02:35:11.443112Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:35:11.727058Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6516:5450], ActorId: [2:6517:5451], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MWY3MTdkNDctYjg5ZjNkNzQtMjg4ZGMwNjEtNjBkNTBiZjE=, TxId: 2026-01-04T02:35:11.727188Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6516:5450], ActorId: [2:6517:5451], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWY3MTdkNDctYjg5ZjNkNzQtMjg4ZGMwNjEtNjBkNTBiZjE=, TxId: 2026-01-04T02:35:11.740509Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6515:5449], ActorId: [2:6516:5450], Got response [2:6517:5451] SUCCESS 2026-01-04T02:35:11.742118Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:35:11.771515Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-04T02:35:11.771612Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=2 -U{, ActorId=[1:4873:3692] 2026-01-04T02:35:11.773208Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6553:4254]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:35:11.773526Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:35:11.773598Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:35:11.773970Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:35:11.774030Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:35:11.774104Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:35:11.813442Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TxUsage::WriteToTopic_Demo_46_Table >> TStorageTenantTest::Empty [GOOD] >> THiveTest::TestCreateTabletsWithRaceForStoragePoolsKIKIMR_9659 [GOOD] >> THiveTest::TestDeleteTablet >> TSchemeShardTest::AlterIndexTableDirectly [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable >> THiveTest::TestBlockCreateTablet [GOOD] >> THiveTest::DrainWithHiveRestart >> TContinuousBackupWithRebootsTests::Basic |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.5%| [LD] {RESULT} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/grpc_streaming/ut/ydb-core-grpc_streaming-ut |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> TStorageTenantTest::Empty [GOOD] >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query >> TxUsage::WriteToTopic_Demo_27_Query [GOOD] |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_encrypted_storage/unittest >> THiveTest::TestSpreadNeighboursWithUpdateTabletsObject [GOOD] >> THiveTest::TestSpreadNeighboursDifferentOwners |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 13135, MsgBus: 12763 2026-01-04T02:33:49.467162Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329051621793279:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:49.473943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:50.051577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:50.051677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:50.129238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:50.174233Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329051621793249:2081] 1767494029445568 != 1767494029445571 2026-01-04T02:33:50.174915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:50.176894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:50.341070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:50.502154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:50.580209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:50.580231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:50.580241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:50.580348Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:51.324917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:51.365547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.557443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.865433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.974959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.472601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329051621793279:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:54.472684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:55.131795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329077391598914:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.131905Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.132421Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329077391598924:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.132467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.553260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.594667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.634928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.714732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.754639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.812369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.870494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.954752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.033502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329081686567092:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.033598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.033914Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329081686567097:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.033944Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.033955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329081686567098:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.038626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:56.051150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329081686567101:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:56.137535Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329081686567154:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:57.968944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... _message; 2026-01-04T02:34:30.610036Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:34:30.696732Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:30.706694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:34:30.706792Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:34:30.749514Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:34:30.818490Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:34:30.818525Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:34:30.818533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:34:30.818635Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:34:30.859496Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:34:31.361035Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:34:31.381013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:31.491449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:31.636891Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:34:31.739232Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:31.841186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.550466Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329250561978949:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:35.550558Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:35.550933Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329250561978959:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:35.550991Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:35.570808Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329229087140646:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:34:35.570895Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:35.628064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.670239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.714312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.744578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.776909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.820214Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.863579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:35.919849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:36.009014Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329254856947131:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:36.009128Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:36.009489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329254856947136:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:36.009530Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329254856947137:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:36.009800Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:34:36.014247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:34:36.059010Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329254856947140:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:34:36.157386Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329254856947191:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:34:38.559826Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:34:45.669140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:34:45.669184Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:11.276799Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dq88mfvt24r1yh37ct7n5", SessionId: ydb://session/3?node_id=2&id=NTZmNDkxOTUtYjgxNWU4ODEtNzNhZmYyZDctZDczMTIwMjE=, Slow query, duration: 12.050336s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n BATCH UPDATE LargeTable\n SET Data = 2\n WHERE Key >= 2000;\n ", parameters: 0b |88.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> THiveTest::TestDeleteTablet [GOOD] >> THiveTest::TestDeleteOwnerTablets >> THiveTest::TestCheckSubHiveMigration [GOOD] >> THiveTest::TestCheckSubHiveMigrationManyTablets |88.5%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |88.5%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |88.5%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |88.5%| [LD] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/ydb-library-yql-dq-runtime-ut |88.6%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} |88.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_encrypted_storage/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::AlterTopicWithSharedConsumer_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy >> THiveTest::TestHiveBalancerWithPrefferedDC1 [GOOD] >> THiveTest::TestHiveBalancerWithPrefferedDC2 >> THiveTest::TestNotEnoughResources [GOOD] >> THiveTest::TestRestartTablets >> TxUsage::WriteToTopic_Demo_38_Table >> THiveTest::TestHiveBalancerWithSystemTablets [GOOD] >> THiveTest::TestHiveNoBalancingWithLowResourceUsage >> BasicUsage::WriteSessionSwitchDatabases [GOOD] >> TSchemeShardTest::AlterMixedStorageConfigAndChannelProfileIdTable [GOOD] >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools >> THiveTest::TestDeleteOwnerTablets [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany |88.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table >> KqpRboPg::Select >> KqpRboYql::OlapPredicatePushdown |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |88.6%| [LD] {RESULT} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/kqp/ut/perf/ydb-core-kqp-ut-perf >> THiveTest::TestSpreadNeighboursDifferentOwners [GOOD] >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics >> KqpRboPg::TestCrossInnerJoin-ColumnStore >> THiveTest::TestRestartTablets [GOOD] >> THiveTest::TestLockTabletExecutionTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::WriteSessionSwitchDatabases [GOOD] Test command err: 2026-01-04T02:32:49.466638Z :WriteSessionNoAvailableDatabase INFO: Random seed for debugging is 1767493969466596 2026-01-04T02:32:50.001543Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328793969723013:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:50.001716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:50.165380Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328799284306040:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:50.165424Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:50.239457Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.261324Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:50.577217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.576793Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:50.727640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.727758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.731100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:50.731271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:50.775442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.784416Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:50.799205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:50.806502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:50.857929Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:50.865446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:51.006878Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:51.150933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002d0e/r3tmp/yandexFvuF5h.tmp 2026-01-04T02:32:51.150960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002d0e/r3tmp/yandexFvuF5h.tmp 2026-01-04T02:32:51.151097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002d0e/r3tmp/yandexFvuF5h.tmp 2026-01-04T02:32:51.151174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:51.238345Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:51.294824Z INFO: TTestServer started on Port 25525 GrpcPort 10675 PQClient connected to localhost:10675 2026-01-04T02:32:51.517772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976720657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:54.637734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815444560640:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.637889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.638268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815444560661:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.638315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815444560662:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.638675Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.648437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:54.661113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815444560697:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.666283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.666754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328815444560700:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.666807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.692747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816464175556:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.693386Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816464175529:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.693485Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.694238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328816464175567:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.694454Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:54.819008Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328815444560665:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720661 completed, doublechecking } 2026-01-04T02:32:54.990052Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328816464175566:2130] txid# 281474976715657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:32:54.993575Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328815444560755:3000] txid# 281474976720662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:55.015272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328793969723013:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:55.015946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:32:55.020847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:55.168601Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328799284306040:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:55.168811Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T0 ... d tx events 2026-01-04T02:35:15.759412Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:35:15.759443Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:35:15.759456Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:15.759475Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:35:15.759583Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob processing sourceId '\0src_id' seqNo 3 partNo 0 2026-01-04T02:35:15.760696Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72075186224037892][Partition][0][StateIdle] Topic 'rt3.dc1--test-topic' partition 0 part blob complete sourceId '\0src_id' seqNo 3 partNo 0 FormedBlobsCount 0 NewHead: Offset 2 PartNo 0 PackedSize 172 count 1 nextOffset 3 batches 1 2026-01-04T02:35:15.760740Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:35:15.760751Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:35:15.760766Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:15.761185Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72075186224037892][Partition][0][StateIdle] Add new write blob: topic 'rt3.dc1--test-topic' partition 0 compactOffset 2,1 HeadOffset 2 endOffset 2 curOffset 3 d0000000000_00000000000000000002_00000_0000000001_00000? size 160 WTime 1767494115761 2026-01-04T02:35:15.761373Z node 4 :PERSQUEUE DEBUG: read.h:275: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:35:15.761489Z node 4 :PERSQUEUE DEBUG: read.h:313: [72075186224037892][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 2 partNo 0 count 1 size 160 2026-01-04T02:35:15.768888Z node 4 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 2 count 1 size 160 actorID [4:7591328905176925176:2399] 2026-01-04T02:35:15.768972Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037892][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:35:15.769044Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037892][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 105 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:35:15.769085Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:35:15.769123Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72075186224037892][Partition][0][StateIdle] Answering for message sourceid: '\0src_id', Topic: 'rt3.dc1--test-topic', Partition: 0, SeqNo: 3, partNo: 0, Offset: 2 is stored on disk 2026-01-04T02:35:15.769280Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:15.769294Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.769308Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:15.769328Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.769343Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:15.769377Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:35:15.769413Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72075186224037892] Topic 'rt3.dc1--test-topic' counters. CacheSize 480 CachedBlobs 3 2026-01-04T02:35:15.769443Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 1 requestId: cookie: 1 2026-01-04T02:35:15.769591Z node 4 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 2 partno 0 count 1 parts 0 suffix '63' size 160 2026-01-04T02:35:15.769943Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::IEventHandle 2026-01-04T02:35:15.771617Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 0 2026-01-04T02:35:15.771742Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session got write response: acks { seq_no: 3 written { offset: 2 } } write_statistics { persisting_time { nanos: 10000000 } min_queue_wait_time { } max_queue_wait_time { } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-04T02:35:15.771763Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] OnAck: seqNo=1, txId=? 2026-01-04T02:35:15.771780Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session: acknoledged message 1 2026-01-04T02:35:15.778430Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session: OnReadDone gRpcStatusCode: 1, Msg: Cancelled on the server side, Details: , InternalError: 0 2026-01-04T02:35:15.778555Z :ERROR: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Got error. Status: CLIENT_CANCELLED, Description:
: Error: GRpc error: (1): Cancelled on the server side 2026-01-04T02:35:15.778603Z :ERROR: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session will not restart after a fatal error 2026-01-04T02:35:15.778640Z :INFO: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session will now close 2026-01-04T02:35:15.778712Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session: aborting 2026-01-04T02:35:15.777928Z node 3 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0 grpc read done: success: 0 data: 2026-01-04T02:35:15.777959Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0 grpc read failed 2026-01-04T02:35:15.778864Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [3:7591329335338534295:3261] destroyed 2026-01-04T02:35:15.777983Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 4 sessionId: src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0 grpc closed 2026-01-04T02:35:15.778914Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:35:15.777998Z node 3 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0 is DEAD 2026-01-04T02:35:15.778952Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:15.778968Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.778412Z node 3 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:35:15.778982Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:15.778997Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.779011Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:15.799481Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:15.799516Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.799532Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:15.799551Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.799567Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:15.816431Z :DEBUG: [/Root] TraceId [] SessionId [src_id|d3c3ee39-4a7dbcbb-5cb43aa0-a821b671_0] MessageGroupId [src_id] Write session: destroy 2026-01-04T02:35:15.899620Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:15.899675Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.899691Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:15.899713Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:15.899727Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:16.002023Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:16.002062Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:16.002079Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:16.002101Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:16.002116Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:35:16.101904Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:35:16.101936Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:16.101948Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:35:16.101963Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:35:16.101974Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] >> LocalPartition::WithoutPartitionUnknownEndpoint [GOOD] >> LocalPartition::WithoutPartitionPartitionRelocation >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> KqpRboYql::Filter+ColumnStore >> THiveTest::DrainWithHiveRestart [GOOD] >> THiveTest::PipeAlivenessOfDeadTablet >> THiveTest::TestUpdateTabletsObjectUpdatesMetrics [GOOD] >> THiveTest::TestServerlessComputeResourcesMode ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::AlterFamilyWithMultipleStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:34:11.636856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:11.636930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:11.636975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:11.637008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:11.637036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:11.637057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:11.637103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:11.637183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:11.638192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:11.638402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:11.733322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:11.733381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:11.755208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:11.759483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:11.759672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:11.765468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:11.765777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:11.766533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:11.766786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:11.769601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:11.769831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:11.771268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:11.771329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:11.771436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:11.771493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:11.771533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:11.772388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:11.942095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.943972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.944036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.944158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.944232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.944302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:11.944376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:35:19.944995Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:35:19.945510Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.945597Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:35:19.947749Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:19.947887Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:19.947940Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:35:19.948024Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:35:19.948093Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:19.948651Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:19.948717Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:19.948739Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:35:19.948763Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:35:19.948792Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:35:19.948856Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:35:19.950056Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1917 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:35:19.950089Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:35:19.950207Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1917 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:35:19.950348Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1917 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-04T02:35:19.951345Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:35:19.951406Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:35:19.951557Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:35:19.951637Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:35:19.951771Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 740 RawX2: 73014446757 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:35:19.951868Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:19.951925Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.951982Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:35:19.952055Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:35:19.954065Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:35:19.954235Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:35:19.956139Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.956362Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.956875Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.956941Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:35:19.957164Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:35:19.957232Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:19.957296Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:35:19.957356Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:19.957421Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:35:19.957542Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [17:706:2695] message: TxId: 102 2026-01-04T02:35:19.957628Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:35:19.957697Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:35:19.957752Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:35:19.957954Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:35:19.960306Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:35:19.960388Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:768:2745] TestWaitNotification: OK eventTxId 102 TestModificationResults wait txId: 103 2026-01-04T02:35:19.964940Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table1" Columns { Name: "value" FamilyName: "new_family" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:35:19.965334Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table1, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:35:19.965741Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Column families require StorageConfig specification, at schemeshard: 72057594046678944 2026-01-04T02:35:19.971388Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Column families require StorageConfig specification" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:19.971814Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column families require StorageConfig specification, operation: ALTER TABLE, path: /MyRoot/Table1 TestModificationResult got TxId: 103, wait until txId: 103 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> THiveTest::PipeAlivenessOfDeadTablet [GOOD] >> THiveTest::TestAsyncReassign ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::CancelBuildUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:28:52.557119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:28:52.557250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:52.557301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:28:52.557345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:28:52.557387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:28:52.557422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:28:52.557532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:28:52.557642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:28:52.559177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:28:52.559578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:28:52.678002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:28:52.678086Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:28:52.730561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:28:52.730875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:28:52.731113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:28:52.764401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:28:52.764674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:28:52.765555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:28:52.765871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:28:52.776771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:52.777001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:28:52.778636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:28:52.778721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:28:52.778902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:28:52.778968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:28:52.779103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:28:52.779396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:28:53.101174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.106982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:28:53.107639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... fy, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:34:57.486733Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:128:2153] message: TxId: 281474976710760 2026-01-04T02:34:57.486794Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:34:57.486840Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:34:57.486877Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:34:57.486955Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 13 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:34:57.489379Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:34:57.489449Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:34:57.489519Z node 6 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-04T02:34:57.489666Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1568:3430], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:34:57.491698Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking 2026-01-04T02:34:57.491856Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1568:3430], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:34:57.491931Z node 6 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-04T02:34:57.493907Z node 6 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled 2026-01-04T02:34:57.494057Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Cancelled TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalUnique, IndexName: index1, IndexColumn: index, State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [6:1568:3430], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:34:57.494110Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-04T02:34:57.494290Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:34:57.494361Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:1667:3518] TestWaitNotification: OK eventTxId 102 2026-01-04T02:34:57.498930Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-04T02:34:57.499240Z node 6 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 State: STATE_CANCELLED Settings { source_path: "/MyRoot/Table" index { name: "index1" index_columns: "index" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-04T02:34:57.503372Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:34:57.503697Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 352us result status StatusSuccess 2026-01-04T02:34:57.504227Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 11 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:34:57.508336Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/index1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:34:57.508659Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/index1" took 331us result status StatusPathDoesNotExist 2026-01-04T02:34:57.508872Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/index1\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeTableIndex, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976710759" Path: "/MyRoot/Table/index1" PathId: 39 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpRboPg::Filter >> THiveTest::TestLockTabletExecutionTimeout [GOOD] >> THiveTest::TestLockTabletExecutionReconnect >> THiveTest::TestHiveBalancerWithPrefferedDC2 [GOOD] >> THiveTest::TestHiveBalancerWithPreferredDC3 >> THiveTest::TestAsyncReassign [GOOD] >> THiveTest::TestAlterFollower >> KqpRboPg::Select [GOOD] >> KqpRboPg::TestCrossInnerJoin+ColumnStore >> THiveTest::TestServerlessComputeResourcesMode [GOOD] >> THiveTest::TestSkipBadNode >> TTxDataShardMiniKQL::CrossShard_2_SwapAndCopy [GOOD] >> TTxDataShardMiniKQL::CrossShard_3_AllToOne >> KqpRboPg::ConstantFolding >> TTxAllocatorClientTest::AllocateOverTheEdge >> KqpRboPg::TestCrossInnerJoin-ColumnStore [GOOD] >> KqpRboPg::UnionAll >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] >> THiveTest::TestLockTabletExecutionReconnect [GOOD] >> THiveTest::TestLockTabletExecutionReconnectExpire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::AllocateOverTheEdge [GOOD] Test command err: 2026-01-04T02:35:25.009114Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:35:25.009710Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:35:25.010641Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:35:25.012596Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.013169Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:35:25.024708Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.024851Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.024982Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:35:25.025121Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.025218Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.025316Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:35:25.025449Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:35:25.026264Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-04T02:35:25.026801Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.026880Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.027001Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-04T02:35:25.027066Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 2026-01-04T02:35:25.027258Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.027444Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.027631Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.027854Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.027991Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-04T02:35:25.028462Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.028544Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.028638Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 5000 Reserved to# 10000 2026-01-04T02:35:25.028689Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 5000 to# 10000 2026-01-04T02:35:25.028875Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.029037Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.029219Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 2500 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.029500Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 1000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 2026-01-04T02:35:25.029638Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-04T02:35:25.030113Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.030209Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:35:25.030293Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 10000 Reserved to# 15000 2026-01-04T02:35:25.030334Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 10000 to# 15000 2026-01-04T02:35:25.030585Z node 1 :TX_ALLOCATOR_CLIENT WARN: client.cpp:38: AllocateTxIds: requested many txIds. Just a warning, request is processed. Requested: 3000 TxAllocators count: 1 RequestPerAllocator: 5000 MaxCapacity: 5000 BatchAllocationWarning: 500 >> THiveTest::TestSkipBadNode [GOOD] >> THiveTest::TestStopTenant >> THiveTest::TestHiveBalancerWithPreferredDC3 [GOOD] >> THiveTest::TestHiveBalancerWithFollowers |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_1_Query [GOOD] >> THiveTest::TestAlterFollower [GOOD] >> THiveTest::TestBootProgress >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Table [GOOD] >> THiveTest::TestDeleteOwnerTabletsMany [GOOD] >> THiveTest::TestDeleteTabletWithFollowers >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/schemeshard/ut_view/ydb-core-tx-schemeshard-ut_view >> THiveTest::TestBootProgress [GOOD] >> THiveTest::TestBridgeCreateTablet >> THiveTest::TestStopTenant [GOOD] >> THiveTest::TestTabletAvailability >> THiveTest::TestLockTabletExecutionReconnectExpire [GOOD] >> THiveTest::TestLockTabletExecutionStealLock >> KqpRboPg::Filter [GOOD] >> KqpRboPg::OrderBy |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpRboYql::OlapPredicatePushdown [GOOD] >> KqpRboYql::OrderBy >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_4_Query [GOOD] >> THiveTest::TestTabletAvailability [GOOD] >> THiveTest::TestSetDomain >> TxUsage::WriteToTopic_Demo_46_Table [GOOD] >> THiveTest::TestDeleteTabletWithFollowers [GOOD] >> THiveTest::TestCreateTabletBeforeLocal >> KqpRboPg::ConstantFolding [GOOD] >> KqpRboPg::ExpressionSubquery >> THiveTest::TestLockTabletExecutionStealLock [GOOD] >> THiveTest::TestProgressWithMaxTabletsScheduled >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table >> TxUsage::WriteToTopic_Demo_46_Query >> KqpRboPg::UnionAll [GOOD] >> KqpRboYql::Aggregation+ColumnStore >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query >> THiveTest::TestCreateTabletBeforeLocal [GOOD] >> TStorageBalanceTest::TestScenario1 [GOOD] >> TStorageBalanceTest::TestScenario2 >> THiveTest::TestCreateTabletReboots >> THiveTest::TestSetDomain [GOOD] >> THiveTest::TestSetDomainAlready |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> THiveTest::TestDrainWithMaxTabletsScheduled [GOOD] >> THiveTest::TestFollowers >> TxUsage::WriteToTopic_Demo_22_RestartBeforeCommit_Query [GOOD] >> THiveTest::TestSetDomainAlready [GOOD] >> THiveTest::TestSetDomainError >> THiveTest::TestBridgeCreateTablet [GOOD] >> THiveTest::TestBridgeDisconnect >> BasicUsage::AlterTopicWithSharedConsumer_DeleteDeadLetterPolicy_AlterMoveDeadLetterPolicy [GOOD] >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table >> TxUsage::WriteToTopic_Demo_38_Table [GOOD] >> KqpRboYql::Filter+ColumnStore [GOOD] >> KqpRboYql::Filter-ColumnStore >> THiveTest::TestProgressWithMaxTabletsScheduled [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode >> THiveTest::TestSetDomainError [GOOD] >> THiveTest::TestTabletsStartingCounter |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> KqpRboPg::OrderBy [GOOD] >> KqpRboPg::LeftJoinToKqpOpJoin >> THiveTest::TestFollowers [GOOD] >> THiveTest::TestFollowersReconfiguration >> THiveTest::TestHiveNoBalancingWithLowResourceUsage [GOOD] >> THiveTest::TestLockTabletExecution >> TxUsage::WriteToTopic_Demo_38_Query >> TxUsage::Sinks_Oltp_WriteToTopics_1_Table [GOOD] >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] >> KqpRboPg::ExpressionSubquery [GOOD] >> KqpRboPg::FallbackToYqlEnabled >> THiveTest::TestTabletsStartingCounter [GOOD] >> THiveTest::TestTabletsStartingCounterExternalBoot >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query >> THiveTest::TestBridgeDisconnect [GOOD] >> THiveTest::TestBridgeDisconnectWithReboots |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_minstep/ydb-core-tx-datashard-ut_minstep >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Table [GOOD] >> THiveTest::TestResetServerlessComputeResourcesMode [GOOD] >> THiveTest::TestReassignNonexistentTablet >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] >> THiveTest::TestCreateTabletReboots [GOOD] >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk >> THiveTest::TestFollowersReconfiguration [GOOD] >> THiveTest::TestFollowerPromotion |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogramSmallParamTypes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:29.232242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:29.328428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:29.336698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:29.337075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:29.337263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:29.773198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:29.883354Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:29.883523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:29.933210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.030238Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:30.733477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:30.734768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:30.734818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:30.734850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:30.735248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:30.804481Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:31.379303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:35.083087Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:35.092733Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:35.097590Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:35.133501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:35.133622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:35.162399Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:35.163967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:35.333514Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.334385Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.335187Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336044Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336320Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336499Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336643Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336770Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.336822Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:35.513062Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:35.524819Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:35.524940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:35.584797Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:35.724361Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:35.783674Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:35.783797Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:35.815106Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:35.815409Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:35.815654Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:35.815742Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:35.815806Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:35.815876Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:35.815938Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:35.815995Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:35.816794Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:35.966554Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:35.966919Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:35.968691Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:35.968747Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:35.968820Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:35.972117Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:35.975500Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:35.978983Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:36.045598Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:36.064801Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:36.064895Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:36.078710Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:36.080210Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:36.551723Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:36.553966Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:36.557585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:36.561453Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:36.561567Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:12.127951Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4663:4126]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:33:12.128179Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:33:12.128241Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4665:4128] 2026-01-04T02:33:12.128299Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4665:4128] 2026-01-04T02:33:12.128579Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4666:4129] 2026-01-04T02:33:12.128667Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4666:4129], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:33:12.128725Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:33:12.128814Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4665:4128], server id = [2:4666:4129], tablet id = 72075186224037894, status = OK 2026-01-04T02:33:12.128875Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:33:12.128953Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4663:4126], StatRequests.size() = 1 2026-01-04T02:33:12.129025Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:34:15.537426Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4526:4049], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:34:15.537724Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4526:4049], Start read next stream part 2026-01-04T02:34:15.556917Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dkz4w20p5094hzb9ny5j5", SessionId: ydb://session/3?node_id=2&id=YzRiZjhlOC0xNmM4NjNkNi1iMjQ2YmViOS0xMmRhMTQ2NA==, Slow query, duration: 63.971938s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value),min(Value),max(Value) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:34:15.557628Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:34:15.573945Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-04T02:34:15.574115Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1` 2026-01-04T02:34:15.574252Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], Start read next stream part 2026-01-04T02:34:15.591831Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4526:4049], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:34:15.598169Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4526:4049], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGY1ZGZmZDQtZDBmZDg3ODEtY2IwNDAzYWItZWQzMDlmZTQ=, TxId: 2026-01-04T02:34:15.996298Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4889:4271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:34:15.996604Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:34:15.996652Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4889:4271], StatRequests.size() = 1 2026-01-04T02:35:31.836552Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:35:31.836768Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], Start read next stream part 2026-01-04T02:35:31.837797Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dnxmpak79rcdb8jpbfr8r", SessionId: ydb://session/3?node_id=2&id=MjE3MDIyNzItODA5OTdlY2EtZGY4MzI4N2UtNDUxMTRkNTQ=, Slow query, duration: 76.238406s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)),AGGREGATE_BY(Value,$f1(2,-1l,1l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:35:31.840922Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5066:4373], ActorId: [2:5067:4374], Starting query actor #1 [2:5068:4375] 2026-01-04T02:35:31.841011Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5067:4374], ActorId: [2:5068:4375], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:35:31.844331Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32380, txId: 18446744073709551615] shutting down 2026-01-04T02:35:31.845553Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5067:4374], ActorId: [2:5068:4375], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWI5Zjg3NjUtYzQxMmIyMTYtZDBlYjllYWItNTg3ODFhM2U=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:35:31.846725Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:35:31.846787Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4516:2468], ActorId: [2:4862:4248], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTg3YjRkNzUtZWM1MjQ4NTctNjI2MzA4YWEtZDNlYjVlNw==, TxId: 2026-01-04T02:35:31.937453Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5085:4389]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:35:31.937726Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:35:31.937764Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5085:4389], StatRequests.size() = 1 2026-01-04T02:35:32.116350Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5067:4374], ActorId: [2:5068:4375], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWI5Zjg3NjUtYzQxMmIyMTYtZDBlYjllYWItNTg3ODFhM2U=, TxId: 2026-01-04T02:35:32.116457Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5067:4374], ActorId: [2:5068:4375], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWI5Zjg3NjUtYzQxMmIyMTYtZDBlYjllYWItNTg3ODFhM2U=, TxId: 2026-01-04T02:35:32.116864Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5066:4373], ActorId: [2:5067:4374], Got response [2:5068:4375] SUCCESS 2026-01-04T02:35:32.117285Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:35:32.170054Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:35:32.170146Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3585:3467] 2026-01-04T02:35:32.171071Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5103:4399]], StatType[ 3 ], StatRequestsCount[ 1 ] 2026-01-04T02:35:32.171532Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:35:32.171607Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:35:32.171984Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:35:32.172081Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:35:32.172153Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 2 ] 2026-01-04T02:35:32.180597Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> THiveTest::TestLockTabletExecution [GOOD] >> THiveTest::TestLockTabletExecutionBadOwner |88.6%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |88.6%| [LD] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp |88.6%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/tx/datashard/ut_kqp/ydb-core-tx-datashard-ut_kqp >> THiveTest::TestCreateTabletWithWrongSPoolsAndReassignGroupsFailButDeletionIsOk [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestTabletsStartingCounterExternalBoot [GOOD] Test command err: 2026-01-04T02:34:54.702134Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.734770Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.735092Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.735992Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.736407Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.737440Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:49:2075] ControllerId# 72057594037932033 2026-01-04T02:34:54.737488Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.737598Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.737758Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.749654Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.749719Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.752975Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753184Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753351Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753479Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753616Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753741Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753921Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.753956Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.754075Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:49:2075] 2026-01-04T02:34:54.754116Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:49:2075] 2026-01-04T02:34:54.754226Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.754344Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.754838Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.757791Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.757984Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.758357Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.758701Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:34:54.759914Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:34:54.759991Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.760924Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2077] ControllerId# 72057594037932033 2026-01-04T02:34:54.760960Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.761033Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.761186Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.775000Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.775081Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.777010Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:81:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.777176Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:82:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.777314Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:83:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.777493Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:84:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.777662Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:85:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.777845Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:86:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.778015Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:87:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.778044Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.778109Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:73:2077] 2026-01-04T02:34:54.778138Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:73:2077] 2026-01-04T02:34:54.778179Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.778224Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.779268Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.779514Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.781507Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:34:54.781580Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.781744Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.781926Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.782357Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:49:2075] 2026-01-04T02:34:54.782406Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.782711Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.782754Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:54.793397Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.795867Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:54.796069Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2093] 2026-01-04T02:34:54.796110Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2093] 2026-01-04T02:34:54.796269Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.796625Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:54.796803Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.796942Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:34:54.796984Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:34:54.797021Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:54.797197Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:102:2089] 2026-01-04T02:34:54.797256Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:102:2089] 2026-01-04T02:34:54.797329Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:34:54.79 ... age_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:34.691934Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:35:34.692042Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:35:34.692077Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:35:34.692141Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.692231Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.692270Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.692642Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [25:325:2304] 2026-01-04T02:35:34.692704Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [25:325:2304] 2026-01-04T02:35:34.692870Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:34.693015Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:34.693150Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:35:34.693224Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:35:34.693281Z node 25 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:35:34.693347Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.693426Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.693466Z node 25 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:34.693552Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-04T02:35:34.693653Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [25:325:2304] 2026-01-04T02:35:34.693714Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [25:325:2304] 2026-01-04T02:35:34.694128Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:328:2306] 2026-01-04T02:35:34.694189Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:328:2306] 2026-01-04T02:35:34.694594Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:34.694686Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-04T02:35:34.694798Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:328:2306] 2026-01-04T02:35:34.694888Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:328:2306] 2026-01-04T02:35:34.694973Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:328:2306] 2026-01-04T02:35:34.695052Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:328:2306] 2026-01-04T02:35:34.695184Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:328:2306] 2026-01-04T02:35:34.695336Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:328:2306] 2026-01-04T02:35:34.695411Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:328:2306] 2026-01-04T02:35:34.695470Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:328:2306] 2026-01-04T02:35:34.695547Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:328:2306] 2026-01-04T02:35:34.695613Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:328:2306] 2026-01-04T02:35:34.695701Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:327:2305] EventType# 268697624 2026-01-04T02:35:34.695979Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} queued, type NKikimr::NHive::TTxStartTablet 2026-01-04T02:35:34.696080Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:34.696341Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} hope 1 -> done Change{6, redo 144b alter 0b annex 0, ~{ 1, 16 } -{ }, 0 gb} 2026-01-04T02:35:34.696442Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{6, NKikimr::NHive::TTxStartTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:34.707794Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [aeed6b7f2709b4c0] bootstrap ActorId# [25:331:2309] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:5:0:0:126:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:35:34.708007Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [aeed6b7f2709b4c0] Id# [72057594037927937:2:5:0:0:126:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:35:34.708097Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [aeed6b7f2709b4c0] restore Id# [72057594037927937:2:5:0:0:126:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:35:34.708201Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [aeed6b7f2709b4c0] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG33 2026-01-04T02:35:34.708288Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [aeed6b7f2709b4c0] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:5:0:0:126:1] Marker# BPG32 2026-01-04T02:35:34.708507Z node 25 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [25:40:2083] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:5:0:0:126:1] FDS# 126 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:35:34.710581Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [aeed6b7f2709b4c0] received {EvVPutResult Status# OK ID# [72057594037927937:2:5:0:0:126:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 20 } Cost# 80992 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 21 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-04T02:35:34.710735Z node 25 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [aeed6b7f2709b4c0] Result# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:35:34.710842Z node 25 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [aeed6b7f2709b4c0] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:35:34.711109Z node 25 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:5:0:0:126:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:35:34.711274Z node 25 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:6} commited cookie 1 for step 5 2026-01-04T02:35:34.711713Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [25:333:2311] 2026-01-04T02:35:34.711775Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [25:333:2311] 2026-01-04T02:35:34.711894Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [25:274:2265] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:34.711971Z node 25 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 25 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [25:274:2265] 2026-01-04T02:35:34.712075Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [25:333:2311] 2026-01-04T02:35:34.712141Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [25:333:2311] 2026-01-04T02:35:34.712212Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [25:333:2311] 2026-01-04T02:35:34.712296Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [25:333:2311] 2026-01-04T02:35:34.712443Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [25:333:2311] 2026-01-04T02:35:34.712627Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [25:333:2311] 2026-01-04T02:35:34.712703Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [25:333:2311] 2026-01-04T02:35:34.712759Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [25:333:2311] 2026-01-04T02:35:34.712847Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [25:333:2311] 2026-01-04T02:35:34.712907Z node 25 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [25:333:2311] 2026-01-04T02:35:34.712989Z node 25 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [25:332:2310] EventType# 268830214 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> THiveTest::TestReassignNonexistentTablet [GOOD] >> THiveTest::TestLockedTabletsMustNotRestart >> TTxDataShardMiniKQL::CrossShard_3_AllToOne [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll >> THiveTest::TestCreateTabletAndReassignGroups3 [GOOD] >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots >> THiveTest::TestLockTabletExecutionBadOwner [GOOD] >> THiveTest::TestLockTabletExecutionRebootTimeout >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> KqpRboYql::Filter-ColumnStore [GOOD] >> KqpRboYql::ExpressionSubquery >> THiveTest::TestFollowerPromotion [GOOD] >> THiveTest::TestFollowerPromotionFollowerDies >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] >> TGRpcStreamingTest::WriteAndFinishWorks >> KqpRboPg::LeftJoinToKqpOpJoin [GOOD] >> KqpRboPg::FiveJoinsCBO >> Channels20::IcChannelLateBinding >> THiveTest::TestHiveBalancerWithFollowers [GOOD] >> THiveTest::TestHiveBalancerWithLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLockedTabletsMustNotRestart [GOOD] Test command err: 2026-01-04T02:35:02.668407Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:02.765053Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:02.785582Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:02.786583Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:02.786917Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:02.787875Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:49:2075] ControllerId# 72057594037932033 2026-01-04T02:35:02.787919Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:02.788008Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:02.788158Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:02.827959Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:02.828015Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:02.843320Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.843502Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.843641Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.843776Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.843899Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.844011Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.844169Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.844200Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:02.844297Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:49:2075] 2026-01-04T02:35:02.844330Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:49:2075] 2026-01-04T02:35:02.844387Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:02.844458Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:02.844836Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:02.859858Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:02.860009Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:02.860372Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:02.860632Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:02.861718Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:02.861777Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:02.870137Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2077] ControllerId# 72057594037932033 2026-01-04T02:35:02.870185Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:02.870260Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:02.870399Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:02.922991Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:02.923060Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:02.924865Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:81:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925022Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:82:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925148Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:83:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925318Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:84:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925467Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:85:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925625Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:86:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925759Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:87:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:02.925785Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:02.938012Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:73:2077] 2026-01-04T02:35:02.938065Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:73:2077] 2026-01-04T02:35:02.938109Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:02.938156Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:02.939176Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:02.939384Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:02.939572Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:35:02.939619Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:02.939741Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:02.939848Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:02.946152Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:49:2075] 2026-01-04T02:35:02.946241Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:02.946556Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:02.946594Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:35:02.986877Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:02.988970Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:02.989104Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2093] 2026-01-04T02:35:02.989135Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2093] 2026-01-04T02:35:02.989251Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:02.989522Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:02.989657Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:02.989833Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:35:02.989874Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:35:02.989907Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:35:02.990096Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:102:2089] 2026-01-04T02:35:02.990147Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:102:2089] 2026-01-04T02:35:02.990201Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:35:02.99 ... TabletID: 72075186224037888 ProposedGeneration: 2 ProposedStep: 0} 2026-01-04T02:35:37.970847Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} queued, type NKikimr::NHive::TTxUpdateTabletStatus 2026-01-04T02:35:37.970889Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:37.971305Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} hope 1 -> done Change{18, redo 164b alter 0b annex 0, ~{ 1 } -{ }, 0 gb} 2026-01-04T02:35:37.971357Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:11} Tx{29, NKikimr::NHive::TTxUpdateTabletStatus} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:37.971609Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:35:37.971683Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:37.971789Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:35:37.971869Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{30, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:37.972270Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [5f02f1e02bd9be6f] received {EvVPutResult Status# OK ID# [72075186224037888:2:1:1:28672:89:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 1 } Cost# 80700 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 2 }}}} from# [80000001:1:0:0:0] Marker# BPP01 2026-01-04T02:35:37.972352Z node 28 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [5f02f1e02bd9be6f] Result# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 2147483649 Marker# BPP12 2026-01-04T02:35:37.972396Z node 28 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [5f02f1e02bd9be6f] SendReply putResult# TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:35:37.972555Z node 28 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72075186224037888:2:1:1:28672:89:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:35:37.972678Z node 28 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72075186224037888 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:35:37.972798Z node 28 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:2:2} commited cookie 2 for step 1 2026-01-04T02:35:37.972997Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [b0b963de30650cd1] bootstrap ActorId# [28:547:2210] Group# 2147483648 TabletId# 72075186224037888 Channel# 0 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-04T02:35:37.973106Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:490:2163] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:0] collect=[2:0] cookie# 0 2026-01-04T02:35:37.973295Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:179: [4d73cb573d4ef239] bootstrap ActorId# [28:548:2211] Group# 2147483649 TabletId# 72075186224037888 Channel# 1 RecordGeneration# 2 PerGenerationCounter# 1 Deadline# 586524-01-19T08:01:49.551615Z CollectGeneration# 2 CollectStep# 0 Collect# true Hard# false IgnoreBlock# false RestartCounter# 0 Marker# DSPC03 2026-01-04T02:35:37.973339Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:182: [4d73cb573d4ef239] Keep# [72075186224037888:1:2:1:8192:289:0] Marker# DSPC04 2026-01-04T02:35:37.973426Z node 28 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [28:509:2179] NKikimr::TEvBlobStorage::TEvVCollectGarbage# {TEvVCollectGarbage for [tablet:gen:cnt:channel]=[72075186224037888:2:1:1] collect=[2:0] Keep: [72075186224037888:1:2:1:8192:289:0] cookie# 0 2026-01-04T02:35:37.977207Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [b0b963de30650cd1] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 0 VDisk# [80000000:1:0:0:0]} Marker# DSPC01 2026-01-04T02:35:37.977328Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [b0b963de30650cd1] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 0 Status# OK} Marker# DSPC02 2026-01-04T02:35:37.978672Z node 28 :BS_PROXY_COLLECT DEBUG: dsproxy_collect.cpp:45: [4d73cb573d4ef239] received TEvVCollectGarbageResult# {EvVCollectGarbageResult Status# OK TabletId# 72075186224037888 RecordGeneration# 2 Channel# 1 VDisk# [80000001:1:0:0:0]} Marker# DSPC01 2026-01-04T02:35:37.978726Z node 28 :BS_PROXY_COLLECT INFO: dsproxy_collect.cpp:113: [4d73cb573d4ef239] Result# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 1 Status# OK} Marker# DSPC02 2026-01-04T02:35:37.979085Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:549:2317] 2026-01-04T02:35:37.979155Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:549:2317] 2026-01-04T02:35:37.979266Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:37.979338Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-04T02:35:37.979416Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:549:2317] 2026-01-04T02:35:37.979488Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:549:2317] 2026-01-04T02:35:37.979549Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:549:2317] 2026-01-04T02:35:37.979622Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:549:2317] 2026-01-04T02:35:37.979762Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:549:2317] 2026-01-04T02:35:37.979909Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:549:2317] 2026-01-04T02:35:37.979971Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:549:2317] 2026-01-04T02:35:37.980042Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:549:2317] 2026-01-04T02:35:37.980112Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:549:2317] 2026-01-04T02:35:37.980163Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:549:2317] 2026-01-04T02:35:37.980244Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:463:2297] EventType# 268959750 2026-01-04T02:35:37.980480Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} queued, type NKikimr::NHive::TTxSyncTablets 2026-01-04T02:35:37.980562Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:37.980674Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:35:37.980750Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{31, NKikimr::NHive::TTxSyncTablets} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:37.980915Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:35:37.980996Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:37.981082Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{19, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:35:37.981151Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:12} Tx{32, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:37.981497Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [27:552:2320] 2026-01-04T02:35:37.981548Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [27:552:2320] 2026-01-04T02:35:37.981647Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [27:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:37.981720Z node 27 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 27 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [27:331:2201] 2026-01-04T02:35:37.981792Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [27:552:2320] 2026-01-04T02:35:37.981905Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [27:552:2320] 2026-01-04T02:35:37.981964Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [27:552:2320] 2026-01-04T02:35:37.982033Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [27:552:2320] 2026-01-04T02:35:37.982142Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [27:552:2320] 2026-01-04T02:35:37.982327Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [27:552:2320] 2026-01-04T02:35:37.982389Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [27:552:2320] 2026-01-04T02:35:37.982432Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [27:552:2320] 2026-01-04T02:35:37.982496Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [27:552:2320] 2026-01-04T02:35:37.982541Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [27:552:2320] 2026-01-04T02:35:37.982613Z node 27 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [27:551:2319] EventType# 268697616 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> THiveTest::TestFollowerPromotionFollowerDies [GOOD] >> THiveTest::TestFollowersCrossDC_Easy >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink >> KqpRboPg::TestCrossInnerJoin+ColumnStore [GOOD] >> KqpRboPg::PredicatePushdownLeftJoin >> KqpRboYql::OrderBy [GOOD] >> KqpRboYql::LeftJoins >> KqpRboPg::FallbackToYqlEnabled [GOOD] >> KqpRboPg::FallbackToYqlDisabled >> LocalPartition::WithoutPartitionPartitionRelocation [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission >> THiveTest::TestHiveBalancerWithLimit [GOOD] >> THiveTest::TestHiveBalancerIgnoreTablet >> KqpQueryPerf::IdxLookupJoin+QueryService >> THiveTest::TestCreateTabletAndReassignGroupsWithReboots [GOOD] >> THiveTest::TestDeleteTabletError >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Table [GOOD] >> TxUsage::WriteToTopic_Demo_46_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WriteAndFinishWorks [GOOD] Test command err: 2026-01-04T02:35:38.480516Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329521359761724:2262];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:38.480691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:38.781794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:38.781924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:38.808679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:38.833129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:38.843802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:38.960681Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d0502a0e680] stream accepted Name# Session ok# true peer# ipv6:[::1]:34338 2026-01-04T02:35:38.961683Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d0502a0e680] facade attach Name# Session actor# [1:7591329521359762159:2394] peer# ipv6:[::1]:34338 2026-01-04T02:35:38.961711Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d0502a0e680] facade write Name# Session data# peer# ipv6:[::1]:34338 2026-01-04T02:35:38.962075Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:399: [0x7d0502a0e680] facade write Name# Session data# peer# ipv6:[::1]:34338 grpc status# (0) message# 2026-01-04T02:35:38.962664Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d0502a0e680] write finished Name# Session ok# true peer# ipv6:[::1]:34338 2026-01-04T02:35:38.963075Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d0502a0e680] write finished Name# Session ok# true peer# ipv6:[::1]:34338 2026-01-04T02:35:38.963121Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d0502a0e680] stream finished Name# Session ok# true peer# ipv6:[::1]:34338 grpc status# (0) message# 2026-01-04T02:35:38.963168Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d0502a0e680] stream done notification Name# Session ok# true peer# ipv6:[::1]:34338 2026-01-04T02:35:38.963225Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d0502a0e680] deregistering request Name# Session peer# ipv6:[::1]:34338 (finish done) 2026-01-04T02:35:38.963955Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 2026-01-04T02:35:38.963964Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:347: Received TEvWriteFinished, success = 1 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> KqpRboPg::FiveJoinsCBO [GOOD] >> TxUsage::WriteToTopic_Demo_47_Table >> THiveTest::TestFollowersCrossDC_Easy [GOOD] >> THiveTest::TestFollowers_LocalNodeOnly >> THiveTest::TestDeleteTabletError [GOOD] >> THiveTest::TestDeleteTabletWithRestartAndRetry >> TxUsage::WriteToTopic_Demo_20_RestartBeforeCommit_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FiveJoinsCBO [GOOD] Test command err: Trying to start YDB, gRPC: 17731, MsgBus: 1094 2026-01-04T02:35:23.005026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329454524295172:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:23.005098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:23.063555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:23.314116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:23.314212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:23.319230Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:23.378379Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:23.430758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:23.431983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329450229327850:2081] 1767494123004302 != 1767494123004305 2026-01-04T02:35:23.510792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:23.510813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:23.510818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:23.510882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:23.639472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:23.975801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:23.988403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:24.014467Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:26.098683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329467409197925:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.098830Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.099196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329467409197935:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.099304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.362815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:26.523230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329467409198036:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.523344Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.523515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329467409198041:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.523602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329467409198043:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.523641Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:26.527149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:26.537635Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329467409198045:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:35:26.634843Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329467409198096:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 29993, MsgBus: 3179 2026-01-04T02:35:27.999141Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329472847083087:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:27.999242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:28.018234Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:28.120538Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:28.152163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:28.152247Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:28.170542Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:28.295857Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:28.320675Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:28.320701Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:28.320708Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:28.320801Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:28.799657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:28.807237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:29.000499Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:31.253589Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329490026952912:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:31.253700Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:31.254118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329490026952922:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:31.254190Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:31.280431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:31.315351Z node 2 :FLAT_ ... ation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:36.798618Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329511610474507:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:36.798713Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:36.799197Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329511610474512:2364], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:36.799244Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329511610474513:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:36.799501Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:36.803629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:36.816498Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329511610474516:2366], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:35:36.913939Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329511610474567:2756] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32175, MsgBus: 2447 2026-01-04T02:35:38.423097Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329519434657204:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:38.423157Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:38.451167Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:38.549039Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:38.561903Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591329519434657176:2081] 1767494138422608 != 1767494138422611 2026-01-04T02:35:38.564200Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:38.564293Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:38.585084Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:38.702496Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:38.702519Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:38.702526Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:38.702614Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:38.725911Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:39.188296Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:39.193082Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:39.438042Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:42.123393Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329536614527242:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.123535Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.124069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329536614527252:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.124150Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.183227Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.243181Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.281683Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.318821Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.358620Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.398190Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.512594Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329536614527698:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.512699Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.513170Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329536614527703:2375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.513219Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329536614527704:2376], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.513485Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.518399Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710664:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:42.534477Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329536614527707:2377], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710664 completed, doublechecking } 2026-01-04T02:35:42.605858Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329536614527759:2823] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::PredicatePushdownLeftJoin [GOOD] >> KqpRboPg::TPCH >> KqpQueryPerf::IndexInsert-QueryService-UseSink >> BasicUsage::AlterDeadLetterPolicy_StreamingConsumer [GOOD] >> BasicUsage::ConflictingWrites >> KqpRboPg::FallbackToYqlDisabled [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table >> THiveTest::TestDeleteTabletWithRestartAndRetry [GOOD] >> THiveTest::TestCreateTabletChangeToExternal >> TContinuousBackupWithRebootsTests::Basic [GOOD] >> Channels20::IcChannelLateBinding [GOOD] >> Channels20::IcChannelAsyncRead >> THiveTest::TestHiveBalancerIgnoreTablet [GOOD] >> THiveTest::TestHiveBalancerNodeRestarts >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Table [GOOD] >> TxUsage::WriteToTopic_Demo_38_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::FallbackToYqlDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 11476, MsgBus: 20735 2026-01-04T02:35:24.397546Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329458641094873:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:24.397785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:24.794697Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:24.801645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:24.801762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:24.845774Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329458641094751:2081] 1767494124381801 != 1767494124381804 2026-01-04T02:35:24.854231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:24.857529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:24.930308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:24.930354Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:24.930362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:24.930473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:25.032155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:25.404396Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:25.414328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:25.423662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:27.671415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329471525997536:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.671545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.671846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329471525997546:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.671925Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.988881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:28.192241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329475820964947:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.192354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.192756Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329475820964952:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.192824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.192980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329475820964953:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.197383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:28.215314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329475820964956:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:35:28.357539Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329475820965007:2610] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 17843, MsgBus: 7268 2026-01-04T02:35:29.311595Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329480768703724:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:29.311654Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:29.333342Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:29.429449Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:29.445066Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:29.445151Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:29.460789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:29.513640Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:29.513673Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:29.513681Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:29.513780Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:29.587952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:29.975407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:30.318236Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:32.694971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329493653606458:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:32.695075Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:32.695407Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329493653606468:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:32.695456Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:32.742147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:32.800024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:32.897441Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [ ... source pool default not found or you don't have access permissions } 2026-01-04T02:35:37.869179Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329516779095870:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:37.869216Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:37.874615Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:37.889076Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329516779095874:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:35:37.956524Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329516779095925:2594] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:38.032632Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7591329516779095941:2346], database: /Root, text: "\n --!syntax_pg\n SET TablePathPrefix = \"/Root/\";\n select sum(distinct t1.c), sum(distinct t1.a) from t1 group by t1.b order by t1.b;\n " 2026-01-04T02:35:39.029933Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591329503894193104:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:39.030020Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:40.098790Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with new RBO failed, retrying with YQL optimizer, self: [3:7591329525369030597:2370], database: /Root, text: "\n INSERT INTO `/Root/t1` (a, b, c) VALUES (1, 2, 3);\n " Trying to start YDB, gRPC: 20603, MsgBus: 32581 2026-01-04T02:35:41.078067Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329534476321599:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:41.090047Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:41.121651Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:41.231362Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:41.231480Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:41.254328Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:41.258650Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:41.277300Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591329534476321562:2081] 1767494141073671 != 1767494141073674 2026-01-04T02:35:41.346345Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:41.346369Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:41.346377Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:41.346484Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:41.350399Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:41.803296Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:41.814318Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:35:42.106028Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:44.740959Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329547361224334:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.741117Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.741599Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329547361224344:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.741661Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.762658Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.874039Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329547361224441:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.874121Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.874297Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329547361224446:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.877938Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329547361224448:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.878022Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.879612Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:44.895388Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329547361224449:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:35:44.961442Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329547361224501:2594] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:45.031463Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591329547361224510:2346], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:4:1: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported, code: 1 2026-01-04T02:35:45.034229Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YWRlNWExZTktZjM5Yzc2YmMtZjMwNTAyMjQtMjE0NTUzM2Y=, ActorId: [4:7591329547361224438:2335], ActorState: ExecuteState, LegacyTraceId: 01ke3drmv94dehzfpfyy2zhx65, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 4 column: 1 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:1110: Multiple distinct is not supported" end_position { row: 4 column: 1 } issue_code: 1 } } tx_id# trace_id# 2026-01-04T02:35:45.110730Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591329551656191831:2356], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
: Fatal: yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed, code: 1 2026-01-04T02:35:45.113559Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=MzQzZGYyN2MtYTczMmI4YTEtYzJlYzFlYjAtZjc2NzM5NmE=, ActorId: [4:7591329551656191823:2351], ActorState: ExecuteState, LegacyTraceId: 01ke3drn1hamvx1jzw703ppask, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { message: "yql/essentials/core/expr_nodes_gen/yql_expr_nodes_gen.h:294 TCallable(): requirement Match(node.Get()) failed" issue_code: 1 } } tx_id# trace_id# |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> THiveTest::TestCreateTabletChangeToExternal [GOOD] >> THiveTest::TestDownCounterDeleteNode >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiRead-QueryService >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query >> TxUsage::WriteToTopic_Demo_40_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::Basic [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] Leader for TabletID 72057594046678944 is [1:139:2160] sender: [1:142:2058] recipient: [1:117:2146] 2026-01-04T02:35:17.177196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:17.177269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:17.177307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:17.177342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:17.177380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:17.177402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:17.177452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:17.177522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:17.178201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:17.178409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:17.291430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:35:17.291519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:17.292423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:17.304803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:17.305046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:17.305201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:17.316696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:17.316982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:17.317845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:17.318109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:17.321051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:17.321256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:17.322639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:17.322721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:17.322956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:17.323038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:17.323096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:17.323197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:17.513449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.514690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.514831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.514910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:17.515788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... _SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:35:46.727444Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 6 2026-01-04T02:35:46.727482Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:35:46.728035Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:46.728075Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:35:46.728112Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-04T02:35:46.728150Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:35:46.728226Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-04T02:35:46.728693Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:35:46.728732Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:35:46.728764Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 2/3, is published: true 2026-01-04T02:35:46.730895Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:35:46.734587Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:35:46.734713Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:46.734985Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:35:46.735825Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:46.736007Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:35:46.737075Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:35:46.737136Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:35:46.737301Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:35:46.737355Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:35:46.737411Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:35:46.737458Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:35:46.737508Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-04T02:35:46.737565Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:35:46.737623Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:35:46.737665Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:35:46.737847Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:35:46.737918Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-04T02:35:46.737952Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-04T02:35:46.737996Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:35:46.738028Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-04T02:35:46.738056Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-04T02:35:46.738127Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:35:46.738696Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:35:46.738760Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:35:46.738851Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:35:46.738908Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:35:46.738959Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:35:46.739948Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:46.740274Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:46.740326Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:35:46.743519Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:35:46.743883Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:35:46.743944Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:35:46.744547Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:35:46.744654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:35:46.744705Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [16:1155:3070] TestWaitNotification: OK eventTxId 104 2026-01-04T02:35:46.745743Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:35:46.746020Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 316us result status StatusPathDoesNotExist 2026-01-04T02:35:46.746313Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:35:46.746972Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:35:46.747210Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 269us result status StatusPathDoesNotExist 2026-01-04T02:35:46.747381Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> THiveTest::TestFollowers_LocalNodeOnly [GOOD] >> THiveTest::TestFollowersCrossDC_Tight ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19582, MsgBus: 19498 2026-01-04T02:35:40.162484Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329529288534085:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:40.162636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:40.414831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:40.422276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:40.422384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:40.504926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:40.505466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:40.603032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:40.603055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:40.603065Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:40.603152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:40.680726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:40.971971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:40.984244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:41.061151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:41.166934Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:41.249215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:41.462033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:41.557621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.435140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329542173437715:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:43.435312Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:43.435922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329542173437725:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:43.435974Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:43.829742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.864648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.900802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.947450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.012076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.049645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.088372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.152330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:44.261275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329546468405895:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.261366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.261519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329546468405900:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.261684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329546468405901:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.261715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.266017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:44.284450Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329546468405904:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:44.350947Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329546468405955:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:45.166416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329529288534085:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:45.166496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoin-QueryService >> THiveTest::TestDownCounterDeleteNode [GOOD] |88.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestDownCounterDeleteNode [GOOD] Test command err: 2026-01-04T02:34:54.388860Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.431660Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.431980Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.432936Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.433326Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.434454Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:73:2076] ControllerId# 72057594037932033 2026-01-04T02:34:54.434508Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.434626Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.434772Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.445550Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.445623Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.448402Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:80:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.448596Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:81:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.448747Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.448877Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.449055Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.449207Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.449339Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.449377Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.449473Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:73:2076] 2026-01-04T02:34:54.449516Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:73:2076] 2026-01-04T02:34:54.449570Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.449634Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.450512Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.450678Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.455703Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.455963Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.456336Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.456663Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:34:54.458261Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:34:54.458360Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.459388Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:97:2078] ControllerId# 72057594037932033 2026-01-04T02:34:54.459440Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.459535Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.459675Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.471979Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.472041Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.473846Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.473974Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474090Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474174Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474278Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474410Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474550Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.474578Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.474668Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:97:2078] 2026-01-04T02:34:54.474702Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:97:2078] 2026-01-04T02:34:54.474767Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.474819Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.475649Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.475737Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.478699Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.478838Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.479172Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.479406Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.480204Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-04T02:34:54.480254Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.480328Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.480469Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.488745Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.488826Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.490621Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490748Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490857Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490962Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.491089Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.491217Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.491327Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.491361Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.491416Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] :: ... ID: 72057594037936131 Cookie: 2} 2026-01-04T02:35:49.300025Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.300112Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.300176Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.300232Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-04T02:35:49.300281Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-04T02:35:49.300311Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-04T02:35:49.332335Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [31:53:2064] 2026-01-04T02:35:49.332407Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [31:53:2064] 2026-01-04T02:35:49.332496Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:49.332577Z node 31 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 31 selfDC 2 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:35:49.332773Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:49.333128Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-04T02:35:49.333196Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-04T02:35:49.333255Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-04T02:35:49.333459Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.333564Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.333675Z node 31 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.333782Z node 31 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-04T02:35:49.333862Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [31:53:2064] 2026-01-04T02:35:49.333901Z node 31 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [31:53:2064] 2026-01-04T02:35:49.344412Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936129] client retry [30:77:2064] 2026-01-04T02:35:49.344503Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [30:77:2064] 2026-01-04T02:35:49.344605Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:49.344643Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:35:49.344803Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:49.344926Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-04T02:35:49.345008Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-04T02:35:49.345056Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-04T02:35:49.345114Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.345224Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.345311Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.345375Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936129 leader: [0:0:0] followers: 0 2026-01-04T02:35:49.345435Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936129] forward result error, check reconnect [30:77:2064] 2026-01-04T02:35:49.345462Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936129] schedule retry [30:77:2064] 2026-01-04T02:35:49.426086Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:426: TClient[72057594037936131] client retry [30:199:2162] 2026-01-04T02:35:49.426173Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936131] lookup [30:199:2162] 2026-01-04T02:35:49.426273Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936131 entry.State: StNormal leader: [0:0:0] (known problem) followers: 0 ev: {EvForward TabletID: 72057594037936131 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:49.426347Z node 30 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 30 selfDC 1 leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:35:49.426522Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936131 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:49.426651Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 0} 2026-01-04T02:35:49.426731Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 1} 2026-01-04T02:35:49.426771Z node 30 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936131 Cookie: 2} 2026-01-04T02:35:49.426822Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.426899Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.426970Z node 30 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936131 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:49.427057Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037936131 leader: [0:0:0] followers: 0 2026-01-04T02:35:49.427142Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037936131] forward result error, check reconnect [30:199:2162] 2026-01-04T02:35:49.427184Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037936131] schedule retry [30:199:2162] 2026-01-04T02:35:49.497192Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} queued, type NKikimr::NHive::TTxDeleteNode 2026-01-04T02:35:49.497312Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:35:49.497541Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} hope 1 -> done Change{10, redo 64b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:35:49.497651Z node 30 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:7} Tx{17, NKikimr::NHive::TTxDeleteNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:35:49.498660Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [30:537:2318] 2026-01-04T02:35:49.498738Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [30:537:2318] 2026-01-04T02:35:49.498871Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [30:452:2266] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:49.498987Z node 30 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 30 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [30:452:2266] 2026-01-04T02:35:49.499125Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [30:537:2318] 2026-01-04T02:35:49.499224Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [30:537:2318] 2026-01-04T02:35:49.499318Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [30:537:2318] 2026-01-04T02:35:49.499408Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [30:537:2318] 2026-01-04T02:35:49.499615Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [30:537:2318] 2026-01-04T02:35:49.500093Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [30:537:2318] 2026-01-04T02:35:49.500188Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [30:537:2318] 2026-01-04T02:35:49.500254Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [30:537:2318] 2026-01-04T02:35:49.500354Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [30:537:2318] 2026-01-04T02:35:49.500419Z node 30 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [30:537:2318] 2026-01-04T02:35:49.500516Z node 30 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [30:536:2317] EventType# 268830214 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Table [GOOD] >> TSchemeShardViewTest::EmptyQueryText >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateDifferentViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:35:51.251275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:51.251408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:51.251477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:51.251530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:51.251582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:51.251613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:51.251692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:51.251771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:51.252824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:51.253196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:51.329536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:35:51.329618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:51.342016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:51.342246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:51.342489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:51.352851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:51.353098Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:51.353892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:51.354171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:51.357537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:51.357777Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:51.359014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:51.359081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:51.359217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:51.359262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:51.359304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:51.359470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:51.500311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.501980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.502073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.502145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.502225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.502308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:51.502461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:35:52.234288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:35:52.234315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:35:52.234347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:35:52.234411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:35:52.237387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:35:52.238609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-04T02:35:52.239116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:35:52.239173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:35:52.239298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:35:52.239359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:35:52.239442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:35:52.239465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:35:52.239908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:35:52.240081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:35:52.240121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:742:2732] 2026-01-04T02:35:52.240413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:35:52.240476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:35:52.240508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:742:2732] 2026-01-04T02:35:52.240586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:35:52.240664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:35:52.240684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:742:2732] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-04T02:35:52.241151Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:52.241611Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir" took 222us result status StatusSuccess 2026-01-04T02:35:52.242135Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir" PathDescription { Self { Name: "SomeDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:52.242734Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/FirstView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:52.242968Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/FirstView" took 205us result status StatusSuccess 2026-01-04T02:35:52.243316Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/FirstView" PathDescription { Self { Name: "FirstView" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "FirstView" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 QueryText: "First query" CapturedContext { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:52.243859Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDir/SecondView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:35:52.244038Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDir/SecondView" took 169us result status StatusSuccess 2026-01-04T02:35:52.244335Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDir/SecondView" PathDescription { Self { Name: "SecondView" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 103 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "SecondView" PathId { OwnerId: 72057594046678944 LocalId: 40 } Version: 1 QueryText: "Second query" CapturedContext { } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_1_Query [GOOD] >> THiveTest::TestFollowersCrossDC_Tight [GOOD] >> THiveTest::TestFollowersCrossDC_MovingLeader >> THiveTest::TestHiveBalancerNodeRestarts [GOOD] >> THiveTest::TestHiveBalancerDifferentResources ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyQueryText [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:35:52.178954Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:52.179014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:52.179041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:52.179064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:52.179120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:52.179143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:52.179186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:52.179230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:52.180077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:52.180402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:52.257779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:35:52.257861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:52.272875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:52.273110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:52.273296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:52.282080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:52.282303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:52.283008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:52.283278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:52.286368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:52.286617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:52.287807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:52.287868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:52.288078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:52.288147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:52.288212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:52.288396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:52.443304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:52.444765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :2663] Become StateWork (SchemeCache [1:678:2668]) 2026-01-04T02:35:53.170043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "MyView" QueryText: "" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:35:53.170263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0 2026-01-04T02:35:53.170318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/MyView, opId: 101:0, viewDescription: Name: "MyView" QueryText: "" 2026-01-04T02:35:53.170443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: MyView, child id: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:35:53.170525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-04T02:35:53.170611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 101:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-04T02:35:53.170680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:35:53.171772Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:35:53.174061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusAccepted TxId: 101 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-04T02:35:53.174331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/MyView 2026-01-04T02:35:53.176898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:35:53.176977Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 101:0 ProgressState 2026-01-04T02:35:53.177050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-04T02:35:53.177211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:35:53.179832Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-04T02:35:53.180475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-04T02:35:53.180646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-04T02:35:53.181043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:53.181195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:35:53.181274Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-04T02:35:53.181447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-04T02:35:53.181685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:35:53.181752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:35:53.184296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:53.184355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:53.184618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:35:53.184725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:53.184776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:35:53.184854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:35:53.185157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:35:53.185207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:35:53.185316Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:35:53.185356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:53.185436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:35:53.185485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:53.185529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:35:53.185596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:35:53.185635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:35:53.185669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:35:53.185784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:35:53.185857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:35:53.185895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:35:53.185929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:35:53.187047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:53.187162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:53.187213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:53.187251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:35:53.187296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:35:53.188070Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:53.188148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:35:53.188196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:35:53.188247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:35:53.188282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:35:53.188360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:35:53.191551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:35:53.192388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs >> TxUsage::WriteToTopic_Demo_18_RestartBeforeCommit_Query [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table >> KqpRboYql::ExpressionSubquery [GOOD] >> LocalPartition::DirectWriteWithoutDescribeResourcesPermission [GOOD] >> LocalPartition::WithoutPartitionWithSplit >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_5_Query [GOOD] >> KqpQueryPerf::MultiRead-QueryService [GOOD] >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] >> KqpStreamLookup::ReadTableDuringSplit >> Channels20::IcChannelAsyncRead [GOOD] >> Channels20::IcChannelEarlyFinish >> KqpQueryPerf::IndexInsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink >> THiveTest::TestBridgeDisconnectWithReboots [GOOD] >> THiveTest::TestBridgeDemotion >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::ExpressionSubquery [GOOD] Test command err: Trying to start YDB, gRPC: 62466, MsgBus: 15241 2026-01-04T02:35:21.188812Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329446486830601:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:21.188937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:21.439586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:21.439734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:21.486908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:21.547451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:21.547578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:21.653262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:21.654857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:21.654894Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:21.655046Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:21.739201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:22.070302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:22.186945Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:24.475930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329459371733245:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:24.476057Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:24.476463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329459371733255:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:24.476515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:24.813651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:35:25.438584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:25.438835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:25.439066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:35:25.439173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:35:25.439281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:35:25.439382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:35:25.439478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:35:25.439575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:35:25.439669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:35:25.439786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:35:25.439893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:35:25.439988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:35:25.440107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591329463666701104:2330];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:35:25.461355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:25.461474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:25.461662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:35:25.461781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:35:25.461902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:35:25.462009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:35:25.462105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:35:25.462204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:35:25.462303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:35:25.462392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:35:25.462476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:35:25.462578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:35:25.462691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591329463666701120:2334];tablet_id=72075186224037931;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:35:25.480349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7591329463666701133:2339];tablet_id=72075186224037940;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:25.480446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;self_id=[1:7591329463666701133:2339];tablet_id=72075186224037940;pr ... gressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.235907Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.244771Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.244839Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.244857Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.245047Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037994;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.245102Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037994;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.245121Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037994;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.255433Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.255510Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.255528Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037978;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.258099Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.258165Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.258181Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.264717Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037986;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.264785Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037986;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.264804Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037986;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.267409Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037996;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.267476Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037996;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.267493Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037996;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.274407Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.274499Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.274517Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.277645Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.277739Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.277759Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.283706Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.283782Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.283807Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.288539Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.288647Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.288665Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.293379Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.293442Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.293459Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=39;result=not_found; 2026-01-04T02:35:47.412548Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329561281170092:3008], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:47.412634Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:47.412788Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329561281170097:3011], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:47.413244Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329561281170099:3012], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:47.413316Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:47.417323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:47.440423Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329561281170100:3013], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:35:47.525151Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329561281170154:5246] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:52.044690Z node 3 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:152: Scan [3:7591329578461042726:5620] got AbortExecution txId: 281474976715665 scanId: 76 gen: 1 tablet: 72075186224037946 code: CANCELLED reason: {
: Error: Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FINISHED } } 2026-01-04T02:35:52.778085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:35:52.778118Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::CrossShard_4_OneToAll [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:133:2156] sender: [1:136:2057] recipient: [1:112:2143] 2026-01-04T02:34:43.975655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:43.975720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:43.982563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:34:43.998782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:34:43.999242Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:34:43.999588Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:34:44.051151Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:34:44.065794Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:34:44.067060Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:34:44.070212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:34:44.070371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:34:44.070436Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:34:44.070924Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:34:44.071033Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:34:44.071158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2156] in generation 2 Leader for TabletID 9437184 is [1:133:2156] sender: [1:214:2057] recipient: [1:14:2061] 2026-01-04T02:34:44.184210Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:34:44.221524Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:34:44.221764Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:34:44.221982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-04T02:34:44.222048Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:34:44.222101Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:34:44.222150Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:34:44.222425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:44.222476Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:44.222709Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:34:44.222804Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:34:44.222928Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:34:44.222969Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:34:44.223030Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:34:44.223071Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:34:44.223116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:34:44.223168Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:34:44.223220Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:34:44.223334Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:215:2214], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:44.223382Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:44.223424Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:215:2214], sessionId# [0:0:0] 2026-01-04T02:34:44.231568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:34:44.231671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:34:44.231763Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:34:44.232053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:34:44.232122Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:34:44.232175Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:34:44.232234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:34:44.232271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:34:44.232319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:34:44.232361Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:34:44.232710Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:34:44.232750Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:34:44.232787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:34:44.232822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:34:44.232897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:34:44.232930Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:34:44.232964Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:34:44.232999Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:34:44.233029Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:34:44.251042Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:34:44.251120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:34:44.251161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:34:44.251222Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:34:44.251333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:34:44.252222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:44.252279Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:44.252328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-04T02:34:44.252469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-04T02:34:44.252505Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:34:44.252672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-04T02:34:44.252721Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-04T02:34:44.252778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:34:44.252822Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:34:44.256845Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:34:44.256927Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:34:44.262060Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:44.262152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:44.262244Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:34:44.262297Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:34:44.262339Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... HARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-04T02:35:55.417205Z node 37 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:35:55.417235Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:35:55.417268Z node 37 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [7:6] in PlanQueue unit at 9437186 2026-01-04T02:35:55.417298Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PlanQueue 2026-01-04T02:35:55.417329Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.417357Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PlanQueue 2026-01-04T02:35:55.417385Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadTxDetails 2026-01-04T02:35:55.417414Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadTxDetails 2026-01-04T02:35:55.418182Z node 37 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437186 loaded tx from db 7:6 keys extracted: 1 2026-01-04T02:35:55.418237Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.418267Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadTxDetails 2026-01-04T02:35:55.418300Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit FinalizeDataTxPlan 2026-01-04T02:35:55.418331Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit FinalizeDataTxPlan 2026-01-04T02:35:55.418378Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.418405Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit FinalizeDataTxPlan 2026-01-04T02:35:55.418481Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildAndWaitDependencies 2026-01-04T02:35:55.418512Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildAndWaitDependencies 2026-01-04T02:35:55.418562Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [7:6] is the new logically complete end at 9437186 2026-01-04T02:35:55.418595Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [7:6] is the new logically incomplete end at 9437186 2026-01-04T02:35:55.418635Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [7:6] at 9437186 2026-01-04T02:35:55.418679Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.418707Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildAndWaitDependencies 2026-01-04T02:35:55.418737Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BuildDataTxOutRS 2026-01-04T02:35:55.418769Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BuildDataTxOutRS 2026-01-04T02:35:55.418819Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.418847Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BuildDataTxOutRS 2026-01-04T02:35:55.418874Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit StoreAndSendOutRS 2026-01-04T02:35:55.418902Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit StoreAndSendOutRS 2026-01-04T02:35:55.418934Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.418961Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit StoreAndSendOutRS 2026-01-04T02:35:55.418996Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit PrepareDataTxInRS 2026-01-04T02:35:55.419023Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit PrepareDataTxInRS 2026-01-04T02:35:55.419054Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.419080Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit PrepareDataTxInRS 2026-01-04T02:35:55.419106Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit LoadAndWaitInRS 2026-01-04T02:35:55.419135Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit LoadAndWaitInRS 2026-01-04T02:35:55.419164Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.419191Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit LoadAndWaitInRS 2026-01-04T02:35:55.419221Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit BlockFailPoint 2026-01-04T02:35:55.419248Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit BlockFailPoint 2026-01-04T02:35:55.419275Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.419300Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit BlockFailPoint 2026-01-04T02:35:55.419326Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit ExecuteDataTx 2026-01-04T02:35:55.419354Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit ExecuteDataTx 2026-01-04T02:35:55.419714Z node 37 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [7:6] at tablet 9437186 with status COMPLETE 2026-01-04T02:35:55.419776Z node 37 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [7:6] at 9437186: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 1, SelectRowBytes: 10, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:35:55.419831Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.419864Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit ExecuteDataTx 2026-01-04T02:35:55.419894Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompleteOperation 2026-01-04T02:35:55.419924Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompleteOperation 2026-01-04T02:35:55.420149Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is DelayComplete 2026-01-04T02:35:55.420183Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompleteOperation 2026-01-04T02:35:55.420213Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [7:6] at 9437186 to execution unit CompletedOperations 2026-01-04T02:35:55.420245Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [7:6] at 9437186 on unit CompletedOperations 2026-01-04T02:35:55.420284Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [7:6] at 9437186 is Executed 2026-01-04T02:35:55.420313Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [7:6] at 9437186 executing on unit CompletedOperations 2026-01-04T02:35:55.420343Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [7:6] at 9437186 has finished 2026-01-04T02:35:55.420376Z node 37 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:35:55.420406Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:35:55.420441Z node 37 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-04T02:35:55.420472Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-04T02:35:55.436957Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 7 txid# 6} 2026-01-04T02:35:55.437024Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 7} 2026-01-04T02:35:55.437084Z node 37 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:35:55.437138Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437185 on unit CompleteOperation 2026-01-04T02:35:55.437200Z node 37 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437185 at tablet 9437185 send result to client [37:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:35:55.437248Z node 37 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-04T02:35:55.437666Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437186 step# 7 txid# 6} 2026-01-04T02:35:55.437712Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437186 step# 7} 2026-01-04T02:35:55.437756Z node 37 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:35:55.437788Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437186 on unit CompleteOperation 2026-01-04T02:35:55.437900Z node 37 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437186 at tablet 9437186 send result to client [37:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:35:55.437941Z node 37 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:35:55.438333Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 7 txid# 6} 2026-01-04T02:35:55.438376Z node 37 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 7} 2026-01-04T02:35:55.438415Z node 37 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:35:55.438450Z node 37 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [7:6] at 9437184 on unit CompleteOperation 2026-01-04T02:35:55.438530Z node 37 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [7 : 6] from 9437184 at tablet 9437184 send result to client [37:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:35:55.438569Z node 37 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 25383, MsgBus: 6284 2026-01-04T02:35:48.270140Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329561573837271:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:48.270203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:48.601088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:48.646729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:48.646833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:48.685545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:48.796921Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:48.805929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:48.914539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:48.914565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:48.914573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:48.914670Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:49.281526Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:49.372832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:49.445766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:49.636636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:49.814413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:49.886132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:51.718345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329574458740907:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:51.718537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:51.719012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329574458740917:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:51.719098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.053887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.084250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.114466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.144611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.178370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.211573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.244010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.307842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:52.406183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329578753709084:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.406294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.407855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329578753709089:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.407885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329578753709090:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.407946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:52.412451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:52.426311Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329578753709093:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:52.489762Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329578753709144:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:53.269201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329561573837271:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:53.269277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table >> THiveTest::TestHiveBalancerDifferentResources [GOOD] >> THiveTest::TestHiveBalancerDifferentResources2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoin-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20065, MsgBus: 11527 2026-01-04T02:35:41.945625Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329533449412428:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:41.945707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:42.317700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:42.317838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:42.320360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:42.349956Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:42.353404Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329533449412400:2081] 1767494141944820 != 1767494141944823 2026-01-04T02:35:42.353470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:42.510930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:42.510951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:42.510958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:42.511083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:42.558649Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:42.962382Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:42.982158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:43.038799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.213545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.374073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.450064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:45.570419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329550629283462:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:45.570632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:45.571350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329550629283472:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:45.571413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:45.933240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:45.984117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.028994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.071950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.116524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.154860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.190764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.272525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:46.377486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329554924251646:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:46.377579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:46.377970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329554924251651:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:46.378037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329554924251652:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:46.378070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:46.383315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:46.397310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329554924251655:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:46.499518Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329554924251706:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:46.947241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329533449412428:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:46.947296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10092, MsgBus: 30287 2026-01-04T02:35:49.853067Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329566903173612:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:49.854702Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:49.872215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:49.977903Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329566903173584:2081] 1767494149842473 != 1767494149842476 2026-01-04T02:35:49.984017Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:50.015310Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:50.015399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:50.023198Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:50.127657Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:50.146909Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:50.146937Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:50.146949Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:50.147067Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:50.656745Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:50.663578Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:50.680280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.758325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.867107Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:50.939060Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:51.079413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.219516Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329584083044632:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.219605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.219891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329584083044641:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.219940Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.297044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.328053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.361780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.392691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.426782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.472001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.510181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.562643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:53.654315Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329584083045512:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.654457Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.654798Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329584083045517:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.654812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329584083045518:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.654889Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:53.661515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:53.673050Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329584083045521:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:53.775633Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329584083045572:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:54.849985Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329566903173612:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:54.850075Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs >> KqpRboYql::Aggregation+ColumnStore [GOOD] >> KqpRboYql::Aggregation-ColumnStore >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] >> TSchemeShardViewTest::CreateView |88.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeIncrementalBackup [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] Leader for TabletID 72057594046678944 is [1:139:2160] sender: [1:142:2058] recipient: [1:117:2146] 2026-01-04T02:35:06.207895Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:06.207998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:06.208062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:06.208097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:06.208157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:06.208187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:06.208271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:06.208359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:06.209242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:06.209523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:06.344229Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:35:06.344340Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:06.345333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:06.353463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:06.353707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:06.353876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:06.358657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:06.358877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:06.359692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:06.359940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:06.362314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:06.362521Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:06.363836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:06.363902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:06.364130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:06.364214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:06.364273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:06.364377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:06.569550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.570809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.570957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.571946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:06.572066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... blish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-04T02:35:58.056894Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:35:58.057131Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:35:58.057517Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-04T02:35:58.057574Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710757 2026-01-04T02:35:58.057751Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:35:58.057816Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-04T02:35:58.057965Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-04T02:35:58.058012Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:35:58.058062Z node 16 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-04T02:35:58.058100Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:35:58.058150Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-04T02:35:58.058241Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [16:1380:3204] message: TxId: 281474976710757 2026-01-04T02:35:58.058304Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:35:58.058354Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-04T02:35:58.058398Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-04T02:35:58.058547Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:35:58.058594Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-04T02:35:58.058622Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-04T02:35:58.058654Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:35:58.058680Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-04T02:35:58.058702Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-04T02:35:58.058763Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:35:58.059215Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:35:58.059270Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:35:58.059349Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:35:58.059398Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:35:58.059437Z node 16 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:35:58.062330Z node 16 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-04T02:35:58.062516Z node 16 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:35:58.762570Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:35:58.762843Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 323us result status StatusPathDoesNotExist 2026-01-04T02:35:58.763026Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:35:58.763723Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:35:58.763942Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 241us result status StatusPathDoesNotExist 2026-01-04T02:35:58.764110Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 39])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 39 LastExistedPrefixDescription { Self { Name: "Table" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:35:58.764728Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:35:58.764953Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 252us result status StatusSuccess 2026-01-04T02:35:58.765465Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW+VolatileTxs [GOOD] >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TSchemeShardViewTest::CreateView [GOOD] >> TSchemeShardViewTest::EmptyName >> THiveTest::TestBridgeDemotion [GOOD] >> THiveTest::TestBridgeBalance >> TSchemeShardViewTest::AsyncDropSameView [GOOD] >> TSubDomainTest::DatashardNotRunAtAllWhenSubDomainNodesIsStopped [GOOD] >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::CreateView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:35:59.641789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:59.642036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:59.642075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:59.642110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:59.642148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:59.642193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:59.642293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:59.642362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:59.643247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:59.643593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:59.739894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:35:59.739965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:59.753965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:59.758469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:59.758708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:59.764174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:59.764475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:59.765174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:59.765462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:59.768202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:59.768420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:59.769683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:59.769758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:59.769908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:59.769970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:59.770013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:59.770176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:59.933047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.934979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:59.935464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-04T02:36:00.600375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-04T02:36:00.600504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-04T02:36:00.600840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:00.600965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:00.601028Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 101:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-04T02:36:00.601158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-04T02:36:00.601335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:00.601403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:36:00.603277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:00.603322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:00.603461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:36:00.603540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:00.603571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:36:00.603622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:36:00.603866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:36:00.603916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:36:00.604040Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:36:00.604084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:00.604128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:36:00.604170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:00.604211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:36:00.604258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:00.604293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:36:00.604325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:36:00.604392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:36:00.604437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:36:00.604469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:36:00.604498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:36:00.605231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:00.605299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:00.605326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:36:00.605361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:36:00.605399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:00.605985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:00.606076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:00.606117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:36:00.606151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:36:00.606187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:36:00.606256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:36:00.614755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:36:00.614990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:36:00.615284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:36:00.615334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:36:00.615742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:36:00.615842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:36:00.615881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2691] TestWaitNotification: OK eventTxId 101 2026-01-04T02:36:00.616344Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:36:00.616569Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 273us result status StatusSuccess 2026-01-04T02:36:00.616988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> THiveTest::TestHiveBalancerDifferentResources2 [GOOD] >> THiveTest::TestHiveBalancerUselessNeighbourMoves >> TSchemeShardViewTest::ReadOnlyMode >> TxUsage::WriteToTopic_Demo_47_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncDropSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:35:59.905141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:59.905256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:59.905303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:59.905348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:59.905387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:59.905416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:59.905478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:59.905543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:59.906610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:59.906947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:00.001237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:00.001318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:00.017702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:00.018011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:00.018226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:00.031490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:00.031767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:00.032569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:00.033089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:00.036994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:00.037266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:00.038710Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:00.038783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:00.038925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:00.038975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:00.039020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:00.039244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:00.199303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.200923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:00.201447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:01.017281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:36:01.019676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:01.019726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:01.019941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:36:01.020097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:01.020158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:36:01.020218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:36:01.020592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:36:01.020647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:36:01.020743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:36:01.020776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:01.020812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:36:01.020842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:01.020891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:36:01.020941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:01.020977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:36:01.021029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:36:01.021122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:36:01.021170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:36:01.021222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:36:01.021261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:36:01.022189Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:01.022301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:01.022360Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:36:01.022410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:36:01.022448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:01.023351Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:01.023435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:01.023466Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:36:01.023495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:36:01.023550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:36:01.023638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:36:01.024492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:36:01.024547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:36:01.024621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:01.026964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:36:01.028875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:36:01.029044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 102 2026-01-04T02:36:01.029369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:36:01.029420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:36:01.029502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:36:01.029526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 TestWaitNotification wait txId: 104 2026-01-04T02:36:01.029570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:36:01.029589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:36:01.030156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:36:01.030330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:36:01.030380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:737:2727] 2026-01-04T02:36:01.030546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:36:01.030622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:36:01.030702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:36:01.030740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:737:2727] 2026-01-04T02:36:01.030818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:36:01.030839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:737:2727] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 103 2026-01-04T02:36:01.031326Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:36:01.031531Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 229us result status StatusPathDoesNotExist 2026-01-04T02:36:01.031712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Table [GOOD] >> TSchemeShardViewTest::DropView >> TxUsage::WriteToTopic_Demo_47_Query ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:36:01.192160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:01.192276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:01.192314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:01.192351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:01.192389Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:01.192430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:01.192505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:01.192575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:01.193515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:01.193870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:01.283763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:01.283853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:01.300296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:01.300921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:01.301115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:01.308897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:01.309179Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:01.309928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:01.310226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:01.313271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:01.313470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:01.314740Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:01.314810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:01.314885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:01.314929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:01.314972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:01.315176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:01.449114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.453934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:01.454574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... irst GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:02.150454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:02.150607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:36:02.150846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.150934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:36:02.150982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:36:02.151012Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:36:02.153209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.153272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:36:02.153313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:36:02.155265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.155320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.155361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:36:02.155414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:36:02.155569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:36:02.157900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:36:02.158042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:36:02.158449Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:02.158588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:02.158643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:36:02.158952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:36:02.162073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:36:02.162268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:02.162344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:36:02.164608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:02.164676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:02.164896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:02.164942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:36:02.165035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.165083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:36:02.165176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:36:02.165211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:36:02.165249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:36:02.165309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:36:02.165387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:36:02.165427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:36:02.165462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:36:02.165491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:36:02.165581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:02.165624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:36:02.165656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:36:02.166397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:36:02.166487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:36:02.166516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:36:02.166555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:36:02.166584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:02.166647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:36:02.170020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:36:02.170524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:36:02.171051Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:675:2665] Bootstrap 2026-01-04T02:36:02.171925Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:675:2665] Become StateWork (SchemeCache [1:680:2670]) 2026-01-04T02:36:02.174781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "" QueryText: "Some query" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:36:02.174950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0 2026-01-04T02:36:02.175011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/, opId: 101:0, viewDescription: Name: "" QueryText: "Some query" 2026-01-04T02:36:02.175090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-04T02:36:02.176134Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:36:02.179799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:02.179991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/', error: path part shouldn't be empty, operation: CREATE VIEW, path: /MyRoot/ 2026-01-04T02:36:02.180557Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TxUsage::WriteToTopic_Demo_22_RestartAfterCommit_Query [GOOD] >> TSchemeShardViewTest::ReadOnlyMode [GOOD] >> TSchemeShardViewTest::AsyncCreateSameView >> TSchemeShardViewTest::DropView [GOOD] >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] >> KqpQueryPerf::Update-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:36:02.366657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:02.366730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:02.366758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:02.366789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:02.366831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:02.366853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:02.366905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:02.366958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:02.367719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:02.368000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:02.450808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:02.450875Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:02.467509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:02.467788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:02.467999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:02.492849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:02.493121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:02.493904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:02.494210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:02.501444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:02.501721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:02.503015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:02.503091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:02.503197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:02.503263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:02.503317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:02.503514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:02.661100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.661998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.662925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.663002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:02.663118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... less_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:03.714143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 Leader for TabletID 72057594046678944 is [1:788:2757] sender: [1:844:2058] recipient: [1:15:2062] 2026-01-04T02:36:03.755193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateView CreateView { Name: "ThirdView" QueryText: "Some query" } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:36:03.755432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_view.cpp:118: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0 2026-01-04T02:36:03.755498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_view.cpp:124: [72057594046678944] TCreateView Propose, path: /MyRoot/ThirdView, opId: 103:0, viewDescription: Name: "ThirdView" QueryText: "Some query" 2026-01-04T02:36:03.755650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: ThirdView, child id: [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:36:03.755725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:36:03.755765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCreateView target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: 2026-01-04T02:36:03.755836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:36:03.758639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-04T02:36:03.758871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE VIEW, path: /MyRoot/ThirdView 2026-01-04T02:36:03.759119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:36:03.759173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:30: [72057594046678944] TCreateView::TPropose, opId: 103:0 ProgressState 2026-01-04T02:36:03.759226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-04T02:36:03.759352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:36:03.761601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:36:03.761773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000039 2026-01-04T02:36:03.762560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:03.762665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:03.762717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_view.cpp:45: [72057594046678944] TCreateView::TPropose, opId: 103:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000039 2026-01-04T02:36:03.762844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-04T02:36:03.763020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:03.763111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:36:03.765288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:03.765345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:03.765549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:36:03.765821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:03.765867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:838:2796], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-04T02:36:03.765911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:838:2796], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:36:03.765992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:36:03.766047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:36:03.766151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:36:03.766187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:36:03.766227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:36:03.766255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:36:03.766293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:36:03.766348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:36:03.766381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:36:03.766413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:36:03.766484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:36:03.766525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-04T02:36:03.766581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:36:03.766610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:36:03.767568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:36:03.767670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:36:03.767729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:36:03.767775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:36:03.767828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:36:03.768869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:36:03.768972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:36:03.769006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:36:03.769034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:36:03.769061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:36:03.769146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:36:03.771727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:36:03.773237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> KqpRboPg::TPCH [GOOD] >> Channels20::IcChannelEarlyFinish [GOOD] >> Channels20::IcChannelBackPressure >> THiveTest::TestHiveBalancerUselessNeighbourMoves [GOOD] >> THiveTest::TestHiveBalancerWithImmovableTablets >> THiveTest::TestCheckSubHiveMigrationManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::DropView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:36:03.382997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:03.383106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:03.383147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:03.383186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:03.383244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:03.383274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:03.383338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:03.383405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:03.384315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:03.384630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:03.482879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:03.482934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:03.501185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:03.501412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:03.501622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:03.520364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:03.520624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:03.521387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:03.521692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:03.527696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:03.527996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:03.529244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:03.529310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:03.529411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:03.529470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:03.529521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:03.529726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:03.681905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.684789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.684946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.685762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.686112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:03.686244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:36:04.351244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-04T02:36:04.351384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-04T02:36:04.351767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:04.351902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:04.351963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_view.cpp:43: [72057594046678944] TDropView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-04T02:36:04.352145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:36:04.352316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:04.352380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:36:04.354798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:04.354849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:04.355045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:36:04.355194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:04.355245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:36:04.355322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:36:04.355548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:36:04.355600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:36:04.355701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:36:04.355733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:04.355767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:36:04.355808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:04.355842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:36:04.355885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:36:04.355920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:36:04.355992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:36:04.356080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:36:04.356119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:36:04.356151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:36:04.356192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:36:04.357507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:04.357626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:04.357685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:36:04.357726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:36:04.357764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:04.359081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:04.359183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:36:04.359219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:36:04.359243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:36:04.359301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:36:04.359406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:36:04.359927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:36:04.359990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:36:04.360068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:36:04.362185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:36:04.363773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:36:04.363884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:36:04.364118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:36:04.364156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:36:04.364542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:36:04.364642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:36:04.364676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:731:2721] TestWaitNotification: OK eventTxId 102 2026-01-04T02:36:04.365141Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:36:04.365341Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 218us result status StatusPathDoesNotExist 2026-01-04T02:36:04.365529Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyView\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/MyView" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTableCompletesQuicklyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:35:55.403645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:55.518119Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:35:55.518476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:35:55.518526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:35:55.773060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:55.773207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:55.847798Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494152677254 != 1767494152677257 2026-01-04T02:35:55.861499Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:55.907529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:56.016811Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:35:56.308031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:35:56.308412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:35:56.310407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:56.311313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:35:56.313026Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-04T02:35:56.313076Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-04T02:35:56.324356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:56.349158Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-04T02:35:56.349285Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-04T02:35:56.425178Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-04T02:35:56.425304Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-04T02:35:56.425617Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-04T02:35:56.426067Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-04T02:35:56.426145Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-04T02:35:56.426581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:35:56.427960Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-04T02:35:56.428132Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-04T02:35:56.428172Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-04T02:35:56.428226Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-04T02:35:56.428908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:35:56.428986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-04T02:35:56.430116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-04T02:35:56.433647Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:35:56.438661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:35:56.438765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:56.439756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-04T02:35:56.443287Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-04T02:35:56.451890Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:35:56.452015Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-04T02:35:56.452288Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-04T02:35:56.452347Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-04T02:35:56.452419Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-04T02:35:56.452573Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-04T02:35:56.453195Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-04T02:35:56.453341Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-04T02:35:56.453974Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-04T02:35:56.454309Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-04T02:35:56.454412Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136418710901536}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-04T02:35:56.454507Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136418710901536}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-04T02:35:56.454653Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{136418710901536}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-04T02:35:56.454745Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-04T02:35:56.454820Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-04T02:35:56.454868Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-04T02:35:56.455086Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-04T02:35:56.455127Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-04T02:35:56.455196Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-04T02:35:56.455308Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-04T02:35:56.455427Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-04T02:35:56.455521Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-04T02:35:56.455720Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(DataShard.72075186224037888.Leader.1) to node 1 storage {Version# 1 TabletID# 72075186224037888 T ... reason: , at schemeshard: 72057594046644480 2026-01-04T02:36:04.136402Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715665, database: /Root, subject: , status: StatusAccepted, operation: DROP TABLE, path: /Root/table-2 2026-01-04T02:36:04.139702Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:36:04.139857Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715665 ssId 72057594046644480 seqNo 2:4 2026-01-04T02:36:04.139936Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715665 at tablet 72075186224037889 2026-01-04T02:36:04.141187Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:36:04.141356Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:36:04.152414Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [2:327:2368] NKikimrLocal.TEvStopTablet TabletId: 72075186224037888 FollowerId: 0 Generation: 1,0x10040206 [2:399:2398] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-04T02:36:04.152751Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:36:04.154006Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:36:04.154159Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:36:04.157261Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-04T02:36:04.157352Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-04T02:36:04.157688Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:36:04.157774Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-04T02:36:04.157847Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:36:04.157913Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-04T02:36:04.158361Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-04T02:36:04.169741Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:36:04.171315Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715665 HANDLE EvProposeTransaction marker# C0 2026-01-04T02:36:04.171389Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715665 step# 4500 Status# 16 SEND to# [2:399:2398] Proxy marker# C1 2026-01-04T02:36:04.182070Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-04T02:36:04.258604Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715665 has been planned 2026-01-04T02:36:04.258725Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 2026-01-04T02:36:04.258771Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 2026-01-04T02:36:04.259041Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 5000 in 0.500000s at 4.950000s 2026-01-04T02:36:04.259513Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 4500, txid# 281474976715665 marker# C2 2026-01-04T02:36:04.259597Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715665 stepId# 4500 Status# 17 SEND EvProposeTransactionStatus to# [2:399:2398] Proxy 2026-01-04T02:36:04.260334Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715665 at step 4500 at tablet 72075186224037889 { Transactions { TxId: 281474976715665 AckTo { RawX1: 0 RawX2: 0 } } Step: 4500 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-04T02:36:04.260416Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:36:04.260711Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 4500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:36:04.261134Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:04.261192Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:36:04.261245Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [4500:281474976715665] in PlanQueue unit at 72075186224037889 2026-01-04T02:36:04.261449Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 4500:281474976715665 keys extracted: 0 2026-01-04T02:36:04.261586Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:36:04.261898Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:04.264049Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-04T02:36:04.264546Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:36:04.267072Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 4500} 2026-01-04T02:36:04.267152Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:04.267521Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-04T02:36:04.267628Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-04T02:36:04.267680Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-04T02:36:04.267715Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 for mediator 72057594046382081 acknowledged 2026-01-04T02:36:04.267768Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:24] persistent tx 281474976715665 acknowledged 2026-01-04T02:36:04.268193Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:04.268277Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715665] from 72075186224037889 at tablet 72075186224037889 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:36:04.268345Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715665 state PreOffline TxInFly 0 2026-01-04T02:36:04.268431Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:36:04.268607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715665, done: 0, blocked: 1 2026-01-04T02:36:04.274966Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715665 datashard 72075186224037889 state PreOffline 2026-01-04T02:36:04.275063Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:36:04.276558Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715665:0 2026-01-04T02:36:04.276724Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715665, publications: 2, subscribers: 1 2026-01-04T02:36:04.278330Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715665, subscribers: 1 2026-01-04T02:36:04.279176Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:36:04.296605Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:36:04.296947Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:36:04.298988Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:36:04.301906Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:36:04.302501Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-04T02:36:04.302573Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-04T02:36:04.302706Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-04T02:36:04.303021Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-04T02:36:04.303167Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_view/unittest >> TSchemeShardViewTest::AsyncCreateSameView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:36:04.378380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:04.378469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:04.378518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:04.378565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:04.378608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:04.378637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:04.378710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:04.378775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:04.379694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:04.380034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:04.483063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:04.483148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:04.500731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:04.500963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:04.501157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:04.519533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:04.519779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:04.520513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:04.520788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:04.524131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:04.524395Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:04.525667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:04.525730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:04.525900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:04.525953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:04.525998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:04.526208Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:04.660536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.661580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.661720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.661848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.661917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.661987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:04.662754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:36:05.315592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:05.315671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:05.315867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:36:05.316004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:05.316056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:36:05.316123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:36:05.316590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:36:05.316669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:36:05.316783Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:36:05.316825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:05.316868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:36:05.316904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:05.316943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:36:05.316989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:36:05.317029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:36:05.317075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:36:05.317180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:36:05.317239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:36:05.317275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:36:05.317318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:36:05.318245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:05.318359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:05.318405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:36:05.318526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:36:05.318589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:36:05.319619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:05.319734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:36:05.319774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:36:05.319807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:36:05.319851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:36:05.319950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:36:05.323746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:36:05.325151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 101 2026-01-04T02:36:05.325559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:36:05.325636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:36:05.325753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:36:05.325780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:36:05.325863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:36:05.325894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:36:05.326485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:36:05.326618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:36:05.326660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:713:2703] 2026-01-04T02:36:05.326943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:36:05.327031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:36:05.327072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:36:05.327109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:713:2703] 2026-01-04T02:36:05.327238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:36:05.327264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:713:2703] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-04T02:36:05.327810Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyView" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:36:05.328117Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyView" took 288us result status StatusSuccess 2026-01-04T02:36:05.328542Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MyView" PathDescription { Self { Name: "MyView" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ViewDescription { Name: "MyView" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 QueryText: "Some query" CapturedContext { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_view/unittest >> KqpRboYql::LeftJoins [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table >> THiveTest::TestBridgeBalance [GOOD] >> THiveTest::TestBridgeFollowers >> THiveTest::TestFollowersCrossDC_MovingLeader [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower |88.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} |88.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_view/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpWorkload::KV ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62526, MsgBus: 2751 2026-01-04T02:35:45.831224Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329549498853015:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:45.831335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:45.876739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:46.104912Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:46.146876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:46.146966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:46.151585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:46.185484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:46.366549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:46.366578Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:46.366602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:46.366709Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:46.429072Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:46.847769Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:47.013961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:47.033184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:47.123512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:47.279947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:47.453210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:47.537411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:49.526994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329566678724018:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:49.527165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:49.533987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329566678724028:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:49.534162Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:49.920328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:49.990468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.039225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.079539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.118381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.162797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.201884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.282817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:50.403337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329570973692211:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:50.403484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:50.403945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329570973692216:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:50.404024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329570973692217:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:50.404058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:50.408487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:50.422524Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329570973692220:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:50.501241Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329570973692273:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:50.832600Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329549498853015:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:50.832700Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:52.426430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itsel ... 2026-01-04T02:35:56.165084Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:56.165190Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:56.199742Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:56.208689Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:56.342418Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:56.342444Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:56.342455Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:56.342563Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:56.402631Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:56.845985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:56.853061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:35:56.864671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:56.952312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:57.110881Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:57.176562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:57.270182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.707278Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329611574013630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:59.707409Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:59.707683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329611574013639:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:59.707728Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:59.784640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.822200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.859417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.891529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.935743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:59.991631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:00.040238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:00.120262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:00.221000Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329615868981815:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.221109Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.225747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329615868981821:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.225747Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329615868981820:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.225871Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.229896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:00.251016Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329615868981824:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:00.315092Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329615868981875:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:01.051834Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329598689109913:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:01.051933Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:02.143885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:02.194860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:02.241110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::TPCH [GOOD] Test command err: Trying to start YDB, gRPC: 1821, MsgBus: 19262 2026-01-04T02:35:19.702878Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329437531310129:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:19.702970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:19.945355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:19.955762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:19.956634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:19.982964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:20.059993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329437531310088:2081] 1767494119701787 != 1767494119701790 2026-01-04T02:35:20.075530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:20.114188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:20.198505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:20.198533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:20.198541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:20.198643Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:20.658873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:20.709075Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:22.946181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329450416212879:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.946181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329450416212871:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.946386Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.946781Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329450416212886:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.946890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.951146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:22.967772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329450416212885:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:35:23.087348Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329454711180235:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 7443, MsgBus: 8664 2026-01-04T02:35:24.021848Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329459872627648:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:24.021911Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:24.057720Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:24.128070Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:24.130842Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329459872627618:2081] 1767494124020984 != 1767494124020987 2026-01-04T02:35:24.165836Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:24.165912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:24.173220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:24.250577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:24.250604Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:24.250610Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:24.250736Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:24.347664Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:24.647108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:25.028486Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:27.310889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329472757530387:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.310991Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.311423Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329472757530397:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.311472Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:27.605121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:35:28.198772Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[2:7591329472757530913:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:28.198778Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[2:7591329472757530940:2331];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:28.199051Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[2:7591329472757530940:2331];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:28.199308Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[2:7591329472757530940:2331];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:35:28.199460Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[2:7591329472757530940:2331];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:35:28.199494Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[2:7591329472757530913:2330];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:28.199598Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[2:7591329472757530940:2331];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:35:28.199695Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[2:7591329472757530913:2330];tablet_id=720751862240378 ... s_local=45;result=not_found; 2026-01-04T02:35:57.088158Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.088177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.089629Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.089674Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.089692Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.098988Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.099092Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.099116Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.101870Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.101940Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.101962Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.109939Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.110010Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.110031Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.113016Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.113071Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.113091Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038014;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.120797Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.121174Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.121195Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.123268Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.123306Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.123320Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.130255Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.130323Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.130344Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.134555Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.134625Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.134646Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.140472Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.140545Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.140568Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.144416Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.144487Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.144509Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:35:57.206221Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329600236426296:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:57.206345Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:57.206819Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329600236426301:3023], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:57.206872Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329600236426302:3024], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:57.207187Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:57.212105Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:57.226814Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715666, at schemeshard: 72057594046644480 2026-01-04T02:35:57.231282Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329600236426305:3025], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715666 completed, doublechecking } 2026-01-04T02:35:57.298779Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329600236426356:5392] txid# 281474976715667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:00.945219Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:36:00.945255Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> KqpQueryPerf::IndexReplace+QueryService-UseSink >> KqpRboYql::Aggregation-ColumnStore [GOOD] >> TxUsage::WriteToTopic_Demo_40_Table [GOOD] >> KqpQueryPerf::Replace-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::LeftJoins [GOOD] Test command err: Trying to start YDB, gRPC: 31491, MsgBus: 4238 2026-01-04T02:35:19.776095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329438200584367:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:19.776721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:20.017283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:20.022971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:20.023224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:20.104485Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:20.111588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:20.246422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:20.246446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:20.246454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:20.246582Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:20.304359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:20.702861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:20.782224Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:22.706815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329451085487000:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.706983Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.707350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329451085487010:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.707412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:22.957132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:35:23.529482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:23.529779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:23.530033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:35:23.530170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:35:23.530332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:35:23.530449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:35:23.530575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:35:23.530697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:35:23.530818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:35:23.530956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:35:23.531085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:35:23.531209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:35:23.531341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591329455380454893:2335];tablet_id=72075186224037930;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:35:23.532540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:23.532648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:35:23.532842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:35:23.532978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:35:23.533068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:35:23.533164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:35:23.533273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:35:23.533430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:35:23.533601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:35:23.533748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:35:23.533874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:35:23.533966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:35:23.534096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591329455380454889:2332];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:35:23.574816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7591329455380454890:2333];tablet_id=72075186224037950;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:35:23.574901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037950;self_id=[1:7591329455380454890:2333];tablet_id=72075186224037950;pro ... ProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.189060Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038133;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.196700Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.196777Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.196801Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038109;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.199018Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.199121Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.199158Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038125;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.206710Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.206790Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.206810Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038139;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.208429Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.208503Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.208523Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038141;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.216620Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.216693Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.216712Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038115;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.218322Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.218387Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.218406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038143;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228436Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228434Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038137;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228509Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038137;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228515Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228532Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038137;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.228535Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038129;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239195Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038135;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239194Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038111;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239295Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038111;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239301Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038135;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239322Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038135;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.239323Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038111;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=41;result=not_found; 2026-01-04T02:35:58.295171Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3drn121gz1j736gmm4e748", SessionId: ydb://session/3?node_id=3&id=OGI0ZWRmYTQtZGJkODQ2MzQtNDM1YjNiNGQtMzU4ZDI5N2E=, Slow query, duration: 13.230881s, status: SUCCESS, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/t1` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t2` (\n a Int64\tNOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t3` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n\n CREATE TABLE `/Root/t4` (\n a Int64 NOT NULL,\n b String,\n c Int64,\n primary key(a)\n ) with (Store = Column);\n ", parameters: 0b 2026-01-04T02:35:58.463329Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329606223293710:3715], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:58.463448Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:58.463536Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329606223293715:3718], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:58.463895Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329606223293717:3719], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:58.463945Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:58.468506Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:58.480801Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-04T02:35:58.486893Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329606223293718:3720], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:35:58.585377Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329606223293770:8031] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> KqpQueryPerf::Update-QueryService-UseSink >> YdbIndexTable::MultiShardTableOneIndexIndexOverlapDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexPkOverlap >> KqpStreamLookup::ReadTableDuringSplit [GOOD] >> THiveTest::TestFollowersCrossDC_KillingHiveAndFollower [GOOD] >> THiveTest::TestFollowerCompatability1 >> TxUsage::WriteToTopic_Demo_40_Query >> THiveTest::TestBridgeFollowers [GOOD] >> KqpQueryPerf::Upsert+QueryService+UseSink >> KqpQueryPerf::ComputeLength+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::Aggregation-ColumnStore [GOOD] Test command err: Trying to start YDB, gRPC: 26830, MsgBus: 20457 2026-01-04T02:35:20.034463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329442156581666:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.047202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:20.301966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:20.338955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:20.339072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:20.403894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:20.413242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:20.477393Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:20.477419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:20.477429Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:20.477517Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:20.485687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:20.868944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:21.050351Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:23.200944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484395:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.201065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.202398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484405:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.202528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.453134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:23.562243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484498:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.562321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.562639Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484500:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.562701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.581556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:23.667820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484591:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.667908Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.668044Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484596:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.668388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329455041484598:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.668501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:23.671772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:23.692325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329455041484599:2353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:35:23.807749Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329455041484651:2656] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 32489, MsgBus: 26950 2026-01-04T02:35:25.239226Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329462833790470:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:25.239266Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:25.258098Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:25.339222Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:25.351853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:25.351940Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:25.352895Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:25.365179Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329462833790367:2081] 1767494125225703 != 1767494125225706 2026-01-04T02:35:25.389933Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:25.472269Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:25.472291Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:25.472299Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:25.472392Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:25.632120Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:25.868880Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:26.245637Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:28.282615Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329475718693138:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:28.282712Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service. ... /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:39.729783Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329524937831207:3125], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:39.729832Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:39.730081Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329524937831210:3127], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:39.730121Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:39.733465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:39.744103Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329524937831209:3126], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:35:39.836811Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329524937831264:5246] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:45.165868Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:35:45.165905Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 3162, MsgBus: 28549 2026-01-04T02:35:58.931556Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329604522963373:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:58.931674Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:58.959052Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:59.056972Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:59.059280Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:59.059382Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:59.061962Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591329604522963328:2081] 1767494158929369 != 1767494158929372 2026-01-04T02:35:59.088195Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:59.166498Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:59.166523Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:59.166536Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:59.166627Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:59.207467Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:59.608534Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:59.621857Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:59.941959Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:03.375806Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800693:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.375927Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.376559Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800703:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.376622Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.399857Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:03.541863Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800795:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.542008Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.542546Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800798:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.542630Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.575783Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:03.650132Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800874:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.650252Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.651164Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800879:2350], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.651232Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329625997800880:2351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.651400Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:03.656257Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:03.673699Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329625997800883:2352], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:36:03.731537Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329625997800934:2641] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:03.931857Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591329604522963373:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:03.931942Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:35:58.675500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:58.788846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:35:58.789422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:35:58.789503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:35:59.103581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:59.103710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:59.179315Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494155929934 != 1767494155929937 2026-01-04T02:35:59.192661Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:59.243420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:59.341221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:35:59.645712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:59.662051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:59.767159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:00.090481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2811], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.090619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:964:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.090694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.091873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:969:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.092361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:00.096973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:00.231846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:967:2819], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:36:00.303796Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1025:2858] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR --- split started --- --- split finished --- Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR Captured TEvDataShard::TEvRead from KQP_STREAM_LOOKUP_ACTOR to TX_DATASHARD_ACTOR |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink >> THiveTest::TestHiveBalancerWithImmovableTablets [GOOD] >> THiveTest::TestHiveBalancerHighUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestBridgeFollowers [GOOD] Test command err: 2026-01-04T02:35:15.506450Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:15.542680Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:15.542990Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:15.543975Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:15.544435Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:15.545664Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:15.545755Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:15.546997Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-04T02:35:15.547061Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:15.547210Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:15.547389Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:15.576901Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:15.576962Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:15.580309Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.580537Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.580742Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.580907Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.581087Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.581292Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.581443Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:15.581491Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:15.581590Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-04T02:35:15.581642Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-04T02:35:15.581701Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:15.581761Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:15.582686Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:15.583258Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:15.603012Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:15.603115Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:15.616145Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:15.616221Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:35:15.616698Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:15.616847Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:15.617484Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:15.618069Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:35:15.618149Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-04T02:35:15.618201Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-04T02:35:15.618252Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:15.618345Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-04T02:35:15.618372Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-04T02:35:15.619254Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-04T02:35:15.619293Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-04T02:35:15.619337Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:15.619469Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:15.619597Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:35:15.619626Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-04T02:35:15.619710Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-04T02:35:15.634752Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-04T02:35:15.634962Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-04T02:35:15.634998Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:15.635103Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:15.636079Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-04T02:35:15.636130Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:15.636325Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-04T02:35:15.636390Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-04T02:35:15.636441Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-04T02:35:15.642526Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:15.642802Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:15.643017Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:35:15.643173Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:15.643403Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-04T02:35:15.643445Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-04T02:35:15.643476Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-04T02:35:15.643644Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:15.647216Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-04T02:35:15.647278Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-04T02:35:15.647311Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-04T02:35:15.647365Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 202 ... tatus: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.853610Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [50:639:2158] followers: 2 2026-01-04T02:36:08.853646Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.853705Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [50:887:2275] 2026-01-04T02:36:08.853733Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [50:887:2275] 2026-01-04T02:36:08.853932Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [50:889:2276] 2026-01-04T02:36:08.853962Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [50:889:2276] 2026-01-04T02:36:08.854018Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [50:639:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-04T02:36:08.854051Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.854144Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:08.854382Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:36:08.854435Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:36:08.854475Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:36:08.854668Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.854741Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.854809Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [50:639:2158] followers: 2 2026-01-04T02:36:08.854843Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.854901Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [50:889:2276] 2026-01-04T02:36:08.854928Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [50:889:2276] 2026-01-04T02:36:08.855105Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [50:891:2277] 2026-01-04T02:36:08.855133Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [50:891:2277] 2026-01-04T02:36:08.855181Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [50:639:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-04T02:36:08.855215Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.855314Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:08.855543Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:36:08.855590Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:36:08.855633Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:36:08.855798Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.855882Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.855969Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [50:639:2158] followers: 2 2026-01-04T02:36:08.856019Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.856080Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [50:891:2277] 2026-01-04T02:36:08.856108Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [50:891:2277] 2026-01-04T02:36:08.856291Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [50:893:2278] 2026-01-04T02:36:08.856320Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [50:893:2278] 2026-01-04T02:36:08.856369Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StNormal leader: [50:639:2158] followers: 2 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-04T02:36:08.856401Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.856501Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:08.856730Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:36:08.856784Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:36:08.856829Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:36:08.857020Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.857091Z node 50 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.857159Z node 50 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [50:639:2158] followers: 2 2026-01-04T02:36:08.857194Z node 50 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 50 selfDC 1 leaderDC 1 3:2:3 local 1 localDc 2 other 1 disallowed 3 2026-01-04T02:36:08.857245Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [50:893:2278] 2026-01-04T02:36:08.857273Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [50:893:2278] 2026-01-04T02:36:08.857573Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [51:896:2150] 2026-01-04T02:36:08.857632Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [51:896:2150] 2026-01-04T02:36:08.857797Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 3:2:3} 2026-01-04T02:36:08.857981Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:08.858351Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:36:08.858433Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:36:08.858475Z node 48 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:36:08.858732Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.858848Z node 51 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [50:639:2158] CurrentLeaderTablet: [50:669:2180] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:08.858962Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [50:639:2158] followers: 2 2026-01-04T02:36:08.859056Z node 51 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 51 selfDC 2 leaderDC 1 3:2:3 local 1 localDc 1 other 2 disallowed 2 tabletId: 72075186224037888 followers: 2 countLeader 1 allowFollowers 1 winner: [51:759:2125] 2026-01-04T02:36:08.859153Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72075186224037888] forward result local node, try to connect [51:896:2150] 2026-01-04T02:36:08.859288Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [51:896:2150] 2026-01-04T02:36:08.859482Z node 51 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [51:896:2150] 2026-01-04T02:36:08.859718Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Follower [51:896:2150] 2026-01-04T02:36:08.859781Z node 51 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [51:896:2150] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] >> YdbIndexTable::MultiShardTableOneIndex [GOOD] >> YdbIndexTable::MultiShardTableOneIndexDataColumn >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] >> THiveTest::TestFollowerCompatability1 [GOOD] >> THiveTest::TestFollowerCompatability2 >> KqpQueryPerf::Update+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25726, MsgBus: 30104 2026-01-04T02:36:04.939186Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329630510862409:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:04.940350Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:05.189051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:05.189163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:05.189605Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:05.195370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:05.251327Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:05.370444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:05.370492Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:05.370504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:05.370652Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:05.431816Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:05.885762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:05.906139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:05.954868Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:05.994366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:06.210631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:06.423013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:06.498511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.411864Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329647690733429:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:08.412072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:08.412491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329647690733439:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:08.412583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:08.747380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.785038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.822726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.856009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.893590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.956101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:08.992012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.037272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.120729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329651985701612:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.120834Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.121102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329651985701617:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.121145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329651985701618:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.121278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.125143Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:09.135733Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329651985701621:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:09.228789Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329651985701672:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:09.941942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329630510862409:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:09.942051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minstep/unittest >> TDataShardMinStepTest::TestDropTablePlanComesNotTooEarlyRW-VolatileTxs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:35:57.299447Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:57.419623Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:35:57.420113Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:35:57.420180Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:35:57.701533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:57.701675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:57.820139Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494154324930 != 1767494154324933 2026-01-04T02:35:57.832605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:57.882386Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:57.985850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:35:58.288642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:35:58.289036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:35:58.290834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:58.291727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:35:58.293479Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-04T02:35:58.293531Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-04T02:35:58.305155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:58.329104Z node 1 :HIVE DEBUG: hive_impl.cpp:2519: HIVE#72057594037968897 ProcessTabletBalancer [(72057594046644480:1,0)] MaxUsage=0.000000000 on #1 MinUsage=0.000000000 on #1 Scatter=0.000000000 2026-01-04T02:35:58.329243Z node 1 :HIVE DEBUG: hive_impl.cpp:406: HIVE#72057594037968897 Handle BalancerOut 2026-01-04T02:35:58.404184Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-04T02:35:58.404298Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-04T02:35:58.404589Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-04T02:35:58.405009Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-04T02:35:58.405083Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-04T02:35:58.405535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:35:58.406792Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-04T02:35:58.406970Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-04T02:35:58.407013Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-04T02:35:58.407050Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-04T02:35:58.407728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:35:58.407794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-04T02:35:58.408703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 1, subscribers: 1 2026-01-04T02:35:58.411472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/table-1, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:35:58.412960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:35:58.413045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:58.413876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/table-1 2026-01-04T02:35:58.417754Z node 1 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(DataShard(72057594046644480,1)) 2026-01-04T02:35:58.430067Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046644480 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/Root:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:35:58.430194Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-04T02:35:58.430465Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type DataShard: {} 2026-01-04T02:35:58.430575Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:449: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for object (72057594046644480,38): {} 2026-01-04T02:35:58.430656Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-04T02:35:58.430868Z node 1 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet DataShard.72075186224037888.Leader.0 2026-01-04T02:35:58.431554Z node 1 :HIVE DEBUG: tx__create_tablet.cpp:173: HIVE#72057594037968897 THive::TTxCreateTablet::Execute TabletId: 72075186224037888 Status: OK 2026-01-04T02:35:58.431730Z node 1 :HIVE DEBUG: hive_impl.cpp:1108: HIVE#72057594037968897 THive::AssignTabletGroups TEvControllerSelectGroups tablet 72075186224037888 GroupParameters { StoragePoolSpecifier { Name: "/Root:test" } } ReturnAllMatchingGroups: true 2026-01-04T02:35:58.432487Z node 1 :HIVE DEBUG: hive_impl.cpp:93: HIVE#72057594037968897 Connected to tablet 72057594037932033 from tablet 72057594037968897 2026-01-04T02:35:58.432897Z node 1 :HIVE DEBUG: hive_impl.cpp:461: HIVE#72057594037968897 THive::Handle TEvControllerSelectGroupsResult: success Status: OK NewStyleQuerySupported: true MatchingGroups { Groups { ErasureSpecies: 0 GroupID: 2181038080 StoragePoolName: "/Root:test" AssuredResources { } CurrentResources { } PhysicalGroup: true Decommitted: false GroupSizeInUnits: 0 } } 2026-01-04T02:35:58.433024Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:63: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137100699489056}(72075186224037888,HIVE_REASSIGN_REASON_NO,[]) 2026-01-04T02:35:58.433130Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137100699489056}: tablet 72075186224037888 channel 0 assigned to group 2181038080 2026-01-04T02:35:58.433302Z node 1 :HIVE DEBUG: tx__update_tablet_groups.cpp:151: HIVE#72057594037968897 THive::TTxUpdateTabletGroups::Execute{137100699489056}: tablet 72075186224037888 channel 1 assigned to group 2181038080 2026-01-04T02:35:58.433397Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Unknown -> Stopped 2026-01-04T02:35:58.433479Z node 1 :HIVE DEBUG: tablet_info.cpp:125: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Stopped -> Booting 2026-01-04T02:35:58.433539Z node 1 :HIVE DEBUG: hive_impl.cpp:370: HIVE#72057594037968897 ProcessBootQueue (1) 2026-01-04T02:35:58.433884Z node 1 :HIVE DEBUG: tx__process_boot_queue.cpp:18: HIVE#72057594037968897 THive::TTxProcessBootQueue()::Execute 2026-01-04T02:35:58.433942Z node 1 :HIVE DEBUG: hive_impl.cpp:250: HIVE#72057594037968897 Handle ProcessBootQueue (size: 1) 2026-01-04T02:35:58.433990Z node 1 :HIVE DEBUG: hive_impl.cpp:1254: HIVE#72057594037968897 [FBN] Finding best node for tablet DataShard.72075186224037888.Leader.0 2026-01-04T02:35:58.434075Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037888.Leader.0) VolatileState: Booting -> Starting (Node 1) 2026-01-04T02:35:58.434163Z node 1 :HIVE DEBUG: hive_impl.cpp:330: HIVE#72057594037968897 ProcessBootQueue - BootQueue empty (WaitQueue: 0) 2026-01-04T02:35:58.434234Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:31: HIVE#72057594037968897 THive::TTxStartTablet::Execute Tablet (72075186224037888,0) 2026-01-04T02:35:58.434389Z node 1 :HIVE DEBUG: tx__start_tablet.cpp:73: HIVE#72057594037968897 THive::TTxStartTablet::Execute, Sending TEvBootTablet(DataShard.72075186224037888.Leader.1) to node 1 storage {Version# 1 TabletID# 72075186224037888 T ... :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:36:11.325637Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:36:11.325794Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:36:11.328855Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037888 OK) 2026-01-04T02:36:11.328943Z node 2 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037888 OK) 2026-01-04T02:36:11.329141Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:36:11.329238Z node 2 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037888 2026-01-04T02:36:11.329290Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:36:11.329363Z node 2 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037888 2026-01-04T02:36:11.329869Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037888 OK) 2026-01-04T02:36:11.341034Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:36:11.351901Z node 2 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037888)::Complete SideEffects {} 2026-01-04T02:36:11.373480Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=DataShard 72075186224037889 is blocked by a schema operation;tx_id=281474976715662; 2026-01-04T02:36:11.373677Z node 2 :TX_DATASHARD INFO: datashard_pipeline.cpp:1318: Outdated Tx 281474976715662 is cleaned at tablet 72075186224037889 and outdatedStep# 34500 2026-01-04T02:36:11.373766Z node 2 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:38: Cleaned up old txs at 72075186224037889 TxInFly 0 2026-01-04T02:36:11.374021Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:11.374088Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715666 ssId 72057594046644480 seqNo 2:4 2026-01-04T02:36:11.374136Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715666 at tablet 72075186224037889 2026-01-04T02:36:11.374325Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:36:11.374491Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:36:11.374555Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:11.374587Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:36:11.374643Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-04T02:36:11.386071Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:11.386196Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:11.387622Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 281474976715666 HANDLE EvProposeTransaction marker# C0 2026-01-04T02:36:11.387695Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 281474976715666 step# 35000 Status# 16 SEND to# [2:399:2398] Proxy marker# C1 2026-01-04T02:36:11.449724Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 281474976715666 has been planned 2026-01-04T02:36:11.449857Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 2026-01-04T02:36:11.449919Z node 2 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 2026-01-04T02:36:11.450192Z node 2 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 35500 in 0.500000s at 35.450000s 2026-01-04T02:36:11.450604Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 35000, txid# 281474976715666 marker# C2 2026-01-04T02:36:11.450676Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 281474976715666 stepId# 35000 Status# 17 SEND EvProposeTransactionStatus to# [2:399:2398] Proxy 2026-01-04T02:36:11.451098Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 35000, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:36:11.451845Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715666 at step 35000 at tablet 72075186224037889 { Transactions { TxId: 281474976715666 AckTo { RawX1: 0 RawX2: 0 } } Step: 35000 MediatorID: 72057594046382081 TabletID: 72075186224037889 } 2026-01-04T02:36:11.451902Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:36:11.452124Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:11.452186Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:36:11.452234Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [35000:281474976715666] in PlanQueue unit at 72075186224037889 2026-01-04T02:36:11.452434Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037889 loaded tx from db 35000:281474976715666 keys extracted: 0 2026-01-04T02:36:11.452550Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:36:11.452714Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:36:11.452778Z node 2 :TX_DATASHARD INFO: drop_table_unit.cpp:72: Trying to DROP TABLE at 72075186224037889 2026-01-04T02:36:11.453187Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:36:11.456029Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 35000} 2026-01-04T02:36:11.456150Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:11.456503Z node 2 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-04T02:36:11.456610Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-04T02:36:11.456657Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 tablet 72075186224037889 removed=1 2026-01-04T02:36:11.456686Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 for mediator 72057594046382081 acknowledged 2026-01-04T02:36:11.456727Z node 2 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:54] persistent tx 281474976715666 acknowledged 2026-01-04T02:36:11.457217Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:36:11.457297Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [35000 : 281474976715666] from 72075186224037889 at tablet 72075186224037889 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:36:11.457350Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715666 state PreOffline TxInFly 0 2026-01-04T02:36:11.457425Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:36:11.457700Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 281474976715666, done: 0, blocked: 1 2026-01-04T02:36:11.498821Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715666:0 2026-01-04T02:36:11.499134Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715666 datashard 72075186224037889 state PreOffline 2026-01-04T02:36:11.499187Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:36:11.499604Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:36:11.513460Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:36:11.513760Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:36:11.515814Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:36:11.516981Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:36:11.517434Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186224037889 2026-01-04T02:36:11.517505Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037889 2026-01-04T02:36:11.517612Z node 2 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037889.Leader.1) VolatileState: Running -> Stopped (Node 2) 2026-01-04T02:36:11.517764Z node 2 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037889.Leader.1 gen 1) to node 2 2026-01-04T02:36:11.517900Z node 2 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2 |88.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minstep/unittest |88.8%| [TA] $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minstep/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQueryPerf::DeleteOn+QueryService-UseSink >> THiveTest::TestHiveBalancerHighUsage [GOOD] >> THiveTest::TestHiveBalancerHighUsageAndColumnShards ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp/unittest >> KqpStreamLookup::ReadTableWithIndexDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:35:59.875071Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:00.006975Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:36:00.007525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:36:00.007604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:36:00.300031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:00.300189Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:00.392954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494156866519 != 1767494156866522 2026-01-04T02:36:00.419001Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:00.469103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:00.567249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:36:00.882930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:00.897327Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:01.004450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:01.335867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1000:2848], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:01.336032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2853], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:01.336131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:01.337309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1016:2858], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:01.337485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:01.342596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:01.477148Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1014:2856], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:36:01.555363Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1073:2896] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:11.995488Z node 1 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ds4xh5nd405knsq1s7y6a", SessionId: ydb://session/3?node_id=1&id=Mjc2YmM2MWUtNDlkM2Y5N2EtOWRhNjM2NDMtN2JjMzMzMTc=, Slow query, duration: 10.665687s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "UPSERT INTO `/Root/TestTable` (key, value) VALUES (0, 00), (1, 11), (2, 22), (3, 33), (4, 44), (5, 55), (6, 66), (7, 77), (8, 88), (9, 99), (10, 1010), (11, 1111), (12, 1212), (13, 1313), (14, 1414), (15, 1515), (16, 1616), (17, 1717), (18, 1818), (19, 1919), (20, 2020), (21, 2121), (22, 2222), (23, 2323), (24, 2424), (25, 2525), (26, 2626), (27, 2727), (28, 2828), (29, 2929), (30, 3030), (31, 3131), (32, 3232), (33, 3333), (34, 3434), (35, 3535), (36, 3636), (37, 3737), (38, 3838), (39, 3939), (40, 4040), (41, 4141), (42, 4242), (43, 4343), (44, 4444), (45, 4545), (46, 4646), (47, 4747), (48, 4848), (49, 4949), (50, 5050), (51, 5151), (52, 5252), (53, 5353), (54, 5454), (55, 5555), (56, 5656), (57, 5757), (58, 5858), (59, 5959), (60, 6060), (61, 6161), (62, 6262), (63, 6363), (64, 6464), (65, 6565), (66, 6666), (67, 6767), (68, 6868), (69, 6969), (70, 7070), (71, 7171), (72, 7272), (73, 7373), (74, 7474), (75, 7575), (76, 7676), (77, 7777), (78, 7878), (79, 7979), (80, 8080), (81, 8181), (82, 8282), (83, 8383), (84, 8484), (85, 8585), (86, 8686), (87, 8787), (88, 8888), (89, 8989), (90, 9090), (91, 9191), (92, 9292), (93, 9393), (94, 9494), (95, 9595), (96, 9696), (97, 9797), (98, 9898), (99, 9999), (100, 100100), (101, 101101), (102, 102102), (103, 103103), (104, 104104), (105, 105105), (106, 106106), (107, 107107), (108, 108108), (109, 109109), (110, 110110), (111, 111111), (112, 112112), (113, 113113), (114, 114114), (115, 115115), (116, 116116), (117, 117117), (118, 118118), (119, 119119), (120, 120120), (121, 121121), (122, 122122), (123, 123123), (124, 124124), (125, 125125), (126, 126126), (127, 127127), (128, 128128), (129, 129129), (130, 130130), (131, 131131), (132, 132132), (133, 133133), (134, 134134), (135, 135135), (136, 136136), (137, 137137), (138, 138138), (139, 139139), (140, 140140), (141, 141141), (142, 142142), (143, 143143), (144, 144144), (145, 145145), (146, 146146), (147, 147147), (148, 148148), (149, 149149), (150, 150150), (151, 151151), (152, 152152), (153, 153153), (154, 154154), (155, 155155), (156, 156156), (157, 157157), (158, 158158), (159, 159159), (160, 160160), (161, 161161), (162, 162162), (163, 163163), (164, 164164), (165, 165165), (166, 166166), (167, 167167), (168, 168168), (169, 169169), (170, 170170), (171, 171171), (172, 172172), (173, 173173), (174, 174174), (175, 175175), (176, 176176), (177, 177177), (178, 178178), (179, 179179), (180, 180180), (181, 181181), (182, 182182), (183, 183183), (184, 184184), (185, 185185), (186, 186186), (187, 187187), (188, 188188), (189, 189189), (190, 190190), (191, 191191), (192, 192192), (193, 193193), (194, 194194), (195, 195195), (196, 196196), (197, 197197), (198, 198198), (199, 199199), (200, 200200), (201, 201201), (202, 202202), (203, 203203), (204, 204204), (205, 205205), (206, 206206), (207, 207207), (208, 208208), (209, 209209), (210, 210210), (211, 211211), (212, 212212), (213, 213213), (214, 214214), (215, 215215), (216, 216216), (217, 217217), (218, 218218), (219, 219219), (220, 220220), (221, 221221), (222, 222222), (223, 223223), (224, 224224), (225, 225225), (226, 226226), (227, 227227), (228, 228228), (229, 229229), (230, 230230), (231, 231231), (232, 232232), (233, 233233), (234, 234234), (235, 235235), (236, 236236), (237, 237237), (238, 238238), (239, 239239), (240, 240240), (241, 241241), (242, 242242), (243, 243243), (244, 244244), (245, 245245), (246, 246246), (247, 247247), (248, 248248), (249, 249249), (250, 250250), (251, 251251), (252, 252252), (253, 253253), (254, 254254), (255, 255255), (256, 256256), (257, 257257), (258, 258258), (259, 259259), (260, 260260), (261, 261261), (262, 262262), (263, 263263), (264, 264264), (265, 265265), (266, 266266), (267, 267267), (268, 268268), (269, 269269), (270, 270270), (271, 271271), (272, 272272), (273, 273273), (274, 274274), (275, 275275), (276, 276276), (277, 277277), (278, 278278), (279, 279279), (280, 280280), (281, 281281), (282, 282282), (283, 283283), (284, 284284), (285, 285285), (286, 286286), (287, 287287), (288, 288288), (289, 289289), (290, 290290), (291, 291291), (292, 292292), (293, 293293), (294, 294294), (295, 295295), (296, 296296), (297, 297297), (298, 298298), (299, 299299), (300, 300300), (301, 301301), (302, 302302), (303, 303303), (304, 304304), (305, 305305), (306, 306306), (307, 307307), (308, 308308), (309, 309309), (310, 310310), (311, 311311), (312, 312312), (313, 313313), (314, 314314), (315, 315315), (316, 316316), (317, 317317), (318, 318318), (319, 319319), (320, 320320), (321, 321321), (322, 322322), (323, 323323), (324, 324324), (325, 325325), (326, 326326), (327, 327327), (328, 328328), (329, 329329), (330, 330330), (331, 331331), (332, 332332), (333, 333333), (334, 334334), (335, 335335), (336, 336336), (337, 337337), (338, 338338), (339, 339339), (340, 340340), (341, 341341), (342, 342342), (343, 343343), (344, 344344), (345, 345345), (346, 346346), (347, 347347), (348, 348348), (349, 349349), (350, 350350), (351, 351351), (352, 352352), (353, 353353), (354, 354354), (355, 355355), (356, 356356), (357, 357357), (358, 358358), (359, 359359), (360, 360360), (361, 361361), (362, 362362), (363, 363363), (364, 364364), (365, 365365), (366, 366366), (367, 367367), (368, 368368), (369, 369369), (370, 370370), (371, 371371), (372, 372372), (373, 373373), (374, 374374), (375, 375375), (376, 376376), (377, 377377), (378, 378378), (379, 379379), (380, 380380), (381, 381381), (382, 382382), (383, 383383), (384, 384384), (385, 385385), (386, 386386), (387, 387387), (388, 388388), (389, 389389), (390, 390390), (391, 391391), (392, 392392), (393, 393393), (394, 394394), (395, 395395), (396, 396396), (397, 397397), (398, 398398), (399, 399399), (400, 400400), (401, 401401), (402, 402402), (403, 403403), (404, 404404), (405, 405405), (406, 406406), (407, 407407), (408, 408408), (409, 409409), (410, 410410), (411, 411411), (412, 412412), (413, 413413), (414, 414414), (415, 415415), (416, 416416), (417, 417417), (418, 418418), (419, 419419), (420, 420420), (421, 421421), (422, 422422), (423, 423423), (424, 424424), (425, 425425), (426, 426426), (427, 427427), (428, 428428), (429, 429429), (430, 430430), (431, 431431), (432, 432432), (433, 433433), (434, 434434), (435, 435435), (436, 436436), (437, 437437), (438, 438438), (439, 439439), (440, 440440), (441, 441441), (442, 442442), (443, 443443), (444, 444444), (445, 445445), (446, 446446), (447, 447447), (448, 448448), (449, 449449), (450, 450450), (451, 451451), (452, 452452), (453, 453453), (454, 454454), (455, 455455), (456, 456456), (457, 457457), (458, 458458), (459, 459459), (460, 460460), (461, 461461), (462, 462462), (463, 463463), (464, 464464), (465, 465465), (466, 466466), (467, 467467), (468, 468468), (469, 469469), (470, 470470), (471, 471471), (472, 472472), (473, 473473), (474, 474474), (475, 475475), (476, 476476), (477, 477477), (478, 478478), (479, 479479), (480, 480480), (481, 481481), (482, 482482), (483, 483483), (484, 484484), (485, 485485), (486, 486486), (487, 487487), (488, 488488), (489, 489489), (490, 490490), (491, 491491), (492, 492492), (493, 493493), (494, 494494), (495, 495495), (496, 496496), (497, 497497), (498, 498498), (499, 499499), (500, 500500), (501, 501501), (502, 502502), (503, 503503), (504, 504504), (505, 505505), (506, 506506), (507, 507507), (508, 508508), (509, 509509), (510, 510510), (511, 511511), (512, 512512), (513, 513513), (514, 514514), (515, 515515), (516, 516516), (517, 517517), (518, 518518), (519, 519519), (520, 520520), (521, 521521), (522, 522522), (523, 523523), (524, 524524), (525, 525525), (526, 526526), (527, 527527), (528, 528528), (529, 529529), (530, 530530), (531, 531531), (532, 532532), (533, 533533), (534, 534534), (535, 535535), (536, 536536), (537, 537537), (538, 538538), (539, 539539), (540, 540540), (541, 541541), (542, 542542), (543, 543543), (544, 544544), (545, 545545), (546, 546546), (547, 547547), (548, 548548), (549, 549549), (550, 550550), (551, 551551), (552, 552552), (553, 553553), (554, 554554), (555, 555555), (556, 556556), (557, 557557), (558, 558558), (559, 559559), (560, 560560), (561, 561561), (562, 562562), (563, 563563), (564, 564564), (565, 565565), (566, 566566), (567, 567567), (568, 568568), (569, 569569), (570, 570570), (571, 571571), (572, 572572), (573, 573573), (574, 574574), (575, 575575), (576, 576576), (577, 577577), (578, 578578), (579, 579579), (580, 580580), (581, 581581), (582, 582582), (583, 583583), (584, 584584), (585, 585585), (586, 586586), (587, 587587), (588, 588588), (589, 589589), (590, 590590), (591, 591591), (592, 592592), (593, 593593), (594, 594594), (595, 595595), (596, 596596), (597, 597597), (598, 598598), (599, 599599), (600, 600600), (601, 601601), (602, 602602), (603, 603603), (604, 604604), (605, 605605), (606, 606606), (607, 607607), (608, 608608), (609, 609609), (610, 610610), (611, 611611), (612, 612612), (613, 613613), (614, 614614), (615, 615615), (616, 616616), (617, 617617), (618, 618618), (619, 619619), (620, 620620), (621, 621621), (622, 622622), (623, 623623), (624, 624624), (625, 625625), (626, 626626), (627, 627627), (628, 628628), (629, 629629), (630, 630630), (631, 631631), (632, 632632), (633, 633633), (634, 634634), (635, 635635), (636, 636636), (637, 637637), (638, 638638), (639, 639639), (640, 640640), (641, 641641), (642, 642642), (643, 643643), (644, 644644), (645, 645645), (646, 646646), (647, 647647), (648, 648648), (649, 649649), (650, 650650), (651, 651651), (652, 652652), (653, 653653), (654, 654654), (655, 655655), (656, 656656), (657, 657657), (658, 658658), (659, 659659), (660, 660660), (661, 661661), (662, 662662), (663, 663663), (664, 664664), (665, 665665), (666, 666666), (667, 667667), (668, 668668), (669, 669669), (670, 670670), (671, 671671), (672, 672672), (673, 673673), (674, 674674), (675, 675675), (676, 676676), (677, 677677), (678, 678678), (679, 679679), (680, 680680), (681, 681681), (682, 682682), (683, 683683), (684, 684684), (685, 685685), (686, 686686), (687, 687687), (688, 688688), (689, 689689), (690, 690690), (691, 691691), (692, 692692), (693, 693693), (694, 694694), (695, 695695), (696, 696696), (697, 697697), (698, 698698), (699, 699699), (700, 700700), (701, 701701), (702, 702702), (703, 703703), (704, 704704), (705, 705705), (706, 706706), (707, 707707), (708, 708708), (709, 709709), (710, 710710), (711, 711711), (712, 712712), (713, 713713), (714, 714714), (715, 715715), (716, 716716), (717, 717717), (718, 718718), (719, 719719), (720, 720720), (721, 721721), (722, 722722), (723, 723723), (724, 724724), (725, 725725), (726, 726726), (727, 727727), (728, 728728), (729, 729729), (730, 730730), (731, 731731), (732, 732732), (733, 733733), (734, 734734), (735, 735735), (736, 736736), (737, 737737), (738, 738738), (739, 739739), (740, 740740), (741, 741741), (742, 742742), (743, 743743), (744, 744744), (745, 745745), (746, 746746), (747, 747747), (748, 748748), (749, 749749), (750, 750750), (751, 751751), (752, 752752), (753, 753753), (754, 754754), (755, 755755), (756, 756756), (757, 757757), (758, 758758), (759, 759759), (760, 760760), (761, 761761), (762, 762762), (763, 763763), (764, 764764), (765, 765765), (766, 766766), (767, 767767), (768, 768768), (769, 769769), (770, 770770), (771, 771771), (772, 772772), (773, 773773), (774, 774774), (775, 775775), (776, 776776), (777, 777777), (778, 778778), (779, 779779), (780, 780780), (781, 781781), (782, 782782), (783, 783783), (784, 784784), (785, 785785), (786, 786786), (787, 787787), (788, 788788), (789, 789789), (790, 790790), (791, 791791), (792, 792792), (793, 793793), (794, 794794), (795, 795795), (796, 796796), (797, 797797), (798, 798798), (799, 799799), (800, 800800), (801, 801801), (802, 802802), (803, 803803), (804, 804804), (805, 805805), (806, 806806), (807, 807807), (808, 808808), (809, 809809), (810, 810810), (811, 811811), (812, 812812), (813, 813813), (814, 814814), (815, 815815), (816, 816816), (817, 817817), (818, 818818), (819, 819819), (820, 820820), (821, 821821), (822, 822822), (823, 823823), (824, 824824), (825, 825825), (826, 826826), (827, 827827), (828, 828828), (829, 829829), (830, 830830), (831, 831831), (832, 832832), (833, 833833), (834, 834834), (835, 835835), (836, 836836), (837, 837837), (838, 838838), (839, 839839), (840, 840840), (841, 841841), (842, 842842), (843, 843843), (844, 844844), (845, 845845), (846, 846846), (847, 847847), (848, 848848), (849, 849849), (850, 850850), (851, 851851), (852, 852852), (853, 853853), (854, 854854), (855, 855855), (856, 856856), (857, 857857), (858, 858858), (859, 859859), (860, 860860), (861, 861861), (862, 862862), (863, 863863), (864, 864864), (865, 865865), (866, 866866), (867, 867867), (868, 868868), (869, 869869), (870, 870870), (871, 871871), (872, 872872), (873, 873873), (874, 874874), (875, 875875), (876, 876876), (877, 877877), (878, 878878), (879, 879879), (880, 880880), (881, 881881), (882, 882882), (883, 883883), (884, 884884), (885, 885885), (886, 886886), (887, 887887), (888, 888888), (889, 889889), (890, 890890), (891, 891891), (892, 892892), (893, 893893), (894, 894894), (895, 895895), (896, 896896), (897, 897897), (898, 898898), (899, 899899), (900, 900900), (901, 901901), (902, 902902), (903, 903903), (904, 904904), (905, 905905), (906, 906906), (907, 907907), (908, 908908), (909, 909909), (910, 910910), (911, 911911), (912, 912912), (913, 913913), (914, 914914), (915, 915915), (916, 916916), (917, 917917), (918, 918918), (919, 919919), (920, 920920), (921, 921921), (922, 922922), (923, 923923), (924, 924924), (925, 925925), (926, 926926), (927, 927927), (928, 928928), (929, 929929), (930, 930930), (931, 931931), (932, 932932), (933, 933933), (934, 934934), (935, 935935), (936, 936936), (937, 937937), (938, 938938), (939, 939939), (940, 940940), (941, 941941), (942, 942942), (943, 943943), (944, 944944), (945, 945945), (946, 946946), (947, 947947), (948, 948948), (949, 949949), (950, 950950), (951, 951951), (952, 952952), (953, 953953), (954, 954954), (955, 955955), (956, 956956), (957, 957957), (958, 958958), (959, 959959), (960, 960960), (961, 961961), (962, 962962), (963, 963963), (964, 964964), (965, 965965), (966, 966966), (967, 967967), (968, 968968), (969, 969969), (970, 970970), (971, 971971), (972, 972972), (973, 973973), (974, 974974), (975, 975975), (976, 976976), (977, 977977), (978, 978978), (979, 979979), (980, 980980), (981, 981981), (982, 982982), (983, 983983), (984, 984984), (985, 985985), (986, 986986), (987, 987987), (988, 988988), (989, 989989), (990, 990990), (991, 991991), (992, 992992), (993, 993993), (994, 994994), (995, 995995), (996, 996996), (997, 997997), (998, 998998), (999, 999999), (10000, 10000);", parameters: 0b Captured TEvDataShard::TEvRead from KQP_SOURCE_READ_ACTOR to TX_DATASHARD_ACTOR |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp/unittest |88.8%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} |88.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Table [GOOD] >> THiveTest::TestFollowerCompatability2 [GOOD] >> THiveTest::TestFollowerCompatability3 >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Table [GOOD] >> KqpQueryPerf::RangeRead-QueryService >> KqpQueryPerf::IndexUpsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert+QueryService+UseSink >> Channels20::IcChannelBackPressure [GOOD] >> Channels20::CaIntegrationTrivial >> KqpQueryPerf::Delete-QueryService-UseSink >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query >> KqpQueryPerf::RangeRead+QueryService >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::ComputeLength+QueryService [GOOD] >> KqpQueryPerf::ComputeLength-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3345, MsgBus: 25279 2026-01-04T02:36:08.947018Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329650411073703:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:08.947084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:09.253969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:09.269321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:09.269416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:09.302418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:09.354381Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:09.356103Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329650411073675:2081] 1767494168944280 != 1767494168944283 2026-01-04T02:36:09.455316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:09.455342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:09.455351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:09.455413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:09.511473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:09.960312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:09.981467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:10.033723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.227897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.385770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.462680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.366976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329667590944735:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.367074Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.369641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329667590944745:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.369726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.757524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.790484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.820665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.855962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.892331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.929996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.000520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.080418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.170307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671885912921:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.170415Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.171076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671885912926:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.171142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671885912927:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.171280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.176091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:13.192855Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329671885912930:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:13.283745Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329671885912981:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:13.948126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329650411073703:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:13.949794Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_2_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17518, MsgBus: 5379 2026-01-04T02:36:09.247111Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329654006843148:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:09.249870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:09.499612Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:09.521729Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:09.521829Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:09.564497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:09.614940Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:09.615952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329654006843117:2081] 1767494169245409 != 1767494169245412 2026-01-04T02:36:09.670834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:09.688127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:09.688156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:09.688167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:09.688310Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:10.112989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:10.118440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:10.175854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.264395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:10.381966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.572886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.674546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.645790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329666891746876:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.645970Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.646344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329666891746886:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.646400Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.983694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.031877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.102410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.141162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.213108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.289841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.339888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.402727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.530179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671186715059:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.530260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.530570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671186715064:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.530611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671186715065:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.530696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.534785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:13.548514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:36:13.548748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329671186715068:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:13.615822Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329671186715119:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:14.247710Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329654006843148:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:14.247786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace+QueryService+UseSink >> TxUsage::Sinks_Oltp_WriteToTopics_2_Table [GOOD] >> THiveTest::TestFollowerCompatability3 [GOOD] >> THiveTest::TestGetStorageInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26354, MsgBus: 19735 2026-01-04T02:36:10.007739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329658353836448:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:10.007982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:10.305972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:10.309019Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:10.309093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:10.359823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:10.474176Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329654058869021:2081] 1767494169970881 != 1767494169970884 2026-01-04T02:36:10.476058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:10.484029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:10.484057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:10.484075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:10.484218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:10.528613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:11.000994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:11.006033Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:11.010667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:11.059644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.209278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.388620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.460985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.400132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671238740078:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.400253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.400654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671238740088:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.400696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.820041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.852505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.888725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.927197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.982141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.047419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.082185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.129516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.219474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675533708257:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.219550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.220015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675533708262:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.220053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675533708263:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.220170Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.224228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:14.241997Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329675533708266:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:14.335868Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329675533708317:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:15.002403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329658353836448:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:15.002498Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> BasicUsage::ConflictingWrites [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query >> THiveTest::TestGetStorageInfo [GOOD] >> THiveTest::TestExternalBoot >> KqpQueryPerf::MultiDeleteFromTable+QueryService-UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink >> LocalPartition::WithoutPartitionWithSplit [GOOD] >> TxUsage::ReadRuleGeneration >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPointsAndRanges 2026-01-04 02:36:06,518 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 02:36:06,655 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 421529 58.9M 58.2M 32.9M test_tool run_ut @/home/runner/.ya/build/build_root/c5zo/0039e7/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args 421868 973M 966M 927M └─ ydb-core-tx-datashard-ut_order --trace-path-append /home/runner/.ya/build/build_root/c5zo/0039e7/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/c Test command err: 2026-01-04T02:26:09.551198Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:26:09.553276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:26:09.553333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:26:09.563268Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:26:09.563606Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:26:09.563869Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:26:09.613929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:26:09.622226Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:26:09.622319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:26:09.623775Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:26:09.623911Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:26:09.623973Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:26:09.624332Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:26:09.625837Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:26:09.625918Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:26:09.700848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:26:09.756009Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:26:09.756218Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:26:09.756340Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:26:09.756400Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:26:09.756459Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:26:09.756503Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:09.756728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.756775Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.757047Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:26:09.757171Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:26:09.757241Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:09.757276Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:26:09.757308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:26:09.757356Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:26:09.757393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:26:09.757435Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:26:09.757481Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:26:09.757574Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.757605Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.757649Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:26:09.768201Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:26:09.768298Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:26:09.768406Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:26:09.768575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:26:09.768647Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:26:09.768692Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:26:09.768764Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:26:09.768819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:26:09.768855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:26:09.768889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:09.769177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:26:09.769216Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:26:09.769269Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:26:09.769303Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:09.769354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:26:09.769394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:26:09.769433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:26:09.769478Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:09.770389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:26:09.787904Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:26:09.787978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:26:09.788020Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:26:09.788072Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:26:09.788175Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:26:09.788760Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.788819Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:26:09.788860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:26:09.789016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:26:09.789057Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:26:09.789189Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:26:09.789234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:26:09.789282Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:26:09.848573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:26:09.852272Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:26:09.852358Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:26:09.852619Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.852664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:26:09.852724Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:26:09.852775Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:26:09.852825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:26:09.852863Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:26:09.852897Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... 9871321 } } Transactions { TxId: 247 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 248 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 249 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 250 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 251 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 252 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 253 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 254 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 255 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 256 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 257 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 258 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 259 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 260 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 261 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 262 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 263 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 264 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 265 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 266 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 267 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 268 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 269 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 270 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 271 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 272 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 273 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 274 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 275 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 276 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 277 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 278 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 279 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 280 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 281 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 282 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 283 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 284 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 285 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 286 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 287 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 288 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 289 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 290 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 291 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 292 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 293 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 294 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 295 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 296 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 297 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 298 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 299 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 300 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 301 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 302 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 303 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 304 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 305 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 306 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 307 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 308 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 309 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 310 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 311 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 312 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 313 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 314 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 315 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 316 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 317 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 318 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 319 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 320 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 321 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 322 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 323 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 324 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 325 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 326 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 327 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 328 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 329 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 330 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 331 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 332 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 333 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 334 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 335 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 336 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 337 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 338 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 339 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 340 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 341 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 342 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 343 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 344 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 345 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 346 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 347 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 348 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 349 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 350 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 351 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 352 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 353 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 354 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 355 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 356 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 357 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 358 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 359 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 360 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 361 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 362 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 363 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 364 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 365 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 366 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 367 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 368 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 369 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 370 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 371 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 372 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 373 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 374 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 375 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 376 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 377 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 378 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 379 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 380 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 381 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 382 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 383 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 384 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 385 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 386 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 387 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 388 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 389 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 390 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 391 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 392 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 393 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 394 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 395 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 396 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 397 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 398 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 399 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 400 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 401 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 402 AckTo { RawX1: 104 RawX2: 17179871321 } } Transactions { TxId: 403 AckTo { RawX1: 104 RawX2: 17179871321 } } Step: 1000004 MediatorID: 0 TabletID: 9437185 } Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/0039e7/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/0039e7/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/chunk28/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) >> KqpQueryPerf::Insert+QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> BasicUsage::ConflictingWrites [GOOD] Test command err: 2026-01-04T02:33:02.200316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328851821908812:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:02.200453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:02.293726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:02.305279Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:33:02.648051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:02.648229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:02.678396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:02.728715Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:02.729564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:02.845559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001967/r3tmp/yandexn4R2CC.tmp 2026-01-04T02:33:02.845581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001967/r3tmp/yandexn4R2CC.tmp 2026-01-04T02:33:02.845748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001967/r3tmp/yandexn4R2CC.tmp 2026-01-04T02:33:02.845855Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:02.876501Z INFO: TTestServer started on Port 18294 GrpcPort 13783 2026-01-04T02:33:03.027460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:13783 2026-01-04T02:33:03.090634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:03.193900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:03.258441Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:05.529267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328864706811702:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:05.529277Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328864706811688:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:05.529528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:05.531387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328864706811714:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:05.531490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:05.535278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:05.556150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328864706811713:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:33:05.788405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328864706811779:2645] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:05.818231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:05.859319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:05.934053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:05.966635Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328864706811787:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:05.969271Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2JlMjVjNzQtZGI4ODU5OTctZWMwYzQ3M2EtZjhkOWE3YWQ=, ActorId: [1:7591328864706811680:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dks7petsw4y6tzxbavexd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:05.972215Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328869001779369:2822] 2026-01-04T02:33:07.200603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328851821908812:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:07.200669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:12.230109Z :CreateTopicWithCustomName INFO: TTopicSdkTestSetup started 2026-01-04T02:33:12.242986Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:12.256478Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328894771583391:2933] connected; active server actors: 1 2026-01-04T02:33:12.256867Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic-1767493992] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:12.272969Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic-1767493992] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:12.273126Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic-1767493992] BALANCER INIT DONE for test-topic-1767493992: (0, 72075186224037892) 2026-01-04T02:33:12.275764Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:12.276430Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:12.276603Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:12.276647Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:12.276694Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-04T02:33:12.276841Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 720751862240 ... 8 requestId: cookie: 9 2026-01-04T02:36:17.302541Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 98 written { offset: 97 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 47000000 } max_queue_wait_time { nanos: 47000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-04T02:36:17.301609Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'test-topic' partition: 0 messageNo: 19 requestId: cookie: 10 2026-01-04T02:36:17.301685Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2026-01-04T02:36:17.302565Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] OnAck: seqNo=98, txId=? 2026-01-04T02:36:17.301716Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2026-01-04T02:36:17.302587Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: acknoledged message 98 2026-01-04T02:36:17.301740Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NKikimr::TEvPersQueue::TEvResponse 2026-01-04T02:36:17.302094Z node 14 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72075186224037892' partition 0 offset 97 partno 0 count 3 parts 0 suffix '63' size 380 2026-01-04T02:36:17.305370Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-04T02:36:17.305447Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 99 written { offset: 98 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 47000000 } max_queue_wait_time { nanos: 47000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-04T02:36:17.305464Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] OnAck: seqNo=99, txId=? 2026-01-04T02:36:17.305480Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: acknoledged message 99 2026-01-04T02:36:17.305736Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 0 2026-01-04T02:36:17.305862Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session got write response: acks { seq_no: 100 written { offset: 99 } } write_statistics { persisting_time { nanos: 2000000 } min_queue_wait_time { nanos: 47000000 } max_queue_wait_time { nanos: 47000000 } partition_quota_wait_time { } topic_quota_wait_time { } } 2026-01-04T02:36:17.305892Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] OnAck: seqNo=100, txId=? 2026-01-04T02:36:17.305917Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: acknoledged message 100 2026-01-04T02:36:17.326613Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:17.326677Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.326700Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:17.326736Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.326763Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:17.341916Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session will now close 2026-01-04T02:36:17.342040Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: aborting 2026-01-04T02:36:17.342774Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:17.343229Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-04T02:36:17.343271Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-04T02:36:17.343320Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session is aborting and will not restart 2026-01-04T02:36:17.345864Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 13 sessionId: test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0 grpc read done: success: 0 data: 2026-01-04T02:36:17.345904Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 13 sessionId: test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0 grpc read failed 2026-01-04T02:36:17.345928Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 13 sessionId: test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0 grpc closed 2026-01-04T02:36:17.345943Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 13 sessionId: test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0 is DEAD 2026-01-04T02:36:17.346712Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:17.350430Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7591329677617463074:2684] destroyed 2026-01-04T02:36:17.350495Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:17.350545Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:17.350568Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.350593Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:17.350629Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.350651Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:17.353089Z node 14 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:149: new Describe topic request 2026-01-04T02:36:17.353330Z node 14 :PQ_READ_PROXY DEBUG: schema_actors.cpp:1197: Describe topic actor for path test-topic 2026-01-04T02:36:17.356168Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037892] server connected, pipe [14:7591329686207397722:2700], now have 1 active actors on pipe 2026-01-04T02:36:17.356255Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][test-topic] pipe [14:7591329686207397723:2701] connected; active server actors: 1 2026-01-04T02:36:17.357091Z node 14 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186224037892][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 2147483647 LifetimeSeconds: 3600 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } 2026-01-04T02:36:17.358106Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7591329686207397722:2700] destroyed 2026-01-04T02:36:17.362769Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d876c207-7cd9b201-a9a633c5-8c0cefa1_0] PartitionId [0] Generation [11] Write session: destroy 2026-01-04T02:36:17.426572Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:17.426616Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.426639Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:17.426673Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.426700Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:17.527253Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:17.527300Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.527322Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:17.527359Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.527385Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:17.627697Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:17.628030Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.628059Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:17.628095Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:17.628122Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> THiveTest::TestExternalBoot [GOOD] >> THiveTest::TestExternalBootWhenLocked |88.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService-UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink >> KqpQueryPerf::DeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn+QueryService+UseSink >> THiveTest::TestHiveBalancerHighUsageAndColumnShards [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 17243, MsgBus: 19751 2026-01-04T02:36:13.060164Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329671323094441:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:13.060247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:13.354070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:13.372991Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:13.373114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:13.447642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:13.453542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329671323094395:2081] 1767494173053267 != 1767494173053270 2026-01-04T02:36:13.464721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:13.561395Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:13.587414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:13.587433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:13.587438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:13.587505Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:14.056740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:14.073200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:14.080457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:14.135964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.291669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.436738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.500523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:16.619757Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329684207998154:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:16.619896Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:16.620393Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329684207998163:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:16.620445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.019799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.058328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.091740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.133982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.190290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.222983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.268135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.318585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.415426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329688502966336:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.415538Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.416069Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329688502966341:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.416116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329688502966342:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.416447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.420591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:17.435495Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329688502966345:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:17.537472Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329688502966396:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:18.059740Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329671323094441:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:18.059826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TxUsage::WriteToTopic_Demo_20_RestartAfterCommit_Query [GOOD] >> THiveTest::TestExternalBootWhenLocked [GOOD] >> THiveTest::TestExternalBootCounters >> TxUsage::WriteToTopic_Demo_47_Query [GOOD] >> KqpQueryPerf::RangeLimitRead-QueryService >> KqpQueryPerf::MultiRead+QueryService >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Table [GOOD] >> KqpQueryPerf::RangeRead-QueryService [GOOD] >> TxUsage::Write_And_Read_Small_Messages_1 >> KqpQueryPerf::Delete-QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete-QueryService+UseSink >> THiveTest::TestExternalBootCounters [GOOD] >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query >> KqpQueryPerf::RangeRead+QueryService [GOOD] >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query >> KqpQueryPerf::Upsert-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 32582, MsgBus: 17651 2026-01-04T02:36:15.872948Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329678372145673:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:15.873049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:16.153925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:16.163522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:16.163674Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:16.231374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:16.289164Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:16.427128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:16.456897Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:16.456939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:16.456950Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:16.457030Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:16.886826Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:17.009782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:17.098834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.239815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.440185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.513119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.516272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329695552016682:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.516412Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.519225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329695552016692:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.519327Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.845669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.878668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.916876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.955263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.005348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.060066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.102621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.170766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.275290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329699846984859:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.275392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.275725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329699846984864:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.275791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329699846984865:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.275918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.280251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:20.295032Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329699846984868:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:20.392585Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329699846984919:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:20.873961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329678372145673:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:20.874014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] >> Channels20::CaIntegrationTrivial [GOOD] >> Channels20::CaIntegrationAgg ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestExternalBootCounters [GOOD] Test command err: 2026-01-04T02:35:01.348067Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:01.378544Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:01.378850Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:01.379771Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:01.380133Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:01.381212Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:73:2076] ControllerId# 72057594037932033 2026-01-04T02:35:01.381255Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:01.381359Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:01.381480Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:01.392671Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:01.392735Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:01.399312Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:80:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.399490Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:81:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.399631Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.399764Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.399926Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.400091Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.400222Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.400257Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:01.400362Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:73:2076] 2026-01-04T02:35:01.400398Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:73:2076] 2026-01-04T02:35:01.400448Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:01.400513Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:01.401215Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:01.401354Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:01.404557Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:01.404715Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:01.405087Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:01.405376Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:01.414814Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:01.414934Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:01.415881Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:97:2078] ControllerId# 72057594037932033 2026-01-04T02:35:01.415918Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:01.415994Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:01.416133Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:01.429710Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:01.429765Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:01.432224Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.432416Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.432563Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.432696Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.432812Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.432937Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.433063Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.433092Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:01.433177Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:97:2078] 2026-01-04T02:35:01.433208Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:97:2078] 2026-01-04T02:35:01.433296Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:01.433346Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:01.434267Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:01.434344Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:01.437482Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:01.437652Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:01.438037Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:01.438257Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:01.439214Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-04T02:35:01.439266Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:01.439337Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:01.439489Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:01.449171Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:01.449233Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:01.451303Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.451510Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.451683Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.451842Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.452051Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.452237Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.452392Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.452432Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:01.452503Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] :: ... 42: [cd7cbca5baaa92c4] Id# [72057594037927937:2:13:0:0:105:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:36:23.583391Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [cd7cbca5baaa92c4] restore Id# [72057594037927937:2:13:0:0:105:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:36:23.583491Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [cd7cbca5baaa92c4] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG33 2026-01-04T02:36:23.583563Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [cd7cbca5baaa92c4] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:13:0:0:105:1] Marker# BPG32 2026-01-04T02:36:23.583755Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:83:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:13:0:0:105:1] FDS# 105 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:36:23.593751Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [cd7cbca5baaa92c4] received {EvVPutResult Status# OK ID# [72057594037927937:2:13:0:0:105:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 28 } Cost# 80826 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 29 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-04T02:36:23.593953Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [cd7cbca5baaa92c4] Result# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:36:23.594059Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [cd7cbca5baaa92c4] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:36:23.594339Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:13:0:0:105:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:36:23.594536Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} commited cookie 1 for step 13 2026-01-04T02:36:23.594766Z node 68 :HIVE NOTICE: tx__unlock_tablet.cpp:91: HIVE#72057594037927937 THive::TTxUnlockTabletExecution::Complete TabletId: 72075186224037889 SideEffects: {Notifications: 0x1004020F [68:483:2316] NKikimrHive.TEvLockTabletExecutionLost TabletID: 72075186224037889 Reason: LOCK_LOST_REASON_UNLOCKED,0x1004020E [68:483:2316] NKikimrHive.TEvUnlockTabletExecutionResult TabletID: 72075186224037889 Status: OK StatusMessage: ""} 2026-01-04T02:36:23.595361Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:510:2342] 2026-01-04T02:36:23.595427Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:510:2342] 2026-01-04T02:36:23.595547Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:23.595625Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-04T02:36:23.595708Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:510:2342] 2026-01-04T02:36:23.595783Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:510:2342] 2026-01-04T02:36:23.595856Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:510:2342] 2026-01-04T02:36:23.595933Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:510:2342] 2026-01-04T02:36:23.596087Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:510:2342] 2026-01-04T02:36:23.596301Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:510:2342] 2026-01-04T02:36:23.596373Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:510:2342] 2026-01-04T02:36:23.596434Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:510:2342] 2026-01-04T02:36:23.596508Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:510:2342] 2026-01-04T02:36:23.596570Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:510:2342] 2026-01-04T02:36:23.596652Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:483:2316] EventType# 268697612 2026-01-04T02:36:23.597222Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} queued, type NKikimr::NHive::TTxUpdateTabletMetrics 2026-01-04T02:36:23.597323Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:23.597748Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} hope 1 -> done Change{22, redo 182b alter 0b annex 0, ~{ 1, 4 } -{ }, 0 gb} 2026-01-04T02:36:23.597860Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:14} Tx{30, NKikimr::NHive::TTxUpdateTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:23.598379Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} queued, type NKikimr::NHive::TTxProcessTabletMetrics 2026-01-04T02:36:23.598468Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:23.598728Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} hope 1 -> done Change{23, redo 514b alter 0b annex 0, ~{ 16 } -{ }, 0 gb} 2026-01-04T02:36:23.598822Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} Tx{31, NKikimr::NHive::TTxProcessTabletMetrics} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:23.610716Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [667a4d0d3c364da9] bootstrap ActorId# [68:513:2345] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:14:0:0:321:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:36:23.610908Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [667a4d0d3c364da9] Id# [72057594037927937:2:14:0:0:321:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:36:23.611002Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [667a4d0d3c364da9] restore Id# [72057594037927937:2:14:0:0:321:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:36:23.611091Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [667a4d0d3c364da9] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG33 2026-01-04T02:36:23.611169Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [667a4d0d3c364da9] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:14:0:0:321:1] Marker# BPG32 2026-01-04T02:36:23.611363Z node 68 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [68:83:2082] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:14:0:0:321:1] FDS# 321 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:36:23.614882Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [667a4d0d3c364da9] received {EvVPutResult Status# OK ID# [72057594037927937:2:14:0:0:321:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 29 } Cost# 82527 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 30 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-04T02:36:23.615075Z node 68 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [667a4d0d3c364da9] Result# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:36:23.615190Z node 68 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [667a4d0d3c364da9] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:36:23.615531Z node 68 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:14:0:0:321:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:36:23.615727Z node 68 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:15} commited cookie 1 for step 14 2026-01-04T02:36:23.616437Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [68:515:2347] 2026-01-04T02:36:23.616510Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [68:515:2347] 2026-01-04T02:36:23.616602Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [68:515:2347] 2026-01-04T02:36:23.616726Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [68:331:2201] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:23.616811Z node 68 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 68 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [68:331:2201] 2026-01-04T02:36:23.616903Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [68:515:2347] 2026-01-04T02:36:23.616977Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [68:515:2347] 2026-01-04T02:36:23.617067Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [68:515:2347] 2026-01-04T02:36:23.617205Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [68:515:2347] 2026-01-04T02:36:23.617408Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [68:515:2347] 2026-01-04T02:36:23.617488Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [68:515:2347] 2026-01-04T02:36:23.617550Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [68:515:2347] 2026-01-04T02:36:23.617632Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [68:515:2347] 2026-01-04T02:36:23.617704Z node 68 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [68:515:2347] 2026-01-04T02:36:23.617793Z node 68 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [68:514:2346] EventType# 268830214 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1269, MsgBus: 28065 2026-01-04T02:36:17.026783Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329687091353512:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:17.028175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:17.059355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:17.321893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:17.378422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:17.378524Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:17.380402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:17.450966Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:17.457938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329687091353465:2081] 1767494177010537 != 1767494177010540 2026-01-04T02:36:17.539715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:17.561162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:17.561178Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:17.561183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:17.561253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:17.974491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:17.980423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:18.029563Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:18.044766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.236422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.405970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.486030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.440143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329699976257233:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.440256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.440702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329699976257243:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.440784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.787496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.829915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.866659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.906284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.955877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.003659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.060205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.110071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.207509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329704271225409:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.207603Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.207929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329704271225415:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.207957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.208009Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329704271225414:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.212108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:21.221519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:36:21.222341Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329704271225418:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:21.326974Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329704271225469:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:22.019585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329687091353512:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:22.019633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::ComputeLength-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 6285, MsgBus: 15214 2026-01-04T02:36:10.060421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329658339310771:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:10.060822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:10.329877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:10.368409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:10.368519Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:10.408551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:10.491146Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:10.498148Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329658339310655:2081] 1767494170039566 != 1767494170039569 2026-01-04T02:36:10.622426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:10.766368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:10.766397Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:10.766404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:10.766481Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:11.070062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:11.228129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:11.233600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:11.281380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.441733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.610227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.680988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.603031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671224214424:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.603128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.603549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329671224214434:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.603599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.913400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.947371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.977975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.007528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.047175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.102460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.140865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.210743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.316173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675519182604:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.316267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.316786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675519182609:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.316844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329675519182610:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.316999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.321650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:14.334797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329675519182613:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:14.427267Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329675519182664:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:15.044595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329658339310771:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:15.044669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27953, MsgBus: 14742 2026-01-04T02:36:17.770015Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329688172805806:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:17.770568Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:17.784411Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:17.878253Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:17.920964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:17.921054Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:17.926667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:18.012647Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:18.025534Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:18.025558Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:18.025566Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:18.025649Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:18.425391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:18.433529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:18.444222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.539219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.685404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.743675Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.854068Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:21.257310Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329705352676796:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.257397Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.257914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329705352676806:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.257966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.327798Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.370836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.429238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.487807Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.559234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.599816Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.653199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.721206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.811588Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329705352677676:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.811721Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.812364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329705352677681:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.812416Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.812469Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329705352677682:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.816080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:21.829677Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329705352677685:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:21.925410Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329705352677738:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:22.773899Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329688172805806:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:22.773954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4528, MsgBus: 2656 2026-01-04T02:36:05.936810Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329637142811989:2197];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:05.937046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:06.281297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:06.281401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:06.312266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:06.359807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:06.406354Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:06.407201Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329637142811816:2081] 1767494165903662 != 1767494165903665 2026-01-04T02:36:06.486187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:06.486223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:06.486231Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:06.486341Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:06.535886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:06.938261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:06.981582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:06.994743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:07.065434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:07.230966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:07.399297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:07.477693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.460664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329654322682881:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.460809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.462115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329654322682891:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.462212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:09.808958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.847310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.875204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.907010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.957966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.003217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.042980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.099772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.193977Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329658617651061:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:10.194089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:10.194176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329658617651066:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:10.194371Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329658617651068:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:10.194441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:10.198451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:10.214508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329658617651070:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:10.278966Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329658617651121:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:10.933298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329637142811989:2197];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:10.933367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:12.184183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... :36:16.149020Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:16.154199Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329680312088023:2081] 1767494175935306 != 1767494175935309 2026-01-04T02:36:16.155409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:16.155497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:16.175456Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:16.366024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:16.376595Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:16.376623Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:16.376643Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:16.376756Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:16.823644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:16.841350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:16.914681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.021750Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:17.093282Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.183636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.699058Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329697491959085:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.699146Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.699608Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329697491959095:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.699684Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.771311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.809080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.840966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.875667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.909918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.954674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.999178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.054167Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.155228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329701786927263:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.155314Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.155585Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329701786927266:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.155631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.155800Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329701786927270:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.160230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:20.176230Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329701786927272:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:20.248194Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329701786927323:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:20.953394Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329680312088212:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:20.953506Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:22.015513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.072120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.143349Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink >> KqpQueryPerf::Insert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert+QueryService+UseSink >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] >> KqpQueryPerf::Update+QueryService+UseSink >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29372, MsgBus: 8285 2026-01-04T02:36:11.645693Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329663311128401:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:11.645822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:11.877890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:11.978517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:11.978629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:11.979289Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:11.992387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:12.075841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:12.116056Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:12.116085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:12.116097Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:12.116212Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:12.596223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:12.604560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:12.653317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:12.671783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.778930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.933902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.005239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.186045Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329680490999351:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.186197Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.190256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329680490999361:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.190377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.501782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.537346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.569311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.605692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.636511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.676441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.718518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.766244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.848508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329680491000233:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.848599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.848860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329680491000238:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.848906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.848953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329680491000239:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:15.852667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:15.865839Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329680491000242:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:15.967431Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329680491000293:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:16.646080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329663311128401:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:16.646137Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7105, MsgBus: 14759 2026-01-04T02:36:19.758585Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329697882706639:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:19.760333Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:19.866180Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:19.882573Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:19.887286Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329697882706591:2081] 1767494179733263 != 1767494179733266 2026-01-04T02:36:19.898313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:19.898405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:19.925012Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:20.042302Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:20.042326Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:20.042335Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:20.042399Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:20.052213Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:20.502667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:20.509068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:20.514175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.581153Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.772511Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:20.787861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.871291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.478907Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329715062577640:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:23.478985Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:23.479745Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329715062577650:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:23.479793Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:23.550751Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.597640Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.679170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.723860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.771957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.826080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.883411Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.962815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.080244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329719357545821:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.080361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.081726Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329719357545826:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.081784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329719357545827:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.081920Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.086913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:24.106637Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329719357545830:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:24.196643Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329719357545881:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:24.761939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329697882706639:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:24.762030Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 6016, MsgBus: 19814 2026-01-04T02:36:14.388362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329674629219158:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:14.388640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:14.407752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:14.688756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:14.688879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:14.738295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:14.744939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:14.764440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:14.766434Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329674629219053:2081] 1767494174378160 != 1767494174378163 2026-01-04T02:36:14.890369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:14.890391Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:14.890398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:14.890470Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:15.013922Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:15.305903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:15.311338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:15.362318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.392581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:15.535780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.685251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:15.762919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.860056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687514122809:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.860156Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.860488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687514122819:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.860565Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.188472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.232563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.270140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.313405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.350178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.384170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.438649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.479201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.559536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329691809090989:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.559604Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.559986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329691809090994:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.559999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329691809090995:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.560034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:18.563709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:18.572386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329691809090998:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:18.667671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329691809091049:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:19.387122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329674629219158:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:19.387185Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62302, MsgBus: 29171 2026-01-04T02:36:21.451107Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329705130112661:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:21.451245Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:21.490132Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:21.563166Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:21.568743Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329705130112558:2081] 1767494181435831 != 1767494181435834 2026-01-04T02:36:21.577830Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:21.577903Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:21.606888Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:21.765617Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:21.805111Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:21.805132Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:21.805141Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:21.805220Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:22.279192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:22.290958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:22.306623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.392332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.463410Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:22.567539Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.624707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.010494Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329722309983612:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.010594Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.011061Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329722309983622:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.011117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.092980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.135100Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.176131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.221978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.265709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.310213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.365076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.446646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.523513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329722309984494:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.523609Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.523918Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329722309984499:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.523979Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329722309984500:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.524079Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.528390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:25.540418Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329722309984503:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:25.636370Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329722309984556:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:26.439693Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329705130112661:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:26.439792Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin+EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 4842, MsgBus: 28485 2026-01-04T02:36:13.388574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329670154875176:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:13.389990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:13.711335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:13.711559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:13.720135Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:13.782175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:13.805343Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:13.809687Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329670154875144:2081] 1767494173377849 != 1767494173377852 2026-01-04T02:36:13.974551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:13.974594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:13.974610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:13.974695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:14.044022Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:14.406448Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:14.465553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:14.536105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.714977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.891239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.966880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.047354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687334746202:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.047484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.047817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687334746212:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.047859Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.374756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.420418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.463664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.497329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.550533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.583397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.623602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.672486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.773031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687334747080:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.773133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.773705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687334747086:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.773758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329687334747085:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.773793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:17.778859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:17.794530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329687334747089:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:17.888327Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329687334747140:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:18.388958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329670154875176:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:18.389021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 2419, MsgBus: 8940 2026-01-04T02:36:20.955690Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329702853359897:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:20.974443Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:21.013904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:21.094614Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:21.111097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:21.111182Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:21.139481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:21.202436Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:21.202462Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:21.202469Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:21.202545Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:21.289232Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:21.603711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:21.610241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:21.624855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.709393Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.864744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.923649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.051914Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:24.593940Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329720033230880:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.594047Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.594393Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329720033230890:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.594444Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.652396Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.697623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.728327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.762593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.797730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.871128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.957125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.016770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.117475Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329724328199058:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.117585Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.117784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329724328199063:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.117869Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329724328199064:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.117956Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.123381Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:25.139349Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329724328199067:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:25.212538Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329724328199118:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:25.991849Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329702853359897:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:25.991921Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::IndexInsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexInsert+QueryService+UseSink >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] >> TxUsage::WriteToTopic_Demo_40_Query [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19327, MsgBus: 1207 2026-01-04T02:36:08.373435Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329648600698529:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:08.373614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:08.621911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:08.653746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:08.653984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:08.710450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:08.722000Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:08.723081Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329648600698498:2081] 1767494168369326 != 1767494168369329 2026-01-04T02:36:08.813712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:08.813733Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:08.813738Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:08.813859Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:08.846533Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:09.216461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:09.222341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:09.291663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.385781Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:09.434256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.609681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:09.684339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.630394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329661485602260:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.630502Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.630804Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329661485602270:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.630866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.997513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.032468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.066042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.112832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.142029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.177799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.221786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.270911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:12.348160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329665780570440:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.348208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329665780570445:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.348258Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.351359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329665780570448:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.351436Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:12.352635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:12.369351Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329665780570447:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:12.470267Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329665780570502:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:13.373558Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329648600698529:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:13.373613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:14.262003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... e.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:18.100113Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:36:18.195080Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:18.196308Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329693442305745:2081] 1767494178061451 != 1767494178061454 2026-01-04T02:36:18.209439Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:18.209540Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:18.326613Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:18.326646Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:18.326656Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:18.326739Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:18.345903Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:18.347802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:18.850677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:18.866452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:18.937487Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.097703Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:19.111532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.189502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.845419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329706327209504:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.845543Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.845869Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329706327209513:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.845924Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:21.924823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.977968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.039905Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.128658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.203031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.247324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.323256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.391428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.502338Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329710622177687:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:22.502477Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:22.503273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329710622177692:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:22.503326Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329710622177693:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:22.503379Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:22.508229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:22.524260Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329710622177696:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:22.625755Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329710622177747:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:24.642004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.691151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.728986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpQueryPerf::UpdateOn-QueryService+UseSink >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] >> KqpQueryPerf::MultiRead+QueryService [GOOD] |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] >> THiveTest::TestHiveBalancerOneTabletHighUsage [GOOD] >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TContinuousBackupWithRebootsTests::TakeSeveralIncrementalBackups [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:122:2058] recipient: [1:117:2146] Leader for TabletID 72057594046678944 is [1:139:2160] sender: [1:142:2058] recipient: [1:117:2146] 2026-01-04T02:35:16.615838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:35:16.615934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:16.615981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:35:16.616045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:35:16.616129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:35:16.616170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:35:16.616242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:35:16.616343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:35:16.617219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:16.617503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:35:16.740574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:35:16.740675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:16.741542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:35:16.748737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:35:16.748918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:35:16.749060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:35:16.754619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:35:16.754831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:35:16.755578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:35:16.755826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:35:16.757937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:16.758127Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:35:16.759291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:35:16.759375Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:35:16.759571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:35:16.759618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:35:16.759658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:35:16.759750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:35:16.907215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.908932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:219:2219] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:35:16.909563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# ... 83647 LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 1048576 BurstSize: 1048576 } TopicName: "3_continuousBackupImpl" TopicPath: "/MyRoot/Table/3_continuousBackupImpl/streamImpl" YdbDatabasePath: "/MyRoot" PartitionKeySchema { Name: "key" TypeId: 4 } MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409555 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409556 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 50 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:29.561505Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:36:29.561766Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl1" took 278us result status StatusSuccess 2026-01-04T02:36:29.562363Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl1" PathDescription { Self { Name: "IncrBackupImpl1" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl1" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:29.563693Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:36:29.563995Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl2" took 289us result status StatusSuccess 2026-01-04T02:36:29.564600Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl2" PathDescription { Self { Name: "IncrBackupImpl2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000042 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl2" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:36:29.566084Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:36:29.566379Z node 16 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl3" took 291us result status StatusSuccess 2026-01-04T02:36:29.566951Z node 16 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl3" PathDescription { Self { Name: "IncrBackupImpl3" PathId: 49 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl3" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 43 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 49 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 17053, MsgBus: 18769 2026-01-04T02:36:22.999227Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329709823838854:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:22.999302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:23.233962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:23.273989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:23.274107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:23.363154Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329709823838760:2081] 1767494182989348 != 1767494182989351 2026-01-04T02:36:23.379958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:23.380779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:23.473316Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:23.558655Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:23.558677Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:23.558683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:23.558784Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:24.017988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:24.385731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:24.406939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:24.492384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.633403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.796652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.891899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:26.778589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329727003709834:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.778674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.779061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329727003709844:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.779121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.202799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.239975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.278460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.318069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.351231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.393386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.473134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.531483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.624795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329731298678020:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.624901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.625180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329731298678026:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.625415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329731298678025:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.625437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.628586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:27.644545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329731298678029:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:27.718317Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329731298678080:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:27.999190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329709823838854:2134];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:27.999283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 27463, MsgBus: 21330 2026-01-04T02:36:22.953431Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329708440484651:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:22.953492Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:23.201915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:23.219356Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:23.219455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:23.275738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:23.328360Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:23.330519Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329708440484615:2081] 1767494182950274 != 1767494182950277 2026-01-04T02:36:23.435891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:23.435923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:23.435931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:23.436019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:23.494687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:23.966832Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:24.258254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:24.270726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:24.369332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.545764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.687825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.761964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:26.650746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329725620355667:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.650932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.651404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329725620355677:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.651482Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:26.977869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.024072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.060061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.107578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.144303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.187776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.232183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.278564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.374248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329729915323845:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.374356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.374832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329729915323850:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.374871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329729915323851:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.374912Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.379455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:27.394729Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329729915323854:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:27.462287Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329729915323905:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:27.954048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329708440484651:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:27.954117Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> THiveTest::TestGetStorageInfoDeleteTabletBeforeAssigned [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3292, MsgBus: 7269 2026-01-04T02:36:16.195630Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329685846051484:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:16.198381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:16.448241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:16.515330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:16.515426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:16.555158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:16.616256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:16.720852Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:16.760882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:16.760912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:16.760923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:16.761024Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:17.210148Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:17.278121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:17.287332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:17.357244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.533764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.699252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:17.769407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.664166Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329698730955215:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.664288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.664733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329698730955225:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.664781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:19.980838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.011197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.042147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.083626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.117703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.155558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.204605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.266759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.352830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329703025923396:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.352927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.353336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329703025923401:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.353379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329703025923402:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.353407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:20.357633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:20.372117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715670, at schemeshard: 72057594046644480 2026-01-04T02:36:20.372950Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329703025923405:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:20.465168Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329703025923456:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:21.195893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329685846051484:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:21.197893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13690, MsgBus: 27779 2026-01-04T02:36:23.444241Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329712898410626:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:23.444314Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:23.492219Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:23.573615Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:23.583889Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329712898410591:2081] 1767494183441939 != 1767494183441942 2026-01-04T02:36:23.594208Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:23.594298Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:23.622204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:23.769908Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:23.786045Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:23.786069Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:23.786076Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:23.786150Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:24.226895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:24.233057Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:24.247455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.302120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.457488Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:24.464810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.534555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.313972Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329730078281646:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.314104Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.320751Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329730078281656:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.320873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.394173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.430400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.474623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.510443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.548720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.594788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.648156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.706818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:27.811262Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329730078282528:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.811352Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.811621Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329730078282533:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.811652Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329730078282534:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.811677Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:27.816149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:27.831843Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329730078282537:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:27.890361Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329730078282588:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:28.444374Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329712898410626:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:28.444455Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TxUsage::WriteToTopic_Demo_41_Query >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink >> KqpQueryPerf::Insert-QueryService-UseSink >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 20926, MsgBus: 7126 2026-01-04T02:36:24.850340Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329719111121324:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:24.850371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:24.905545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:25.150940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:25.151067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:25.153323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:25.162668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:25.200127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:25.201719Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329719111121304:2081] 1767494184846620 != 1767494184846623 2026-01-04T02:36:25.220608Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:36:25.229059Z node 1 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:36:25.274374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:25.274402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:25.274409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:25.274505Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:25.432814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:25.798202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:25.818903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:25.882226Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:25.891853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:26.072318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:26.252460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:26.329776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.219857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329736290992359:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.220038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.227234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329736290992369:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.227351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.508312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.547590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.596920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.631205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.667208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.704829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.746838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.806095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:28.882156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329736290993242:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.882279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.882468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329736290993247:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.882492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329736290993248:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.882563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:28.886112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:28.898745Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329736290993251:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:28.972875Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329736290993302:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:29.851945Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329719111121324:2060];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:29.852016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] >> KqpQueryPerf::Replace+QueryService-UseSink >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] >> KqpQueryPerf::DeleteOn-QueryService-UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink >> KqpQueryPerf::Delete+QueryService-UseSink >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] |88.9%| [LD] {default-linux-x86_64, release, asan} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |88.9%| [LD] {RESULT} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut |88.9%| [LD] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/core/viewer/ut/ydb-core-viewer-ut ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Oltp_WriteToTopicAndTable_6_Query [GOOD] Test command err: 2026-01-04T02:32:55.076341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328819237043468:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:55.080699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:55.186581Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:55.461411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:55.461541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:55.484091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:55.541009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:55.601733Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:55.605968Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328819237043337:2081] 1767493975044576 != 1767493975044579 2026-01-04T02:32:55.661409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce3/r3tmp/yandex23v9HF.tmp 2026-01-04T02:32:55.661442Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce3/r3tmp/yandex23v9HF.tmp 2026-01-04T02:32:55.661658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce3/r3tmp/yandex23v9HF.tmp 2026-01-04T02:32:55.661771Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:55.693037Z INFO: TTestServer started on Port 27972 GrpcPort 63542 2026-01-04T02:32:55.744665Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:63542 2026-01-04T02:32:55.915929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:55.921211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:32:56.024548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.095968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:58.584837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328832121946266:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:58.584955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:58.586509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328832121946279:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:58.586559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328832121946280:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:58.586653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:58.598192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:58.617132Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328832121946283:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:32:58.946005Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328832121946347:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:58.989423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:59.037670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:59.133242Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328832121946355:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:59.135807Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzdhNzJiNzEtZmU3NjMxMjQtMjU4MTU4ZS1iODNlYzRiZA==, ActorId: [1:7591328832121946264:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkje80p6x3a3hy7k1b4rh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:59.138328Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:59.144040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328836416913944:2825] 2026-01-04T02:33:00.076682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328819237043468:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:00.076772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:04.568629Z :Sinks_Oltp_WriteToTopic_1_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:04.596623Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:04.624819Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328857891750634:2922] connected; active server actors: 1 2026-01-04T02:33:04.625090Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:04.626838Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:04.626949Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:04.627460Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:04.628243Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:04.628394Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:04.628435Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:04.628469Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037 ... 0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-04T02:36:32.834038Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0] PartitionId [0] Generation [1] Write session will now close 2026-01-04T02:36:32.834097Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-04T02:36:32.834988Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:32.835292Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T02:36:32.835545Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0 grpc read done: success: 0 data: 2026-01-04T02:36:32.835581Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0 grpc read failed 2026-01-04T02:36:32.836019Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 7 sessionId: test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0 2026-01-04T02:36:32.836048Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|5716ce15-d8376e0b-e2a0fd39-4f8e3836_0 is DEAD 2026-01-04T02:36:32.836507Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:32.836557Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:32.836711Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7591329736853289678:2519] destroyed 2026-01-04T02:36:32.836749Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:32.836775Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.836790Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.836804Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.836822Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.836835Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:36:32.836877Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7591329736853289681:2519] destroyed 2026-01-04T02:36:32.841233Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.841270Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.841288Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.841309Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.841330Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:32.854333Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-04T02:36:32.854377Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0] PartitionId [0] Generation [1] Write session will now close 2026-01-04T02:36:32.854412Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-04T02:36:32.854833Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:32.854867Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T02:36:32.878002Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0 grpc read done: success: 0 data: 2026-01-04T02:36:32.878044Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0 grpc read failed 2026-01-04T02:36:32.878103Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 5 sessionId: test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0 2026-01-04T02:36:32.878124Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|aefb896a-76455f51-9396bed0-3552d6d7_0 is DEAD 2026-01-04T02:36:32.878615Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:32.878673Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:32.878972Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591329736853289635:2511] destroyed 2026-01-04T02:36:32.879016Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591329736853289638:2511] destroyed 2026-01-04T02:36:32.879046Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:32.879080Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.879101Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.879117Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.879137Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.879152Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:32.918049Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.918093Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.918112Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.918138Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.918158Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:36:32.924920Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.924954Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.924981Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.925016Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.925033Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:32.941772Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:32.941822Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.941841Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:32.941864Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:32.941889Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:33.018575Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:33.018615Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.018630Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:33.018650Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.018665Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:36:33.025220Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:33.025262Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.025279Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:33.025302Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.025328Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:33.043445Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:33.043475Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.043486Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:33.043501Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:33.043515Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 24969, MsgBus: 9224 2026-01-04T02:33:38.664520Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329007147919630:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.668076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:39.049363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:39.184533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:39.184634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:39.193075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:39.193667Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:39.254745Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:39.356617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:39.356641Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:39.356647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:39.356735Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:39.672992Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.965476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.984021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:40.039123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.232608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.475210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:40.537314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.522382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329024327790642:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.522536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.523129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329024327790652:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.523193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.824455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.867596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.902956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.934218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.979270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:43.031754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:43.075794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:43.131459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:43.227413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329028622758817:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:43.227504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:43.227715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329028622758822:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:43.227745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329028622758823:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:43.227855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:43.232152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:43.245773Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329028622758826:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:43.314396Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329028622758877:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.665417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329007147919630:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.666821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:45.137220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... T02:36:18.469137Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7591329693559688168:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:18.469222Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:18.534165Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:18.648171Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:18.666001Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:18.666134Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:18.704794Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:18.780352Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:18.791094Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:18.791123Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:18.791134Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:18.791260Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:19.477446Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:19.715258Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:19.722720Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:19.731077Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:19.838751Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.125587Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:20.348166Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:23.471372Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591329693559688168:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:23.471482Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:24.942062Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329719329493785:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.942234Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.942876Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329719329493795:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.942949Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.081285Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.146914Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.198394Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.274853Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.339023Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.416811Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.486477Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.673565Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.805383Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329723624461989:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.805546Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.806075Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329723624461994:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.806147Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329723624461995:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.806249Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.814106Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:25.843464Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591329723624461998:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:25.941186Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591329723624462051:3790] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:29.062395Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpQueryPerf::IndexUpsert-QueryService-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16828, MsgBus: 21715 2026-01-04T02:36:27.841238Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329733039101242:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:27.841314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:28.093901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:28.122721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:28.122814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:28.178248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:28.192819Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329733039101220:2081] 1767494187840524 != 1767494187840527 2026-01-04T02:36:28.213181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:28.276132Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:28.276169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:28.276176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:28.276279Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:28.323271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:28.810041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:28.817236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:28.855644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:28.868957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.024283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.209790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.285785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.316743Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329750218972279:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.316854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.317260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329750218972289:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.317378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.654971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.696503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.737232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.778879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.821897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.874545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.930151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.986225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.097439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754513940454:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.097546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.097972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754513940459:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.098025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754513940460:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.098145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.103088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:32.118751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329754513940463:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:32.183127Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329754513940514:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:32.841705Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329733039101242:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:32.841823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 19147, MsgBus: 61439 2026-01-04T02:36:20.288749Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329702092327439:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:20.288964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:20.580858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:20.581023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:20.636754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:20.659421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:20.677906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329702092327393:2081] 1767494180286692 != 1767494180286695 2026-01-04T02:36:20.678732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:20.838070Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:20.853573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:20.853596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:20.853605Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:20.853693Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:21.305411Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:21.413372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:21.422357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:21.471199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.672281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.845784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.920811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.035920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329719272198445:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.036060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.036395Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329719272198455:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.036435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.533789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.567138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.602377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.639827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.683610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.724866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.765750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.818605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.920082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329719272199329:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.920175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.920359Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329719272199334:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.920412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329719272199335:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.920570Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.925314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:24.935283Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329719272199338:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:25.015168Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329723567166687:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:25.290008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329702092327439:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:25.291283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11305, MsgBus: 29811 2026-01-04T02:36:28.236645Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329736744734648:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:28.242925Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:28.287983Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:28.359773Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:28.378122Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:28.378205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:28.407018Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:28.480321Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:28.509000Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:28.509023Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:28.509031Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:28.509109Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:28.985428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:28.992089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:28.998633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.063399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.233676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.277537Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:29.310023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.901958Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329749629638333:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.902061Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.902421Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329749629638343:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:31.902476Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.025244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.055698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.091034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.126036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.177217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.228264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.277449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.349922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.458409Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329753924606517:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.458502Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.458854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329753924606522:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.458901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329753924606523:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.458954Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.463648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:32.478413Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329753924606526:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:32.581004Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329753924606577:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:33.241822Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329736744734648:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:33.241901Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Update+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9291, MsgBus: 4682 2026-01-04T02:36:28.432552Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329733409522727:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:28.432784Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:28.777917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:28.807761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:28.807874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:28.888593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:28.925367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:28.926903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329733409522504:2081] 1767494188393498 != 1767494188393501 2026-01-04T02:36:29.034335Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:29.130307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:29.130334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:29.130341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:29.130432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:29.396335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:29.617169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:29.624618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:29.704772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.869950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:30.036046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:30.118428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.322649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329750589393571:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.322781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.323721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329750589393581:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.323789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.740588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.779417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.816969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.878140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.907091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.943275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.984466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.035896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.124802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754884361745:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.124927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.126104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754884361750:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.126167Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329754884361751:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.126291Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.131261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:33.149370Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329754884361754:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:33.217334Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329754884361805:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:33.431726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329733409522727:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:33.431790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> Channels20::CaIntegrationAgg [GOOD] >> Channels20::CaIntegrationIc >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup-QueryService [GOOD] >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService >> TxUsage::ReadRuleGeneration [GOOD] >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] >> KqpQueryPerf::UpdateOn+QueryService+UseSink >> KqpQueryPerf::KvRead+QueryService >> TxUsage::Write_And_Read_Small_Messages_1 [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink >> KqpQueryPerf::RangeLimitRead+QueryService >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink >> TxUsage::Write_And_Read_Small_Messages_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4036, MsgBus: 24077 2026-01-04T02:36:30.500548Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329743168178667:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:30.501320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:30.813627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:30.813718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:30.862959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:30.901339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:30.908043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:31.174299Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:31.221018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:31.221050Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:31.221058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:31.221170Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:31.533957Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:31.970866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:31.983617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:32.069617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.260095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.444045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.560954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.554913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329760348049662:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.555075Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.562233Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329760348049672:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.562338Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.952449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.988739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.030523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.062637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.121436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.158340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.234088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.285564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.378600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329764643017844:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.378666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.378746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329764643017849:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.379097Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329764643017851:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.379138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.382796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:35.396401Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329764643017852:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:35.461303Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329764643017904:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:35.504238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329743168178667:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:35.504330Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestHiveBalancerWithSpareNodes [GOOD] Test command err: 2026-01-04T02:35:05.403417Z node 3 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:05.439783Z node 3 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:05.440147Z node 3 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 3 PDiskId# 1 Path# "SectorMap:2:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:05.441151Z node 3 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:05.441536Z node 3 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:05.442733Z node 3 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [3:73:2076] ControllerId# 72057594037932033 2026-01-04T02:35:05.442800Z node 3 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:05.442923Z node 3 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:05.443079Z node 3 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:05.454537Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:05.454607Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:05.457474Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:80:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.457747Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:81:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.458505Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:82:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.459302Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:83:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.459500Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:84:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.459676Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:85:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.459846Z node 3 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [3:72:2075] Create Queue# [3:86:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.459882Z node 3 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:05.459986Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [3:73:2076] 2026-01-04T02:35:05.460044Z node 3 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [3:73:2076] 2026-01-04T02:35:05.460099Z node 3 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:05.460160Z node 3 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:05.460906Z node 3 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:05.461061Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:05.465437Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:05.465643Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:05.466060Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:05.466401Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:05.467810Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:05.467999Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:05.469023Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:97:2078] ControllerId# 72057594037932033 2026-01-04T02:35:05.469064Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:05.469138Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:05.469274Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:05.483503Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:05.483570Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:05.485610Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:105:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.485832Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:106:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.485988Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:107:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.486141Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:108:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.486284Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:109:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.486426Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:110:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.486570Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:96:2077] Create Queue# [1:111:2089] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.486615Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:05.486711Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:97:2078] 2026-01-04T02:35:05.486744Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:97:2078] 2026-01-04T02:35:05.486807Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:05.486860Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:05.487796Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:05.487879Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:05.491400Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } PDisks { NodeID: 3 PDiskID: 1 Path: "SectorMap:2:3200" PDiskGuid: 3 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:05.491570Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:05.491917Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:05.492166Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:05.493207Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:122:2077] ControllerId# 72057594037932033 2026-01-04T02:35:05.493262Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:05.493333Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:05.493494Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:05.506135Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:05.506207Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:05.508530Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:129:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.508733Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:130:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.508881Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:131:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.509073Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:132:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.509226Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:133:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.509435Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:134:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.509606Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:121:2076] Create Queue# [2:135:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:05.509658Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:05.509742Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] :: ... BUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037892 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1974:2266] 2026-01-04T02:36:37.186558Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037892] forward result remote node 67 [62:2103:2493] 2026-01-04T02:36:37.186706Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037892] remote node connected [62:2103:2493] 2026-01-04T02:36:37.186754Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037892]::SendEvent [62:2103:2493] 2026-01-04T02:36:37.187181Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037892] Accept Connect Originator# [62:2103:2493] 2026-01-04T02:36:37.187715Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037892] connected with status OK role: Leader [62:2103:2493] 2026-01-04T02:36:37.187764Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037892] send queued [62:2103:2493] 2026-01-04T02:36:37.188894Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037893] ::Bootstrap [62:2107:2495] 2026-01-04T02:36:37.188938Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037893] lookup [62:2107:2495] 2026-01-04T02:36:37.189006Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037893 entry.State: StNormal leader: [67:1312:2100] followers: 0 ev: {EvForward TabletID: 72075186224037893 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:37.189049Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037893 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1312:2100] 2026-01-04T02:36:37.189162Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037893] forward result remote node 67 [62:2107:2495] 2026-01-04T02:36:37.189267Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037893] remote node connected [62:2107:2495] 2026-01-04T02:36:37.189364Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037893]::SendEvent [62:2107:2495] 2026-01-04T02:36:37.189624Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037893] Accept Connect Originator# [62:2107:2495] 2026-01-04T02:36:37.190090Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037893] connected with status OK role: Leader [62:2107:2495] 2026-01-04T02:36:37.190145Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037893] send queued [62:2107:2495] 2026-01-04T02:36:37.191152Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037894] ::Bootstrap [62:2110:2497] 2026-01-04T02:36:37.191196Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037894] lookup [62:2110:2497] 2026-01-04T02:36:37.191276Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037894 entry.State: StNormal leader: [66:1318:2142] (known problem) followers: 0 ev: {EvForward TabletID: 72075186224037894 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:37.191314Z node 62 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:36:37.191545Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037894 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:37.191682Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 0} 2026-01-04T02:36:37.191739Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 1} 2026-01-04T02:36:37.191778Z node 62 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037894 Cookie: 2} 2026-01-04T02:36:37.191837Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1976:2267] CurrentLeaderTablet: [67:1981:2270] CurrentGeneration: 3 CurrentStep: 0} 2026-01-04T02:36:37.191979Z node 62 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037894 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [67:1976:2267] CurrentLeaderTablet: [67:1981:2270] CurrentGeneration: 3 CurrentStep: 0} 2026-01-04T02:36:37.192052Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037894 leader: [67:1976:2267] followers: 0 2026-01-04T02:36:37.192103Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037894 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1976:2267] 2026-01-04T02:36:37.192213Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037894] forward result remote node 67 [62:2110:2497] 2026-01-04T02:36:37.192364Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037894] remote node connected [62:2110:2497] 2026-01-04T02:36:37.192410Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037894]::SendEvent [62:2110:2497] 2026-01-04T02:36:37.192786Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037894] Accept Connect Originator# [62:2110:2497] 2026-01-04T02:36:37.193346Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037894] connected with status OK role: Leader [62:2110:2497] 2026-01-04T02:36:37.193393Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037894] send queued [62:2110:2497] 2026-01-04T02:36:37.194368Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037895] ::Bootstrap [62:2114:2499] 2026-01-04T02:36:37.194411Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037895] lookup [62:2114:2499] 2026-01-04T02:36:37.194470Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037895 entry.State: StNormal leader: [67:1822:2195] followers: 0 ev: {EvForward TabletID: 72075186224037895 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:37.194511Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037895 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1822:2195] 2026-01-04T02:36:37.194610Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037895] forward result remote node 67 [62:2114:2499] 2026-01-04T02:36:37.194709Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037895] remote node connected [62:2114:2499] 2026-01-04T02:36:37.194755Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037895]::SendEvent [62:2114:2499] 2026-01-04T02:36:37.195010Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037895] Accept Connect Originator# [62:2114:2499] 2026-01-04T02:36:37.195380Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037895] connected with status OK role: Leader [62:2114:2499] 2026-01-04T02:36:37.195449Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037895] send queued [62:2114:2499] 2026-01-04T02:36:37.196487Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037896] ::Bootstrap [62:2117:2501] 2026-01-04T02:36:37.196528Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037896] lookup [62:2117:2501] 2026-01-04T02:36:37.196588Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037896 entry.State: StNormal leader: [67:1825:2197] followers: 0 ev: {EvForward TabletID: 72075186224037896 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:37.196628Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037896 followers: 0 countLeader 1 allowFollowers 0 winner: [67:1825:2197] 2026-01-04T02:36:37.196721Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037896] forward result remote node 67 [62:2117:2501] 2026-01-04T02:36:37.196829Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037896] remote node connected [62:2117:2501] 2026-01-04T02:36:37.196873Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037896]::SendEvent [62:2117:2501] 2026-01-04T02:36:37.197132Z node 67 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037896] Accept Connect Originator# [62:2117:2501] 2026-01-04T02:36:37.197521Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037896] connected with status OK role: Leader [62:2117:2501] 2026-01-04T02:36:37.197568Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037896] send queued [62:2117:2501] 2026-01-04T02:36:37.198663Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [62:2119:2502] 2026-01-04T02:36:37.198739Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [62:2119:2502] 2026-01-04T02:36:37.198860Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [62:615:2179] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:37.198947Z node 62 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 62 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [62:615:2179] 2026-01-04T02:36:37.199107Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [62:2119:2502] 2026-01-04T02:36:37.199305Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:420: TClient[72057594037927937] received pending shutdown [62:2119:2502] 2026-01-04T02:36:37.199410Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037927937] forward result local node, try to connect [62:2119:2502] 2026-01-04T02:36:37.199497Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [62:2119:2502] 2026-01-04T02:36:37.199658Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [62:2119:2502] 2026-01-04T02:36:37.199989Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [62:2119:2502] 2026-01-04T02:36:37.200089Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [62:2119:2502] 2026-01-04T02:36:37.200144Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [62:2119:2502] 2026-01-04T02:36:37.200242Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [62:2119:2502] 2026-01-04T02:36:37.200302Z node 62 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [62:2119:2502] 2026-01-04T02:36:37.200399Z node 62 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [62:587:2174] EventType# 268697616 >> THiveTest::TestLockTabletExecutionRebootTimeout [GOOD] >> THiveTest::TestLockTabletExecutionDelete >> KqpWorkload::STOCK >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpQueryPerf::Insert-QueryService-UseSink [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::ReadRuleGeneration [GOOD] Test command err: 2026-01-04T02:32:53.835731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328814206369470:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:53.835791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:53.936210Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:53.936491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:54.336180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:54.342841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:54.342954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:54.434646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:54.505687Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:54.509990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328814206369332:2081] 1767493973753624 != 1767493973753627 2026-01-04T02:32:54.674010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:54.786690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce4/r3tmp/yandexVQFeRV.tmp 2026-01-04T02:32:54.789929Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce4/r3tmp/yandexVQFeRV.tmp 2026-01-04T02:32:54.791329Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce4/r3tmp/yandexVQFeRV.tmp 2026-01-04T02:32:54.791458Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:54.861029Z INFO: TTestServer started on Port 5178 GrpcPort 24122 2026-01-04T02:32:54.865999Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:24122 2026-01-04T02:32:55.215315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:55.222407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:32:55.299012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:55.468836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:32:57.673976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328831386239545:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:57.674034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328831386239561:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:57.674079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:57.676782Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328831386239575:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:57.676861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:57.681497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:57.696090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328831386239576:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:32:57.757671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328831386239641:2647] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:58.082385Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328831386239649:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:58.085116Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YjMyN2M3OTItMzY3MzZlYzEtNTM0MjEwNWQtNzE1MTcxZjQ=, ActorId: [1:7591328831386239542:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dkhj5e9bfehx37ak1bnbx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:58.088106Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:58.091951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:58.137716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:58.285256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328835681207232:2825] 2026-01-04T02:32:58.835890Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328814206369470:2167];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:58.836011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:04.706642Z :CreateTopicWithStreamingConsumer INFO: TTopicSdkTestSetup started 2026-01-04T02:33:04.728314Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:04.758168Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328861451011248:2935] connected; active server actors: 1 2026-01-04T02:33:04.758630Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][topic_name] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:04.759259Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][topic_name] BALANCER INIT DONE for topic_name: (0, 72075186224037892) 2026-01-04T02:33:04.760043Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][topic_name] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:04.766859Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:04.770049Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073 ... 037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:37.746142Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.746167Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:37.746207Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.746233Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:37.846584Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:37.846635Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.846660Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:37.846699Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.846723Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:37.947658Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:37.947709Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.947734Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:37.947772Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:37.947795Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:38.010471Z :INFO: [/Root] [/Root] [d132f1f6-7b2a3aec-196e0abd-12a8790a] Closing read session. Close timeout: 0.000000s 2026-01-04T02:36:38.010568Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:test-topic:0:1:3:4 2026-01-04T02:36:38.010639Z :INFO: [/Root] [/Root] [d132f1f6-7b2a3aec-196e0abd-12a8790a] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2003 BytesRead: 9 MessagesRead: 1 BytesReadCompressed: 9 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:36:38.010795Z :NOTICE: [/Root] [/Root] [d132f1f6-7b2a3aec-196e0abd-12a8790a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:36:38.010868Z :DEBUG: [/Root] [/Root] [d132f1f6-7b2a3aec-196e0abd-12a8790a] [] Abort session to cluster 2026-01-04T02:36:38.011555Z :DEBUG: [/Root] 0x00007DB01BAB6990 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_3168023247987819463_v1 Close 2026-01-04T02:36:38.012126Z :DEBUG: [/Root] 0x00007DB01BAB6990 TDirectReadSessionManager ServerSessionId=consumer-1_14_3_3168023247987819463_v1 Close 2026-01-04T02:36:38.012307Z :NOTICE: [/Root] [/Root] [d132f1f6-7b2a3aec-196e0abd-12a8790a] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:36:38.018034Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-04T02:36:38.018109Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0] PartitionId [0] Generation [1] Write session will now close 2026-01-04T02:36:38.018191Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-04T02:36:38.018876Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:38.018941Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T02:36:38.037406Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 3 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:38.037452Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 3 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 grpc read failed 2026-01-04T02:36:38.037490Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 3 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 grpc closed 2026-01-04T02:36:38.037532Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 3 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 is DEAD 2026-01-04T02:36:38.038258Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7591329770534699478:2534]: session cookie 4 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:38.038320Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7591329770534699478:2534]: session cookie 4 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1grpc read failed 2026-01-04T02:36:38.038394Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7591329770534699478:2534]: session cookie 4 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 grpc closed 2026-01-04T02:36:38.038447Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7591329770534699478:2534]: session cookie 4 consumer consumer-1 session consumer-1_14_3_3168023247987819463_v1 proxy is DEAD 2026-01-04T02:36:38.039333Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][test-topic] pipe [14:7591329770534699468:2529] disconnected. 2026-01-04T02:36:38.039389Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][test-topic] pipe [14:7591329770534699468:2529] disconnected; active server actors: 1 2026-01-04T02:36:38.039421Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][test-topic] pipe [14:7591329770534699468:2529] client consumer-1 disconnected session consumer-1_14_3_3168023247987819463_v1 2026-01-04T02:36:38.039559Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session consumer-1_14_3_3168023247987819463_v1 2026-01-04T02:36:38.039601Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7591329770534699471:2532] destroyed 2026-01-04T02:36:38.039663Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: consumer-1_14_3_3168023247987819463_v1 2026-01-04T02:36:38.041977Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 4 sessionId: test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0 grpc read done: success: 0 data: 2026-01-04T02:36:38.042011Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 4 sessionId: test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0 grpc read failed 2026-01-04T02:36:38.042443Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 4 sessionId: test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0 2026-01-04T02:36:38.042466Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 4 sessionId: test-message_group_id|a96fa30a-fff18fc-a728ed98-28b4b531_0 is DEAD 2026-01-04T02:36:38.047222Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:38.047267Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.047301Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:38.047349Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.047375Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:38.051480Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:38.051922Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [14:7591329757649797420:2484] destroyed 2026-01-04T02:36:38.051984Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:38.052027Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:38.052050Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.052073Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:38.052103Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.052124Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:38.147550Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:38.147597Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.147623Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:38.147662Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.147685Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:38.191911Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037892][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:36:38.249232Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:38.249281Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.249308Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:38.249346Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:38.249370Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay+QueryService [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService >> THiveTest::TestLockTabletExecutionDelete [GOOD] >> THiveTest::TestLockTabletExecutionDeleteReboot |88.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::MultiDeleteFromTable-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9624, MsgBus: 14226 2026-01-04T02:36:32.232848Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329753677542129:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:32.233253Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:32.571550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:32.571645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:32.652128Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:32.663500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:32.687562Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329753677541970:2081] 1767494192129750 != 1767494192129753 2026-01-04T02:36:32.715481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:32.866217Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:32.866240Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:32.866246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:32.868452Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:32.899557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:33.130347Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:33.454571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:33.462617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:33.525190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.674871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.850977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.931378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.998939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329766562445738:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.999081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.006063Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329766562445748:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.006171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.385854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.438161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.469145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.537714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.576642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.621768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.668890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.718281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.818075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329770857413914:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.818164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.819043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329770857413919:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.819094Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329770857413920:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.819129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.823602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:36.835751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329770857413923:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:36.925630Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329770857413974:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:37.214449Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329753677542129:2178];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:37.215826Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::DeleteOn-QueryService+UseSink >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] >> RetryPolicy::TWriteSession_SeqNoShift >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexInsert+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9884, MsgBus: 6799 2026-01-04T02:36:20.573274Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329702920763134:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:20.573683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:20.919831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:20.943938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:20.944068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:20.961799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:21.057941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329702920763019:2081] 1767494180558407 != 1767494180558410 2026-01-04T02:36:21.063356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:21.139032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:21.146428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:21.146457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:21.146464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:21.146549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:21.570960Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:21.606264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:21.615145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:21.662171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.788210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:21.957496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.034360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.043754Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720100634082:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.043889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.044529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720100634092:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.044618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.425079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.458267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.489788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.534508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.566891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.618616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.652681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.695695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.770163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720100634966:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.770233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.770326Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720100634971:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.770450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720100634973:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.770528Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.773710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:24.787771Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329720100634975:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:24.876976Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329720100635026:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:25.565927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329702920763134:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:25.566001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:26.600297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... :36:30.358183Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:30.361961Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329745255161962:2081] 1767494190151532 != 1767494190151535 2026-01-04T02:36:30.389994Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:30.390076Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:30.421719Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:30.499817Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:30.534480Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:30.534509Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:30.534516Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:30.534598Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:31.052161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:31.100561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.167615Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:31.249001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.445511Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.526343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.153047Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329762435033008:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.153142Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.153514Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329762435033018:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.153569Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.231413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.267593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.306550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.347216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.384779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.432274Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.482440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.544185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.671202Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329762435033890:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.671275Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.671612Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329762435033896:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.671658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329762435033895:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.671681Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.675402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:34.688013Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329762435033899:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:34.744429Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329762435033950:3759] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:35.153778Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329745255161991:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:35.153862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:36.610207Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.661180Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.704969Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService-UseSink [GOOD] >> KqpQueryPerf::Delete+QueryService+UseSink >> KqpQueryPerf::IndexReplace-QueryService-UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63283, MsgBus: 13813 2026-01-04T02:36:21.170109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329703475619067:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:21.190786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:21.469940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:21.471242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:21.471361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:21.478412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:21.589924Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329703475619021:2081] 1767494181108840 != 1767494181108843 2026-01-04T02:36:21.593316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:21.686336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:21.686364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:21.686371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:21.686439Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:21.745385Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:22.194125Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:22.210647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:22.219771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:22.289338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.454500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.656903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:22.729214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.767758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329716360522785:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.767869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.768184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329716360522795:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:24.768257Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.123894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.158386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.192936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.228816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.264295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.303540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.353664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.413399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:25.515205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720655490964:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.515265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.515470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720655490969:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.515506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329720655490970:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.515535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:25.518936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:25.532087Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329720655490973:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:25.614186Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329720655491024:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:26.154337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329703475619067:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:26.154525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:27.596249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... :36:32.139007Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:32.139106Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:32.141734Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329748596494222:2081] 1767494191834011 != 1767494191834014 2026-01-04T02:36:32.165292Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:32.190991Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:32.258800Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:32.258821Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:32.258828Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:32.258907Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:32.275668Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:32.664066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:32.681799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.763949Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.911916Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:32.960288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.094849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.529957Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329765776365276:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.530075Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.530430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329765776365286:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.530485Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:35.601664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.643861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.687171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.728993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.772694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.867928Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.907355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.974755Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.096828Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329770071333455:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.096950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.097426Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329770071333460:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.097472Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329770071333461:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.097727Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.101475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:36.115322Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329770071333464:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:36.186373Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329770071333515:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:36.847563Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329748596494282:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:36.847670Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:37.895938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.940724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.972033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2563, MsgBus: 15372 2026-01-04T02:36:34.283308Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329763242334258:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:34.283351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:34.606110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:34.612144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:34.612257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:34.705271Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329763242334054:2081] 1767494194255731 != 1767494194255734 2026-01-04T02:36:34.720141Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:34.724925Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:34.894761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:34.938383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:34.938404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:34.938410Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:34.938473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:35.294088Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:35.585929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:35.597157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:35.647097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.807446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.006248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.112127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.254426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329780422205118:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.254533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.254860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329780422205128:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.254917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.662224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.701415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.738745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.813647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.854993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.894679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.938362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.990376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.073202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329784717173291:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.073275Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.073586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329784717173297:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.073846Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329784717173296:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.073898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.077043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:39.086865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329784717173300:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:39.188895Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329784717173351:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:39.285889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329763242334258:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:39.285942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> THiveTest::TestLockTabletExecutionDeleteReboot [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect >> TxUsage::Sinks_Oltp_WriteToTopics_2_Query [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert+QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 13030, MsgBus: 19868 2026-01-04T02:36:34.967888Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329762169189173:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:34.968016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:35.346421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:35.346569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:35.361140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:35.492853Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:35.492989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329762169188946:2081] 1767494194937278 != 1767494194937281 2026-01-04T02:36:35.533077Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:35.586787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:35.586812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:35.586825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:35.586932Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:35.673956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:35.954761Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:36.058694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:36.156913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.341614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.562206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.653720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.755180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329779349060016:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.755345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.756872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329779349060026:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.756971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.100606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.147525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.180766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.221457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.271528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.326211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.375805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.426984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.542202Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329783644028196:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.542295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.542981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329783644028201:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.543035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329783644028202:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.543180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.547483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:39.560956Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329783644028205:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:39.635340Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329783644028256:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:39.957519Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329762169189173:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:39.958770Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table >> KqpQueryPerf::AggregateToScalar+QueryService >> TStorageBalanceTest::TestScenario2 [GOOD] >> TStorageBalanceTest::TestScenario3 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_18_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-04T02:32:52.764204Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328807858123708:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:52.764472Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:52.822730Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:53.073944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:53.170617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:53.170746Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:53.173222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:53.204535Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:53.228033Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328807858123604:2081] 1767493972733392 != 1767493972733395 2026-01-04T02:32:53.314388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:53.338529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce8/r3tmp/yandex6Qj5Da.tmp 2026-01-04T02:32:53.338560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce8/r3tmp/yandex6Qj5Da.tmp 2026-01-04T02:32:53.338737Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce8/r3tmp/yandex6Qj5Da.tmp 2026-01-04T02:32:53.338910Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:53.375176Z INFO: TTestServer started on Port 21590 GrpcPort 9286 PQClient connected to localhost:9286 2026-01-04T02:32:53.727222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:53.759695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:32:53.771078Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:53.944163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.139049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:32:56.559366Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825037993835:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.559499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825037993829:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.559652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.561052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825037993844:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.561173Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.565426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:56.578572Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328825037993843:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:32:56.836962Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328825037993910:2647] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:56.884688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.990191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:57.157120Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328825037993920:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:57.160166Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWU3NzhhMjQtYzQxZTIwZWItZGJiNmIyYmItZjE5NmMxOGE=, ActorId: [1:7591328825037993827:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkgfdcgx2htd9717mj353, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:57.163168Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:57.176509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328829332961495:2827] 2026-01-04T02:32:57.758974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328807858123708:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:57.759063Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:02.709541Z :WriteToTopic_Demo_12_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:02.726476Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:02.754463Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328850807798172:2918] connected; active server actors: 1 2026-01-04T02:33:02.754709Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:02.758164Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:02.758342Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:02.770679Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:02.771866Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:02.772083Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:02.772135Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: ... [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:21:22 2026-01-04T02:36:34.558715Z :INFO: [/Root] [/Root] [dffbb047-2c6d10e0-264a8382-5fd371b0] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2005 BytesRead: 16000000 MessagesRead: 22 BytesReadCompressed: 16000000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:36:34.558827Z :NOTICE: [/Root] [/Root] [dffbb047-2c6d10e0-264a8382-5fd371b0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:36:34.558881Z :DEBUG: [/Root] [/Root] [dffbb047-2c6d10e0-264a8382-5fd371b0] [] Abort session to cluster 2026-01-04T02:36:34.559414Z :DEBUG: [/Root] 0x00007DB7DAB63190 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_2647300881689451785_v1 Close 2026-01-04T02:36:34.559820Z :DEBUG: [/Root] 0x00007DB7DAB63190 TDirectReadSessionManager ServerSessionId=test-consumer_14_1_2647300881689451785_v1 Close 2026-01-04T02:36:34.559942Z :NOTICE: [/Root] [/Root] [dffbb047-2c6d10e0-264a8382-5fd371b0] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:36:34.568690Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-04T02:36:34.568745Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0] PartitionId [0] Generation [2] Write session will now close 2026-01-04T02:36:34.568799Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-04T02:36:34.569297Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:34.569342Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-04T02:36:36.659606Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.659645Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.659661Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.659683Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.659696Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.660084Z node 14 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:36:36.660194Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.660206Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.660216Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.660232Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.660242Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.660324Z node 14 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:36.660338Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 grpc read failed 2026-01-04T02:36:36.660363Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 grpc closed 2026-01-04T02:36:36.660394Z node 14 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 is DEAD 2026-01-04T02:36:36.661001Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [14:7591329753878366995:2545]: session cookie 2 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:36.661019Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7591329753878366995:2545]: session cookie 2 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1grpc read failed 2026-01-04T02:36:36.661044Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [14:7591329753878366995:2545]: session cookie 2 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 grpc closed 2026-01-04T02:36:36.661063Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7591329753878366995:2545]: session cookie 2 consumer test-consumer session test-consumer_14_1_2647300881689451785_v1 proxy is DEAD 2026-01-04T02:36:36.661345Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0 grpc read done: success: 0 data: 2026-01-04T02:36:36.661358Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0 grpc read failed 2026-01-04T02:36:36.661385Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0 grpc closed 2026-01-04T02:36:36.661398Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|c502fb36-fd2fb928-d270f4c8-7dffec79_0 is DEAD 2026-01-04T02:36:36.664746Z node 14 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [14:7591329753878366985:2540] disconnected. 2026-01-04T02:36:36.664774Z node 14 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [14:7591329753878366985:2540] disconnected; active server actors: 1 2026-01-04T02:36:36.664792Z node 14 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [14:7591329753878366985:2540] client test-consumer disconnected session test-consumer_14_1_2647300881689451785_v1 2026-01-04T02:36:36.664913Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_14_1_2647300881689451785_v1 2026-01-04T02:36:36.664947Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591329753878366988:2543] destroyed 2026-01-04T02:36:36.666728Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:36.667460Z node 14 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_14_1_2647300881689451785_v1 2026-01-04T02:36:36.668462Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591329749583399655:2525] destroyed 2026-01-04T02:36:36.668509Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:36.668537Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.668554Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.668569Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.668587Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.668602Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.758390Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.758434Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.758454Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.758478Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.758495Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.760850Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.760887Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.760902Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.760924Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.760940Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.861682Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.861723Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.861739Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.861763Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.861780Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:36.861870Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:36.861883Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.861894Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:36.861908Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:36.861924Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |88.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] >> THiveTest::TestLockTabletExecutionRebootReconnect [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::Replace-QueryService-UseSink >> DqUnboxedValueToNativeArrowConversion::Struct [GOOD] >> DqUnboxedValueToNativeArrowConversion::Tuple >> DqUnboxedValueToNativeArrowConversion::Tuple [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalListOfOptional [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] >> KqpQueryPerf::KvRead+QueryService [GOOD] >> KqpQueryPerf::KvRead-QueryService ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_21_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-04T02:33:08.980992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328875623025681:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:08.981096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:09.008104Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:33:09.143091Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:09.143200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:09.163600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:09.185880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:09.202235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:09.203195Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328875623025654:2081] 1767493988979757 != 1767493988979760 2026-01-04T02:33:09.274850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001965/r3tmp/yandexSMTtby.tmp 2026-01-04T02:33:09.274875Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001965/r3tmp/yandexSMTtby.tmp 2026-01-04T02:33:09.275086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001965/r3tmp/yandexSMTtby.tmp 2026-01-04T02:33:09.275196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:09.303891Z INFO: TTestServer started on Port 1348 GrpcPort 26401 PQClient connected to localhost:26401 2026-01-04T02:33:09.464416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:09.485264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:09.532203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:09.988089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:11.273131Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328888507928585:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:11.273198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328888507928577:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:11.273420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:11.273842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328888507928594:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:11.273923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:11.276395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:11.283666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328888507928592:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:33:11.452011Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328888507928658:2642] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:11.472359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:11.494385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:11.553290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:11.577409Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328888507928666:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:11.577855Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ODc4OTcyNDItZWNlYzRmNjItNjUwNmYwZTMtMzE4Y2FjZWM=, ActorId: [1:7591328888507928575:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dkyv78xk9ftmdfhe3hvab, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:11.580013Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328888507928941:2819] 2026-01-04T02:33:13.981188Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328875623025681:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:13.981375Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:17.661195Z :WriteToTopic_Demo_19_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:17.672762Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:17.686225Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328914277732954:2926] connected; active server actors: 1 2026-01-04T02:33:17.686794Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:17.691237Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:17.691349Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:17.693857Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:17.694649Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:17.694884Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:17.694926Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:17.694961Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-04T02:33:17.695167Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037893][test-topic] TEvClientConnected TabletId 72 ... lose with zero timeout " } 2026-01-04T02:36:41.468319Z :DEBUG: [/Root] [/Root] [aebffba7-204a9022-a31128e9-84b17377] [] Abort session to cluster 2026-01-04T02:36:41.469305Z :DEBUG: [/Root] 0x00007D7D57C5D190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_4478961974332316948_v1 Close 2026-01-04T02:36:41.469720Z :DEBUG: [/Root] 0x00007D7D57C5D190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_4478961974332316948_v1 Close 2026-01-04T02:36:41.469881Z :NOTICE: [/Root] [/Root] [aebffba7-204a9022-a31128e9-84b17377] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:36:41.468739Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.468779Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.468795Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.468817Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.468832Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.477253Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:41.477285Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 grpc read failed 2026-01-04T02:36:41.477317Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 grpc closed 2026-01-04T02:36:41.477350Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 is DEAD 2026-01-04T02:36:41.478845Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-04T02:36:41.478898Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0] PartitionId [0] Generation [2] Write session will now close 2026-01-04T02:36:41.477914Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7591329783321893190:2532]: session cookie 2 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:41.477965Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7591329783321893190:2532]: session cookie 2 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1grpc read failed 2026-01-04T02:36:41.478943Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-04T02:36:41.478016Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7591329783321893190:2532]: session cookie 2 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 grpc closed 2026-01-04T02:36:41.478037Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7591329783321893190:2532]: session cookie 2 consumer test-consumer session test-consumer_13_1_4478961974332316948_v1 proxy is DEAD 2026-01-04T02:36:41.478597Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7591329783321893182:2527] disconnected. 2026-01-04T02:36:41.478625Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7591329783321893182:2527] disconnected; active server actors: 1 2026-01-04T02:36:41.478649Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7591329783321893182:2527] client test-consumer disconnected session test-consumer_13_1_4478961974332316948_v1 2026-01-04T02:36:41.478745Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_4478961974332316948_v1 2026-01-04T02:36:41.478789Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7591329783321893185:2530] destroyed 2026-01-04T02:36:41.478827Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_4478961974332316948_v1 2026-01-04T02:36:41.479326Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:41.479365Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-04T02:36:41.485622Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0 grpc read done: success: 0 data: 2026-01-04T02:36:41.485648Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0 grpc read failed 2026-01-04T02:36:41.485673Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0 grpc closed 2026-01-04T02:36:41.485684Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|fd5facfe-fdfad372-541ff8ed-229d24c2_0 is DEAD 2026-01-04T02:36:41.486550Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:41.487927Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7591329774731958517:2504] destroyed 2026-01-04T02:36:41.487984Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:41.488013Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.488029Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.488048Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.488066Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.488095Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.509911Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.509967Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.509984Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.510008Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.510024Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.570968Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.571010Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.571026Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.571049Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.571065Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.610013Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.610063Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.610081Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.610105Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.610122Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.676862Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.676898Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.676915Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.676938Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.676953Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.712309Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.712357Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.712388Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.712428Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.712459Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.777999Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.778039Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.778054Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.778076Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.778090Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> KqpQueryPerf::IndexUpsert-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexUpsert-QueryService+UseSink >> DqOutputChannelTests::SingleRead [GOOD] >> DqOutputChannelTests::PopAll [GOOD] >> DqOutputChannelWithStorageTests::Spill [GOOD] >> DqOutputChannelWithStorageTests::Overflow [GOOD] >> DqOutputWideChannelTests::Overflow [GOOD] >> DqOutputWideChannelTests::BigRow |88.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::OptionalVariantOverTupleWithOptionals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexLookupJoin-EnableStreamLookup+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 3702, MsgBus: 7858 2026-01-04T02:36:29.687369Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329739988849691:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:29.688037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:29.957992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:29.966934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:29.967067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:29.972971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:30.088952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329739988849579:2081] 1767494189641458 != 1767494189641461 2026-01-04T02:36:30.096193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:30.168058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:30.168083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:30.168094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:30.168181Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:30.220265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:30.710228Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:30.768949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:30.786016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:30.905351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.276115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.532292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:31.634381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.656143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329757168720632:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.656260Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.656609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329757168720642:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.656661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.994123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.035318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.078700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.151457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.187691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.248510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.289692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.367312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.451741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329761463688815:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.451824Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.452278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329761463688819:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.452328Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.452341Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329761463688821:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:34.456820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:34.467763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329761463688824:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:34.532086Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329761463688875:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:34.690020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329739988849691:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:34.690081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 18158, MsgBus: 24539 2026-01-04T02:36:37.998964Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329774305167392:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:37.999050Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:38.019271Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:38.091478Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:38.132870Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:38.132953Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:38.139520Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:38.176100Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:38.176122Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:38.176130Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:38.176198Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:38.231791Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:38.603537Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:38.608300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:38.612892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.679468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.849428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.968727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.087119Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:41.665969Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329791485038404:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:41.666066Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:41.666513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329791485038414:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:41.666583Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:41.734472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.771786Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.811776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.850023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.889413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.971369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.019359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.072300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.170028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329795780006588:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.170155Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.170559Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329795780006594:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.170606Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.170651Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329795780006593:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.174259Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:42.187072Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329795780006597:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:42.279741Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329795780006648:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:42.995530Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329774305167392:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:42.995607Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] >> THiveTest::TestLockTabletExecutionBadUnlock [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock >> DqOutputWideChannelTests::BigRow [GOOD] >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 16283, MsgBus: 18577 2026-01-04T02:36:38.529074Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329777146404537:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:38.530028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:38.569731Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:38.915085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:38.915204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:38.960434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:38.986609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:39.016767Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:39.102441Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:39.102466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:39.102473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:39.102564Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:39.250237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.549855Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.643198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:39.666226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:39.739864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.933297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.143051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.225991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.318643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329794326275572:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.318762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.325945Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329794326275582:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.326031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.690910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.732691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.770421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.814318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.881902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.933576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.980697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.050841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.138931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329798621243749:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.139048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.139418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329798621243754:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.139463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329798621243755:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.139736Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.144250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:43.155852Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329798621243758:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:43.218432Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329798621243809:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:43.530878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329777146404537:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.530946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TestArrowBlockSplitter::SplitLargeBlock [GOOD] >> TestArrowBlockSplitter::SplitWithScalars [GOOD] >> TestArrowBlockSplitter::PassSmallBlock [GOOD] >> TestArrowBlockSplitter::CheckLargeRows [GOOD] >> TestArrowBlockSplitter::CheckLargeScalarRows [GOOD] >> HashShuffle::BackPressureWithSpillingMulti [GOOD] >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputWideChannelTests::ChunkSizeLimit [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |88.9%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingMulti [GOOD] |88.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::RangeLimitRead+QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 1170, MsgBus: 18196 2026-01-04T02:36:39.547183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329780608553978:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:39.547547Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:39.857975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:39.862050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:39.862170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:39.941985Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:39.948925Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329780608553933:2081] 1767494199530202 != 1767494199530205 2026-01-04T02:36:39.979856Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:40.161433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:40.166478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:40.166500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:40.166507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:40.166602Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:40.580762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:40.825818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:40.848062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:40.920006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.158138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.338300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.430420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.568899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329797788424988:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.569005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.569492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329797788424998:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.569588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.948292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.984002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.028454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.069903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.152547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.189008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.222852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.270605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.361464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329802083393162:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.361559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.361856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329802083393168:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.361887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.361949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329802083393167:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.365299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:44.375895Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329802083393171:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:44.462722Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329802083393222:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:44.549632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329780608553978:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:44.549741Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] >> THiveTest::TestLockTabletExecutionGoodUnlock [GOOD] >> THiveTest::TestLockTabletExecutionLocalGone >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] >> Channels20::LocalChannelBackPressure >> KqpQueryPerf::IndexDeleteOn-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Insert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 8960, MsgBus: 27453 2026-01-04T02:36:32.995470Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329752004263440:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:32.995514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:33.283206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:33.294406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:33.294516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:33.365190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:33.368143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:33.541046Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:33.541085Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:33.541095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:33.541206Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:33.581258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:34.009188Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:34.108476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:34.116488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:34.171990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.323617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.507032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.596116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.822882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329769184134456:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.823035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.826136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329769184134466:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.826232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.182108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.218921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.257614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.291005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.353513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.388201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.428410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.503026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.586323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329773479102638:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.586422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.586957Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329773479102643:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.587020Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329773479102644:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.587031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.592200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:37.606437Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329773479102647:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:37.692546Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329773479102698:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:37.996174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329752004263440:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:37.996235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21208, MsgBus: 13303 2026-01-04T02:36:41.151533Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329792040212634:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:41.151590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:41.176990Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:41.296869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:41.296962Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:41.300361Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:41.300760Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:41.306012Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329792040212607:2081] 1767494201150295 != 1767494201150298 2026-01-04T02:36:41.320175Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:41.525503Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:41.525526Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:41.525537Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:41.525618Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:41.560571Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:42.104382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:42.109051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:42.122701Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.173775Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:42.186522Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.345470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.447244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.923083Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329804925116363:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.923169Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.923683Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329804925116373:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.923744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.011517Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.050332Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.094314Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.135406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.176899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.228604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.274289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.356415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.456331Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809220084539:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.456420Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.456823Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809220084544:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.456863Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809220084545:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.456986Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.460967Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:45.478794Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329809220084548:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:45.555004Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329809220084599:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:46.154058Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329792040212634:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:46.154129Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |88.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval >> DqOutputWideChannelTests::SingleRead [GOOD] >> DqOutputWideChannelTests::PartialRead [GOOD] >> DqOutputWideChannelTests::PopAll [GOOD] >> DqOutputWideChannelWithStorageTests::Spill [GOOD] >> DqOutputWideChannelWithStorageTests::Overflow [GOOD] >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] >> DqUnboxedValueToNativeArrowConversion::VariantOverTupleWithOptionals [GOOD] >> HashShuffle::BackPressureInMemory [GOOD] >> HashShuffle::BackPressureInMemoryMulti [GOOD] >> HashShuffle::BackPressureInMemoryLoad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::DeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62803, MsgBus: 14200 2026-01-04T02:36:34.758235Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329760904402686:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:34.758300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:34.776796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:35.051762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:35.071897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:35.072038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:35.111394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:35.152621Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329760904402486:2081] 1767494194724061 != 1767494194724064 2026-01-04T02:36:35.174087Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:35.353186Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:35.383176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:35.383202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:35.383210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:35.383309Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:35.749939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:35.985637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:36.010537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:36.086204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.275299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.461857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.541460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.390900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329778084273553:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.390995Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.391279Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329778084273563:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.391335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.706798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.741642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.781255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.822107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.860220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.923352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.964569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.017741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.106106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329782379241729:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.106222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.107103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329782379241734:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.107173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329782379241735:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.107203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.113776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:39.127263Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329782379241738:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:39.231780Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329782379241789:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:39.746214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329760904402686:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:39.747565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8565, MsgBus: 24044 2026-01-04T02:36:42.077874Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:36:42.078178Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:42.141377Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:42.179341Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:42.179418Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:42.184146Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:42.259946Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:42.264312Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:42.264359Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:42.264367Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:42.264440Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:42.760404Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:42.766250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:42.775090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.847453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.007435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.056310Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:43.085608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.372129Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329807052155277:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.372219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.372612Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329807052155287:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.372648Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.458660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.493133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.532156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.575771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.617221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.658551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.708201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.778965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.902300Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329807052156158:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.902411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.902650Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329807052156163:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.902698Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329807052156164:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.902797Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.908097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:45.924583Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329807052156167:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:46.013768Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329811347123514:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::Tuple >> ConvertUnboxedValueToArrowAndBack::Tuple [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::VariantOverTupleWithOptionals [GOOD] >> DqOutputChannelTests::PartialRead [GOOD] >> DqOutputChannelTests::Overflow [GOOD] >> DqOutputChannelTests::BigRow >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11822, MsgBus: 29478 2026-01-04T02:36:28.580198Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329735645303796:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:28.580673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:28.631948Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:28.919939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:28.920115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:28.926094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:29.027168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:29.037413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:29.229722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:29.229754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:29.229762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:29.229876Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:29.229971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:29.594433Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:29.710099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:29.769544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:29.926209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:30.102289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:30.183437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:32.548080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329752825174744:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.548205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.548592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329752825174754:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.548673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:32.961707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.003335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.038385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.071884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.116743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.160740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.202023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.261956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:33.345525Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329757120142919:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.345663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.346077Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329757120142925:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.346088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329757120142924:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.346154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:33.351777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:33.364333Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329757120142928:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:33.434383Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329757120142979:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:33.579262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329735645303796:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:33.579334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:35.414265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesha ... ification cookie mismatch for subscription [2:7591329778710535451:2081] 1767494198961717 != 1767494198961720 2026-01-04T02:36:39.195783Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:39.195872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:39.210715Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:39.342417Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:39.342441Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:39.342448Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:39.342524Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:39.407740Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.966902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:39.979821Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:39.985972Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.988820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.079137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.265613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.353138Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.823042Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329795890406520:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.823148Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.823603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329795890406530:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.823656Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.934778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.983575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.027058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.068879Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.109353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.157364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.199290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.278824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.393664Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329800185374700:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.393768Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.394118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329800185374705:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.394189Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329800185374706:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.394333Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.397776Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:43.410772Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329800185374709:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:43.500439Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329800185374760:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:43.965920Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329778710535475:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.966009Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:45.425918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.484469Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.550483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> THiveTest::TestLockTabletExecutionLocalGone [GOOD] >> THiveTest::TestLocalRegistrationInSharedHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IdxLookupJoinThreeWay-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 31596, MsgBus: 20116 2026-01-04T02:36:33.519197Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329754809293831:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:33.519246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:33.817092Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:33.817220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:33.882801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:33.886203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:33.978662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:34.054459Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:34.054498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:34.054514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:34.054596Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:34.124380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:34.530412Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:34.631045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:34.706772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.850637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.017417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:35.108324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.347857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329771989164829:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.348010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.348501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329771989164839:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.348546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.687568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.735345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.769608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.831619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.861849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.891849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.927260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.012396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.103317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329776284133010:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.103399Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.103706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329776284133015:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.103748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329776284133016:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.103784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.107716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:38.119274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329776284133019:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:38.179567Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329776284133070:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:38.521156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329754809293831:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:38.521229Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 11218, MsgBus: 25110 2026-01-04T02:36:41.737312Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:41.737382Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:36:41.816083Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:41.817459Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329792355690690:2081] 1767494201633654 != 1767494201633657 2026-01-04T02:36:41.857687Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:41.857765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:41.861677Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:41.941570Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:41.941592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:41.941599Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:41.941675Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:41.978467Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:42.398688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:42.420307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.502264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.673683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.720143Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:42.764204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.214706Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809535561741:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.219206Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.219603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809535561750:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.219666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.363765Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.423727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.468616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.516944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.547318Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.596140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.643731Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.693492Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.797888Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809535562619:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.798013Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.798378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809535562625:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.798425Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329809535562624:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.798475Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.805031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:45.823052Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329809535562628:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:45.887464Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329809535562679:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> ConvertUnboxedValueToArrowAndBack::Struct [GOOD] >> ConvertUnboxedValueToArrowAndBack::ListOfJsons [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalListOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::OptionalOfOptional [GOOD] >> ConvertUnboxedValueToArrowAndBack::LargeVariant |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueDoNotFitToArrow::DictOptionalToTuple [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::BigRow [GOOD] >> DqOutputChannelTests::ChunkSizeLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_23_RestartAfterCommit_Query [GOOD] Test command err: 2026-01-04T02:33:00.838638Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328841101372178:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:00.838718Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:00.908471Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:33:01.184484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:01.203075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:01.203184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:01.232436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:01.235367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:01.236980Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328841101372115:2081] 1767493980816668 != 1767493980816671 2026-01-04T02:33:01.292528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce1/r3tmp/yandexp7KhvT.tmp 2026-01-04T02:33:01.292559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce1/r3tmp/yandexp7KhvT.tmp 2026-01-04T02:33:01.292771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce1/r3tmp/yandexp7KhvT.tmp 2026-01-04T02:33:01.292891Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:01.325534Z INFO: TTestServer started on Port 2829 GrpcPort 63038 2026-01-04T02:33:01.392771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:63038 2026-01-04T02:33:01.531191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:01.537842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:01.633514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:01.784333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-04T02:33:01.848307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:04.216482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328858281242342:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.216636Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328858281242355:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.216735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.219790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328858281242358:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.224505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:04.229910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.243354Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328858281242357:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:33:04.344135Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328858281242423:2645] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:04.569301Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328858281242431:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:04.569600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:04.569863Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTZjNTIzZWEtMmFlN2MzZGUtNGY4OGQzZmQtMjA0ODM3NmM=, ActorId: [1:7591328858281242340:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkqy5daz013q79vz4cp35, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:04.572547Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:33:04.615714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:04.719565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328862576210003:2822] 2026-01-04T02:33:05.838597Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328841101372178:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:05.838657Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:10.824597Z :WriteToTopic_Demo_21_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:10.836963Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:10.851597Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328884051046721:2928] connected; active server actors: 1 2026-01-04T02:33:10.852194Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:10.855967Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:10.856127Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:10.858657Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:10.859410Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:10.859584Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:10.859622Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:9 ... nsumer test-consumer session test-consumer_13_1_16462416300741745538_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:41.132216Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 grpc read failed 2026-01-04T02:36:41.132257Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7591329783304717177:2531]: session cookie 2 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1grpc read failed 2026-01-04T02:36:41.132259Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:1678: session cookie 1 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 closed 2026-01-04T02:36:41.132299Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:363: Direct read proxy [13:7591329783304717177:2531]: session cookie 2 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 Close session with reason: reads done signal, closing everything 2026-01-04T02:36:41.132313Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:381: session cookie 2 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 closed 2026-01-04T02:36:41.132333Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 is DEAD 2026-01-04T02:36:41.132473Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7591329783304717177:2531]: session cookie 2 consumer test-consumer session test-consumer_13_1_16462416300741745538_v1 proxy is DEAD 2026-01-04T02:36:41.133076Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7591329783304717168:2526] disconnected. 2026-01-04T02:36:41.133103Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7591329783304717168:2526] disconnected; active server actors: 1 2026-01-04T02:36:41.133125Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7591329783304717168:2526] client test-consumer disconnected session test-consumer_13_1_16462416300741745538_v1 2026-01-04T02:36:41.133228Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_16462416300741745538_v1 2026-01-04T02:36:41.133260Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7591329783304717171:2529] destroyed 2026-01-04T02:36:41.133297Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_16462416300741745538_v1 2026-01-04T02:36:41.138381Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0] PartitionId [0] Generation [2] Write session: close. Timeout 0.000000s 2026-01-04T02:36:41.138445Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0] PartitionId [0] Generation [2] Write session will now close 2026-01-04T02:36:41.138495Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0] PartitionId [0] Generation [2] Write session: aborting 2026-01-04T02:36:41.138994Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0] PartitionId [0] Generation [2] Write session: gracefully shut down, all writes complete 2026-01-04T02:36:41.139041Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0] PartitionId [0] Generation [2] Write session: destroy 2026-01-04T02:36:41.146294Z node 13 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0 grpc read done: success: 0 data: 2026-01-04T02:36:41.146326Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0 grpc read failed 2026-01-04T02:36:41.146366Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 5 sessionId: test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0 grpc closed 2026-01-04T02:36:41.146383Z node 13 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|fcf4ad07-2e6784d9-c2e9392-cfec4986_0 is DEAD 2026-01-04T02:36:41.147503Z node 13 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:36:41.148788Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7591329774714782503:2503] destroyed 2026-01-04T02:36:41.148835Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:36:41.148863Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.148880Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.148893Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.148911Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.148925Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.158137Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.158169Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.158182Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.158218Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.158232Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.176097Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.176128Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.176140Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.176170Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.176193Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.261922Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.261970Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.261984Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.262003Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.262024Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.277961Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.278008Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.278022Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.278049Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.278066Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.361911Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.361956Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.361970Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.361992Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.362007Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.378345Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.378386Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.378401Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.378423Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.378439Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.466139Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.466188Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.466202Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.466225Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.466268Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:41.481528Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:41.481563Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.481576Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:41.481598Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:41.481633Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |89.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TGRpcStreamingTest::ClientDisconnects >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::WriteToTopic_Demo_41_Query [GOOD] Test command err: 2026-01-04T02:32:57.162052Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328830704142639:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:57.162192Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:57.223760Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:57.224001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:57.484416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:57.484507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:57.486022Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:57.529483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:57.566486Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328830704142529:2081] 1767493977125298 != 1767493977125301 2026-01-04T02:32:57.566581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:57.618533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce2/r3tmp/yandexRVoiMJ.tmp 2026-01-04T02:32:57.618575Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce2/r3tmp/yandexRVoiMJ.tmp 2026-01-04T02:32:57.618762Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce2/r3tmp/yandexRVoiMJ.tmp 2026-01-04T02:32:57.618882Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:57.652455Z INFO: TTestServer started on Port 16315 GrpcPort 27581 2026-01-04T02:32:57.736493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:27581 2026-01-04T02:32:57.852501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:57.876093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:32:57.940183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:58.186583Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:00.544972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328843589045448:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:00.545131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:00.545724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328843589045461:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:00.545758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328843589045462:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:00.545835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:00.556678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:00.574149Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328843589045465:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:33:00.772888Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328843589045529:2641] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:00.810778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:00.909729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:01.016617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:01.030543Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328843589045537:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:01.033431Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjU0NjNmZmMtYzdjMjhkNy05N2M1ZWFlMS1jZGRlNmFiYQ==, ActorId: [1:7591328843589045446:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkmbjb4s5jpdgvrhpp85q, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:01.036606Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328847884013121:2822] 2026-01-04T02:33:02.161430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328830704142639:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:02.161517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:06.330025Z :WriteToTopic_Demo_23_RestartNo_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:06.347511Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:06.362418Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328869358849799:2913] connected; active server actors: 1 2026-01-04T02:33:06.362787Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:06.366910Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:06.367045Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:06.369744Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:06.370407Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:06.370561Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:06 ... :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.616051Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:48.616072Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.616089Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:36:48.616111Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000070_00000_0000000001_00001? 2026-01-04T02:36:48.616135Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000071_00000_0000000001_00001? 2026-01-04T02:36:48.616151Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000072_00000_0000000001_00001? 2026-01-04T02:36:48.616168Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000073_00000_0000000001_00001? 2026-01-04T02:36:48.616186Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000074_00000_0000000001_00001? 2026-01-04T02:36:48.616201Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000075_00000_0000000002_00002? 2026-01-04T02:36:48.616218Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000077_00000_0000000001_00001? 2026-01-04T02:36:48.616235Z node 13 :PERSQUEUE DEBUG: partition.cpp:2712: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Key deleted: D0000100000_00000000000000000078_00000_0000000001_00001? 2026-01-04T02:36:48.616362Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] No data for blobs compaction 2026-01-04T02:36:48.616459Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic_A' partition: 0 messageNo: 159 requestId: cookie: 80 2026-01-04T02:36:48.616563Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:428: Erasing blob in L1. Partition 100000 offset 63 size 7001528 cause it's been evicted from L2. Actual L1 size: 18 2026-01-04T02:36:48.616591Z node 13 :PERSQUEUE DEBUG: read.h:371: [72075186224037894][PQCacheProxy]Have to remove new data from cache. Topic topic_A, cookie 0 2026-01-04T02:36:48.616621Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:36:48.616660Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000070_00000_0000000001_00001?(+) to D0000100000_00000000000000000070_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616674Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000071_00000_0000000001_00001?(+) to D0000100000_00000000000000000071_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616685Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000072_00000_0000000001_00001?(+) to D0000100000_00000000000000000072_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616698Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000073_00000_0000000001_00001?(+) to D0000100000_00000000000000000073_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616711Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000074_00000_0000000001_00001?(+) to D0000100000_00000000000000000074_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616724Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000075_00000_0000000002_00002?(+) to D0000100000_00000000000000000075_00000_0000000002_00002?(+) 2026-01-04T02:36:48.616737Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000077_00000_0000000001_00001?(+) to D0000100000_00000000000000000077_00000_0000000001_00001?(+) 2026-01-04T02:36:48.616750Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000_00000000000000000078_00000_0000000001_00001?(+) to D0000100000_00000000000000000078_00000_0000000001_00001?(+) 2026-01-04T02:36:48.617615Z node 13 :PERSQUEUE DEBUG: partition.cpp:4395: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-04T02:36:48.617644Z node 13 :PERSQUEUE DEBUG: partition.cpp:2305: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:36:48.618100Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 71 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618136Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 72 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618161Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 73 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618186Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 74 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618212Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 75 count 2 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618243Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 77 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618271Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 78 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.618312Z node 13 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:36:48.618346Z node 13 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:36:48.618594Z node 13 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72075186224037894][Partition][{0, {13, 281474976715674}, 100000}][StateIdle] No data for blobs compaction 2026-01-04T02:36:48.619321Z node 13 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:36:48.619348Z node 13 :PERSQUEUE DEBUG: read.h:350: [72075186224037894][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-04T02:36:48.620117Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 71 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620149Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 72 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620170Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 73 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620190Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 74 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620210Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 75 partno 0 count 2 parts 2 suffix '63' size 2000457 2026-01-04T02:36:48.620228Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 77 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620247Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 78 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.620806Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 70 count 8 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.620837Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 78 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.620851Z node 13 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 79 count 1 actorID [13:7591329803330634562:2465] 2026-01-04T02:36:48.621111Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 70 partno 0 count 8 parts 8 suffix '0' size 8001742 2026-01-04T02:36:48.621152Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 78 partno 0 count 1 parts 1 suffix '124' size 1000243 2026-01-04T02:36:48.621181Z node 13 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72075186224037894' partition 100000 offset 79 partno 0 count 1 parts 1 suffix '63' size 1000243 2026-01-04T02:36:48.693923Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:48.693975Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.693993Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:48.694017Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.694034Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:48.694093Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:48.694106Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.694116Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:48.694131Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:48.694142Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |89.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Delete+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62495, MsgBus: 31671 2026-01-04T02:36:35.098436Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329764078275155:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:35.098484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:35.449930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:35.454456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:35.454565Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:35.486228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:35.561099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:35.581142Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329764078275116:2081] 1767494195092878 != 1767494195092881 2026-01-04T02:36:35.650469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:35.650499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:35.650515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:35.650590Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:35.719426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:36.158191Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:36.291917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:36.350565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.589721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.762075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.858638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.765419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329776963178880:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.765537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.766059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329776963178890:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:38.766158Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.137642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.175981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.237618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.277043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.304425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.352208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.394947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.443024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:39.541741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329781258147066:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.541869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.542446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329781258147071:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.542518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329781258147072:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.542883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:39.547390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:39.563225Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329781258147075:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:39.639983Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329781258147126:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:40.102049Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329764078275155:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:40.102114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27368, MsgBus: 1218 2026-01-04T02:36:42.940496Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329795588166020:2103];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:42.940952Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:42.984961Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:43.092159Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:43.108998Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:43.109070Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:43.115653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:43.276364Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:43.276389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:43.276398Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:43.276469Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:43.282601Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:43.779105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:43.785713Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:43.804433Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.882046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.989621Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:44.040121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.176369Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.605993Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329812768036995:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.606167Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.607152Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329812768037005:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.607250Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.675559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.748802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.791540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.829083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.879064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.916067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.961184Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.024691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.128218Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329817063005178:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.128302Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.128573Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329817063005183:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.128603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329817063005184:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.128627Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.132317Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:47.144559Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:36:47.144731Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329817063005187:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:47.226716Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329817063005238:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:47.940085Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329795588166020:2103];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:47.940157Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqOutputChannelTests::ChunkSizeLimit [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::UpdateOn-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 1877, MsgBus: 15768 2026-01-04T02:36:42.814507Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329796323272346:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:42.814812Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:43.112605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:43.112672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:43.177204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:43.192959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:43.248554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:43.253934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329796323272237:2081] 1767494202808620 != 1767494202808623 2026-01-04T02:36:43.345894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:43.469373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:43.469396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:43.469419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:43.469487Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:43.818076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:43.996937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:44.016721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:44.134430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.356187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.561732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.642733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.801187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329813503143306:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.801315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.801733Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329813503143316:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.801774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.085861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.124225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.165011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.224906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.256522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.294968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.333773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.381354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.467355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329817798111486:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.467447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.467687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329817798111491:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.467703Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329817798111492:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.467732Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.472280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:47.487313Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329817798111495:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:47.580415Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329817798111546:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:47.813211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329796323272346:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:47.813274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar+QueryService [GOOD] >> KqpQueryPerf::AggregateToScalar-QueryService |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 15318, MsgBus: 28615 2026-01-04T02:36:44.495376Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329802532134915:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:44.495440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:44.832429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:44.841283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:44.841404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:44.895128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:44.970872Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:45.069949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:45.069972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:45.070003Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:45.070176Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:45.078993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:45.510150Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:45.601031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:45.610970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:45.666737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.812738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.984932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.084104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.106826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329819712005928:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.106948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.107379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329819712005938:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.107435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.465966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.513184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.548059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.582061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.612172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.641152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.674901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.725693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.839901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329819712006818:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.840047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.840172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329819712006823:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.840420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329819712006825:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.840497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.843715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:48.855307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329819712006826:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:48.926723Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329819712006878:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:49.495625Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329802532134915:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:49.495693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService-UseSink [GOOD] >> KqpQueryPerf::IndexReplace-QueryService+UseSink >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Upsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61450, MsgBus: 61418 2026-01-04T02:36:44.302846Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329802889081132:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:44.302901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:44.351100Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:44.774956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:44.781361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:44.781561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:44.784518Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:44.810905Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:44.833944Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329802889080916:2081] 1767494204266695 != 1767494204266698 2026-01-04T02:36:44.956363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:44.956386Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:44.956392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:44.956471Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:45.036193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:45.292502Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:45.480896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:45.556726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.753396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.946446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.037440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.161779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329820068951971:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.161909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.162292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329820068951981:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.162370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.534006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.579509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.612121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.643704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.674992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.706678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.747020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.794453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.882680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329820068952852:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.882746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.882853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329820068952857:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.883142Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329820068952859:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.883202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.887078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:48.902636Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329820068952860:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:49.004465Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329820068952912:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:49.288912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329802889081132:2253];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:49.288969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TGRpcStreamingTest::WritesDoneFromClient >> KqpQueryPerf::KvRead-QueryService [GOOD] >> TTxAllocatorClientTest::Boot >> Channels20::CaIntegrationIc [GOOD] >> TTxAllocatorClientTest::ZeroRange >> TGRpcStreamingTest::ClientNeverWrites >> TGRpcStreamingTest::ReadFinish >> TGRpcStreamingTest::SimpleEcho >> DqUnboxedValueDoNotFitToArrow::DictUtf8ToInterval [GOOD] >> DqUnboxedValueDoNotFitToArrow::OptionalOfOptional [GOOD] >> DqUnboxedValueDoNotFitToArrow::LargeVariant >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] >> TTxAllocatorClientTest::Boot [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestLocalRegistrationInSharedHive [GOOD] Test command err: 2026-01-04T02:35:01.119220Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:01.149177Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:01.149552Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:01.150542Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:01.150973Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:01.152212Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:01.152289Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:01.153344Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-04T02:35:01.153379Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:01.153504Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:01.153705Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:01.168192Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:01.168260Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:01.170856Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.171054Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.171195Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.171365Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.171533Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.171710Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.173259Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:01.173308Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:01.173567Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-04T02:35:01.173606Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-04T02:35:01.173662Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:01.173796Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:01.174724Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:01.175264Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:01.206204Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:01.206300Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:01.220090Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:01.220158Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:35:01.220697Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:01.221378Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:01.221602Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:01.221782Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-04T02:35:01.222066Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-04T02:35:01.222149Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:35:01.222215Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-04T02:35:01.222254Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-04T02:35:01.222317Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:01.222461Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:01.222506Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-04T02:35:01.222524Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-04T02:35:01.226096Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-04T02:35:01.226160Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:01.226301Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:01.226395Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:35:01.226420Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-04T02:35:01.226469Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-04T02:35:01.232390Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-04T02:35:01.234084Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-04T02:35:01.234161Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:01.234387Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:01.234495Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-04T02:35:01.234600Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-04T02:35:01.234652Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-04T02:35:01.241340Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:01.241640Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:01.241878Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:35:01.242049Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:01.242138Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-04T02:35:01.242179Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-04T02:35:01.242232Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-04T02:35:01.242268Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:01.242379Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:01.244093Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-04T02:35:01.244132Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-04T02:35:01.244152Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-04T02:35:01.244299Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest: ... 72057594046678944 leader: [44:334:2203] followers: 0 2026-01-04T02:36:52.445398Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594046678944 followers: 0 countLeader 1 allowFollowers 0 winner: [44:334:2203] 2026-01-04T02:36:52.445510Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594046678944] forward result remote node 44 [45:557:2161] 2026-01-04T02:36:52.445667Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594046678944] remote node connected [45:557:2161] 2026-01-04T02:36:52.445731Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:557:2161] 2026-01-04T02:36:52.446123Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594046678944] Accept Connect Originator# [45:557:2161] 2026-01-04T02:36:52.446440Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594046678944] connected with status OK role: Leader [45:557:2161] 2026-01-04T02:36:52.446503Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594046678944] send queued [45:557:2161] 2026-01-04T02:36:52.446606Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046678944] send [45:557:2161] 2026-01-04T02:36:52.446634Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046678944] push event to server [45:557:2161] 2026-01-04T02:36:52.446680Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594046678944]::SendEvent [45:557:2161] 2026-01-04T02:36:52.446807Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594046678944] Push Sender# [45:556:2161] EventType# 271122945 2026-01-04T02:36:52.446968Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme 2026-01-04T02:36:52.447063Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:52.447353Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:36:52.447440Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594046678944:2:12} Tx{16, NKikimr::NSchemeShard::TSchemeShard::TTxDescribeScheme} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:52.448747Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [45:563:2162] 2026-01-04T02:36:52.448783Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [45:563:2162] 2026-01-04T02:36:52.448813Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [45:564:2163] 2026-01-04T02:36:52.448829Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [45:564:2163] 2026-01-04T02:36:52.448958Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [45:563:2162] 2026-01-04T02:36:52.449011Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72075186224037888] queue send [45:564:2163] 2026-01-04T02:36:52.449197Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StNormal leader: [44:333:2202] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:52.449244Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72057594037927937 followers: 0 countLeader 1 allowFollowers 0 winner: [44:333:2202] 2026-01-04T02:36:52.449503Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:36:52.449625Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72057594037927937] forward result remote node 44 [45:563:2162] 2026-01-04T02:36:52.450106Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:36:52.450162Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72057594037927937] remote node connected [45:563:2162] 2026-01-04T02:36:52.450202Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:563:2162] 2026-01-04T02:36:52.450849Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:36:52.450910Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:36:52.451018Z node 44 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:36:52.451235Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037927937] Accept Connect Originator# [45:563:2162] 2026-01-04T02:36:52.451570Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:475:2306] CurrentLeaderTablet: [44:491:2317] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:52.451689Z node 45 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 0 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0 CurrentLeader: [44:475:2306] CurrentLeaderTablet: [44:491:2317] CurrentGeneration: 1 CurrentStep: 0} 2026-01-04T02:36:52.451771Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [44:475:2306] followers: 0 2026-01-04T02:36:52.451850Z node 45 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 45 selfDC 2 leaderDC 1 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 72075186224037888 followers: 0 countLeader 1 allowFollowers 0 winner: [44:475:2306] 2026-01-04T02:36:52.451934Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:193: TClient[72075186224037888] forward result remote node 44 [45:564:2163] 2026-01-04T02:36:52.452491Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:227: TClient[72075186224037888] remote node connected [45:564:2163] 2026-01-04T02:36:52.452539Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:564:2163] 2026-01-04T02:36:52.452741Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [45:563:2162] 2026-01-04T02:36:52.452779Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [45:563:2162] 2026-01-04T02:36:52.452819Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [45:563:2162] 2026-01-04T02:36:52.452950Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [45:563:2162] 2026-01-04T02:36:52.453330Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72075186224037888] Accept Connect Originator# [45:564:2163] 2026-01-04T02:36:52.453410Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [45:560:2162] EventType# 268959744 2026-01-04T02:36:52.453678Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-04T02:36:52.453783Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:52.454045Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:52.454174Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{14, redo 204b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:36:52.454280Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{24, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:52.454729Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:36:52.454916Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:52.455042Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{15, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:36:52.455168Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:9} Tx{25, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:52.455425Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72075186224037888] connected with status OK role: Leader [45:564:2163] 2026-01-04T02:36:52.455469Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [45:564:2163] 2026-01-04T02:36:52.455499Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [45:564:2163] 2026-01-04T02:36:52.455559Z node 45 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72075186224037888]::SendEvent [45:564:2163] 2026-01-04T02:36:52.455700Z node 44 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72075186224037888] Push Sender# [45:561:2163] EventType# 268959744 2026-01-04T02:36:52.455868Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-04T02:36:52.455923Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:52.456099Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:52.456198Z node 44 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(45, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:52.456273Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{6, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:36:52.456324Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:6} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:36:52.456525Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:36:52.456577Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:36:52.456667Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:36:52.456713Z node 44 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:7} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::Boot [GOOD] Test command err: 2026-01-04T02:36:53.698576Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:36:53.699142Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:36:53.699937Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:36:53.701867Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.702431Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:36:53.713945Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.714083Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.714208Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:36:53.714335Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.714405Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.714502Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:36:53.714632Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 >> TGRpcStreamingTest::ClientDisconnects [GOOD] |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpdateOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23309, MsgBus: 18298 2026-01-04T02:36:33.059006Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329755192446053:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:33.059041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:33.407930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:33.408077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:33.427154Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:33.478583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:33.478630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:33.479931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329755192446013:2081] 1767494193035001 != 1767494193035004 2026-01-04T02:36:33.558341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:33.558370Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:33.558380Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:33.558444Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:33.641905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:34.090447Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:34.108983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:34.115926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:34.209716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.368936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.537115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:34.612100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:36.730728Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329768077349773:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.730868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.736437Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329768077349783:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:36.736537Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.024178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.065128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.095516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.124038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.175819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.218283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.261339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.312911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.400698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329772372317954:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.400771Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.400853Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329772372317959:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.400919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329772372317961:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.400971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:37.405063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:37.418649Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329772372317963:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:37.484968Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329772372318014:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:38.059499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329755192446053:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:38.059562Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:39.282023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... 2026-01-04T02:36:43.435067Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:43.450069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:43.450134Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:43.481886Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:43.616127Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:43.633654Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:43.633680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:43.633691Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:43.633773Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:44.169771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:44.180022Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:44.195363Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.237274Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:44.277062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.458069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.557956Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.134126Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329816215173967:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.134211Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.134479Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329816215173977:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.134537Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.219116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.262295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.301582Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.341606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.424932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.482055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.528702Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.596563Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.681794Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329816215174853:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.681914Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.682218Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329816215174858:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.682270Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329816215174859:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.682361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.685683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:47.696560Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329816215174862:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:47.757412Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329816215174913:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:48.269297Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329799035302981:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:48.269395Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:49.437509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.485639Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.529068Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::KvRead-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 15707, MsgBus: 10602 2026-01-04T02:36:38.669009Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329778139079346:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:38.669066Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:38.989964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:39.096419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:39.096532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:39.158532Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:39.164561Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329778139079236:2081] 1767494198622373 != 1767494198622376 2026-01-04T02:36:39.172609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:39.196261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.302432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:39.302456Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:39.302466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:39.302553Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:39.670903Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.846025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:39.869638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:39.929904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.104854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.293384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.373086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.352072Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329795318950293:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.352195Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.354387Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329795318950303:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.354468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:42.665099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.741961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.788554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.830718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.894846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.940871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:42.986731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.043552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.140537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329799613918469:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.140629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.140930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329799613918475:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.140983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329799613918474:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.141034Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.145344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:43.158865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329799613918478:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:43.239976Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329799613918529:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:43.641928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329778139079346:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.642008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29345, MsgBus: 27585 2026-01-04T02:36:46.502963Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329813099456330:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:46.503074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:46.524089Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:46.637977Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:46.640967Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:46.645338Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329813099456289:2081] 1767494206493007 != 1767494206493010 2026-01-04T02:36:46.655543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:46.655636Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:46.683314Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:46.769446Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:46.769468Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:46.769477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:46.769555Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:46.899497Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:47.267276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:47.274562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:47.299944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.384741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.526859Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:47.577823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.650114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.094946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329830279327339:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.095041Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.102114Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329830279327349:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.102195Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.168290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.207648Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.248599Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.283183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.323917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.363473Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.397001Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.470686Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.548896Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329830279328222:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.548983Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.549057Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329830279328227:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.549122Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329830279328229:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.549188Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.552504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:50.561820Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329830279328231:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:50.647547Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329830279328282:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:51.504954Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329813099456330:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:51.505030Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::InitiatingRequest [GOOD] Test command err: 2026-01-04T02:36:54.582721Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:36:54.583218Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:36:54.583905Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:36:54.585295Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.585676Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:36:54.597902Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.598024Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.598137Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:36:54.598260Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.598331Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.598449Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:36:54.598585Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:36:54.599312Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-04T02:36:54.599826Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.599895Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:54.600026Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-04T02:36:54.600073Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::Replace-QueryService-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 2905, MsgBus: 13890 2026-01-04T02:36:46.355289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329812221643129:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:46.356213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:46.380825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:46.662903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:46.679463Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:46.679568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:46.681918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:46.693157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:46.842385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:46.842408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:46.842418Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:46.842482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:46.889757Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:47.361790Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:47.391806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:47.458815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.605075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.758009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.841980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.887533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329825106546844:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.887632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.887908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329825106546854:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.887936Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.213094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.250331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.284262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.320005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.353754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.428060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.462055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.506031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.589469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329829401515033:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.589546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.589579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329829401515038:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.589828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329829401515040:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.589887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.593508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:50.606375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329829401515042:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:50.664231Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329829401515093:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:51.353957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329812221643129:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:51.354019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> KqpRboYql::Select >> KqpRboPg::Bench_Select ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientDisconnects [GOOD] Test command err: 2026-01-04T02:36:51.370399Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329832266204469:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:51.370448Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:51.674777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:51.763402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:51.763845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:51.765921Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:51.832118Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:51.845847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:51.937707Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d2161411080] stream done notification Name# Session ok# true peer# ipv6:[::1]:49200 2026-01-04T02:36:51.937987Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d2161411080] stream accepted Name# Session ok# true peer# ipv6:[::1]:49200 2026-01-04T02:36:51.938668Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d2161411080] facade attach Name# Session actor# [1:7591329832266205205:2467] peer# ipv6:[::1]:49200 2026-01-04T02:36:51.938706Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:230: Received TEvNotifiedWhenDone 2026-01-04T02:36:51.939275Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d2161411080] stream finished Name# Session ok# false peer# unknown grpc status# (1) message# Request abandoned 2026-01-04T02:36:51.939324Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d2161411080] deregistering request Name# Session peer# unknown (finish done) |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::CaIntegrationIc [GOOD] Test command err: Trying to start YDB, gRPC: 12823, MsgBus: 10242 2026-01-04T02:35:38.863753Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329522439550133:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:38.863928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:38.944463Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329520302374987:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:38.944541Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:39.206809Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:39.210507Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:39.268927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:39.269060Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:39.270735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:39.270806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:39.276302Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:35:39.276475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:39.277217Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:39.380626Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:39.397257Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:39.401016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:39.505478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:39.505517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:39.505526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:39.505733Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:39.883330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:39.972572Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:40.025111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:40.097643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:40.366200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:40.599680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:40.734959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:42.948102Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329539619421733:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.948240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.948545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329539619421743:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:42.948605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:43.241624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.307091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.387657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.466130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.594205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.678753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.771297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.854544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:43.863969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329522439550133:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:43.864080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:43.943849Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329520302374987:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:43.943933Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:44.008677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329548209357534:2430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.008750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.008974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329548209357539:2433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:44.008981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [Workloa ... 2:36:39.218692Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:39.394050Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.398094Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.441560Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:39.498995Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.560450Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.561605Z node 13 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:39.561940Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:40.092164Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:40.143813Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.343769Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.730388Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.854620Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.481907Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591329777610779528:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.482011Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:43.518292Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591329779834008108:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.518372Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:43.561916Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7591329779114358449:2226];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.562011Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:44.287183Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591329803380585507:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.287306Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.295050Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591329803380585517:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.295191Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.389148Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.504283Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.644135Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.782788Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.888198Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.005457Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.278206Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.436484Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.593202Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591329807675553843:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.593293Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.593382Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591329807675553848:2407], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.594033Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591329807675553851:2409], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.594121Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.598499Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:45.626473Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591329807675553850:2408], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:45.723674Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591329807675553926:4630] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:48.615050Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) |89.0%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> Channels20::LocalChannelBackPressure [GOOD] >> Channels20::LocalChannelAsyncRead >> KqpRboYql::ConstantFolding+ColumnStore >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> KqpRboPg::Aggregation+ColumnStore >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] >> TGRpcStreamingTest::ReadFinish [GOOD] >> TGRpcStreamingTest::ClientNeverWrites [GOOD] |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTablets [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexUpsert-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 24517, MsgBus: 25615 2026-01-04T02:36:36.665903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329768955238470:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:36.677734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:37.062954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:37.063106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:37.111459Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:37.129827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:37.134367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:37.256353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:37.256402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:37.256413Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:37.256509Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:37.390587Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:37.672345Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:37.680521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:37.691941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:37.756661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:37.891200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.052457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:38.120980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.050495Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329786135109482:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.050613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.051106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329786135109492:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.051147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.410555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.451902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.486116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.524192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.559121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.608605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.659050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.724026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:40.815312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329786135110368:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.815428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.815936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329786135110373:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.815997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329786135110374:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.816299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:40.821304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:40.837274Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329786135110377:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:40.911814Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329786135110428:3780] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:41.666605Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329768955238470:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:41.666689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:42.960397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... :36:47.010174Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:36:47.093655Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:47.098419Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329814148328925:2081] 1767494206921480 != 1767494206921483 2026-01-04T02:36:47.133042Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:47.133131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:47.143497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:47.206846Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:47.206868Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:47.206875Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:47.206980Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:47.231903Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:47.612838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:47.619456Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:47.627310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.705126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.856446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.932579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.058855Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:50.466412Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329831328199975:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.466508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.466774Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329831328199985:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.466818Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.539966Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.577897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.619221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.694646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.732909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.773925Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.847989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.900224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.987026Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329831328200860:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.987156Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.987486Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329831328200865:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.987533Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329831328200866:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.987585Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:50.992353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:51.009013Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329831328200869:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:51.067307Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329835623168216:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:52.682093Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:52.726310Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:52.775815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::WritesDoneFromClient [GOOD] Test command err: 2026-01-04T02:36:53.402582Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329844990411472:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:53.403367Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:53.455193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:53.746000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:53.746115Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:53.759068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:53.847249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:53.858230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:53.860125Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329844990411440:2081] 1767494213398801 != 1767494213398804 2026-01-04T02:36:53.986955Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7dd55bb7f480] stream accepted Name# Session ok# true peer# ipv6:[::1]:40992 2026-01-04T02:36:53.987252Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7dd55bb7f480] facade attach Name# Session actor# [1:7591329844990412091:2366] peer# ipv6:[::1]:40992 2026-01-04T02:36:53.987276Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7dd55bb7f480] facade read Name# Session peer# ipv6:[::1]:40992 2026-01-04T02:36:53.987479Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7dd55bb7f480] read finished Name# Session ok# false data# peer# ipv6:[::1]:40992 2026-01-04T02:36:53.987573Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:302: Received TEvReadFinished, success = 0 2026-01-04T02:36:53.987635Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7dd55bb7f480] facade finish Name# Session peer# ipv6:[::1]:40992 grpc status# (9) message# Everything is A-OK 2026-01-04T02:36:53.988541Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7dd55bb7f480] stream done notification Name# Session ok# true peer# ipv6:[::1]:40992 2026-01-04T02:36:53.988596Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7dd55bb7f480] stream finished Name# Session ok# true peer# ipv6:[::1]:40992 grpc status# (9) message# Everything is A-OK 2026-01-04T02:36:53.988624Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7dd55bb7f480] deregistering request Name# Session peer# ipv6:[::1]:40992 (finish done) 2026-01-04T02:36:53.994156Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:312: Received TEvNotifiedWhenDone 2026-01-04T02:36:54.071140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ReadFinish [GOOD] Test command err: 2026-01-04T02:36:53.820085Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329843338408930:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:53.820620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:53.863610Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:54.097030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:54.112541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:54.112651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:54.158802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:54.210216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:54.218958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329843338408903:2081] 1767494213814697 != 1767494213814700 2026-01-04T02:36:54.302113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:54.369192Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d4aa43a0880] stream accepted Name# Session ok# true peer# ipv6:[::1]:42694 2026-01-04T02:36:54.370003Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d4aa43a0880] facade attach Name# Session actor# [1:7591329847633376968:2467] peer# ipv6:[::1]:42694 2026-01-04T02:36:54.370047Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d4aa43a0880] facade read Name# Session peer# ipv6:[::1]:42694 2026-01-04T02:36:54.370104Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d4aa43a0880] facade finish Name# Session peer# ipv6:[::1]:42694 grpc status# (0) message# 2026-01-04T02:36:54.370372Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d4aa43a0880] read finished Name# Session ok# false data# peer# ipv6:[::1]:42694 2026-01-04T02:36:54.370436Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d4aa43a0880] stream done notification Name# Session ok# true peer# ipv6:[::1]:42694 2026-01-04T02:36:54.370474Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d4aa43a0880] stream finished Name# Session ok# true peer# ipv6:[::1]:42694 grpc status# (0) message# 2026-01-04T02:36:54.370516Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d4aa43a0880] deregistering request Name# Session peer# ipv6:[::1]:42694 (finish done) 2026-01-04T02:36:54.370657Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:265: Received TEvReadFinished, success = 0 |89.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup_reboots/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::ClientNeverWrites [GOOD] Test command err: 2026-01-04T02:36:53.763129Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329844349698384:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:53.772593Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:54.029963Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:54.073207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:54.073334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:54.137919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329844349698338:2081] 1767494213738698 != 1767494213738701 2026-01-04T02:36:54.152361Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:54.159145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:54.197624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:54.290050Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7d76cd00e680] stream accepted Name# Session ok# true peer# ipv6:[::1]:52786 2026-01-04T02:36:54.290412Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7d76cd00e680] facade attach Name# Session actor# [1:7591329848644666404:2465] peer# ipv6:[::1]:52786 2026-01-04T02:36:54.290451Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7d76cd00e680] facade read Name# Session peer# ipv6:[::1]:52786 2026-01-04T02:36:54.290506Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7d76cd00e680] facade write Name# Session data# peer# ipv6:[::1]:52786 2026-01-04T02:36:54.290807Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7d76cd00e680] facade finish Name# Session peer# ipv6:[::1]:52786 grpc status# (0) message# 2026-01-04T02:36:54.291044Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7d76cd00e680] write finished Name# Session ok# true peer# ipv6:[::1]:52786 2026-01-04T02:36:54.291104Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:187: Received TEvWriteFinished, success = 1 2026-01-04T02:36:54.291280Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7d76cd00e680] read finished Name# Session ok# false data# peer# ipv6:[::1]:52786 2026-01-04T02:36:54.291321Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7d76cd00e680] stream done notification Name# Session ok# true peer# ipv6:[::1]:52786 2026-01-04T02:36:54.291446Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7d76cd00e680] stream finished Name# Session ok# true peer# ipv6:[::1]:52786 grpc status# (0) message# 2026-01-04T02:36:54.291492Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7d76cd00e680] deregistering request Name# Session peer# ipv6:[::1]:52786 (finish done) 2026-01-04T02:36:54.295936Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:181: Received TEvReadFinished, success = 0 2026-01-04T02:36:54.295998Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:194: Received TEvNotifiedWhenDone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_streaming/ut/unittest >> TGRpcStreamingTest::SimpleEcho [GOOD] Test command err: 2026-01-04T02:36:53.919099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329843997521138:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:53.919184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:54.210454Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:54.210631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:54.215984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:54.295817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:54.311832Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:54.313198Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329843997521116:2081] 1767494213918296 != 1767494213918299 2026-01-04T02:36:54.386071Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:230: [0x7de12846f880] stream accepted Name# Session ok# true peer# ipv6:[::1]:36466 2026-01-04T02:36:54.389149Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:304: [0x7de12846f880] facade attach Name# Session actor# [1:7591329848292489064:2367] peer# ipv6:[::1]:36466 2026-01-04T02:36:54.389176Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:328: [0x7de12846f880] facade read Name# Session peer# ipv6:[::1]:36466 2026-01-04T02:36:54.389355Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:356: [0x7de12846f880] read finished Name# Session ok# true data# peer# ipv6:[::1]:36466 2026-01-04T02:36:54.389653Z node 1 :GRPC_SERVER DEBUG: grpc_streaming_ut.cpp:142: Received TEvReadFinished, success = 1 2026-01-04T02:36:54.389678Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:404: [0x7de12846f880] facade write Name# Session data# peer# ipv6:[::1]:36466 2026-01-04T02:36:54.390098Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:514: [0x7de12846f880] facade finish Name# Session peer# ipv6:[::1]:36466 grpc status# (0) message# 2026-01-04T02:36:54.390099Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:459: [0x7de12846f880] write finished Name# Session ok# true peer# ipv6:[::1]:36466 2026-01-04T02:36:54.390420Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:271: [0x7de12846f880] stream done notification Name# Session ok# true peer# ipv6:[::1]:36466 2026-01-04T02:36:54.390470Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:550: [0x7de12846f880] stream finished Name# Session ok# true peer# ipv6:[::1]:36466 grpc status# (0) message# 2026-01-04T02:36:54.390538Z node 1 :GRPC_SERVER DEBUG: grpc_streaming.h:583: [0x7de12846f880] deregistering request Name# Session peer# ipv6:[::1]:36466 (finish done) |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_streaming/ut/unittest >> TSchemeShardSysViewTest::AsyncCreateSameSysView |89.1%| [TA] $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {RESULT} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_streaming/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews >> TSchemeShardSysViewTest::DropSysView >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] >> TSchemeShardSysViewTest::CreateExistingSysView >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] >> TSchemeShardSysViewTest::CreateSysView >> KqpRboYql::Select [GOOD] |89.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRboYql::TPCH_YDB_PERF >> TSchemeShardSysViewTest::ReadOnlyMode >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews >> TSchemeShardSysViewTest::AsyncCreateSameSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews >> KqpRboPg::Bench_Select [GOOD] >> KqpRboPg::Bench_Filter >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews |89.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysViewTest::EmptyName >> TTxDataShardMiniKQL::CrossShard_5_AllToAll |89.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup_reboots/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardMiniKQL::Write >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx >> TTxDataShardMiniKQL::ReadSpecialColumns >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::AggregateToScalar-QueryService [GOOD] Test command err: Trying to start YDB, gRPC: 20899, MsgBus: 13943 2026-01-04T02:36:45.011701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329809710284257:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:45.011764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:45.261950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:45.361026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:45.361133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:45.382751Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:45.385204Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329805415316851:2081] 1767494205006914 != 1767494205006917 2026-01-04T02:36:45.403243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:45.438075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:45.606122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:45.606153Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:45.606159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:45.606249Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:46.018255Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:46.203155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:46.313028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.549626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.730184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.839084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:48.867619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329822595187913:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.867733Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.868058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329822595187923:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:48.868086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.220665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.255080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.285689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.317390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.368107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.404110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.442471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.485821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.578597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329826890156093:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.578680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.578790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329826890156099:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.578800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329826890156098:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.578814Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:49.582786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:49.595961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329826890156102:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:49.701254Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329826890156153:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:50.013970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329809710284257:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:50.014033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15229, MsgBus: 23704 2026-01-04T02:36:52.717449Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329837198750180:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:52.717506Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:52.742299Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:52.839186Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:52.841246Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329837198750153:2081] 1767494212716629 != 1767494212716632 2026-01-04T02:36:52.875548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:52.875633Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:52.881765Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:52.938400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:52.938431Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:52.938439Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:52.938506Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:52.947700Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:53.319862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:53.327013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:53.340466Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.406374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.540233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.633407Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.785474Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:55.847835Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329850083653917:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.847929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.848309Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329850083653927:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.848378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.939034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.974334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.047029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.086224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.121819Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.165756Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.201414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.258451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.363477Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329854378622094:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.363552Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329854378622099:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.363557Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.363723Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329854378622101:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.363794Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.367114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:56.378184Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329854378622102:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:56.433910Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329854378622154:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:57.722010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329837198750180:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:57.722665Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Write_And_Read_Small_Messages_2 [GOOD] Test command err: 2026-01-04T02:33:08.297660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328875546299338:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:08.297786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:08.338387Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:33:08.483985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:08.488814Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:08.488889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:08.545066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:08.546452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:08.549953Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328875546299312:2081] 1767493988296868 != 1767493988296871 2026-01-04T02:33:08.610067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001966/r3tmp/yandexv4yRqx.tmp 2026-01-04T02:33:08.610124Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001966/r3tmp/yandexv4yRqx.tmp 2026-01-04T02:33:08.610349Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001966/r3tmp/yandexv4yRqx.tmp 2026-01-04T02:33:08.610468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:08.635144Z INFO: TTestServer started on Port 2879 GrpcPort 8681 2026-01-04T02:33:08.671759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:8681 2026-01-04T02:33:08.808952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:08.869470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:09.303945Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:10.630996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328884136234938:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:10.631103Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:10.631267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328884136234951:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:10.631331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328884136234952:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:10.631567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:10.635242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:10.646151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328884136234956:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:33:10.821890Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328884136235020:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:10.844529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:10.871109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:10.941021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:10.956702Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328884136235028:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:10.959047Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmQ4NGIzNTUtNmJjY2Q1MWUtOTFlZjMyZDMtYTA3ODQyOTI=, ActorId: [1:7591328884136234924:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dky756rxqeqczjhfaz2xg, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:10.961308Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328888431202599:2819] 2026-01-04T02:33:13.298054Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328875546299338:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:13.298122Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:17.195235Z :WriteToTopic_Demo_41_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:17.206263Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:17.219807Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328914201006610:2926] connected; active server actors: 1 2026-01-04T02:33:17.220187Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:17.222316Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:17.222431Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:17.225040Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:17.225692Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:17.225873Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:17.225918Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:17.225963Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-04T02:33:17.226072Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037893][test-topic] TEvClientConnected TabletId 72075186224037 ... 0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.725973Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.725987Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.726010Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.726025Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:56.743832Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.743868Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.743889Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.743930Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.743958Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:56.824430Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.824466Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.824482Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.824504Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.824529Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:56.843928Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.843984Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.843998Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.844021Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.844034Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:56.924868Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.924943Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.924974Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.925009Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.925050Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:56.946037Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:56.946089Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.946105Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:56.946130Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:56.946156Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:57.025175Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:57.025216Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.025230Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:57.025251Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.025267Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:57.050011Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:57.050047Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.050061Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:57.050082Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.050095Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:36:57.098917Z :INFO: [/Root] [/Root] [843d8153-ad4962ec-68241aa9-f1af9ce3] Closing read session. Close timeout: 0.000000s 2026-01-04T02:36:57.098979Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:topic_A:0:1:319:320 2026-01-04T02:36:57.099036Z :INFO: [/Root] [/Root] [843d8153-ad4962ec-68241aa9-f1af9ce3] Counters: { Errors: 0 CurrentSessionLifetimeMs: 2040 BytesRead: 20480000 MessagesRead: 320 BytesReadCompressed: 20480000 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:36:57.099130Z :NOTICE: [/Root] [/Root] [843d8153-ad4962ec-68241aa9-f1af9ce3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:36:57.099180Z :DEBUG: [/Root] [/Root] [843d8153-ad4962ec-68241aa9-f1af9ce3] [] Abort session to cluster 2026-01-04T02:36:57.099652Z :DEBUG: [/Root] 0x00007D4FF8B9F190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_1736279527059484842_v1 Close 2026-01-04T02:36:57.100034Z :DEBUG: [/Root] 0x00007D4FF8B9F190 TDirectReadSessionManager ServerSessionId=test-consumer_13_1_1736279527059484842_v1 Close 2026-01-04T02:36:57.100150Z :NOTICE: [/Root] [/Root] [843d8153-ad4962ec-68241aa9-f1af9ce3] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:36:57.102753Z node 13 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:57.102785Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 grpc read failed 2026-01-04T02:36:57.102814Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 grpc closed 2026-01-04T02:36:57.102850Z node 13 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 is DEAD 2026-01-04T02:36:57.103437Z node 13 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:84: Direct read proxy [13:7591329850830804868:2519]: session cookie 2 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 grpc read done: success# 0, data# { } 2026-01-04T02:36:57.103459Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [13:7591329850830804868:2519]: session cookie 2 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1grpc read failed 2026-01-04T02:36:57.103484Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:66: Direct read proxy [13:7591329850830804868:2519]: session cookie 2 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 grpc closed 2026-01-04T02:36:57.103502Z node 13 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [13:7591329850830804868:2519]: session cookie 2 consumer test-consumer session test-consumer_13_1_1736279527059484842_v1 proxy is DEAD 2026-01-04T02:36:57.104380Z node 13 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037895][topic_A] pipe [13:7591329850830804859:2514] disconnected. 2026-01-04T02:36:57.104411Z node 13 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037895][topic_A] pipe [13:7591329850830804859:2514] disconnected; active server actors: 1 2026-01-04T02:36:57.104430Z node 13 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037895][topic_A] pipe [13:7591329850830804859:2514] client test-consumer disconnected session test-consumer_13_1_1736279527059484842_v1 2026-01-04T02:36:57.104523Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037894] Destroy direct read session test-consumer_13_1_1736279527059484842_v1 2026-01-04T02:36:57.104554Z node 13 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [13:7591329850830804862:2517] destroyed 2026-01-04T02:36:57.104604Z node 13 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: test-consumer_13_1_1736279527059484842_v1 2026-01-04T02:36:57.125920Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:57.125961Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.125976Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:57.125999Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.126014Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:36:57.149924Z node 13 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:36:57.149966Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.149986Z node 13 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:36:57.150010Z node 13 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:36:57.150034Z node 13 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexDeleteOn-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4316, MsgBus: 21235 2026-01-04T02:36:39.843067Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329783613595833:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:39.843112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:40.150121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:40.182253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:40.182382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:40.291650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:40.293990Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329783613595610:2081] 1767494199816544 != 1767494199816547 2026-01-04T02:36:40.311800Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:40.334616Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:40.538355Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:40.538376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:40.538383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:40.538446Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:40.834896Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:41.065768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:41.087183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:41.154799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.305465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.471686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:41.563134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.523827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329800793466670:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.523934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.524285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329800793466680:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.524329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.884586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.921732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:43.957115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.001360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.049104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.100953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.138325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.193400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.288947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329805088434847:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.289046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.289615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329805088434853:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.289656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329805088434852:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.289791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.294290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:44.306951Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329805088434856:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:44.406832Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329805088434907:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:44.841427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329783613595833:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:44.841501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:46.429895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:49.945954Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:49.957565Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:49.957673Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:49.981055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:50.076145Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:50.076182Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:50.076190Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:50.076323Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:50.145465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:50.544244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:50.553727Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.606414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.754007Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:50.761845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.841122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.499042Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329842127314555:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.499160Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.499497Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329842127314565:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.499570Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.598598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.635642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.670034Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.706426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.752021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.795125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.830386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.880302Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.997234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329842127315434:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.997360Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329842127315439:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.997389Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.997576Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329842127315442:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.997639Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:54.000860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:54.014502Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329842127315441:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:54.110294Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329846422282790:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:54.754934Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329824947443745:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:54.755020Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:55.937692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.988900Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.038663Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSchemeShardSysViewTest::DropSysView [GOOD] >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::DeleteObsoleteSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:36:59.705513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:59.705602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.705641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:59.705673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:59.705712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:59.705759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:59.705867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.705938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:59.706776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:59.707079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:59.800170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:59.800231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:59.813709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:59.817709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:59.817951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:59.826870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:59.827123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:59.827776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:59.828058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:59.830851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.831035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:59.832280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:59.832347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.832457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:59.832505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:59.832543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:59.832717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:59.981919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.982886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.983961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.984065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... views_update.cpp:213: SysViewsRosterUpdate# [1:817:2793] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_ds_pdisks' 2026-01-04T02:37:00.894148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.894268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 43 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.894304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-04T02:37:00.894333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 43 2026-01-04T02:37:00.894368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 39 2026-01-04T02:37:00.894733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.894808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.894833Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720658 2026-01-04T02:37:00.894859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720658, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:37:00.894920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:37:00.895012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720658, subscribers: 1 2026-01-04T02:37:00.895047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:817:2793] 2026-01-04T02:37:00.897739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.898862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720658 2026-01-04T02:37:00.898969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:817:2793] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, drop sys view '/MyRoot/.sys/new_sys_view' 2026-01-04T02:37:00.899003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:817:2793] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:759:2746] sender: [1:859:2058] recipient: [1:15:2062] 2026-01-04T02:37:00.963219Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:00.963473Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 282us result status StatusSuccess 2026-01-04T02:37:00.963813Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:00.964558Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:00.964807Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 214us result status StatusPathDoesNotExist 2026-01-04T02:37:00.964959Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000042, drop txId: 281474976720658" Path: "/MyRoot/.sys/new_sys_view" PathId: 38 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:00.965504Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:00.965678Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_ds_pdisks" took 179us result status StatusPathDoesNotExist 2026-01-04T02:37:00.965841Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_ds_pdisks\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeSysView, state: EPathStateNotExist), drop stepId: 5000041, drop txId: 281474976720657" Path: "/MyRoot/.sys/new_ds_pdisks" PathId: 39 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:00.966389Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:00.966573Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_partition_stats" took 186us result status StatusSuccess 2026-01-04T02:37:00.966909Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_partition_stats" PathDescription { Self { Name: "new_partition_stats" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::DropSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:36:59.764256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:59.764376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.764421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:59.764471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:59.764529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:59.764560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:59.764623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.764714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:59.765645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:59.766069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:59.858953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:59.859014Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:59.873941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:59.874155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:59.874360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:59.883698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:59.883965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:59.884735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:59.885020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:59.888316Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.888546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:59.889820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:59.889889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.890016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:59.890076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:59.890208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:59.890408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.031386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.032517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.032669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.032803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.032907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.032983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.033876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... n for pathId [OwnerId: 72057594046678944, LocalPathId: 8] was 0 2026-01-04T02:37:00.902253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 9] was 0 2026-01-04T02:37:00.902284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 10] was 0 2026-01-04T02:37:00.902304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-04T02:37:00.902331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-04T02:37:00.902352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-04T02:37:00.902373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-04T02:37:00.902422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-04T02:37:00.902462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-04T02:37:00.902497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-04T02:37:00.902520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-04T02:37:00.902550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-04T02:37:00.902573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-04T02:37:00.902612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-04T02:37:00.902640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-04T02:37:00.902662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-04T02:37:00.902683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-04T02:37:00.902704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-04T02:37:00.902724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-04T02:37:00.902754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-04T02:37:00.902782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:37:00.902802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:37:00.902824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:37:00.902844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:37:00.902874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:37:00.902915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:37:00.902944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:37:00.902979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:37:00.903002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:37:00.903038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:37:00.903205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.903313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.903382Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.903494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.903589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.903710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.904026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.904176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.904552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.904630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.904854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.905021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.905080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.905175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.905354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.905444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.910776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.916650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:00.920665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:00.920741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.920899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:00.920943Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:00.921034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:00.922689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:738:2725] sender: [1:800:2058] recipient: [1:15:2062] 2026-01-04T02:37:00.994870Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:00.995157Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 321us result status StatusPathDoesNotExist 2026-01-04T02:37:00.995367Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDifferentSysViews [GOOD] >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateExistingSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:36:59.963572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:59.963665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.963715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:59.963758Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:59.963797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:59.963843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:59.963917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.964047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:59.964839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:59.965121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:00.062442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:00.062514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.077353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:00.081533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:00.081750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:00.088612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:00.088851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:00.089513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:00.089770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:00.092386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.092565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:00.093745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:00.093839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.093960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:00.094025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:00.094082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:00.094273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.258185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.259931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.260023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.260109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.260195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 53255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:37:01.054048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:37:01.054158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:37:01.054211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:37:01.054261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-04T02:37:01.054304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-04T02:37:01.054993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:37:01.055073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:37:01.055114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:37:01.055151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:37:01.055194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:37:01.055268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:37:01.058515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:37:01.058689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:37:01.058950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:37:01.058996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:37:01.059391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:37:01.059485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:37:01.059541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:701:2691] TestWaitNotification: OK eventTxId 101 2026-01-04T02:37:01.059998Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.060279Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 262us result status StatusSuccess 2026-01-04T02:37:01.060690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:37:01.063779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "new_sys_view" Type: ENodes } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:37:01.063908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0 2026-01-04T02:37:01.063983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/new_sys_view, opId: 102:0, sysViewDescription: Name: "new_sys_view" Type: ENodes 2026-01-04T02:37:01.064099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:37:01.065778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges)" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:37:01.066072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/.sys/new_sys_view', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSysView, state: EPathStateNoChanges), operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/new_sys_view TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:37:01.066398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:37:01.066443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:37:01.066820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:37:01.066947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:37:01.066986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:709:2699] TestWaitNotification: OK eventTxId 102 2026-01-04T02:37:01.067449Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.067657Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 209us result status StatusSuccess 2026-01-04T02:37:01.068007Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::MemoryUsageImmediateSmallTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] >> TTxDataShardMiniKQL::ReadSpecialColumns [GOOD] >> TTxDataShardMiniKQL::SelectRange >> TTxDataShardMiniKQL::Write [GOOD] >> TTxDataShardMiniKQL::TableStats |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::CreateSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:37:00.235703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:00.235783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.235823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:00.235867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:00.235916Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:00.235961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:00.236029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.236099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:00.236871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:00.237137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:00.345142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:00.345200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.361261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:00.361464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:00.361658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:00.378929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:00.379195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:00.379881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:00.380173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:00.384333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.384554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:00.385782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:00.385868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.385983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:00.386035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:00.386134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:00.386320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.522932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.523963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.524999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.525109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -01-04T02:37:01.321410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-04T02:37:01.321433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-04T02:37:01.321475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-04T02:37:01.321504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-04T02:37:01.321529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-04T02:37:01.321558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-04T02:37:01.321584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-04T02:37:01.321607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-04T02:37:01.321639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-04T02:37:01.321670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-04T02:37:01.321705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-04T02:37:01.321735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-04T02:37:01.321761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-04T02:37:01.321785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:37:01.321862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:37:01.321903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:37:01.321929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:37:01.321956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:37:01.321981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:37:01.322020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:37:01.322047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:37:01.322069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:37:01.322093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:37:01.322121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-04T02:37:01.322297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.322399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.322496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.322624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.322702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.322859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.323209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.323345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.323750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.323823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324386Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.324692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.325901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.338435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:01.347933Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.348037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.348247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:01.348308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:01.348357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:01.348721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:716:2703] sender: [1:776:2058] recipient: [1:15:2062] 2026-01-04T02:37:01.418888Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.419196Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 320us result status StatusSuccess 2026-01-04T02:37:01.419617Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncDropSameSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:36:59.307565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:36:59.307665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.307700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:36:59.307734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:36:59.307804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:36:59.307843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:36:59.307905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:36:59.307987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:36:59.308790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:36:59.309081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:36:59.398254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:36:59.398322Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:59.411835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:36:59.414861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:36:59.415069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:36:59.419686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:36:59.419869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:36:59.420458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:36:59.420734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:36:59.423550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.423795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:36:59.425067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:36:59.425135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:36:59.425242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:36:59.425299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:36:59.425349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:36:59.425531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:36:59.565578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.566712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.566853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.566945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:36:59.567847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... nStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:01.569353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:01.569412Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_sysview.cpp:43: [72057594046678944] TDropSysView TPropose, opId: 102:0 HandleReply TEvOperationPlan, step: 5000039 2026-01-04T02:37:01.569548Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:37:01.569728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-04T02:37:01.569799Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:37:01.571785Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.571844Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:37:01.572207Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:37:01.572389Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.572436Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-04T02:37:01.572489Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:37:01.572792Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.572849Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:37:01.572958Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:01.573002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.573047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:01.573098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.573141Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:37:01.573185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.573225Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:37:01.573260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:37:01.573336Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:37:01.573386Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:37:01.573425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-04T02:37:01.573460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:37:01.574211Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.574337Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.574389Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:01.574433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-04T02:37:01.574478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-04T02:37:01.575271Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.575360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.575407Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:01.575442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:37:01.575473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:37:01.575562Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:37:01.575923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:37:01.575992Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:37:01.576069Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-04T02:37:01.579406Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:37:01.579710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:37:01.580560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 2026-01-04T02:37:01.580883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:37:01.580948Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 TestWaitNotification wait txId: 103 2026-01-04T02:37:01.581053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:37:01.581080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:37:01.581579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:37:01.581695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:37:01.581742Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:37:01.581783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:734:2723] 2026-01-04T02:37:01.581866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:37:01.581893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:734:2723] TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 2026-01-04T02:37:01.582443Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.582652Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 243us result status StatusPathDoesNotExist 2026-01-04T02:37:01.582857Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/.sys/new_sys_view\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/.sys\' (id: [OwnerId: 72057594046678944, LocalPathId: 2])" Path: "/MyRoot/.sys/new_sys_view" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/.sys" LastExistedPrefixPathId: 2 LastExistedPrefixDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::CreateDirWithDomainSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:37:00.902909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:00.902995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.903059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:00.903100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:00.903141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:00.903194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:00.903265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.903349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:00.904264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:00.904611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:01.019290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.019344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.031981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:01.035913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:01.036133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:01.041353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:01.041573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:01.042233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:01.042471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:01.044824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.045000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:01.046136Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.046209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.046314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:01.046356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:01.046399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:01.046531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.191742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.192708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.192838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.192898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.192989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.193774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ished: true CreateTxId: 281474976710675 CreateStep: 5000031 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_hour" PathId: 36 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710691 CreateStep: 5000018 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_partitions_one_minute" PathId: 9 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710664 CreateStep: 5000007 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_hour" PathId: 14 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710669 CreateStep: 5000006 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_cpu_time_one_minute" PathId: 28 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710683 CreateStep: 5000025 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_hour" PathId: 37 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710692 CreateStep: 5000016 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_duration_one_minute" PathId: 25 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710680 CreateStep: 5000030 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_hour" PathId: 15 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710670 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_read_bytes_one_minute" PathId: 16 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710671 CreateStep: 5000002 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_hour" PathId: 10 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710665 CreateStep: 5000005 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } Children { Name: "top_queries_by_request_units_one_minute" PathId: 29 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710684 CreateStep: 5000023 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:01.869578Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:37:01.871353Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-04T02:37:01.871959Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.872129Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 186us result status StatusSuccess 2026-01-04T02:37:01.872410Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:01.872801Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.872978Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 179us result status StatusSuccess 2026-01-04T02:37:01.873278Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 7 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710662 CreateStep: 5000011 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 7 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:01.873716Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/query_metrics_one_minute" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.873950Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/query_metrics_one_minute" took 207us result status StatusSuccess 2026-01-04T02:37:01.874247Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/query_metrics_one_minute" PathDescription { Self { Name: "query_metrics_one_minute" PathId: 34 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710689 CreateStep: 5000022 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "query_metrics_one_minute" Type: EQueryMetricsOneMinute SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 34 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:37:00.380353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:00.380467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.380522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:00.380574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:00.380621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:00.380657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:00.380735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.380826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:00.381844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:00.382144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:00.484128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:00.484199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.498852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:00.499052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:00.499266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:00.508685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:00.508962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:00.509899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:00.510200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:00.513874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.514113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:00.515390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:00.515458Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.515608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:00.515669Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:00.515786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:00.516015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.664650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.665755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.666927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... el: 0 TxId: 102 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:37:01.786315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-04T02:37:01.786483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-04T02:37:01.787298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:01.787429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:01.787492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_sysview.cpp:45: [72057594046678944] TCreateSysView::TPropose, opId: 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038 2026-01-04T02:37:01.787632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:37:01.787805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-04T02:37:01.787890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:37:01.790715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.790778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:37:01.791145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:37:01.791287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.791336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:820:2778], at schemeshard: 72057594046678944, txId: 102, path id: 2 2026-01-04T02:37:01.791391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:820:2778], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:37:01.791782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.791843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:37:01.791968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:01.792028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.792074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:01.792123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.792171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:37:01.792213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:01.792262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:37:01.792303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:37:01.792376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:37:01.792444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:37:01.792484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 39 2026-01-04T02:37:01.792516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:37:01.793296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.793453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 2 Version: 39 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.793498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:01.793539Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 39 2026-01-04T02:37:01.793591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 37 2026-01-04T02:37:01.794670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.794762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:01.794797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:01.794832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:37:01.794875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:37:01.794974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:37:01.798463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:37:01.799706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:37:01.800033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:37:01.800097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:37:01.800609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:37:01.800730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:37:01.800773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:850:2806] TestWaitNotification: OK eventTxId 102 2026-01-04T02:37:01.801462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:01.801748Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 313us result status StatusSuccess 2026-01-04T02:37:01.802213Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::EmptyName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:37:01.126045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:01.126131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:01.126170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:01.126204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:01.126239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:01.126320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:01.126383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:01.126452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:01.127258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:01.127520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:01.218788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.218837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.238480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:01.242902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:01.243134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:01.255446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:01.255668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:01.256412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:01.256660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:01.261310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.261501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:01.262719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.262789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.262902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:01.262952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:01.262994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:01.263159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.414827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.415852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.416936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.417039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.417159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 678944 2026-01-04T02:37:02.086850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:37:02.086906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:37:02.088841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.088896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:37:02.088937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:37:02.091412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.091470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.091533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:37:02.091577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:37:02.091698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:37:02.093439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:37:02.093577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:37:02.093977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:02.094090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:02.094130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:37:02.094418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:37:02.094473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:37:02.094694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:37:02.094796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:37:02.097044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:02.097095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:02.097290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:02.097363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:37:02.097722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.097773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:37:02.097886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:37:02.097925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:02.097965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:37:02.097994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:02.098046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:37:02.098101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:02.098151Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:37:02.098196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:37:02.098281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:37:02.098322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:37:02.098356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:37:02.098936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:37:02.099041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:37:02.099096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:37:02.099146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:37:02.099199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:37:02.099286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:37:02.102775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:37:02.103368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:37:02.104037Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:669:2659] Bootstrap 2026-01-04T02:37:02.105160Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:669:2659] Become StateWork (SchemeCache [1:674:2664]) 2026-01-04T02:37:02.108058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView CreateSysView { Name: "" Type: EPartitionStats } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:37:02.108241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_sysview.cpp:117: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0 2026-01-04T02:37:02.108310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_sysview.cpp:123: [72057594046678944] TCreateSysView Propose, path: /MyRoot/.sys/, opId: 101:0, sysViewDescription: Name: "" Type: EPartitionStats 2026-01-04T02:37:02.108433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, at schemeshard: 72057594046678944 2026-01-04T02:37:02.109287Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:669:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:37:02.112809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Check failed: path: \'/MyRoot/.sys/\', error: path part shouldn\'t be empty" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:02.113053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Check failed: path: '/MyRoot/.sys/', error: path part shouldn't be empty, operation: CREATE SYSTEM VIEW, path: /MyRoot/.sys/ 2026-01-04T02:37:02.113545Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:37:02.113751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:37:02.113796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:37:02.114246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:37:02.114361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:37:02.114396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:684:2674] TestWaitNotification: OK eventTxId 101 >> TTxDataShardMiniKQL::MemoryUsageImmediateMediumTx [GOOD] >> TTxDataShardMiniKQL::MemoryUsageMultiShard >> TTxDataShardMiniKQL::SelectRange [GOOD] >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey >> Channels20::LocalChannelAsyncRead [GOOD] >> Channels20::IcChannelTrivial |89.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewsUpdateTest::RestoreAbsentSysViews [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:37:00.946402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:00.946512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.946550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:00.946586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:00.946621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:00.946672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:00.946747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.946823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:00.947627Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:00.947965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:01.038545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.038615Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.051376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:01.055712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:01.055918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:01.060859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:01.061097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:01.061764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:01.062014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:01.064866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.065044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:01.066233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:01.066307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:01.066408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:01.066459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:01.066498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:01.066640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:01.211536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.212956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:01.213642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard DescribePath, at schemeshard: 72057594046678944, txId: 281474976720657, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:37:02.097341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:02.097387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:765:2741], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 2 2026-01-04T02:37:02.097435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:765:2741], at schemeshard: 72057594046678944, txId: 281474976720657, path id: 38 2026-01-04T02:37:02.097544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976720657:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.097639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976720657:0 ProgressState 2026-01-04T02:37:02.097751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-04T02:37:02.097829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-04T02:37:02.097881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976720657:0 progress is 1/1 2026-01-04T02:37:02.097916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-04T02:37:02.097954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976720657, ready parts: 1/1, is published: false 2026-01-04T02:37:02.097998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976720657 ready parts: 1/1 2026-01-04T02:37:02.098039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976720657:0 2026-01-04T02:37:02.098073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976720657:0 2026-01-04T02:37:02.098172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:37:02.098226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976720657, publications: 2, subscribers: 1 2026-01-04T02:37:02.098277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 2], 40 2026-01-04T02:37:02.098325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976720657, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:37:02.101681Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.101865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 40 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.101920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-04T02:37:02.101970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 40 2026-01-04T02:37:02.102017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 36 2026-01-04T02:37:02.102964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.103072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.103108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 281474976720657 2026-01-04T02:37:02.103142Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 281474976720657, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:37:02.103176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:37:02.103258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 281474976720657, subscribers: 1 2026-01-04T02:37:02.103298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:769:2745] 2026-01-04T02:37:02.106562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.108076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976720657 2026-01-04T02:37:02.108296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:769:2745] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/ds_pdisks' 2026-01-04T02:37:02.108358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:769:2745] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished Leader for TabletID 72057594046678944 is [1:713:2700] sender: [1:797:2058] recipient: [1:15:2062] 2026-01-04T02:37:02.176379Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/partition_stats" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:02.176659Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/partition_stats" took 324us result status StatusSuccess 2026-01-04T02:37:02.177054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/partition_stats" PathDescription { Self { Name: "partition_stats" PathId: 30 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976710685 CreateStep: 5000021 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "partition_stats" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 30 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:02.177839Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/ds_pdisks" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:02.178066Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/ds_pdisks" took 264us result status StatusSuccess 2026-01-04T02:37:02.178412Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/ds_pdisks" PathDescription { Self { Name: "ds_pdisks" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 281474976720657 CreateStep: 5000039 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "ds_pdisks" Type: EPDisks SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sysview/unittest >> TSchemeShardSysViewTest::AsyncCreateDirWithSysView [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:37:00.574613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:00.574729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.574770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:00.575026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:00.575084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:00.575112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:00.575182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:00.575265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:00.576186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:00.576617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:00.672581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:00.672644Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.687721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:00.687966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:00.688202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:00.696529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:00.696749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:00.697523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:00.697797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:00.701052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.701263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:00.702468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:00.702541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:00.702682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:00.702733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:00.702849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:00.703067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:00.862210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.863906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:00.864505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... tion.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:37:02.447278Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:37:02.447346Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:02.447372Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:02.447402Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:37:02.447428Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:02.447456Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:37:02.447487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:37:02.447514Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:37:02.447538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:37:02.447587Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-04T02:37:02.447616Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:37:02.447642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 2], 4 2026-01-04T02:37:02.447667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-04T02:37:02.448475Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:02.448548Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:02.448578Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:02.448608Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 4 2026-01-04T02:37:02.448639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:37:02.449396Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:02.449469Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:37:02.449498Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:37:02.449525Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-04T02:37:02.449553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-04T02:37:02.449607Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:37:02.451754Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:37:02.452054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-04T02:37:02.452276Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:37:02.452318Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:37:02.452395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:37:02.452416Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:37:02.452810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:37:02.452895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:37:02.452937Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:37:02.452971Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:326:2315] 2026-01-04T02:37:02.453094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:37:02.453115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:326:2315] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-04T02:37:02.453447Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:02.453613Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys" took 202us result status StatusSuccess 2026-01-04T02:37:02.454031Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys" PathDescription { Self { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 } ChildrenExist: true } Children { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:37:02.454439Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/.sys/new_sys_view" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:37:02.454588Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/.sys/new_sys_view" took 170us result status StatusSuccess 2026-01-04T02:37:02.454879Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/.sys/new_sys_view" PathDescription { Self { Name: "new_sys_view" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSysView CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 SysViewVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } SysViewDescription { Name: "new_sys_view" Type: EPartitionStats SourceObject { OwnerId: 72057594046678944 LocalId: 1 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TTxDataShardMiniKQL::WriteKeyTooLarge >> TxUsage::Sinks_Olap_WriteToTopicAndTable_3_Query [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sysview/unittest >> TTxDataShardMiniKQL::TableStats [GOOD] >> TTxDataShardMiniKQL::TableStatsHistograms >> TTxDataShardMiniKQL::ReadConstant >> YdbIndexTable::MultiShardTableOneUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] >> KqpBatchUpdate::ManyPartitions_2 [GOOD] |89.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sysview/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpQueryPerf::IndexReplace-QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 30086, MsgBus: 10898 2026-01-04T02:36:43.035857Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329801011360824:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:43.035901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:43.106295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:43.421881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:43.432693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:43.432790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:43.486966Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:43.570434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:43.572403Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329796716393362:2081] 1767494203000495 != 1767494203000498 2026-01-04T02:36:43.615547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:43.744785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:43.744805Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:43.744818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:43.744921Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:44.058074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:44.275487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:44.285403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:44.374718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.551898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.754890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.834225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:46.843714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329813896264424:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.843838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.849832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329813896264434:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:46.849961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.163721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.198676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.242854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.287341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.334188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.374257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.411910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.466494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:47.563666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329818191232601:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.563773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.564130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329818191232606:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.564173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329818191232607:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.564301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:47.568813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:47.584518Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329818191232610:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:47.648179Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329818191232661:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:48.036648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329801011360824:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:48.036742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... 2026-01-04T02:36:53.279237Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:53.309071Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:53.309160Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:53.312109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:53.365757Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:53.365776Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:53.365782Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:53.365875Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:53.381919Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:53.859171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:53.866603Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:53.880169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.934286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.119531Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.179620Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:54.214187Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.557248Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329856718168019:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.557335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.557686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329856718168029:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.557763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:56.638166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.696196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.731601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.770348Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.810221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.855134Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.894803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:56.954313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:57.094168Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329861013136195:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:57.094294Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:57.094789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329861013136200:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:57.094835Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329861013136201:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:57.094873Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:57.099495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:57.113777Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329861013136204:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:57.213028Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329861013136257:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:58.168776Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329843833264402:2156];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:58.168878Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:58.956260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.005173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.058040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> YdbProxy::DropTable >> YdbProxy::CreateTable >> YdbProxy::CopyTable >> TTxDataShardMiniKQL::WriteKeyTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteValueTooLarge >> YdbProxy::ListDirectory ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::SelectRangeWithNotFullKey [GOOD] Test command err: 2026-01-04T02:37:01.452671Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:01.457711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.457758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.467983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:01.468314Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:01.468601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:01.518833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:01.527887Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:01.528003Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:01.529717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:01.529796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:01.529871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:01.530290Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:01.531719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:01.531816Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:37:01.601748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:01.630563Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:01.630752Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:01.630840Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:37:01.630894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:01.630957Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:01.630989Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.631180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.631220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.631504Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:01.631622Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:01.631707Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.631733Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:01.631757Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:01.631785Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:01.631817Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:01.631858Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:01.631903Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:01.632038Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.632090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.632130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:37:01.637569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nx\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\016\n\010__tablet\030\004 9\032\023\n\r__updateEpoch\030\004 :\032\020\n\n__updateNo\030\004 ;(\"J\014/Root/table1\222\002\013\th\020\000\000\000\000\000\000\020\r" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:01.637636Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:01.637698Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:01.637875Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:01.637913Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:01.637959Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:01.638011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:01.638048Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:01.638074Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:01.638097Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.638357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:01.638386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:01.638411Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:01.638450Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.638508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:01.638567Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:01.638602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:01.638625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.638643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:01.651240Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:01.651300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.651325Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.651350Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:01.651412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:01.651809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.651849Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.651878Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:37:01.652013Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:01.652051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:01.652146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.652181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:37:01.652207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:01.652231Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:01.655499Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:01.655599Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.655857Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.655908Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.655981Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.656024Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:01.656086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:01.656131Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:37:01 ... ine.cpp:1937: Add [0:7] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:03.832981Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:03.833028Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:03.833092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 9437184 2026-01-04T02:37:03.833137Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-04T02:37:03.833166Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:03.833194Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:03.833222Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:03.833252Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-04T02:37:03.833279Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:03.833305Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:03.833330Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:03.834177Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:7] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:03.834266Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:7] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 3, SelectRangeBytes: 46, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:03.834342Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-04T02:37:03.834377Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:03.834409Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:03.834441Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit FinishPropose 2026-01-04T02:37:03.834490Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 7 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:37:03.834562Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is DelayComplete 2026-01-04T02:37:03.834596Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:03.834641Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:03.834687Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 9437184 on unit CompletedOperations 2026-01-04T02:37:03.834736Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 9437184 is Executed 2026-01-04T02:37:03.834763Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:03.834790Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 9437184 has finished 2026-01-04T02:37:03.834864Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:03.834903Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 9437184 on unit FinishPropose 2026-01-04T02:37:03.834955Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:03.839197Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:237:2229]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-04T02:37:03.839269Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-04T02:37:03.839640Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:307:2288], Recipient [3:237:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:03.839692Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:03.839744Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:306:2287], serverId# [3:307:2288], sessionId# [0:0:0] 2026-01-04T02:37:03.839963Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:237:2229]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\342\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\002\203\004\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?6\003?4e\005\001\013?:\003?8m\005\001\003?<\002\003?>\000\003?@\000\003?B\000\006\004?F\003\203\014\000\003\203\014\000\003\003?H\000\377\007\002\000\005?\032\005?\026?r\000\005?\030\003\005? \005?\034?r\000\006\000?\036\003?x\005?&\006\ 2026-01-04T02:37:03.839993Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:03.840088Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:03.840839Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CheckDataTx 2026-01-04T02:37:03.840924Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-04T02:37:03.840959Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:03.840989Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:03.841018Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:03.841054Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:03.841114Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:8] at 9437184 2026-01-04T02:37:03.841154Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-04T02:37:03.841179Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:03.841208Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:03.841228Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:03.841249Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-04T02:37:03.841274Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:03.841294Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:03.841311Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:03.841842Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:8] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:03.841913Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:8] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 2, SelectRangeBytes: 31, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:03.841971Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-04T02:37:03.841994Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:03.842014Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:03.842037Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit FinishPropose 2026-01-04T02:37:03.842070Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 8 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:37:03.842166Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is DelayComplete 2026-01-04T02:37:03.842205Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:03.842249Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:8] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:03.842289Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:8] at 9437184 on unit CompletedOperations 2026-01-04T02:37:03.842341Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:8] at 9437184 is Executed 2026-01-04T02:37:03.842366Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:8] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:03.842395Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:8] at 9437184 has finished 2026-01-04T02:37:03.842462Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:03.842497Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:8] at 9437184 on unit FinishPropose 2026-01-04T02:37:03.842539Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> YdbProxy::CreateTopic >> TTxDataShardMiniKQL::ReadConstant [GOOD] >> TTxDataShardMiniKQL::ReadAfterWrite >> YdbProxy::DescribePath >> KqpRboYql::ConstantFolding+ColumnStore [GOOD] >> KqpRboYql::ConstantFolding-ColumnStore >> YdbProxy::ReadTopic >> YdbProxy::MakeDirectory >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] >> YdbProxy::RemoveDirectory >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table >> LocalPartitionReader::Simple >> TTxDataShardMiniKQL::ReadAfterWrite [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting >> PartitionEndWatcher::EmptyPartition [GOOD] >> PartitionEndWatcher::AfterCommit [GOOD] >> YdbProxy::AlterTable >> LocalPartitionReader::Simple [GOOD] >> KqpRboPg::Bench_Filter [GOOD] >> KqpRboPg::Bench_CrossFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_2 [GOOD] Test command err: Trying to start YDB, gRPC: 7895, MsgBus: 22348 2026-01-04T02:33:38.154086Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329007168619398:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:38.154152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:38.391617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.391731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.393914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:38.400614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.513387Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.594132Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:38.627111Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:38.627141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:38.627149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:38.627240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:39.180708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:39.209398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:39.221800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:33:39.287328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.412403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.564881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:39.633918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.395483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329020053523124:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.395651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.396112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329020053523134:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.396205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:41.866187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:41.954793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.019105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.057279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.096020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.143616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.215661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.265076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:42.365292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329024348491306:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.365392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.365868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329024348491311:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.365920Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329024348491312:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.365949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:42.370459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:42.381390Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329024348491315:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:42.472592Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329024348491368:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:43.157921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329007168619398:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:43.157996Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:44.229606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 6Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7591329823315391191:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:48.240902Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:48.294386Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:48.407344Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:48.409507Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591329823315391163:2081] 1767494208239593 != 1767494208239596 2026-01-04T02:36:48.454687Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:48.454835Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:48.471824Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:48.525281Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:48.582668Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:48.582700Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:48.582715Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:48.582838Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:49.259715Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:49.406460Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:49.425910Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.509651Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.755722Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:49.879859Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.241968Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591329823315391191:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:53.242082Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:54.391726Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329849085196838:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:54.391863Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:54.392442Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329849085196847:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:54.392514Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:54.529385Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.587551Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.633533Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.704453Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.751852Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.810889Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:54.896879Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.037973Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.174567Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329853380165047:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.174724Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.175204Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329853380165052:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.175276Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329853380165053:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.175769Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:55.181559Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:55.197820Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591329853380165056:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:55.266806Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591329853380165111:3782] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:58.481075Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> LocalPartitionReader::Retries >> TTxDataShardMiniKQL::WriteValueTooLarge [GOOD] >> TTxDataShardMiniKQL::WriteLargeExternalBlob |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Simple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageMultiShard [GOOD] Test command err: 2026-01-04T02:37:01.398449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:01.400282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.400339Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.410486Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:01.410834Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:01.411150Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:01.456123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:01.466876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:01.466994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:01.468814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:01.468892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:01.468969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:01.469362Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:01.470956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:01.471059Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:37:01.544440Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:01.575602Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:01.575829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:01.575995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:37:01.576044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:01.576109Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:01.576153Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.576376Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.576428Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.576775Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:01.576908Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:01.576987Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.577026Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:01.577081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:01.577117Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:01.577149Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:01.577201Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:01.577244Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:01.577338Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.577389Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.577430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:37:01.584323Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:01.584414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:01.584512Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:01.584672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:01.584717Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:01.584771Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:01.584850Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:01.584907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:01.584947Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:01.584979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.585393Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:01.585440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:01.585496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:01.585534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.585590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:01.585650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:01.585699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:01.585746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.585775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:01.598919Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:01.598994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.599029Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.599089Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:01.599181Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:01.599780Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.599843Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.599898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:37:01.600077Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:01.600110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:01.600254Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.600304Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-04T02:37:01.600353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:01.600391Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:01.604751Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:01.604839Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.605122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.605168Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.605243Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.605304Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:01.605339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:01.605380Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-04T02:37:01.605416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-04T02:37:01. ... 4T02:37:05.086122Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-04T02:37:05.086247Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-04T02:37:05.086464Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 10778022b}, Memory{0 dyn 10778022} 2026-01-04T02:37:05.086604Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:05.086651Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:05.087935Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437184 restored its data 2026-01-04T02:37:05.088825Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [6:5] at 9437184 exceeded memory limit 10778022 and requests 86224176 more for the next try 2026-01-04T02:37:05.088965Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 5 released its data 2026-01-04T02:37:05.089007Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Restart 2026-01-04T02:37:05.089041Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:37:05.089070Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:05.089134Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 9437184 out-of-order limits exceeded 2026-01-04T02:37:05.089168Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:05.089238Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 4 -> retry Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:37:05.089289Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} pin 0 (0 b) load 0 (0 b) 2026-01-04T02:37:05.089346Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release tx data 2026-01-04T02:37:05.089391Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} released on update Res{3 10778022b}, Memory{0 dyn 0} 2026-01-04T02:37:05.089446Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} update Res{3 97002198b} type transaction 2026-01-04T02:37:05.089579Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:370:2315]) (release resources {0, 97002198}) 2026-01-04T02:37:05.089643Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 18.820903 to 1.882090 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437185 (3 by [3:370:2315])) 2026-01-04T02:37:05.089765Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:441: Update task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) (priority=5 type=transaction resources={0, 97002198} resubmit=1) 2026-01-04T02:37:05.090026Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) to queue queue_transaction 2026-01-04T02:37:05.090094Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 97002198} for task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) from queue queue_transaction 2026-01-04T02:37:05.090133Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) to queue queue_transaction 2026-01-04T02:37:05.090183Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_transaction from 0.000000 to 15.244932 (insert task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230])) 2026-01-04T02:37:05.090316Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} acquired dyn mem Res{3 97002198b}, Memory{0 dyn 97002198} 2026-01-04T02:37:05.090380Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:05.090407Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:05.091261Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 5 at 9437184 restored its data 2026-01-04T02:37:05.361674Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [6:5] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:05.361761Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [6:5] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 2, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 22, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:05.361845Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:05.361879Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:05.361911Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompleteOperation 2026-01-04T02:37:05.361942Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompleteOperation 2026-01-04T02:37:05.362143Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is DelayComplete 2026-01-04T02:37:05.362172Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompleteOperation 2026-01-04T02:37:05.362198Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [6:5] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:05.362222Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [6:5] at 9437184 on unit CompletedOperations 2026-01-04T02:37:05.362280Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [6:5] at 9437184 is Executed 2026-01-04T02:37:05.362305Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [6:5] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:05.362328Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [6:5] at 9437184 has finished 2026-01-04T02:37:05.362355Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:05.362377Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:05.362401Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:05.362425Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:05.362518Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} hope 5 -> done Change{16, redo 636b alter 0b annex 0, ~{ 1001, 1, 3, 4, 12, 7, 8, 5 } -{ }, 0 gb} 2026-01-04T02:37:05.362575Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:9} Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} release Res{3 97002198b}, Memory{0 dyn 0} 2026-01-04T02:37:05.362762Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230]) (release resources {0, 97002198}) 2026-01-04T02:37:05.362809Z node 3 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_transaction from 15.244932 to 0.000000 (remove task Tx{19, NKikimr::NDataShard::TDataShard::TTxProgressTransaction} at tablet 9437184 (3 by [3:260:2230])) 2026-01-04T02:37:05.375632Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437185:3:10} commited cookie 1 for step 9 2026-01-04T02:37:05.375709Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:37:05.375757Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437185 on unit CompleteOperation 2026-01-04T02:37:05.375823Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437185 at tablet 9437185 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:37:05.375949Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437185 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-04T02:37:05.376001Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-04T02:37:05.376263Z node 3 :TABLET_EXECUTOR DEBUG: Leader{9437184:3:10} commited cookie 1 for step 9 2026-01-04T02:37:05.376297Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:05.376344Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:5] at 9437184 on unit CompleteOperation 2026-01-04T02:37:05.376384Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 5] from 9437184 at tablet 9437184 send result to client [3:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:37:05.376425Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-04T02:37:05.376452Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:05.376653Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:348:2315], Recipient [3:456:2398]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437185 SetTabletConsumer# 9437185 Flags# 0 Seqno# 2} 2026-01-04T02:37:05.376704Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:37:05.376754Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437185 consumer 9437185 txId 5 2026-01-04T02:37:05.376826Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [3:238:2230], Recipient [3:456:2398]: {TEvReadSet step# 6 txid# 5 TabletSource# 9437186 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-04T02:37:05.376847Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:37:05.376863Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437186 source 9437186 dest 9437184 consumer 9437184 txId 5 |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> LocalPartitionReader::Retries [GOOD] >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Retries [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::ReadNonExisting [GOOD] Test command err: 2026-01-04T02:37:04.298987Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:04.302939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:04.303002Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:04.315932Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:04.316411Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:04.316775Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:04.365422Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:04.374453Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:04.374563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:04.376578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:04.376679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:04.376759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:04.377214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:04.379326Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:04.379443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:37:04.459020Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:04.495902Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:04.496162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:04.496289Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:37:04.496352Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:04.496435Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:04.496503Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:04.496754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.496804Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.497135Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:04.497255Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:04.497332Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:04.497376Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:04.497423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:04.497466Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:04.497510Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:04.497565Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:04.497616Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:04.497721Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.497771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.498067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:37:04.506215Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:04.506318Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:04.506424Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:04.506644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:04.506707Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:04.506766Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:04.506849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:04.506924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:04.506984Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:04.507027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:04.507377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:04.507416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:04.507461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:04.507496Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:04.507565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:04.507608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:04.507655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:04.507695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:04.507725Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:04.521965Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:04.522052Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:04.522089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:04.522130Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:04.522232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:04.522833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.522893Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.522934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:37:04.523084Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:04.523126Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:04.523288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:04.523336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:37:04.523373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:04.523420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:04.527203Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:04.527301Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:04.527610Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.527658Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.527725Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:04.527773Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:04.527814Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:04.527870Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:37:04.527921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 26-01-04T02:37:06.705170Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-04T02:37:06.705287Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-04T02:37:06.705465Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:238:2230], Recipient [3:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:06.705519Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:06.705798Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:06.705923Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:06.706055Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 1 message# Source { RawX1: 238 RawX2: 12884904118 } Origin: 9437184 State: 2 TxId: 1 Step: 0 Generation: 3 2026-01-04T02:37:06.706134Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [3:26:2073], Recipient [3:238:2230]: {TEvRegisterTabletResult TabletId# 9437184 Entry# 0} 2026-01-04T02:37:06.706169Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:37:06.706215Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 9437184 time 0 2026-01-04T02:37:06.706254Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:06.706338Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [3:26:2073], Recipient [3:238:2230]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 0 ReadStep# 0 } 2026-01-04T02:37:06.706377Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-04T02:37:06.706415Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 9437184 coordinator 72057594046316545 last step 0 next step 0 2026-01-04T02:37:06.706478Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:06.706518Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:06.706555Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:06.706597Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:06.706634Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:06.706669Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:06.706714Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:06.706821Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877760, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 4200 Status: OK ServerId: [3:288:2271] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T02:37:06.706852Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3186: StateWork, processing event TEvTabletPipe::TEvClientConnected 2026-01-04T02:37:06.706925Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269552132, Sender [3:126:2151], Recipient [3:238:2230]: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 1 2026-01-04T02:37:06.706958Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3157: StateWork, processing event TEvDataShard::TEvSchemaChangedResult 2026-01-04T02:37:06.706995Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 1 datashard 9437184 state Ready 2026-01-04T02:37:06.707067Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 9437184 Got TEvSchemaChangedResult from SS at 9437184 2026-01-04T02:37:06.735596Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [3:284:2267], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 4200 ClientId: [3:284:2267] ServerId: [3:288:2271] } 2026-01-04T02:37:06.735662Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T02:37:06.777510Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-04T02:37:06.777593Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-04T02:37:06.777894Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:294:2275], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:06.777942Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:06.778007Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:292:2274], serverId# [3:294:2275], sessionId# [0:0:0] 2026-01-04T02:37:06.778262Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\365\001\037\004\0021\nvalue\005\205\n\205\002\207\205\002\207\203\001H\006\002\205\004\205\002?\006\002\205\000\034MyReads MyWrites\205\004\205\002?\006\002\206\202\024Reply\024Write?\014\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\010)\211\n?\006\203\005\004\200\205\002\203\004\006\213\002\203\004\203\004$SelectRow\000\003?\036 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000?\004\005?\"\003? p\001\013?&\003?$T\001\003?(\000\037\002\000\005?\016\005?\n?8\000\005?\014\003\005?\024\005?\020?8\000\006\000?\022\003?>\005?\032\006\000?\030\001\037/ \0018\001" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-04T02:37:06.778308Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:06.778414Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:06.779156Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-04T02:37:06.779266Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:06.779312Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:06.779352Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:06.779390Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:06.779434Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:06.779498Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-04T02:37:06.779539Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:06.779567Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:06.779593Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:06.779618Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:06.779647Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:06.779670Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:06.779691Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:06.779718Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:06.780195Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:06.780278Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 1, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:06.780342Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:06.780373Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:06.780399Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:06.780428Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:06.780470Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:37:06.780569Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayComplete 2026-01-04T02:37:06.780607Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:06.780645Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:06.780683Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-04T02:37:06.780735Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:06.780761Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:06.780789Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-04T02:37:06.780855Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:06.780894Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:06.780939Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] >> LocalPartitionReader::Booting [GOOD] >> KqpBatchDelete::ManyPartitions_3 [GOOD] >> TMiniKQLEngineFlatTest::TestPureProgram >> YdbProxy::RemoveDirectory [GOOD] >> YdbProxy::StaticCreds |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_local_partition_reader/unittest >> LocalPartitionReader::Booting [GOOD] >> YdbProxy::ListDirectory [GOOD] >> YdbProxy::DropTopic >> TMiniKQLEngineFlatTest::TestPureProgram [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists >> YdbProxy::CreateTopic [GOOD] >> YdbProxy::DescribeConsumer >> YdbProxy::DescribePath [GOOD] >> YdbProxy::DescribeTable |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_local_partition_reader/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeFullExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive >> YdbProxy::MakeDirectory [GOOD] >> YdbProxy::OAuthToken >> TMiniKQLEngineFlatTest::TestSelectRangeFromExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromIncTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb >> YdbProxy::CreateTable [GOOD] >> YdbProxy::CreateCdcStream >> TMiniKQLEngineFlatTest::TestSelectRangeBothIncFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo >> YdbProxy::CopyTable [GOOD] >> YdbProxy::CopyTables >> TMiniKQLProtoTestYdb::TestExportTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb >> KqpRboYql::ConstantFolding-ColumnStore [GOOD] >> KqpRboYql::BasicJoins >> TMiniKQLEngineFlatTest::TestSelectRangeBothExcFromExcTo [GOOD] >> TMiniKQLEngineFlatTest::TestMapsPushdown >> TMiniKQLProtoTestYdb::TestExportStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteLargeExternalBlob [GOOD] Test command err: 2026-01-04T02:37:04.038614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:04.040427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:04.040468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:04.048221Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:04.048502Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:04.048722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:04.088812Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:04.096398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:04.096492Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:04.098255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:04.098336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:04.098394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:04.098781Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:04.100022Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:04.100094Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:37:04.163773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:04.206037Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:04.206242Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:04.206371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:37:04.206426Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:04.206487Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:04.206539Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:04.206758Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.206802Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.207094Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:04.207216Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:04.207283Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:04.207336Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:04.207374Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:04.207407Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:04.207438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:04.207495Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:04.207545Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:04.207641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.207691Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.207728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:37:04.214881Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nY\n\006table2\032\n\n\004key1\030\002 \"\032\013\n\004key2\030\200$ #\032\014\n\005value\030\200$ 8(\"(#:\010Z\006\010\000\030\000(\000J\014/Root/table2\222\002\013\th\020\000\000\000\000\000\000\020\016" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:04.214981Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:04.215071Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:04.215288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:04.215342Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:04.215413Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:04.215504Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:04.215542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:04.215579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:04.215613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:04.215910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:04.215965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:04.216006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:04.216034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:04.216087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:04.216141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:04.216181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:04.216213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:04.216241Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:04.230625Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:04.230708Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:04.230771Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:04.230812Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:04.230909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:04.231471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.231528Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:04.231575Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:37:04.231727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:04.231770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:04.231908Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:04.231968Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:37:04.232007Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:04.232044Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:04.240874Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:04.240956Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:04.241234Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.241314Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:04.241408Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:04.241457Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:04.241495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:04.241535Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:37:04.241571Z node 1 :TX_DATASHARD TRACE: dat ... [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:07.747803Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:07.747830Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:07.747869Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:07.747903Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-04T02:37:07.747972Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:07.747998Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:07.748033Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-04T02:37:07.786852Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:07.786931Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:07.786987Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 5 ms, status: COMPLETE 2026-01-04T02:37:07.787077Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:08.352107Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-04T02:37:08.352231Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-04T02:37:08.352706Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:303:2283], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:08.352757Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:08.352834Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:302:2282], serverId# [3:303:2283], sessionId# [0:0:0] 2026-01-04T02:37:08.563741Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\332\201\200\010\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\004\203\004\203\001H\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000\013?\024\003?\020\251\003\003?\022\006bar\003\005?\030\003?\026\007\000\000\000\001xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx 2026-01-04T02:37:08.566805Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:08.567030Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:08.633266Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CheckDataTx 2026-01-04T02:37:08.633417Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-04T02:37:08.633492Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:08.633567Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:08.633616Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:08.633676Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:08.633763Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 9437184 2026-01-04T02:37:08.633834Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-04T02:37:08.633861Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:08.633885Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:08.633910Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:08.633938Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-04T02:37:08.633960Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:08.633981Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:08.634006Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:08.634072Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:08.634151Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:3] at 9437184 requested 46269686 more memory 2026-01-04T02:37:08.634214Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-04T02:37:08.634363Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:08.634423Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:08.634498Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:08.663632Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:3] at 9437184 exceeded memory limit 50463990 and requests 403711920 more for the next try 2026-01-04T02:37:08.667873Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 3 released its data 2026-01-04T02:37:08.668010Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Restart 2026-01-04T02:37:08.668388Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:08.668431Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:08.731081Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 3 at 9437184 restored its data 2026-01-04T02:37:08.731189Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:08.832379Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:3] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:08.832503Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:3] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 16777223, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:08.832603Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:08.832646Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:08.832702Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:08.832752Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit FinishPropose 2026-01-04T02:37:08.832806Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is DelayComplete 2026-01-04T02:37:08.832845Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:08.832891Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:08.832930Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 9437184 on unit CompletedOperations 2026-01-04T02:37:08.832990Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 9437184 is Executed 2026-01-04T02:37:08.833018Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:08.833054Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 9437184 has finished 2026-01-04T02:37:08.919039Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:08.919121Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:3] at 9437184 on unit FinishPropose 2026-01-04T02:37:08.919181Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 3 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2026-01-04T02:37:08.919295Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:08.992707Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:37:08.992787Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-04T02:37:09.000331Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:236:2229], Recipient [3:238:2230]: NKikimr::TEvTablet::TEvFollowerGcApplied |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest |89.2%| [TA] $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TMiniKQLEngineFlatTest::TestMapsPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown >> TMiniKQLProtoTestYdb::TestExportVariantTupleTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey |89.2%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} |89.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_local_partition_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> YdbProxy::AlterTable [GOOD] >> YdbProxy::DropTable [GOOD] >> YdbProxy::DescribeTopic >> TMiniKQLEngineFlatTest::TestNoOrderedTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown >> TMiniKQLProtoTestYdb::TestExportVariantStructTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVoidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStringYdb >> TMiniKQLProgramBuilderTest::TestUpdateRowStaticKey [GOOD] >> TMiniKQLProtoTestYdb::TestExportDataTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb >> TMiniKQLEngineFlatTest::TestNoAggregatedPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestNoPartialSortPushdown [GOOD] >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination >> TMiniKQLProtoTestYdb::TestExportStringYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb >> TMiniKQLProtoTestYdb::TestExportDecimalTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] >> TMiniKQLProtoTestYdb::TestExportUuidYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportTupleYdb >> TMiniKQLProtoTestYdb::TestExportDictTypeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportBoolYdb >> TMiniKQLProtoTestYdb::TestExportTupleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportStructYdb >> TMiniKQLProtoTestYdb::TestExportBoolYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDoubleYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb >> TMiniKQLProtoTestYdb::TestExportStructYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalNegativeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb >> TMiniKQLProtoTestYdb::TestExportDecimalHugeYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportEmptyOptionalOptionalYdb [GOOD] >> TMiniKQLProtoTestYdb::TestExportDictYdb >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists >> TMiniKQLProtoTestYdb::TestExportDictYdb [GOOD] >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistWithoutColumns [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 23176, MsgBus: 22478 2026-01-04T02:33:48.940965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329050337376323:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:48.942164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:49.337886Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:49.355959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:49.356103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:49.414382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:49.466557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:49.469119Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329050337376220:2081] 1767494028909215 != 1767494028909218 2026-01-04T02:33:49.598074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:49.611221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:49.611245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:49.611250Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:49.611330Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:49.940266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:50.475781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:50.596329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:50.821262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.134130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.243774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:53.941913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329050337376323:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:53.941988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:54.691795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329076107181867:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.691922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.692333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329076107181876:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.692395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.819781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.899810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.951666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.996173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.061144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.132263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.184886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.237564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:56.367600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329084697117364:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.367661Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.367916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329084697117369:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.367950Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329084697117370:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.368245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:56.371328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:56.382829Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329084697117373:2494], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:56.466578Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329084697117424:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:58.493594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 515]; 2026-01-04T02:36:53.527103Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:53.573392Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:53.721042Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:53.725980Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591329842426143273:2081] 1767494213525589 != 1767494213525592 2026-01-04T02:36:53.742702Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:53.742845Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:53.783054Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:53.797911Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:53.862789Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:53.862817Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:53.862830Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:53.862944Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:54.537171Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:54.879045Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:54.896795Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.002378Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.243996Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:55.423181Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:58.529982Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591329842426143304:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:58.530089Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:59.524885Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329868195948933:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.525033Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.525596Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329868195948942:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.525686Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.685908Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.752689Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.817361Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.890559Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:59.954200Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:00.012982Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:00.071988Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:00.168925Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:00.316328Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329872490917121:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.316483Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.316884Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329872490917126:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.316925Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591329872490917127:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.316961Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.322890Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:00.338196Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:37:00.338890Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591329872490917130:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:37:00.438257Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591329872490917184:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:03.461898Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestMultiRSPerDestination [GOOD] Test command err: PrepareShardPrograms (491): too many shard readsets (2 > 1), src tables: [200:301:0], dst tables: [200:301:0] Type { Kind: Struct } >> TMiniKQLEngineFlatTest::TestSelectRowWithoutColumnsExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayload [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistSetPayloadNullValue [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload >> TMiniKQLEngineFlatTest::TestSelectRowPayloadNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowNotExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload >> TMiniKQLEngineFlatTest::TestSelectRangeToInclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowManyShards |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestExportVariantYdb [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestUpdateRowExistChangePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowExistErasePayload [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards >> TMiniKQLEngineFlatTest::TestSelectRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions >> TMiniKQLEngineFlatTest::TestUpdateRowManyShards [GOOD] >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems >> TMiniKQLEngineFlatTest::TestUpdateRowNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk >> Channels20::IcChannelTrivial [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTable [GOOD] Test command err: 2026-01-04T02:37:06.151720Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329896816971045:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:06.167410Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.207859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:06.700516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.741922Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329896816970998:2081] 1767494226095652 != 1767494226095655 2026-01-04T02:37:06.745856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.745974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.750476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.779754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.873908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.158636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.158662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.158672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.158792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.169247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:07.483684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:09.479424Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329909701873777:2505] txid# 281474976710658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-04T02:37:09.494041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.628310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:37:09.667165Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329909701873893:2585] txid# 281474976710661, issues: { message: "Can\'t drop unknown column: \'extra\'" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexPkOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 14523, MsgBus: 16918 2026-01-04T02:35:15.336250Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329422183181804:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:15.336307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:15.645011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:15.649707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:15.650382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:15.697662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:15.778831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:15.863276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:15.900542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:15.900562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:15.900580Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:15.900657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:16.360556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:16.433554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:16.448098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:16.552282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.756262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.930715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.008193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:18.609055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329435068085546:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.609222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.609557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329435068085556:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.609597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.970856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.005374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.038080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.071541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.114339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.147745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.179766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.243561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.319870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439363053727:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.319947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.319966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439363053732:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.320295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439363053734:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.320339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.323742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:19.335042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329439363053735:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:19.395518Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329439363053789:3778] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:20.337314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329422183181804:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.337390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:20.979468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... KqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:09.468471Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:09.469917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:09.485147Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:09.485228Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:09.514072Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:09.582386Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:09.582415Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:09.582424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:09.582519Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:09.738394Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:10.227394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:10.235323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:10.250803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.328833Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:10.331288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.529340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:10.625574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.659312Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329670153144534:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.659406Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.659866Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329670153144544:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.659949Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:13.736321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.778965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.823809Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.861186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.901445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:13.971389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.046347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.104671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:14.203757Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329674448112714:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.203852Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.204228Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329674448112719:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.204272Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329674448112720:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.204374Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:14.208509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:14.226225Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329674448112723:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:36:14.304941Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329674448112774:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:14.323449Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329652973273525:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:14.324366Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:16.584021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:24.446081Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:36:24.446110Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProtoTestYdb::TestCellsFromTuple [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TMiniKQLEngineFlatTest::TestTopSortPushdownPk [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortPushdown >> TMiniKQLEngineFlatTest::TestSelectRangeWithPartitionsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSomePushDown [GOOD] >> TMiniKQLEngineFlatTest::TestTakePushdown >> TMiniKQLEngineFlatTest::TestTopSortPushdown [GOOD] >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey >> TMiniKQLEngineFlatTest::TestTakePushdown [GOOD] >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestEraseRowDynamicKey [GOOD] >> TMiniKQLProgramBuilderTest::TestAcquireLocks [GOOD] >> TMiniKQLProgramBuilderTest::TestDiagnostics >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] >> TMiniKQLEngineFlatHostTest::ShardId [GOOD] >> TMiniKQLEngineFlatHostTest::Basic [GOOD] >> TMiniKQLEngineFlatTest::TestAbort >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] >> KqpRboPg::Bench_CrossFilter [GOOD] >> KqpRboPg::Bench_JoinFilter >> TMiniKQLEngineFlatTest::TestAbort [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail1 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 >> TMiniKQLEngineFlatTest::TestCASBoth2Fail2 [GOOD] >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 >> TMiniKQLEngineFlatTest::TestCASBoth2Fail12 [GOOD] >> TMiniKQLEngineFlatTest::TestBug998 [GOOD] >> TMiniKQLEngineFlatTest::TestAcquireLocks >> TMiniKQLEngineFlatTest::TestAcquireLocks [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers >> TMiniKQLEngineFlatTest::NoMapPushdownMultipleConsumers [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownNonPureLambda [GOOD] >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestTopSortNonImmediatePushdown [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLProgramBuilderTest::TestDiagnostics [GOOD] |89.2%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoOrderedMapPushdown [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExists [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey >> TMiniKQLEngineFlatTest::NoMapPushdownWriteToTable [GOOD] >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeFullWithoutColumnsNotExistsNullKey [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItems [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull >> YdbProxy::StaticCreds [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByItemsFromNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeFullExistsTruncatedByBytes [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull >> TMiniKQLEngineFlatTest::TestSelectRangeNullNull [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive >> YdbProxy::OAuthToken [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeToExclusive [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards >> TMiniKQLEngineFlatTest::TestSelectRangeNoShards [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitions [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems1 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 >> YdbProxy::DropTopic [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems2 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::NoMapPushdownArgClosure [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeReverseWithPartitionsTruncatedByItems3 [GOOD] >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] >> YdbProxy::DescribeTopic [GOOD] >> YdbProxy::CopyTables [GOOD] >> YdbProxy::AlterTopic >> YdbProxy::DescribeTable [GOOD] >> YdbProxy::CreateCdcStream [GOOD] |89.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup >> TPopulatorTest::Boot >> TPopulatorTest::RemoveDir >> TPopulatorQuorumTest::OneWriteOnlyRingGroup >> TPopulatorQuorumTest::OneRingGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::StaticCreds [GOOD] Test command err: 2026-01-04T02:37:05.703909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329895659821008:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.704068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.217933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.247972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.248074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.259000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.419096Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.422032Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329895659820981:2081] 1767494225701108 != 1767494225701111 2026-01-04T02:37:06.493936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.713012Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:07.091998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.108361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.108382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.108394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.108474Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.516659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.611006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:37:07.695700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329904249756463:2524] txid# 281474976710660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-04T02:37:09.754016Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329910783287103:2187];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.754466Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:09.764309Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:09.847808Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:09.850154Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329910783286946:2081] 1767494229736754 != 1767494229736757 2026-01-04T02:37:09.875036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:09.875118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:09.889363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:09.896532Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.134457Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.134480Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.134487Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.134557Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.499543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:10.511981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:10.590784Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:37:10.753939Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:10.914616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/engine/ut/unittest >> TMiniKQLEngineFlatTest::TestSelectRangeNoColumns [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::OAuthToken [GOOD] Test command err: 2026-01-04T02:37:05.665183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329892309996393:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.677634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.141945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.282294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.282374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.290972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.381877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.413626Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.493104Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.108836s 2026-01-04T02:37:06.493178Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.108929s 2026-01-04T02:37:06.680473Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:06.793887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.108473Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.108508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.108515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.108634Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.476920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.568253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:37:09.884630Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329911766671013:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.886640Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:09.906266Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:09.997935Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:10.015548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:10.015624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:10.045788Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.192363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.283624Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.283655Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.283662Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.283750Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.593385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:10.600217Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:10.638497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 >> YdbIndexTable::MultiShardTableOneIndexDataColumn [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap >> ConvertUnboxedValueToArrowAndBack::DictUtf8ToInterval [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverStruct [GOOD] >> ConvertUnboxedValueToArrowAndBack::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/engine/ut/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DropTopic [GOOD] Test command err: 2026-01-04T02:37:05.665343Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329893013966310:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.677210Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.267975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.277093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.277173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.281402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.412708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.413987Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329893013966203:2081] 1767494225634954 != 1767494225634957 2026-01-04T02:37:06.446006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.679558Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:06.845969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.107919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.107955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.107962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.108041Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.450380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.469132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:09.745973Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329911428302443:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.749528Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:09.784609Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:09.872124Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:09.878050Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329911428302389:2081] 1767494229722613 != 1767494229722616 2026-01-04T02:37:09.888713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:09.888787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:09.915390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.004844Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.216767Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.216790Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.216797Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.216866Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.581226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:10.588195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:10.747385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:37:10.756708Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:10.819876Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:37:10.819908Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:37:10.821120Z node 2 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:37:10.821163Z node 2 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-04T02:37:10.840600Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329915723270676:2589] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeConsumer [GOOD] Test command err: 2026-01-04T02:37:05.665198Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329893736325125:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.676304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.349636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.420756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.420865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.425893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.549969Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329893736325018:2081] 1767494225636365 != 1767494225636368 2026-01-04T02:37:06.582976Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.655993Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:06.679304Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:07.110176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.110204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.110220Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.110339Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.447701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.550183Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329902326260457:2493] txid# 281474976715658, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } 2026-01-04T02:37:09.769535Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329910417051708:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.769906Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:09.803771Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:09.945204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:09.945323Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:09.958475Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:09.966703Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:09.968309Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:10.347315Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.347340Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.347496Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.347598Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.390534Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.695835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:10.702286Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:37:10.785979Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TPopulatorQuorumTest::TwoRingGroups >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTopic [GOOD] Test command err: 2026-01-04T02:37:05.664026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329895438274867:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.676317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.225903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.246812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.246906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.259535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.398413Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.495674Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.159969s 2026-01-04T02:37:06.495752Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.160064s 2026-01-04T02:37:06.501690Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.678051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:06.863415Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.107886Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.107906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.107914Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.107997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.587339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:09.439243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.582672Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:37:09.589554Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329912618144976:2599] txid# 281474976715660, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-04T02:37:10.449782Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329916419114042:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:10.454264Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:10.494099Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:10.560141Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:10.599004Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:10.599090Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:10.604466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.761963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.818411Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.818433Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.818439Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.818508Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:11.079654Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> TPopulatorTest::Boot [GOOD] |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneWriteOnlyRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-04T02:37:14.520872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-04T02:37:14.530755Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-04T02:37:14.530845Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-04T02:37:14.533829Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-04T02:37:14.533894Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-04T02:37:14.533947Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-04T02:37:14.534034Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-04T02:37:14.534062Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-04T02:37:14.534089Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-04T02:37:14.534167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-04T02:37:14.534193Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-04T02:37:14.534215Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-04T02:37:14.534252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-04T02:37:14.534276Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-04T02:37:14.534314Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-04T02:37:14.534358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-04T02:37:14.534390Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-04T02:37:14.534414Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-04T02:37:14.534453Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-04T02:37:14.534473Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-04T02:37:14.534488Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-04T02:37:14.534537Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:14.534608Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-04T02:37:14.534668Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.535410Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.535535Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:14.535601Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-04T02:37:14.535673Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.535724Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-04T02:37:14.535778Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.535831Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:14.535878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.535961Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-04T02:37:14.536011Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-04T02:37:14.536060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-04T02:37:14.536088Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.536122Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.536167Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-04T02:37:14.536205Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-04T02:37:14.536256Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-04T02:37:14.536294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.536328Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.536447Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-04T02:37:14.536491Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-04T02:37:14.536531Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-04T02:37:14.536559Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.536607Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.536682Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-04T02:37:14.536740Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-04T02:37:14.536833Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.536922Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:14.536978Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.537055Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-04T02:37:14.537117Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-04T02:37:14.537155Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 2026-01-04T02:37:14.537200Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.537293Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:14.537343Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.537472Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:14.537530Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-04T02:37:14.537579Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.537630Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-04T02:37:14.537658Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-04T02:37:14.537687Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.537762Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-04T02:37:14.537814Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.537872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-04T02:37:14.537904Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 2026-01-04T02:37:14.537943Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.538027Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-04T02:37:14.538071Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.538120Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-04T02:37:14.538142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-04T02:37:14.538184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.538256Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-04T02:37:14.538294Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.538344Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-04T02:37:14.538382Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2026-01-04T02:37:14.538428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.538498Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-04T02:37:14.538532Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-04T02:37:14.538750Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-04T02:37:14.550003Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-04T02:37:14.550066Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TPopulatorQuorumTest::TwoRingGroups [GOOD] >> TPopulatorTest::RemoveDir [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneDisconnectedRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-04T02:37:14.520860Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-04T02:37:14.530757Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-04T02:37:14.530844Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-04T02:37:14.533718Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-04T02:37:14.533776Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-04T02:37:14.533855Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-04T02:37:14.533931Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-04T02:37:14.533958Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-04T02:37:14.533982Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-04T02:37:14.534021Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-04T02:37:14.534047Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-04T02:37:14.534071Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-04T02:37:14.534145Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:14.534238Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-04T02:37:14.534329Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.535423Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.535524Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:14.535593Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-04T02:37:14.535663Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.535758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-04T02:37:14.535810Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.535918Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:14.535984Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:14.536036Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-04T02:37:14.536090Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-04T02:37:14.536147Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:14.536188Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.536227Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:14.536296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-04T02:37:14.536354Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-04T02:37:14.536414Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-04T02:37:14.536453Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.536533Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:14.536571Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.536644Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:14.536672Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-04T02:37:14.536706Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:14.536745Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-04T02:37:14.536769Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-04T02:37:14.536869Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.536941Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-04T02:37:14.536966Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:31:2078], replica: [1:2199047594611:0] 2026-01-04T02:37:14.537132Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-04T02:37:14.547534Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-04T02:37:14.547601Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |89.3%| [TA] $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::CreateCdcStream [GOOD] Test command err: 2026-01-04T02:37:05.664318Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329892534043229:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.675967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.193939Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.213343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.213455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.216549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.381344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.383573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.385120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329892534043187:2081] 1767494225633758 != 1767494225633761 2026-01-04T02:37:06.493425Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.134360s 2026-01-04T02:37:06.493512Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.134465s 2026-01-04T02:37:06.679006Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:06.934944Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.107858Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.107880Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.107887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.108054Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.561745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:09.168356Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329909713913267:2504] txid# 281474976710658, issues: { message: "Column key has wrong key type Float" severity: 1 } 2026-01-04T02:37:09.183641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.359658Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329909713913352:2563] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:10.137906Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:10.139223Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329915082103417:2266];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:10.139292Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:10.273896Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:10.299384Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:10.300532Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329915082103167:2081] 1767494230092477 != 1767494230092480 2026-01-04T02:37:10.312597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:10.312671Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:10.316321Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.571781Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.578630Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.578662Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.578672Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.578756Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.911418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:11.112383Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:13.346143Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:13.452304Z node 2 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037888:1][2:7591329927967006120:2333] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:40:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:37:13.476472Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329927967006171:2641] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/table/updates\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeCdcStream, state: EPathStateNoChanges)" severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::OneRingGroup [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:5:2052], service: [1:1099535966835:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-04T02:37:14.537954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:19:2066] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:10:2057] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-04T02:37:14.545309Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:19:2066] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:17:2064], cookie# 12345, event size# 36, preserialized size# 0 2026-01-04T02:37:14.545413Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:19:2066] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-04T02:37:14.547435Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:20:2067] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-04T02:37:14.547507Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:20:2067] Successful handshake: replica# [1:4:2051] 2026-01-04T02:37:14.547561Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:20:2067] Start full sync: replica# [1:4:2051] 2026-01-04T02:37:14.547646Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:21:2068] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-04T02:37:14.547676Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:21:2068] Successful handshake: replica# [1:5:2052] 2026-01-04T02:37:14.547704Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:21:2068] Start full sync: replica# [1:5:2052] 2026-01-04T02:37:14.547752Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:22:2069] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-04T02:37:14.547776Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:22:2069] Successful handshake: replica# [1:6:2053] 2026-01-04T02:37:14.547798Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:22:2069] Start full sync: replica# [1:6:2053] 2026-01-04T02:37:14.547878Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-04T02:37:14.547988Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:20:2067] 2026-01-04T02:37:14.548078Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-04T02:37:14.548272Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-04T02:37:14.548373Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-04T02:37:14.548439Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:20:2067] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-04T02:37:14.548518Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-04T02:37:14.548634Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:21:2068] 2026-01-04T02:37:14.548691Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-04T02:37:14.548758Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-04T02:37:14.548819Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:19:2066] 2026-01-04T02:37:14.548889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:22:2069] 2026-01-04T02:37:14.548951Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:21:2068] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-04T02:37:14.549034Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:20:2067] 2026-01-04T02:37:14.549086Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:20:2067] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-04T02:37:14.549136Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:19:2066], cookie# 0 2026-01-04T02:37:14.549229Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:20:2067] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] 2026-01-04T02:37:14.549296Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:22:2069] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-04T02:37:14.549370Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 0 2026-01-04T02:37:14.549408Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:20:2067], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.549508Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:21:2068] 2026-01-04T02:37:14.549561Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:21:2068] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-04T02:37:14.549613Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:19:2066] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:22:2069] 2026-01-04T02:37:14.549673Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:21:2068] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-04T02:37:14.549718Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:22:2069] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:19:2066] 2026-01-04T02:37:14.549778Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 0 2026-01-04T02:37:14.549834Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:21:2068], cookie# 0 2026-01-04T02:37:14.549919Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:22:2069] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:14.550001Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:22:2069], cookie# 0 2026-01-04T02:37:14.550026Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:19:2066] Ack for unknown update (already acked?): sender# [1:22:2069], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:21:2068], replica: [1:1099535966835:0] populator: [1:22:2069], replica: [1:2199047594611:0] populator: [1:20:2067], replica: [1:24339059:0] 2026-01-04T02:37:14.550186Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:20:2067], cookie# 12345 2026-01-04T02:37:14.560612Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:19:2066] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:21:2068], cookie# 12345 2026-01-04T02:37:14.560714Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:19:2066] Ack update: ack to# [1:17:2064], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::DescribeTable [GOOD] Test command err: 2026-01-04T02:37:05.665298Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329892491171656:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.676708Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.293989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.314052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.314164Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.334538Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.444389Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.596248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:06.676668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:07.107671Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.107699Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.107712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.107810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.464164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.477972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:37:09.799384Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329912580971133:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.799554Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:09.814664Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:09.932791Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:09.942000Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329912580970963:2081] 1767494229779733 != 1767494229779736 2026-01-04T02:37:09.958777Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:09.973798Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:09.973890Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:09.978271Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.115510Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.242419Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.242440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.242447Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.242518Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.600281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:10.811433Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:13.257680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |89.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |89.3%| [TA] {RESULT} $(B)/ydb/core/engine/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::Boot [GOOD] Test command err: 2026-01-04T02:37:14.710055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:14.710150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorQuorumTest::TwoRingGroups [GOOD] Test command err: replicas: [1:24339059:0], [1:1099535966835:0], [1:2199047594611:0], [1:3298559222387:0], [1:4398070850163:0], [1:5497582477939:0] replicaActorToServiceMap: actor: [1:6:2053], service: [1:2199047594611:0] actor: [1:15:2062], service: [1:5497582477939:0] actor: [1:4:2051], service: [1:24339059:0] actor: [1:13:2060], service: [1:3298559222387:0] actor: [1:5:2052], service: [1:1099535966835:0] actor: [1:14:2061], service: [1:4398070850163:0] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult 2026-01-04T02:37:15.156653Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:807: [1:28:2075] Handle NKikimr::TEvStateStorage::TEvListSchemeBoardResult: sender# [1:19:2066] ... waiting for NKikimr::TEvStateStorage::TEvListSchemeBoardResult (done) 2026-01-04T02:37:15.164062Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:28:2075] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/TestPath" PathId: 100 PathOwnerId: 72057594046678944 }: sender# [1:26:2073], cookie# 12345, event size# 36, preserialized size# 0 2026-01-04T02:37:15.164152Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:28:2075] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], cookie# 12345, is deletion# false, version: 0 ... waiting for updates from replica populators 2026-01-04T02:37:15.166512Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:29:2076] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:4:2051] 2026-01-04T02:37:15.166580Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:29:2076] Successful handshake: replica# [1:4:2051] 2026-01-04T02:37:15.166638Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:29:2076] Start full sync: replica# [1:4:2051] 2026-01-04T02:37:15.166730Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:30:2077] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:5:2052] 2026-01-04T02:37:15.166757Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:30:2077] Successful handshake: replica# [1:5:2052] 2026-01-04T02:37:15.166781Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:30:2077] Start full sync: replica# [1:5:2052] 2026-01-04T02:37:15.166830Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:31:2078] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:6:2053] 2026-01-04T02:37:15.166879Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:31:2078] Successful handshake: replica# [1:6:2053] 2026-01-04T02:37:15.166905Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:31:2078] Start full sync: replica# [1:6:2053] 2026-01-04T02:37:15.166948Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:32:2079] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:13:2060] 2026-01-04T02:37:15.166975Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:32:2079] Successful handshake: replica# [1:13:2060] 2026-01-04T02:37:15.167010Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:32:2079] Start full sync: replica# [1:13:2060] 2026-01-04T02:37:15.167051Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:33:2080] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:14:2061] 2026-01-04T02:37:15.167082Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:33:2080] Successful handshake: replica# [1:14:2061] 2026-01-04T02:37:15.167105Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:33:2080] Start full sync: replica# [1:14:2061] 2026-01-04T02:37:15.167142Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:243: [1:34:2081] Handle NKikimrSchemeBoard.TEvHandshake { Owner: 72057594046678944 Generation: 0 }: sender# [1:15:2062] 2026-01-04T02:37:15.167175Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:255: [1:34:2081] Successful handshake: replica# [1:15:2062] 2026-01-04T02:37:15.167201Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:259: [1:34:2081] Start full sync: replica# [1:15:2062] 2026-01-04T02:37:15.167276Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:15.167368Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:29:2076] 2026-01-04T02:37:15.167440Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.167587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.167739Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:15.167812Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:29:2076] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:4:2051], cookie# 0 2026-01-04T02:37:15.167900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.168004Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:30:2077] 2026-01-04T02:37:15.168095Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.168184Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:15.168252Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.168346Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:31:2078] 2026-01-04T02:37:15.168427Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:30:2077] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:5:2052], cookie# 0 2026-01-04T02:37:15.168497Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-04T02:37:15.168547Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.168611Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.168696Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:31:2078] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:6:2053], cookie# 0 2026-01-04T02:37:15.168762Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:32:2079] 2026-01-04T02:37:15.168815Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-04T02:37:15.168881Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.168937Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.169015Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:32:2079] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:13:2060], cookie# 0 2026-01-04T02:37:15.169091Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:33:2080] 2026-01-04T02:37:15.169157Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-04T02:37:15.169216Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.169283Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/TestPath PathId: [OwnerId: 72057594046678944, LocalPathId: 100] PathVersion: 0 } }: sender# [1:28:2075] 2026-01-04T02:37:15.169428Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:638: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestUpdate { PathId: [OwnerId: 72057594046678944, LocalPathId: 100] }: sender# [1:34:2081] 2026-01-04T02:37:15.169495Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:33:2080] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:14:2061], cookie# 0 2026-01-04T02:37:15.169563Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 1 }: sender# [1:28:2075], cookie# 0 2026-01-04T02:37:15.169661Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:24339059:0] }: sender# [1:29:2076] 2026-01-04T02:37:15.169711Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:29:2076] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.169772Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:34:2081] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:15:2062], cookie# 0 2026-01-04T02:37:15.169860Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 0 2026-01-04T02:37:15.169921Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:29:2076], cookie# 0 2026-01-04T02:37:15.169978Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:29:2076] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:4:2051] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:15.170100Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:1099535966835:0] }: sender# [1:30:2077] 2026-01-04T02:37:15.170168Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:30:2077] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.170275Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:2199047594611:0] }: sender# [1:31:2078] 2026-01-04T02:37:15.170325Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:30:2077] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:5:2052] 2026-01-04T02:37:15.170379Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:31:2078] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.170444Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 0 2026-01-04T02:37:15.170478Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:30:2077], cookie# 0 2026-01-04T02:37:15.170512Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:31:2078] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:6:2053] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:15.170587Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:3298559222387:0] }: sender# [1:32:2079] 2026-01-04T02:37:15.170635Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:32:2079] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.170689Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:31:2078], cookie# 0 2026-01-04T02:37:15.170715Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:31:2078], cookie# 0 2026-01-04T02:37:15.170775Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:32:2079] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:13:2060] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:15.170855Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:4398070850163:0] }: sender# [1:33:2080] 2026-01-04T02:37:15.170904Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:33:2080] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.170965Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 0 2026-01-04T02:37:15.170992Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:32:2079], cookie# 0 2026-01-04T02:37:15.171042Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:33:2080] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:14:2061] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:15.171110Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:28:2075] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 101] Replica: [1:5497582477939:0] }: sender# [1:34:2081] 2026-01-04T02:37:15.171160Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:34:2081] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:28:2075] 2026-01-04T02:37:15.171211Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 0 2026-01-04T02:37:15.171247Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:33:2080], cookie# 0 2026-01-04T02:37:15.171307Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:34:2081] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 1 }: sender# [1:15:2062] ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 2026-01-04T02:37:15.171371Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:34:2081], cookie# 0 2026-01-04T02:37:15.171399Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:28:2075] Ack for unknown update (already acked?): sender# [1:34:2081], cookie# 0 ... blocking NKikimr::NSchemeBoard::NSchemeshardEvents::TEvUpdateAck from SCHEME_BOARD_REPLICA_POPULATOR_ACTOR to SCHEME_BOARD_POPULATOR_ACTOR cookie 12345 ... waiting for updates from replica populators (done) populatorToReplicaMap: populator: [1:29:2076], replica: [1:24339059:0] populator: [1:33:2080], replica: [1:4398070850163:0] populator: [1:30:2077], replica: [1:1099535966835:0] populator: [1:34:2081], replica: [1:5497582477939:0] populator: [1:31:2078], replica: [1:2199047594611:0] populator: [1:32:2079], replica: [1:3298559222387:0] 2026-01-04T02:37:15.171614Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:29:2076], cookie# 12345 2026-01-04T02:37:15.171689Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:30:2077], cookie# 12345 2026-01-04T02:37:15.171731Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:32:2079], cookie# 12345 2026-01-04T02:37:15.183082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:28:2075] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 1 PathId: [OwnerId: 72057594046678944, LocalPathId: 100] Version: 0 }: sender# [1:33:2080], cookie# 12345 2026-01-04T02:37:15.183146Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:28:2075] Ack update: ack to# [1:26:2073], cookie# 12345, pathId# [OwnerId: 72057594046678944, LocalPathId: 100], version# 0 >> TPopulatorTestWithResets::UpdateAck |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::RemoveDir [GOOD] Test command err: 2026-01-04T02:37:14.703587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:14.703673Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.175027Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:97:2125], cookie# 101 2026-01-04T02:37:15.175082Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:98:2126], cookie# 101 2026-01-04T02:37:15.175108Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 8 2026-01-04T02:37:15.175171Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 101 2026-01-04T02:37:15.175246Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 101 2026-01-04T02:37:15.175300Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 101 2026-01-04T02:37:15.175653Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 8 }: sender# [1:99:2127], cookie# 101 2026-01-04T02:37:15.175763Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 101 2026-01-04T02:37:15.176129Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 101 2026-01-04T02:37:15.176164Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-04T02:37:15.176383Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 101 2026-01-04T02:37:15.176417Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 2026-01-04T02:37:15.178173Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 447, preserialized size# 65 2026-01-04T02:37:15.178237Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 101, is deletion# false, version: 9 2026-01-04T02:37:15.178358Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.178399Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.178431Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:37:15.178668Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/Root/DirB\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNotExist), drop stepId: 5000038, drop txId: 101" Path: "/Root/DirB" PathId: 38 LastExistedPrefixPath: "/Root" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 101, event size# 307, preserialized size# 0 2026-01-04T02:37:15.178722Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 101, is deletion# true, version: 0 2026-01-04T02:37:15.178812Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:12:2059], cookie# 101 2026-01-04T02:37:15.178865Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:15:2062], cookie# 101 2026-01-04T02:37:15.178900Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:18:2065], cookie# 101 2026-01-04T02:37:15.178980Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:97:2125], cookie# 101 2026-01-04T02:37:15.179016Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.179050Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.179098Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 101 2026-01-04T02:37:15.179156Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:98:2126], cookie# 101 2026-01-04T02:37:15.179186Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 9 2026-01-04T02:37:15.179553Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:18:2065], cookie# 101 2026-01-04T02:37:15.179713Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 9 }: sender# [1:99:2127], cookie# 101 2026-01-04T02:37:15.179748Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:12:2059], cookie# 101 2026-01-04T02:37:15.179789Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:15:2062], cookie# 101 2026-01-04T02:37:15.179876Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:99:2127], cookie# 101 2026-01-04T02:37:15.179956Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:97:2125], cookie# 101 2026-01-04T02:37:15.179995Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 101, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 18446744073709551615 2026-01-04T02:37:15.180280Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 18446744073709551615 }: sender# [1:98:2126], cookie# 101 2026-01-04T02:37:15.180310Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:98:2126], cookie# 101 TestModificationResult got TxId: 101, wait until txId: 101 |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir >> TPopulatorTestWithResets::UpdateAck [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::DictOptionalToTuple [GOOD] Test command err: Trying to start YDB, gRPC: 1132, MsgBus: 12969 2026-01-04T02:36:49.506854Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329825151890941:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:49.506907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:49.750669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:49.794589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:49.794680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:49.875988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:49.913921Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329825151890723:2081] 1767494209491025 != 1767494209491028 2026-01-04T02:36:49.917681Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:50.001355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:50.026535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:50.026561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:50.026569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:50.026659Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:50.508160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:50.510317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:50.589369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.786205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:50.969478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:51.047007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.123612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329842331761780:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.123716Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.124198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329842331761790:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.124248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.433902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.476592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.512827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.547530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.585170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.622740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.661098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.713468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:53.797938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329842331762665:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.798045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.798357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329842331762670:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.798400Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329842331762671:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.798705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:53.801718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:53.813399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329842331762674:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:36:53.911662Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329842331762725:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:54.507369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329825151890941:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:54.507465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17432, MsgBus: 25175 2026-01-04T02:36:56.537766Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329857109674265:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:56.538499Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... onnected -> Connecting 2026-01-04T02:37:03.765914Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:03.766326Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:03.766398Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:03.780729Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:03.784348Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:37:03.786073Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:03.894454Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:03.894479Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:03.894486Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:03.894571Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:03.902967Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:04.313706Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:04.400188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:04.431983Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:04.522054Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:04.536109Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:04.576147Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:04.701409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:04.804838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.193985Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329904276178579:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:07.194097Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:07.194530Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329904276178589:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:07.194582Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:07.295956Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.397861Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.477060Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.563176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.640551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.723423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.803013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:07.889434Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.044243Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329908571147080:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.044319Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.044566Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329908571147085:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.044604Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329908571147086:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.044644Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.048566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:08.075970Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329908571147089:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:37:08.135382Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329908571147162:4925] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:08.513561Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591329887096306982:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:08.513627Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:08.564005Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591329886327173782:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:08.564085Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> IncrementalRestoreScan::ChangeSenderSimple |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> IncrementalRestoreScan::Empty |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> TPopulatorTest::MakeDir [GOOD] |89.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTestWithResets::UpdateAck [GOOD] Test command err: 2026-01-04T02:37:16.328859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:16.328945Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... : populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.755052Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755138Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 33] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.755204Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/resource_pools PathId: [OwnerId: 72057594046678944, LocalPathId: 33] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755295Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-04T02:37:16.755341Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755416Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.755460Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755547Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 34] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.755593Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/query_metrics_one_minute PathId: [OwnerId: 72057594046678944, LocalPathId: 34] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755664Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-04T02:37:16.755712Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755801Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.755846Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.755901Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 35] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.755947Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/ds_storage_stats PathId: [OwnerId: 72057594046678944, LocalPathId: 35] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-04T02:37:16.756077Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756148Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.756189Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756218Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 36] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.756253Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_partitions_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 36] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756318Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-04T02:37:16.756347Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756401Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.756432Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756515Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 37] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.756543Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/.sys/top_queries_by_duration_one_hour PathId: [OwnerId: 72057594046678944, LocalPathId: 37] PathVersion: 2 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756627Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:24339059:0] }: sender# [1:97:2125] 2026-01-04T02:37:16.756675Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.756718Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:97:2125] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-04T02:37:16.756751Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756801Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.756837Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:97:2125] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:12:2059] 2026-01-04T02:37:16.756872Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: false DeletedPathBegin: 0 DeletedPathEnd: 0 { Path: /Root/DirC PathId: [OwnerId: 72057594046678944, LocalPathId: 38] PathVersion: 3 } }: sender# [1:96:2124] 2026-01-04T02:37:16.756954Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:1099535966835:0] }: sender# [1:98:2126] 2026-01-04T02:37:16.756990Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:543: [1:96:2124] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvRequestDescribe { PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Replica: [1:2199047594611:0] }: sender# [1:99:2127] 2026-01-04T02:37:16.757024Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:98:2126] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-04T02:37:16.757060Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:275: [1:99:2127] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvDescribeResult { Commit: true DeletedPathBegin: 0 DeletedPathEnd: 0 }: sender# [1:96:2124] 2026-01-04T02:37:16.757112Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:98:2126] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:15:2062] 2026-01-04T02:37:16.757135Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:309: [1:99:2127] Handle NKikimrSchemeBoard.TEvCommitGeneration { Owner: 72057594046678944 Generation: 2 }: sender# [1:18:2065] TestWaitNotification: OK eventTxId 100 |89.3%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/backup/impl/ut_table_writer/unittest >> KqpRboPg::Bench_JoinFilter [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/backup/impl/ut_table_writer/unittest >> YdbProxy::AlterTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_populator/unittest >> TPopulatorTest::MakeDir [GOOD] Test command err: 2026-01-04T02:37:16.986275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:16.986333Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ck to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 6 2026-01-04T02:37:17.369780Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:12:2059], cookie# 100 2026-01-04T02:37:17.369838Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:15:2062], cookie# 100 2026-01-04T02:37:17.369889Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:18:2065], cookie# 100 2026-01-04T02:37:17.370292Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 6 }: sender# [1:99:2127], cookie# 100 2026-01-04T02:37:17.370379Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:97:2125], cookie# 100 2026-01-04T02:37:17.370701Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:98:2126], cookie# 100 2026-01-04T02:37:17.370782Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 2 2026-01-04T02:37:17.370844Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 2 }: sender# [1:99:2127], cookie# 100 2026-01-04T02:37:17.370871Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 FAKE_COORDINATOR: Add transaction: 100 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000037 FAKE_COORDINATOR: Erasing txId 100 2026-01-04T02:37:17.372813Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root" PathDescription { Self { Name: "Root" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/Root" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 555, preserialized size# 119 2026-01-04T02:37:17.372874Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], cookie# 100, is deletion# false, version: 7 2026-01-04T02:37:17.373012Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.373073Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.373112Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.373718Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:676: [1:96:2124] Handle TEvSchemeShard::TEvDescribeSchemeResult { Status: StatusSuccess Path: "/Root/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944 }: sender# [1:71:2111], cookie# 100, event size# 314, preserialized size# 2 2026-01-04T02:37:17.373777Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:96:2124] Update description: owner# 72057594046678944, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], cookie# 100, is deletion# false, version: 3 2026-01-04T02:37:17.373906Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:12:2059], cookie# 100 2026-01-04T02:37:17.373957Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:15:2062], cookie# 100 2026-01-04T02:37:17.373993Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:18:2065], cookie# 100 2026-01-04T02:37:17.374053Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:97:2125], cookie# 100 2026-01-04T02:37:17.374104Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.374143Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.374180Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:283: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdate { Owner: 72057594046678944 Generation: 2 }: sender# [1:96:2124], cookie# 100 2026-01-04T02:37:17.374313Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:98:2126], cookie# 100 2026-01-04T02:37:17.374345Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 1], version# 7 2026-01-04T02:37:17.374400Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:97:2125] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:12:2059], cookie# 100 2026-01-04T02:37:17.374450Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:98:2126] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:15:2062], cookie# 100 2026-01-04T02:37:17.374501Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:291: [1:99:2127] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:18:2065], cookie# 100 2026-01-04T02:37:17.374776Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 1] Version: 7 }: sender# [1:99:2127], cookie# 100 2026-01-04T02:37:17.374857Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:97:2125], cookie# 100 2026-01-04T02:37:17.375039Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:98:2126], cookie# 100 2026-01-04T02:37:17.375067Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:96:2124] Ack update: ack to# [1:71:2111], cookie# 100, pathId# [OwnerId: 72057594046678944, LocalPathId: 38], version# 3 2026-01-04T02:37:17.375274Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:758: [1:96:2124] Handle NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 38] Version: 3 }: sender# [1:99:2127], cookie# 100 2026-01-04T02:37:17.375304Z node 1 :SCHEME_BOARD_POPULATOR DEBUG: populator.cpp:764: [1:96:2124] Ack for unknown update (already acked?): sender# [1:99:2127], cookie# 100 TestModificationResult got TxId: 100, wait until txId: 100 >> TContinuousBackupTests::Basic >> TContinuousBackupTests::TakeIncrementalBackup |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_populator/unittest |89.4%| [TA] $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/backup/impl/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn >> TColumnShardTestSchema::HotTiers |89.4%| [TA] $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_populator/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::AlterTopic [GOOD] Test command err: 2026-01-04T02:37:05.677394Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329895144060610:2227];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.677569Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.402271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.446484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.446601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.456996Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.473221Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.643468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:06.682257Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:07.114067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.114096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.114104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.114212Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.498155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.506951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:09.323277Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329912323930498:2505] txid# 281474976715658, issues: { message: "Path does not exist" issue_code: 200200 severity: 1 } 2026-01-04T02:37:09.337940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:10.192989Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329917672240798:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:10.196862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:10.237949Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:10.342072Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:10.357052Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:10.357151Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:10.360955Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:10.519955Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:10.626697Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:10.626729Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:10.626742Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:10.626847Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:10.978767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:11.192895Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:13.247562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:13.322709Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:14.096511Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591329934391631677:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:14.096610Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:14.115163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:14.193722Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:14.227671Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:14.227767Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:14.230297Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:14.271711Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:14.431918Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:14.431959Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:14.431968Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:14.432055Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:14.752880Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:14.760235Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:37:14.888161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:37:14.912818Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329934391632615:2577] txid# 281474976710660, issues: { message: "Invalid retention period: specified: 31536000s, min: 1s, max: 2678400s" severity: 1 } >> TColumnShardTestSchema::RebootColdTiers |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_JoinFilter [GOOD] Test command err: Trying to start YDB, gRPC: 1057, MsgBus: 13738 2026-01-04T02:36:55.937022Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329850460706280:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:55.937198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:56.185899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:56.216761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:56.216886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:56.243721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:56.341571Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329850460706134:2081] 1767494215928718 != 1767494215928721 2026-01-04T02:36:56.344758Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:56.357677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:56.414427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:56.414453Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:56.414460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:56.414573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:56.849187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:56.862216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:36:56.938437Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:59.136048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329867640576203:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.136071Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329867640576214:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.136176Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.136515Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329867640576218:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.136596Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:59.140166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:59.155206Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329867640576217:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:36:59.276530Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329867640576270:2532] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 20396, MsgBus: 2077 2026-01-04T02:37:00.751400Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329870952864933:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:00.751448Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:00.779016Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:00.860787Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.889017Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:00.889104Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:00.904473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:01.001945Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:01.025029Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:01.025044Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:01.025049Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:01.025128Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:01.466826Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:01.757963Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:04.106637Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329888132734948:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.106732Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.107238Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329888132734957:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.107297Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.331594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:04.595436Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329888132735055:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.595521Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.595988Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329888132735060:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.596030Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329888132735061:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.596118Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:04.600386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:04.616804Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ... ave access permissions } 2026-01-04T02:37:09.982479Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.982802Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329912498847107:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.982856Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.024489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:10.092323Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:10.182669Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329916793814567:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.182753Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.183117Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329916793814572:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.183176Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329916793814573:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.183289Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:10.187198Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:10.200017Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329916793814576:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:37:10.285880Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329916793814627:2642] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 65281, MsgBus: 8237 2026-01-04T02:37:12.292771Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591329925083970590:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:12.292824Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:12.322282Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:12.398359Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:12.398433Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:12.422350Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:12.431719Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:12.524609Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:12.524649Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:12.524659Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:12.524744Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:12.591902Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:12.915975Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:13.297858Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:15.539901Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329937968873291:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.540036Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.540404Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329937968873301:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.540469Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.564784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:15.603998Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:15.720736Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329937968873467:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.720816Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.720887Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329937968873472:2346], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.721171Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591329937968873474:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.721253Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:15.724695Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:15.733716Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591329937968873476:2348], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:37:15.793357Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591329937968873527:2639] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:17.293003Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591329925083970590:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:17.293117Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer >> MoveTable::WithData+Reboot >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn >> TColumnShardTestSchema::EnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootHotTiersTtl >> TContinuousBackupTests::Basic [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail >> IncrementalRestoreScan::Empty [GOOD] >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId >> MoveTable::WithData+Reboot [GOOD] >> TColumnShardTestSchema::ColdCompactionSmoke ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::Basic [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:37:18.824130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:18.829995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:18.830115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:18.830170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:18.830216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:18.830289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:18.835526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:18.835654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:18.836648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:18.838115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:18.959435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:18.959514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:18.982184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:18.982691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:18.982845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:18.993432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:18.994184Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:18.995289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:18.997577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:19.016069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:19.016276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:19.021265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:19.021388Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:19.021477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:19.021547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:19.021593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:19.024772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:19.197522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.198582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.198735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.198807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.198899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.198965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.199737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... RD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 955 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-04T02:37:20.598462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-04T02:37:20.598667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 955 } } CommitVersion { Step: 5000041 TxId: 104 } 2026-01-04T02:37:20.598793Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000041 OrderId: 104 ExecLatency: 0 ProposeLatency: 7 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 955 } } CommitVersion { Step: 5000041 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:37:20.600985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969996 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:37:20.601060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-04T02:37:20.601268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 714 RawX2: 4294969996 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:37:20.601364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:37:20.601498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 714 RawX2: 4294969996 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:37:20.601595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:20.601695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:37:20.601755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:37:20.601826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-04T02:37:20.606879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:37:20.607775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:37:20.607956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:37:20.607993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:37:20.608115Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:37:20.608143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:37:20.608203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 3/3 2026-01-04T02:37:20.608229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:37:20.608278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 3/3, is published: true 2026-01-04T02:37:20.608340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 104 2026-01-04T02:37:20.608407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 3/3 2026-01-04T02:37:20.608446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:37:20.608486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:37:20.608636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:37:20.608674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:1 2026-01-04T02:37:20.608696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:1 2026-01-04T02:37:20.608719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:37:20.608743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:2 2026-01-04T02:37:20.608763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:2 2026-01-04T02:37:20.608801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:37:20.609337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:37:20.609385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:37:20.609457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:37:20.609504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:37:20.609538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:37:20.612011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:37:20.612067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1094:3007] 2026-01-04T02:37:20.612301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-04T02:37:20.612761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:37:20.612958Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 253us result status StatusPathDoesNotExist 2026-01-04T02:37:20.613102Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:37:20.613485Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:37:20.613644Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 156us result status StatusPathDoesNotExist 2026-01-04T02:37:20.613743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::OneColdTier >> MoveTable::EmptyTable >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData+Reboot [GOOD] Test command err: 2026-01-04T02:37:20.300860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:20.334892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:20.335152Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:20.342539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:20.342813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:20.343048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:20.343188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:20.343302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:20.343402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:20.343527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:20.343633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:20.343778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:20.343892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.344006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:20.344128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:20.344249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:20.374989Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:20.375141Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:20.375210Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:20.375398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.375577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:20.375684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:20.375732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:20.375827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:20.375884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:20.375925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:20.375990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:20.376182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.376250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:20.376293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:20.376320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:20.376408Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:20.376455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:20.376508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:20.376560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:20.376610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:20.376670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:20.376718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:20.376765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:20.376800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:20.376829Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:20.377016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:20.377082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:20.377115Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:20.377310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:20.377359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.377389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.377435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:20.377471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:20.377497Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:20.377554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:20.377614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:20.377643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:20.377818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:20.377863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... =0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:37:21.511140Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:378:2379];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:37:21.511418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.511598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.511801Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.512113Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:21.512321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.512535Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.512846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:385:2385] finished for tablet 9437184 2026-01-04T02:37:21.513487Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:378:2379];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.021},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.025}],"full":{"a":1703401,"name":"_full_task","f":1703401,"d_finished":0,"c":0,"l":1728763,"d":25362},"events":[{"name":"bootstrap","f":1703716,"d_finished":5440,"c":1,"l":1709156,"d":5440},{"a":1727920,"name":"ack","f":1724693,"d_finished":3016,"c":1,"l":1727709,"d":3859},{"a":1727905,"name":"processing","f":1709460,"d_finished":8052,"c":3,"l":1727714,"d":8910},{"name":"ProduceResults","f":1708112,"d_finished":4838,"c":6,"l":1728407,"d":4838},{"a":1728414,"name":"Finish","f":1728414,"d_finished":0,"c":0,"l":1728763,"d":349},{"name":"task_result","f":1709518,"d_finished":4923,"c":2,"l":1724533,"d":4923}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.513598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:378:2379];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:21.514179Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:378:2379];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.004},{"events":["l_bootstrap"],"t":0.005},{"events":["f_processing","f_task_result"],"t":0.006},{"events":["f_ack","l_task_result"],"t":0.021},{"events":["l_ProduceResults","f_Finish"],"t":0.025},{"events":["l_ack","l_processing","l_Finish"],"t":0.026}],"full":{"a":1703401,"name":"_full_task","f":1703401,"d_finished":0,"c":0,"l":1729491,"d":26090},"events":[{"name":"bootstrap","f":1703716,"d_finished":5440,"c":1,"l":1709156,"d":5440},{"a":1727920,"name":"ack","f":1724693,"d_finished":3016,"c":1,"l":1727709,"d":4587},{"a":1727905,"name":"processing","f":1709460,"d_finished":8052,"c":3,"l":1727714,"d":9638},{"name":"ProduceResults","f":1708112,"d_finished":4838,"c":6,"l":1728407,"d":4838},{"a":1728414,"name":"Finish","f":1728414,"d_finished":0,"c":0,"l":1729491,"d":1077},{"name":"task_result","f":1709518,"d_finished":4923,"c":2,"l":1724533,"d":4923}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:21.514280Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:21.412393Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:37:21.514329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:21.514536Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:385:2385];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-04T02:37:21.515435Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-04T02:37:21.516026Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1767494241274:12} readable: {1767494241274:max} at tablet 9437184 2026-01-04T02:37:21.516191Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-04T02:37:21.516253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:333:2342];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767494241274:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:21.516367Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:333:2342];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767494241274:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::Empty [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:37:20.144266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:20.244320Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:37:20.244719Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:37:20.244763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:37:20.505363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:20.505511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:20.566465Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494237358866 != 1767494237358869 2026-01-04T02:37:20.582691Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:20.635103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:20.738052Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:37:21.033475Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:182: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Exhausted 2026-01-04T02:37:21.033637Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:131: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Handle TEvIncrementalRestoreScan::TEvFinished NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvFinished 2026-01-04T02:37:21.033693Z node 1 :CHANGE_EXCHANGE DEBUG: incr_restore_scan.cpp:195: [TIncrementalRestoreScan][1337][OwnerId: 1, LocalPathId: 2][OwnerId: 3, LocalPathId: 4][1:833:2727] Finish Done |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:20.077604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:20.101822Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:20.102061Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:20.109673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:20.110710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:20.110948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:20.111092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:20.111200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:20.111312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:20.111436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:20.111546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:20.111671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:20.111786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.111888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:20.112011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:20.112119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:20.133531Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:20.133682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:20.133754Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:20.133979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.134159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:20.134252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:20.134298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:20.134413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:20.134496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:20.134542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:20.134571Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:20.134734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.134803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:20.134867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:20.134904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:20.135010Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:20.135093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:20.135140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:20.135183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:20.135244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:20.135286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:20.135313Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:20.135352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:20.135385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:20.135425Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:20.135655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:20.135736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:20.135834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:20.135983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:20.136052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.136085Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.136135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:20.136175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:20.136203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:20.136242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:20.136274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:20.136303Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:20.136437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:20.136488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 19:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:21.594157Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136751308686528;op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136957470241344;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:21.594295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136751308686528;op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136957470241344;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:21.594362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=136751308686528;op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494241163;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=136957470241344;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-04T02:37:21.594716Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:21.594896Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494241163 at tablet 9437184, mediator 0 2026-01-04T02:37:21.594965Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-04T02:37:21.595230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:21.595270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:21.595363Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:21.595436Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-04T02:37:21.595506Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-04T02:37:21.595769Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:21.596490Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-04T02:37:21.610627Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:21.611805Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=136751308689440;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767494241166;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:21.624072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494241166;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136751308689440;op_tx=120:TX_KIND_SCHEMA;min=1767494241166;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:21.624131Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494241166;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=136751308689440;op_tx=120:TX_KIND_SCHEMA;min=1767494241166;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:21.625267Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=136751308691232;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767494241168;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:21.637294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494241168;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136751308691232;op_tx=121:TX_KIND_SCHEMA;min=1767494241168;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:21.637375Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494241168;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=136751308691232;op_tx=121:TX_KIND_SCHEMA;min=1767494241168;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:21.638584Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=136751308693024;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767494241169;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:21.653058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494241169;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136751308693024;op_tx=122:TX_KIND_SCHEMA;min=1767494241169;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:21.653126Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494241169;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=136751308693024;op_tx=122:TX_KIND_SCHEMA;min=1767494241169;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> IncrementalRestoreScan::ChangeSenderSimple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:37:20.068232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:20.192856Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:37:20.193355Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:37:20.193414Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:37:20.478560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:20.478658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:20.542030Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494237246012 != 1767494237246015 2026-01-04T02:37:20.557603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:20.605529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:20.706676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:37:21.137784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:37:21.138050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.138281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:37:21.138327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:37:21.138530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:37:21.138635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:21.139406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:37:21.139541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:37:21.139751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.139814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:37:21.139874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:37:21.139913Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:37:21.140506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.140555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:37:21.140598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:37:21.141087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.141126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.141168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:37:21.141222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:37:21.144901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:37:21.145390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:37:21.145537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:37:21.146109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:37:21.146164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/1, is published: true 2026-01-04T02:37:21.146219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:37:21.157035Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:21.254319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1500, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:37:21.254513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 AckTo { RawX1: 0 RawX2: 0 } } Step: 1500 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:37:21.254583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:37:21.254860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:37:21.254922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:37:21.255086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:37:21.255155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T02:37:21.256181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:37:21.256233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 1, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:37:21.256440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:37:21.256483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:559:2493], at schemeshard: 72057594046644480, txId: 1, path id: 1 2026-01-04T02:37:21.256623Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:37:21.256668Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046644480] TDone opId# 1:0 ProgressState 2026-01-04T02:37:21.256768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:37:21.256808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:21.256848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:37:21.256888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:21.256931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:37:21.256977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:37:21.257016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:37:21.257051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:37:21.257109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-04T02:37:21.257145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 1 2026-01-04T02:37:21.257180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046644480, LocalPathId: 1], 6 2026-01-04T02:37:21.259845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshar ... ration id: 281474976715658:0 2026-01-04T02:37:21.817222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715658:0 2026-01-04T02:37:21.817332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:37:21.818067Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:68:2115] Handle TEvNavigate describe path /Root/IncrBackupTable 2026-01-04T02:37:21.818178Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:1012:2859] HANDLE EvNavigateScheme /Root/IncrBackupTable 2026-01-04T02:37:21.820148Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:1012:2859] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:37:21.820253Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:1012:2859] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/IncrBackupTable" Options { ShowPrivateTable: true } 2026-01-04T02:37:21.821511Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:1012:2859] Handle TEvDescribeSchemeResult Forward to# [1:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/IncrBackupTable" PathDescription { Self { Name: "IncrBackupTable" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715658 CreateStep: 2500 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:37:21.822646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:1022:2863], serverId# [1:1023:2864], sessionId# [0:0:0] 2026-01-04T02:37:21.825567Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:65: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleUserTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/IncrBackupTable TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:37:21.825872Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:131: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleTargetTable TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:37:21.826188Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.h:227: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] HandleKeys TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:37:21.826380Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:139: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] BodySize: 18 }] } 2026-01-04T02:37:21.826532Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:144: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-04T02:37:21.826719Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:391: actor# [1:68:2115] Handle TEvGetProxyServicesRequest 2026-01-04T02:37:21.826784Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:40: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:37:21.827143Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1029:2869], serverId# [1:1030:2870], sessionId# [0:0:0] 2026-01-04T02:37:21.869667Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:78: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handshake NKikimrChangeExchange.TEvStatus Status: STATUS_OK LastRecordOrder: 0 2026-01-04T02:37:21.869826Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:37:21.870004Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_table_base.cpp:123: [TableChangeSenderShard][0:0][72075186224037888][1:1028:2865] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 38] Kind: IncrementalRestore Source: InitialScan Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 39] SchemaVersion: 0 LockId: 0 LockOffset: 0 }] } 2026-01-04T02:37:21.870086Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:154: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:37:21.870233Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender_incr_restore.cpp:176: [IncrRestoreChangeSenderMain][[OwnerId: 72057594046644480, LocalPathId: 38]][1:1024:2865] Handle NKikimr::NDataShard::TEvIncrementalRestoreScan::TEvNoMoreData |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_restore_scan/unittest >> MoveTable::EmptyTable [GOOD] |89.4%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_restore_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::EmptyTable [GOOD] Test command err: 2026-01-04T02:37:22.467397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:22.497327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:22.497609Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:22.504330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:22.504506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:22.504661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:22.504750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:22.504816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:22.504877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:22.504937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:22.505020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:22.505122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:22.505260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.505370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:22.505488Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:22.505610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:22.537365Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:22.537519Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:22.537568Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:22.537734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.537921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:22.538021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:22.538083Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:22.538214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:22.538289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:22.538334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:22.538370Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:22.538562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.538638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:22.538684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:22.538714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:22.538812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:22.538873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:22.538916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:22.538949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:22.538998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:22.539037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:22.539087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:22.539167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:22.539229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:22.539264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:22.539474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:22.539539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:22.539574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:22.539714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:22.539765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.539797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.539843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:22.539881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:22.539910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:22.540004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:22.540051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:22.540082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:22.540226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:22.540295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-04T02:37:22.847905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=22; 2026-01-04T02:37:22.848018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=26; 2026-01-04T02:37:22.848063Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2599; 2026-01-04T02:37:22.848195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:37:22.848256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:37:22.848334Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:37:22.848635Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:22.848696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:22.848787Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.191000s; 2026-01-04T02:37:22.849184Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:37:22.849286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:37:22.849339Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:22.849394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:22.849514Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.097000s; 2026-01-04T02:37:22.849560Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:37:23.152194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136366229477696;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:23.152279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136366229477696;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:23.167783Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136366229477696;op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136572390588864;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:23.167893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136366229477696;op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136572390588864;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:156:2179]; 2026-01-04T02:37:23.167956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136366229477696;op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494243434;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136572390588864;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-04T02:37:23.168316Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:23.168440Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494243434 at tablet 9437184, mediator 0 2026-01-04T02:37:23.168491Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-04T02:37:23.168737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:23.168838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:23.168883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:23.168976Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:23.178129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767494243434;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-04T02:37:23.178243Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:37:23.178367Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-04T02:37:23.178451Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-04T02:37:23.178712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:23.194654Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-04T02:37:23.218570Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-04T02:37:23.219544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136366229527648;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=2; 2026-01-04T02:37:23.219609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136366229527648;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-04T02:37:23.234745Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136366229527648;op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136572390657024;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:23.234881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136366229527648;op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136572390657024;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:37:23.234962Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136366229527648;op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_op_tx=11:TX_KIND_SCHEMA;min=1767494243438;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;int_this=136572390657024;method=TTxController::FinishProposeOnComplete;tx_id=11;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=11; 2026-01-04T02:37:23.235373Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:23.235550Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494243438 at tablet 9437184, mediator 0 2026-01-04T02:37:23.235616Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-04T02:37:23.236000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=11;event=move_table_progress;old_path_id=1;new_path_id=2;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=2;result=not_found; 2026-01-04T02:37:23.248806Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn >> TTxDataShardMiniKQL::CrossShard_5_AllToAll [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn >> MoveTable::RenameAbsentTable_Negative ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TContinuousBackupTests::TakeIncrementalBackup [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:37:18.825959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:37:18.829968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:18.830067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:37:18.830115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:37:18.830159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:37:18.830197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:37:18.835258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:37:18.835368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:37:18.836296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:37:18.837822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:37:18.956983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:18.957490Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:18.981204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:37:18.981413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:37:18.982648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:37:18.994744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:37:18.995035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:37:18.995889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:37:18.997614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:37:19.013992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:19.014280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:37:19.022034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:37:19.022124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:37:19.022308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:37:19.022378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:37:19.022428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:37:19.024678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:37:19.213798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.214821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.214956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215025Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:37:19.215823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... l shard schema changes has been received, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:37:21.665308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976710757:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:37:21.665365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710757:0 129 -> 240 2026-01-04T02:37:21.667696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:37:21.669238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:37:21.669656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710757:0, at schemeshard: 72057594046678944 2026-01-04T02:37:21.669732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710757:0 ProgressState 2026-01-04T02:37:21.669886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-04T02:37:21.669928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:37:21.669973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710757:0 progress is 3/3 2026-01-04T02:37:21.670009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:37:21.670066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710757, ready parts: 3/3, is published: true 2026-01-04T02:37:21.670145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:1379:3199] message: TxId: 281474976710757 2026-01-04T02:37:21.670203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710757 ready parts: 3/3 2026-01-04T02:37:21.670254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:0 2026-01-04T02:37:21.670291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:0 2026-01-04T02:37:21.670427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:37:21.670494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:1 2026-01-04T02:37:21.670519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:1 2026-01-04T02:37:21.670572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:37:21.670608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710757:2 2026-01-04T02:37:21.670629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710757:2 2026-01-04T02:37:21.670670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:37:21.670990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:37:21.671042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:37:21.671120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:37:21.671153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:37:21.671178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:37:21.673376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:37:21.673525Z node 1 :CONTINUOUS_BACKUP ERROR: schemeshard_backup_incremental__progress.cpp:189: TIncrementalBackup::TTxProgress: Incremental backup with id# 0 not found 2026-01-04T02:37:23.739405Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:37:23.739700Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl" took 310us result status StatusPathDoesNotExist 2026-01-04T02:37:23.739912Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:37:23.740507Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:37:23.740688Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/0_continuousBackupImpl/streamImpl" took 183us result status StatusPathDoesNotExist 2026-01-04T02:37:23.740812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/0_continuousBackupImpl/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/Table/0_continuousBackupImpl/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/Table" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:37:23.741279Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/IncrBackupImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:37:23.741479Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/IncrBackupImpl" took 208us result status StatusSuccess 2026-01-04T02:37:23.742174Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/IncrBackupImpl" PathDescription { Self { Name: "IncrBackupImpl" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "IncrBackupImpl" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "__ydb_incrBackupImpl_changeMetadata" Type: "String" TypeId: 4097 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IncrementalBackupConfig { Mode: RESTORE_MODE_INCREMENTAL_BACKUP Consistency: CONSISTENCY_WEAK } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__incremental_backup" Value: "{}" } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_continuous_backup/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] |89.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} |89.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_continuous_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::DropWriteRace >> KqpRboPg::Aggregation+ColumnStore [GOOD] >> KqpRboPg::Aggregation-ColumnStore ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:21.997829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:22.033261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:22.033538Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:22.041350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:22.041623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:22.041914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:22.042055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:22.042166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:22.042271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:22.042393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:22.042517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:22.042648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:22.042752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.042888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:22.043003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:22.043131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:22.073002Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:22.073169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:22.073225Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:22.073436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.073597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:22.073696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:22.073755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:22.073899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:22.073988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:22.074052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:22.074113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:22.074323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.074415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:22.074462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:22.074505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:22.074605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:22.074664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:22.074743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:22.074780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:22.074837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:22.074887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:22.074926Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:22.074975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:22.075022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:22.075061Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:22.075326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:22.075396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:22.075432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:22.075588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:22.075644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.075677Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.075732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:22.075774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:22.075812Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:22.075888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:22.075970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:22.076005Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:22.076177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:22.076232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 7184;request_tx=104:TX_KIND_SCHEMA;min=1767494243090;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136219223837728;op_tx=104:TX_KIND_SCHEMA;min=1767494243090;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1767494243090;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136425363978496;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-04T02:37:24.789204Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:24.789439Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494243090 at tablet 9437184, mediator 0 2026-01-04T02:37:24.789527Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-04T02:37:24.789869Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-04T02:37:24.802803Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-04T02:37:24.803467Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494243090:max} readable: {1767494243090:max} at tablet 9437184 2026-01-04T02:37:24.803642Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:37:24.807958Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494243090:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:24.808072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494243090:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:24.808830Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494243090:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:37:24.810797Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494243090:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:37:24.893464Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494243090:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:561:2573];trace_detailed=; 2026-01-04T02:37:24.894967Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:37:24.895233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:37:24.895629Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.895810Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.896152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:24.896348Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.896509Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.896791Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:561:2573] finished for tablet 9437184 2026-01-04T02:37:24.897284Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:555:2567];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":3390828,"name":"_full_task","f":3390828,"d_finished":0,"c":0,"l":3394303,"d":3475},"events":[{"name":"bootstrap","f":3391203,"d_finished":2087,"c":1,"l":3393290,"d":2087},{"a":3393551,"name":"ack","f":3393551,"d_finished":0,"c":0,"l":3394303,"d":752},{"a":3393516,"name":"processing","f":3393516,"d_finished":0,"c":0,"l":3394303,"d":787},{"name":"ProduceResults","f":3392890,"d_finished":757,"c":2,"l":3394002,"d":757},{"a":3394008,"name":"Finish","f":3394008,"d_finished":0,"c":0,"l":3394303,"d":295}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.897418Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:555:2567];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:24.897916Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:555:2567];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":3390828,"name":"_full_task","f":3390828,"d_finished":0,"c":0,"l":3394932,"d":4104},"events":[{"name":"bootstrap","f":3391203,"d_finished":2087,"c":1,"l":3393290,"d":2087},{"a":3393551,"name":"ack","f":3393551,"d_finished":0,"c":0,"l":3394932,"d":1381},{"a":3393516,"name":"processing","f":3393516,"d_finished":0,"c":0,"l":3394932,"d":1416},{"name":"ProduceResults","f":3392890,"d_finished":757,"c":2,"l":3394002,"d":757},{"a":3394008,"name":"Finish","f":3394008,"d_finished":0,"c":0,"l":3394932,"d":924}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:24.898016Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:24.810758Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:37:24.898061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:24.898247Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] >> TColumnShardTestSchema::RebootHotTiers >> TColumnShardTestSchema::DropWriteRace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameAbsentTable_Negative [GOOD] Test command err: 2026-01-04T02:37:25.141058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:25.172968Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:25.173199Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:25.180638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:25.180897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:25.181136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:25.181243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:25.181313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:25.181399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:25.181507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:25.181618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:25.181841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:25.181999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.182108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:25.182214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:25.182323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:25.211051Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:25.211224Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:25.211282Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:25.211524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.211719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:25.211819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:25.211894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:25.212024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:25.212096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:25.212142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:25.212176Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:25.212331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.212389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:25.212427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:25.212450Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:25.212549Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:25.212594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:25.212622Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:25.212659Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:25.212724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:25.212764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:25.212796Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:25.212853Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:25.212900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:25.212927Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:25.213106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:25.213161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:25.213193Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:25.213419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:25.213472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.213509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.213563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:25.213597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:25.213631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:25.213698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:25.213740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:25.213773Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:25.213906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:25.213964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ata.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:37:25.505737Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:37:25.506181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=395; 2026-01-04T02:37:25.506250Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:37:25.506379Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=82; 2026-01-04T02:37:25.506432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:37:25.506520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-04T02:37:25.506617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=30; 2026-01-04T02:37:25.506704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=25; 2026-01-04T02:37:25.506742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=3754; 2026-01-04T02:37:25.506919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:37:25.506994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:37:25.507080Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:37:25.507393Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:25.507458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:25.507560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.048000s; 2026-01-04T02:37:25.507865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:37:25.508011Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:37:25.508065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:25.508129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:25.508273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.020000s; 2026-01-04T02:37:25.508326Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:37:25.807898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136871262476608;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:25.808031Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136871262476608;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:25.820473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136871262476608;op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=137077423587776;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:25.820621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136871262476608;op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=137077423587776;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:156:2179]; 2026-01-04T02:37:25.820713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136871262476608;op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494246114;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=137077423587776;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-04T02:37:25.821058Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:25.821206Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494246114 at tablet 9437184, mediator 0 2026-01-04T02:37:25.821265Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-04T02:37:25.821537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:25.821614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:25.821658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:25.821746Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:25.829777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767494246114;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-04T02:37:25.829906Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:37:25.830045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-04T02:37:25.830107Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-04T02:37:25.830291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:25.845908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-04T02:37:25.870147Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-04T02:37:25.871197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136871262526560;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=111;dst=2; 2026-01-04T02:37:25.871277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136871262526560;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=111;result=not_found; 2026-01-04T02:37:25.871377Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136871262526560;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=No such table; 2026-01-04T02:37:25.886792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136871262526560;op_tx=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:37:25.886875Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136871262526560;op_tx=11:TX_KIND_SCHEMA;min=1767494246117;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=No such table;tablet_id=9437184;tx_id=11; |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::DropWriteRace [GOOD] Test command err: 2026-01-04T02:37:25.880378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:25.913376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:25.913631Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:25.921243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:25.921512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:25.921765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:25.921922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:25.922024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:25.922120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:25.922235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:25.922363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:25.922476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:25.922574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.922681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:25.922821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:25.922949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:25.952239Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:25.952396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:25.952446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:25.952632Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.952807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:25.952905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:25.952955Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:25.953053Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:25.953113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:25.953153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:25.953191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:25.953419Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.953495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:25.953554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:25.953588Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:25.953681Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:25.953735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:25.953777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:25.953821Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:25.953891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:25.953930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:25.953963Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:25.954005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:25.954044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:25.954080Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:25.954297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:25.954354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:25.954393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:25.954534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:25.954577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.954607Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.954650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:25.954689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:25.954716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:25.954787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:25.954826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:25.954861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:25.954984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:25.955021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n;tablet_id=9437184;tx_id=101;this=136852746721824;method=TTxController::StartProposeOnExecute;tx_info=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:26.536063Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136852746721824;op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137058907832448;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:26.536162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136852746721824;op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137058907832448;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:37:26.536225Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136852746721824;op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=101:TX_KIND_SCHEMA;min=1767494246850;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137058907832448;method=TTxController::FinishProposeOnComplete;tx_id=101;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=101; 2026-01-04T02:37:26.536573Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:26.536728Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494246850 at tablet 9437184, mediator 0 2026-01-04T02:37:26.536785Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-04T02:37:26.537066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:26.537190Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:26.537247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:26.537343Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:26.548069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767494246850;tx_id=101;;switch_optimizer=0;switch_accessors=0; 2026-01-04T02:37:26.548168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:37:26.548268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-04T02:37:26.548352Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-04T02:37:26.548537Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:26.562163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=101;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-04T02:37:26.588034Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6120;columns=10; 2026-01-04T02:37:26.595151Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-04T02:37:26.595230Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6120;operation_id=4e9d0522-e91611f0-935550dc-3f9d0a51;in_flight=1;size_in_flight=6120; 2026-01-04T02:37:26.611118Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8392;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-04T02:37:26.614936Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6120;event=data_write_finished;writing_id=4e9d0522-e91611f0-935550dc-3f9d0a51; 2026-01-04T02:37:26.615275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=78;data_size=59;sum=78;count=1; 2026-01-04T02:37:26.615381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=174;data_size=171;sum=174;count=2;size_of_meta=112; 2026-01-04T02:37:26.615457Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=254;data_size=251;sum=254;count=1;size_of_portion=192; 2026-01-04T02:37:26.616183Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:37:26.616357Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:37:26.632620Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:37:26.632965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:37:26.666823Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136852747073280;op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137058907944448;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:26.666930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136852747073280;op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137058907944448;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:37:26.667006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136852747073280;op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=103:TX_KIND_SCHEMA;min=1767494246857;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=137058907944448;method=TTxController::FinishProposeOnComplete;tx_id=103;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=103; 2026-01-04T02:37:26.667403Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:26.667573Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494246857 at tablet 9437184, mediator 0 2026-01-04T02:37:26.667647Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-04T02:37:26.667987Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-04T02:37:26.687612Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-04T02:37:26.688010Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494246858 at tablet 9437184, mediator 0 2026-01-04T02:37:26.688108Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] execute at tablet 9437184 2026-01-04T02:37:26.688442Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=102;fline=abstract.h:88;progress_tx_id=102;lock_id=1;broken=0; 2026-01-04T02:37:26.702677Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[8] complete at tablet 9437184 2026-01-04T02:37:26.702807Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=102;lock_id=1;broken=0; 2026-01-04T02:37:26.703026Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:37:26.703084Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=102;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; >> TxUsage::Sinks_Oltp_WriteToTopics_3_Table [GOOD] |89.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query >> YdbIndexTable::OnlineBuild [GOOD] >> YdbIndexTable::OnlineBuildWithDataColumn >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId >> MoveTable::RenameToItself_Negative >> KqpRboYql::TPCH_YDB_PERF [GOOD] >> KqpRboYql::TPCH_YQL >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] >> TTxDataShardMiniKQL::CrossShard_6_Local [GOOD] >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx >> MoveTable::RenameToItself_Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::TableStatsHistograms [GOOD] Test command err: 2026-01-04T02:37:01.484921Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:01.486738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.486794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.497211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:01.497582Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:01.497894Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:01.546217Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:01.555730Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:01.555822Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:01.557456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:01.557576Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:01.557634Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:01.557994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:01.559360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:01.559468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:37:01.629216Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:01.664646Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:01.664830Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:01.664921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:37:01.664972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:01.665034Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:01.665077Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.665296Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.665341Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.665640Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:01.665743Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:01.665828Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.665871Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:01.665917Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:01.665950Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:01.665991Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:01.666040Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:01.666081Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:01.666162Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.666210Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.666250Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:37:01.673134Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:01.673212Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:01.673303Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:01.673508Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:01.673558Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:01.673604Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:01.673668Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:01.673714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:01.673761Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:01.673795Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.674162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:01.674204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:01.674239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:01.674270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.674311Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:01.674353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:01.674386Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:01.674419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.674451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:01.686484Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:01.686552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.686580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.686616Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:01.686686Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:01.687211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.687259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.687304Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:37:01.687421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:01.687452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:01.687577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.687638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:37:01.687679Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:01.687727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:01.691119Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:01.691174Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.691395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.691429Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.691503Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.691538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:01.691578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:37:01.691632Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:37:01.691672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... sion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:29.690337Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1002] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:29.690393Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1002] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:29.690485Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:29.690518Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:29.690552Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:29.690583Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit FinishPropose 2026-01-04T02:37:29.690620Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is DelayComplete 2026-01-04T02:37:29.690648Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:29.690680Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1002] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:29.690711Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1002] at 9437184 on unit CompletedOperations 2026-01-04T02:37:29.690755Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1002] at 9437184 is Executed 2026-01-04T02:37:29.690779Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1002] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:29.690808Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1002] at 9437184 has finished 2026-01-04T02:37:29.708177Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:29.708254Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1002] at 9437184 on unit FinishPropose 2026-01-04T02:37:29.708304Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1002 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-04T02:37:29.708397Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 .2026-01-04T02:37:29.714458Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-04T02:37:29.714529Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-04T02:37:29.715586Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4551:6468], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:29.715639Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:29.715684Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4550:6467], serverId# [3:4551:6468], sessionId# [0:0:0] 2026-01-04T02:37:29.716244Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:238:2230]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\265\002\037\000\005\205\n\205\000\205\004?\000\205\002\202\0041\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\001H\01056$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020\235\017\001\005?\026\003?\024\322ImInShard111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111111\001\007\002\000\003\005?\010?\014\006\002?\006?$\000\003?\014?\014\037/ \0018\000" TxId: 1003 ExecLevel: 0 Flags: 0 2026-01-04T02:37:29.716287Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:29.716398Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:29.717003Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CheckDataTx 2026-01-04T02:37:29.717070Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-04T02:37:29.717104Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:29.717139Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:29.717172Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:29.717218Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:29.717280Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1003] at 9437184 2026-01-04T02:37:29.717318Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-04T02:37:29.717344Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:29.717368Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:29.717395Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:29.717422Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-04T02:37:29.717450Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:29.717473Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:29.717500Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:29.717546Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:29.717930Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:1003] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:29.717990Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:1003] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 109, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:29.718087Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:29.718122Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:29.718156Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:29.718187Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit FinishPropose 2026-01-04T02:37:29.718226Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is DelayComplete 2026-01-04T02:37:29.718255Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:29.718286Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1003] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:29.718317Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1003] at 9437184 on unit CompletedOperations 2026-01-04T02:37:29.718362Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1003] at 9437184 is Executed 2026-01-04T02:37:29.718387Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1003] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:29.718413Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1003] at 9437184 has finished 2026-01-04T02:37:29.734279Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:29.734351Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1003] at 9437184 on unit FinishPropose 2026-01-04T02:37:29.734399Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1003 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 3 ms, status: COMPLETE 2026-01-04T02:37:29.734487Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:29.762318Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 9437184, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:37:29.762386Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 9437184, table# 1001, finished edge# 0, front# 0 2026-01-04T02:37:29.767571Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:236:2229], Recipient [3:238:2230]: NKikimr::TEvTablet::TEvFollowerGcApplied .2026-01-04T02:37:29.771181Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:4565:6481], Recipient [3:238:2230]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:29.771265Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:29.771342Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:4564:6480], serverId# [3:4565:6481], sessionId# [0:0:0] 2026-01-04T02:37:29.771876Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553160, Sender [3:4563:6479], Recipient [3:238:2230]: NKikimrTxDataShard.TEvGetTableStats TableId: 13 { InMemSize: 0 LastAccessTime: 1719 LastUpdateTime: 1719 KeyAccessSample { } } |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::RenameToItself_Negative [GOOD] Test command err: 2026-01-04T02:37:30.135046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:30.163797Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:30.164102Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:30.172273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:30.172459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:30.172655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:30.172774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:30.172844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:30.172913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:30.172976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:30.173047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:30.173152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:30.173219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.173283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:30.173357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:30.173434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:30.198797Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:30.198957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:30.199014Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:30.199164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.199327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:30.199435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:30.199496Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:30.199622Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:30.199721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:30.199775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:30.199814Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:30.200063Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.200151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:30.200197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:30.200258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:30.200366Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:30.200427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:30.200499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:30.200538Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:30.200594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:30.200636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:30.200692Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:30.200745Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:30.200794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:30.200828Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:30.201051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:30.201107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:30.201141Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:30.201312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:30.201380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.201415Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.201474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:30.201517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:30.201549Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:30.201621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:30.201669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:30.201703Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:30.201899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:30.201948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mon_data.cpp:29;EXECUTE:storages_managerLoadingTime=287; 2026-01-04T02:37:30.486048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=4; 2026-01-04T02:37:30.486083Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=6; 2026-01-04T02:37:30.486115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:37:30.486186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=46; 2026-01-04T02:37:30.486218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:37:30.486293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-04T02:37:30.486335Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:37:30.486420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-04T02:37:30.486491Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=32; 2026-01-04T02:37:30.486542Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=16; 2026-01-04T02:37:30.486573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=2510; 2026-01-04T02:37:30.486688Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:37:30.486741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:37:30.486805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:37:30.487050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:30.487090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:30.487157Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.084000s; 2026-01-04T02:37:30.487366Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:37:30.487485Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:37:30.487556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:37:30.487602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:454;problem=Background activities cannot be started: no index at tablet; 2026-01-04T02:37:30.487692Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.086000s; 2026-01-04T02:37:30.487731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:37:30.779423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136388621818176;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:30.779524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=10;this=136388621818176;method=TTxController::StartProposeOnExecute;tx_info=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:30.792088Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136388621818176;op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136594782929344;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:30.792222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136388621818176;op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136594782929344;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:156:2179]; 2026-01-04T02:37:30.792290Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;this=136388621818176;op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494251112;max=18446744073709551615;plan=0;src=[1:156:2179];cookie=00:0;;int_this=136594782929344;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-04T02:37:30.792629Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:30.792802Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494251112 at tablet 9437184, mediator 0 2026-01-04T02:37:30.792877Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-04T02:37:30.793164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:30.793281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:30.793333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:30.793434Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:30.804051Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767494251112;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-04T02:37:30.804168Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:37:30.804289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-04T02:37:30.804362Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-04T02:37:30.804636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:30.822201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-04T02:37:30.846271Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 2026-01-04T02:37:30.847281Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136388621868128;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=schema.cpp:134;propose_execute=move_table;src=1;dst=1; 2026-01-04T02:37:30.847348Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=11;this=136388621868128;method=TTxController::StartProposeOnExecute;tx_info=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=tx_controller.cpp:350;error=problem on start;message=Rename to existing table; 2026-01-04T02:37:30.859960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136388621868128;op_tx=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:37:30.860096Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;this=136388621868128;op_tx=11:TX_KIND_SCHEMA;min=1767494251116;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:1;;fline=propose_tx.cpp:23;message=Rename to existing table;tablet_id=9437184;tx_id=11; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::MemoryUsageImmediateHugeTx [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:118:2057] recipient: [1:112:2143] Leader for TabletID 9437184 is [1:133:2156] sender: [1:136:2057] recipient: [1:112:2143] 2026-01-04T02:37:01.359997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:37:01.360071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:01.362745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:01.374310Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:01.374686Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:37:01.375091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:37:01.423597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:01.434728Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:37:01.435849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:37:01.437585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:37:01.437682Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:37:01.437992Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:37:01.438417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:37:01.438521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:37:01.438605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:203:2156] in generation 2 Leader for TabletID 9437184 is [1:133:2156] sender: [1:214:2057] recipient: [1:14:2061] 2026-01-04T02:37:01.523983Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:37:01.556533Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:37:01.556781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:37:01.556901Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:221:2217] 2026-01-04T02:37:01.556976Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:37:01.557019Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:37:01.557083Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.557353Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.557416Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.557695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:37:01.557793Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:37:01.557961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.558003Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:37:01.558042Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:37:01.558077Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:37:01.558133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:37:01.558173Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:37:01.558230Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:37:01.558350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:215:2214], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.558403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.558444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:213:2213], serverId# [1:215:2214], sessionId# [0:0:0] 2026-01-04T02:37:01.565477Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:37:01.565578Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:01.565678Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:01.565933Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:37:01.566024Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:37:01.566090Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:37:01.566141Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:01.566182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:37:01.566222Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:37:01.566262Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.566601Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:01.566641Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:37:01.566676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:01.566714Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.566793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:37:01.566824Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:01.566860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:37:01.566893Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.566925Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:37:01.580830Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:01.580949Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:37:01.580993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:37:01.581081Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:37:01.581194Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:37:01.581854Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:227:2223], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.581924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:01.581975Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:226:2222], serverId# [1:227:2223], sessionId# [0:0:0] 2026-01-04T02:37:01.582136Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-04T02:37:01.582180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:37:01.582336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-04T02:37:01.582392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-04T02:37:01.582433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:37:01.582470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:37:01.591035Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:37:01.591143Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:01.591434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.591485Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:37:01.591562Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:37:01.591609Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:37:01.591651Z node 1 :TX_DATASHARD TRACE: datashard_pipelin ... rd_impl.h:3153: StateWork, received event# 269877761, Sender [19:299:2280], Recipient [19:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:31.781743Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:31.781825Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [19:298:2279], serverId# [19:299:2280], sessionId# [0:0:0] 2026-01-04T02:37:31.782091Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [19:104:2137], Recipient [19:240:2232]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 104 RawX2: 81604380761 } TxBody: "\032\324\002\037\002\006Arg\005\205\n\205\000\205\004?\000\205\002\202\0047\034MyReads MyWrites\205\004?\000\206\202\024Reply\024Write?\000?\000 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\000\005?\004?\014\005?\002)\211\006\202\203\005\004\213\002\203\004\205\002\203\004\01057$UpdateRow\000\003?\016 h\020\000\000\000\000\000\000\r\000\000\000\000\000\000\000\013?\022\003?\020T\001\005?\026)\211\n?\024\206\203\004?\024? ?\024\203\004\020Fold\000)\211\002?\"\206? \034Collect\000)\211\006?(? \203\004\203\0024ListFromRange\000\003? \000\003?,\003\022z\003?.\004\007\010\000\n\003?\024\000)\251\000? \002\000\004)\251\000?\024\002\000\002)\211\006?$\203\005@? ?\024\030Invoke\000\003?F\006Add?@?D\001\006\002\014\000\007\016\000\003\005?\010?\014\006\002?\006?R\000\003?\014?\014\037/ \0018\000" TxId: 2 ExecLevel: 0 Flags: 0 2026-01-04T02:37:31.782140Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:31.782258Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:31.783392Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CheckDataTx 2026-01-04T02:37:31.783491Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:31.783542Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:31.783593Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:31.783640Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:31.783689Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:31.783776Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 9437184 2026-01-04T02:37:31.783838Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:31.783872Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:31.783901Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:31.783947Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:31.783979Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:31.784008Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:31.784036Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:31.784066Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:31.784122Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:31.784184Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:126: Operation [0:2] at 9437184 requested 132390 more memory 2026-01-04T02:37:31.784233Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-04T02:37:31.784549Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:31.784609Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:31.784666Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:31.785912Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 132518 and requests 1060144 more for the next try 2026-01-04T02:37:31.786095Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-04T02:37:31.786169Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-04T02:37:31.786416Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:31.786459Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:31.787381Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-04T02:37:31.787456Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:31.788120Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 1192662 and requests 9541296 more for the next try 2026-01-04T02:37:31.788247Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-04T02:37:31.788293Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-04T02:37:31.788485Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:31.788524Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:31.789154Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-04T02:37:31.789209Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:31.789756Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:2] at 9437184 exceeded memory limit 10733958 and requests 85871664 more for the next try 2026-01-04T02:37:31.789895Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 2 released its data 2026-01-04T02:37:31.789943Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Restart 2026-01-04T02:37:31.790116Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:31.790152Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:31.790735Z node 19 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 2 at 9437184 restored its data 2026-01-04T02:37:31.790785Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v2/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:37:32.116596Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:2] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:32.116749Z node 19 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:2] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 8, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:32.116880Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:37:32.116939Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:32.117000Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:32.117061Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:32.117204Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:37:32.117252Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:32.117314Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:32.117366Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 9437184 on unit CompletedOperations 2026-01-04T02:37:32.117440Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 9437184 is Executed 2026-01-04T02:37:32.117484Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:32.117534Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 9437184 has finished 2026-01-04T02:37:32.130744Z node 19 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:32.130844Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 9437184 on unit FinishPropose 2026-01-04T02:37:32.130908Z node 19 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 2 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-04T02:37:32.131023Z node 19 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:32.132299Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [19:304:2285], Recipient [19:240:2232]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:32.132380Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:32.132440Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [19:303:2284], serverId# [19:304:2285], sessionId# [0:0:0] 2026-01-04T02:37:32.132561Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830214, Sender [19:302:2283], Recipient [19:240:2232]: NKikimrTabletBase.TEvGetCounters |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> KqpRboPg::Aggregation-ColumnStore [GOOD] >> KqpRboPg::AliasesRenames ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:29.372819Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:29.414342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:29.414623Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:29.422727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:29.423008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:29.423255Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:29.423409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:29.423515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:29.423666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:29.423789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:29.423905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:29.424074Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:29.424191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:29.424327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:29.424455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:29.424582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:29.454453Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:29.454637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:29.454704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:29.454903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:29.455109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:29.455224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:29.455280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:29.455378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:29.455479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:29.455541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:29.455608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:29.455851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:29.455960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:29.456016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:29.456053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:29.456163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:29.456264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:29.456319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:29.456351Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:29.456424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:29.456485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:29.456527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:29.456575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:29.456620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:29.456655Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:29.456905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:29.456984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:29.457036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:29.457215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:29.457270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:29.457306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:29.457359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:29.457410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:29.457442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:29.457521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:29.457575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:29.457609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:29.457757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:29.457855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... og.cpp:841: tablet_id=9437184;request_tx=104:TX_KIND_SCHEMA;min=1767494250463;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;this=136386331200544;op_tx=104:TX_KIND_SCHEMA;min=1767494250463;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_op_tx=104:TX_KIND_SCHEMA;min=1767494250463;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:2;;int_this=136592471341312;method=TTxController::FinishProposeOnComplete;tx_id=104;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=104; 2026-01-04T02:37:32.448675Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:32.448852Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494250463 at tablet 9437184, mediator 0 2026-01-04T02:37:32.448907Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] execute at tablet 9437184 2026-01-04T02:37:32.449158Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: DropTable for pathId: {internal: 1000000185, ss: 1} at tablet 9437184 2026-01-04T02:37:32.461615Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[12] complete at tablet 9437184 2026-01-04T02:37:32.462192Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494250463:max} readable: {1767494250463:max} at tablet 9437184 2026-01-04T02:37:32.462323Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:37:32.465616Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494250463:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:32.465736Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494250463:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:32.466402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494250463:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:37:32.468196Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494250463:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:37:32.524515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494250463:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:561:2573];trace_detailed=; 2026-01-04T02:37:32.525489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:37:32.525698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:37:32.525992Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.526117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.526322Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:32.526451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.526577Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.526754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:561:2573] finished for tablet 9437184 2026-01-04T02:37:32.527047Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:555:2567];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3631874,"name":"_full_task","f":3631874,"d_finished":0,"c":0,"l":3634211,"d":2337},"events":[{"name":"bootstrap","f":3632136,"d_finished":1422,"c":1,"l":3633558,"d":1422},{"a":3633709,"name":"ack","f":3633709,"d_finished":0,"c":0,"l":3634211,"d":502},{"a":3633686,"name":"processing","f":3633686,"d_finished":0,"c":0,"l":3634211,"d":525},{"name":"ProduceResults","f":3633275,"d_finished":539,"c":2,"l":3634027,"d":539},{"a":3634034,"name":"Finish","f":3634034,"d_finished":0,"c":0,"l":3634211,"d":177}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.527117Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:555:2567];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:32.527427Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:555:2567];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.001},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.002}],"full":{"a":3631874,"name":"_full_task","f":3631874,"d_finished":0,"c":0,"l":3634596,"d":2722},"events":[{"name":"bootstrap","f":3632136,"d_finished":1422,"c":1,"l":3633558,"d":1422},{"a":3633709,"name":"ack","f":3633709,"d_finished":0,"c":0,"l":3634596,"d":887},{"a":3633686,"name":"processing","f":3633686,"d_finished":0,"c":0,"l":3634596,"d":910},{"name":"ProduceResults","f":3633275,"d_finished":539,"c":2,"l":3634027,"d":539},{"a":3634034,"name":"Finish","f":3634034,"d_finished":0,"c":0,"l":3634596,"d":562}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:32.527496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:32.468168Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:37:32.527530Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:32.527681Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:561:2573];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> ConvertUnboxedValueToArrowAndBack::LargeVariant [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest >> MoveTable::WithData-Reboot >> TColumnShardTestSchema::RebootHotTiersAfterTtl >> DqUnboxedValueDoNotFitToArrow::LargeVariant [GOOD] >> DqUnboxedValueToNativeArrowConversion::ListOfJsons [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverStruct [GOOD] >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] >> MoveTable::WithData-Reboot [GOOD] >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId >> KqpRboYql::BasicJoins [GOOD] >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithData-Reboot [GOOD] Test command err: 2026-01-04T02:37:35.854704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:35.881066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:35.881317Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:35.888496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:35.888757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:35.888980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:35.889080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:35.889167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:35.889231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:35.889293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:35.889384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:35.889485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:35.889571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:35.889643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:35.889760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:35.889876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:35.914779Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:35.914936Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:35.914980Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:35.915162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:35.915369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:35.915484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:35.915528Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:35.915605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:35.915654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:35.915687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:35.915708Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:35.915854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:35.915919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:35.915973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:35.916000Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:35.916073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:35.916110Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:35.916169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:35.916194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:35.916232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:35.916259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:35.916316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:35.916363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:35.916400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:35.916423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:35.916581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:35.916620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:35.916642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:35.916750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:35.916801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:35.916831Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:35.916870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:35.916903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:35.916939Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:35.916996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:35.917036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:35.917070Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:35.917247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:35.917315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... umns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:37:36.764121Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:37:36.764368Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.764587Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.764762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.765020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:36.765213Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.765435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.765775Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:335:2347] finished for tablet 9437184 2026-01-04T02:37:36.770257Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:329:2341];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.019}],"full":{"a":1375692,"name":"_full_task","f":1375692,"d_finished":0,"c":0,"l":1394812,"d":19120},"events":[{"name":"bootstrap","f":1376019,"d_finished":2917,"c":1,"l":1378936,"d":2917},{"a":1391689,"name":"ack","f":1389776,"d_finished":1734,"c":1,"l":1391510,"d":4857},{"a":1391674,"name":"processing","f":1379295,"d_finished":5646,"c":3,"l":1391513,"d":8784},{"name":"ProduceResults","f":1378035,"d_finished":3093,"c":6,"l":1392163,"d":3093},{"a":1392169,"name":"Finish","f":1392169,"d_finished":0,"c":0,"l":1394812,"d":2643},{"name":"task_result","f":1379320,"d_finished":3839,"c":2,"l":1389561,"d":3839}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.771494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:329:2341];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:36.773394Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:329:2341];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.013},{"events":["f_ack"],"t":0.014},{"events":["l_ProduceResults","f_Finish"],"t":0.016},{"events":["l_ack","l_processing","l_Finish"],"t":0.022}],"full":{"a":1375692,"name":"_full_task","f":1375692,"d_finished":0,"c":0,"l":1398419,"d":22727},"events":[{"name":"bootstrap","f":1376019,"d_finished":2917,"c":1,"l":1378936,"d":2917},{"a":1391689,"name":"ack","f":1389776,"d_finished":1734,"c":1,"l":1391510,"d":8464},{"a":1391674,"name":"processing","f":1379295,"d_finished":5646,"c":3,"l":1391513,"d":12391},{"name":"ProduceResults","f":1378035,"d_finished":3093,"c":6,"l":1392163,"d":3093},{"a":1392169,"name":"Finish","f":1392169,"d_finished":0,"c":0,"l":1398419,"d":6250},{"name":"task_result","f":1379320,"d_finished":3839,"c":2,"l":1389561,"d":3839}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:36.773688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:36.672208Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:37:36.773898Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:36.774599Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-04T02:37:36.777240Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-04T02:37:36.778110Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {1767494256833:12} readable: {1767494256833:max} at tablet 9437184 2026-01-04T02:37:36.778597Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-04T02:37:36.778709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767494256833:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:36.778960Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={1767494256833:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> DqUnboxedValueToNativeArrowConversion::DoubleOptionalVariantOverTupleWithOptionals [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:33.984348Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:34.013432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:34.013655Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:34.020808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:34.021065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:34.021310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:34.021422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:34.021513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:34.021616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:34.021727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:34.021850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:34.021991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:34.022091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:34.022222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:34.022334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:34.022463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:34.046348Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:34.046471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:34.046509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:34.046655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:34.046770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:34.046848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:34.046899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:34.046980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:34.047021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:34.047069Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:34.047106Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:34.047259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:34.047311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:34.047337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:34.047356Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:34.047415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:34.047477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:34.047515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:34.047542Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:34.047577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:34.047603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:34.047627Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:34.047669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:34.047710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:34.047732Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:34.047905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:34.047967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:34.048001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:34.048191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:34.048240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:34.048270Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:34.048314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:34.048353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:34.048373Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:34.048418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:34.048466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:34.048499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:34.048636Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:34.048677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nd=cleanup;skip_reason=no_changes; 2026-01-04T02:37:37.038282Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:37:37.038337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:37:37.038760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.147000s; 2026-01-04T02:37:37.038812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:37:37.131465Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494255133:max} readable: {1767494255133:max} at tablet 9437184 2026-01-04T02:37:37.131646Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:37:37.136013Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494255133:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:37.136139Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494255133:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:37.136883Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494255133:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:37:37.138762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494255133:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:37:37.192565Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494255133:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:663:2651];trace_detailed=; 2026-01-04T02:37:37.193885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:37:37.194253Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:37:37.194618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.194771Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.195126Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:37.195319Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.195488Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.195690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:663:2651] finished for tablet 9437184 2026-01-04T02:37:37.196175Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:656:2645];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":3645335,"name":"_full_task","f":3645335,"d_finished":0,"c":0,"l":3648611,"d":3276},"events":[{"name":"bootstrap","f":3645725,"d_finished":1940,"c":1,"l":3647665,"d":1940},{"a":3647958,"name":"ack","f":3647958,"d_finished":0,"c":0,"l":3648611,"d":653},{"a":3647935,"name":"processing","f":3647935,"d_finished":0,"c":0,"l":3648611,"d":676},{"name":"ProduceResults","f":3647325,"d_finished":695,"c":2,"l":3648389,"d":695},{"a":3648395,"name":"Finish","f":3648395,"d_finished":0,"c":0,"l":3648611,"d":216}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.196275Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:656:2645];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:37.196684Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:656:2645];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_ack","f_processing"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":3645335,"name":"_full_task","f":3645335,"d_finished":0,"c":0,"l":3649193,"d":3858},"events":[{"name":"bootstrap","f":3645725,"d_finished":1940,"c":1,"l":3647665,"d":1940},{"a":3647958,"name":"ack","f":3647958,"d_finished":0,"c":0,"l":3649193,"d":1235},{"a":3647935,"name":"processing","f":3647935,"d_finished":0,"c":0,"l":3649193,"d":1258},{"name":"ProduceResults","f":3647325,"d_finished":695,"c":2,"l":3648389,"d":695},{"a":3648395,"name":"Finish","f":3648395,"d_finished":0,"c":0,"l":3649193,"d":798}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:37.196760Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:37.138725Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:37:37.196811Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:37.196938Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Table [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboPg::AliasesRenames [GOOD] >> KqpRboPg::Bench_10Joins ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::BasicJoins [GOOD] Test command err: Trying to start YDB, gRPC: 4542, MsgBus: 7630 2026-01-04T02:36:56.697377Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329855658276390:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:56.698441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:56.997975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:57.011701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:57.011850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:57.112484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:57.163302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:57.173972Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329855658276288:2081] 1767494216675785 != 1767494216675788 2026-01-04T02:36:57.275473Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:57.323554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:57.323588Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:57.323596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:57.323685Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:57.677517Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:57.874328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:57.886461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:00.232666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329872838146359:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.232802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.233764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329872838146369:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.233851Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.537003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:37:01.137318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:01.137653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:01.137905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:01.138031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:01.138146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:01.138257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:01.138350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:01.138492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:01.138606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:01.138748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:01.138855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:01.138969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:01.139095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;self_id=[1:7591329872838146856:2330];tablet_id=72075186224037942;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:01.167414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:01.167541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:01.167741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:01.167833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:01.167919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:01.168025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:01.168119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:01.168204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:01.168285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:01.168534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:01.168671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:01.168788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:01.168882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037948;self_id=[1:7591329872838146900:2336];tablet_id=72075186224037948;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:01.198131Z node 1 ... ocal=40;result=not_found; 2026-01-04T02:37:22.048962Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.049044Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.049064Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038033;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.053760Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.053867Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.053893Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038024;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.058828Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.058905Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.058924Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.061102Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.061194Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.061219Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.068538Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.068606Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.068624Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.068957Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.069035Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.069061Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038057;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.076514Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.076593Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.076612Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.079056Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.079136Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.079157Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.084042Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.084107Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.084126Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=40;result=not_found; 2026-01-04T02:37:22.212637Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329966813589629:3348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:22.212720Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329966813589634:3351], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:22.212726Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:22.213065Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329966813589636:3352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:22.213155Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:22.216917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:22.232795Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-04T02:37:22.233112Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329966813589637:3353], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:37:22.292200Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329966813589689:6644] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:25.325351Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:25.325388Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:35.696422Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591330022648174545:11403], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:135: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)', code: 1 2026-01-04T02:37:35.696904Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MWM1ZmYzZTMtZTJjMmNiZWEtODBmYmQ0MDctNGI1NTlmZWI=, ActorId: [3:7591330022648174542:11401], ActorState: ExecuteState, LegacyTraceId: 01ke3dw0zk2eapj46p3xhkxyw9, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 135 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:609: Condition violated: `(isLeftSidePredicate && !isRightSidePredicate) || (isRightSidePredicate && !isLeftSidePredicate)\'" end_position { row: 3 column: 135 } issue_code: 1 } } tx_id# trace_id# 2026-01-04T02:37:35.810314Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591330022648174562:11412], status: INTERNAL_ERROR, issues:
: Fatal: Execution, code: 1060
:3:143: Fatal: ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported, code: 1 2026-01-04T02:37:35.810724Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=MTUxNjEzZTQtMTJjMTZhZTgtZTY5MWRmNy0zNGE1MWRiZQ==, ActorId: [3:7591330022648174559:11410], ActorState: ExecuteState, LegacyTraceId: 01ke3dw136f459vft9g89f0s21, ReplyQueryCompileError, remove tx status# INTERNAL_ERROR issues# { message: "Execution" issue_code: 1060 issues { position { row: 3 column: 143 } message: "ydb/core/kqp/opt/rbo/kqp_rewrite_select.cpp:548: OR in join predicate is not supported" end_position { row: 3 column: 143 } issue_code: 1 } } tx_id# trace_id# |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query >> MoveTable::WithCommitInProgress-Reboot >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] >> MoveTable::WithCommitInProgress+Reboot >> MoveTable::WithCommitInProgress-Reboot [GOOD] >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::Drop+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:37.566876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:37.591794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:37.592098Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:37.598229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:37.598427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:37.598604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:37.598711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:37.598774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:37.598838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:37.598907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:37.598976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:37.599070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:37.599135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:37.599233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:37.599302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:37.599378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:37.620800Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:37.620917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:37.620971Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:37.621148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:37.621266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:37.621331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:37.621367Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:37.621441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:37.621491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:37.621518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:37.621565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:37.621729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:37.621788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:37.621851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:37.621883Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:37.621981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:37.622037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:37.622071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:37.622091Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:37.622145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:37.622189Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:37.622216Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:37.622249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:37.622275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:37.622297Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:37.622460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:37.622523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:37.622560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:37.622710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:37.622756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:37.622790Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:37.622829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:37.622886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:37.622915Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:37.622958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:37.622991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:37.623012Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:37.623114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:37.623158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:37:40.724950Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:37:40.725006Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:37:40.725441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.169000s; 2026-01-04T02:37:40.725496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:37:40.816889Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494258719:max} readable: {1767494258719:max} at tablet 9437184 2026-01-04T02:37:40.817034Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:37:40.822996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494258719:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:40.823135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494258719:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:37:40.823855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494258719:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:37:40.825260Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494258719:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:37:40.879955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:609:2606];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494258719:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:663:2651];trace_detailed=; 2026-01-04T02:37:40.881360Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:37:40.881698Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:37:40.882112Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.882276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.882656Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:40.882849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.883013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.883221Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:663:2651] finished for tablet 9437184 2026-01-04T02:37:40.883703Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:656:2645];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":3770328,"name":"_full_task","f":3770328,"d_finished":0,"c":0,"l":3773762,"d":3434},"events":[{"name":"bootstrap","f":3770737,"d_finished":2058,"c":1,"l":3772795,"d":2058},{"a":3773102,"name":"ack","f":3773102,"d_finished":0,"c":0,"l":3773762,"d":660},{"a":3773083,"name":"processing","f":3773083,"d_finished":0,"c":0,"l":3773762,"d":679},{"name":"ProduceResults","f":3772411,"d_finished":714,"c":2,"l":3773528,"d":714},{"a":3773534,"name":"Finish","f":3773534,"d_finished":0,"c":0,"l":3773762,"d":228}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.883827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:656:2645];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:40.884283Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:656:2645];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","f_ProduceResults"],"t":0.002},{"events":["l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":3770328,"name":"_full_task","f":3770328,"d_finished":0,"c":0,"l":3774363,"d":4035},"events":[{"name":"bootstrap","f":3770737,"d_finished":2058,"c":1,"l":3772795,"d":2058},{"a":3773102,"name":"ack","f":3773102,"d_finished":0,"c":0,"l":3774363,"d":1261},{"a":3773083,"name":"processing","f":3773083,"d_finished":0,"c":0,"l":3774363,"d":1280},{"name":"ProduceResults","f":3772411,"d_finished":714,"c":2,"l":3773528,"d":714},{"a":3773534,"name":"Finish","f":3773534,"d_finished":0,"c":0,"l":3774363,"d":829}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::not_sorted;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:40.884381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:40.825229Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:37:40.884421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:40.884551Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:663:2651];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress-Reboot [GOOD] Test command err: 2026-01-04T02:37:40.624401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:40.656428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:40.656688Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:40.663718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:40.663961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:40.664174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:40.664317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:40.664413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:40.664505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:40.664599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:40.664695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:40.664834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:40.664934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:40.665026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:40.665138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:40.665245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:40.689479Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:40.689598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:40.689631Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:40.689745Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:40.689876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:40.689964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:40.689999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:40.690070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:40.690120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:40.690145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:40.690168Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:40.690290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:40.690335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:40.690359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:40.690389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:40.690478Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:40.690515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:40.690561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:40.690584Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:40.690614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:40.690651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:40.690686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:40.690717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:40.690740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:40.690761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:40.690881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:40.690923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:40.690939Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:40.691030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:40.691063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:40.691081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:40.691108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:40.691132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:40.691151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:40.691193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:40.691222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:40.691241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:40.691341Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:40.691372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... m_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:37:41.692167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:337:2349];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:37:41.692388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.692575Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.692759Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.692993Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:41.693205Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.693400Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.693827Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:343:2355] finished for tablet 9437184 2026-01-04T02:37:41.694401Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:337:2349];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":1491309,"name":"_full_task","f":1491309,"d_finished":0,"c":0,"l":1507561,"d":16252},"events":[{"name":"bootstrap","f":1491576,"d_finished":2923,"c":1,"l":1494499,"d":2923},{"a":1506646,"name":"ack","f":1504881,"d_finished":1584,"c":1,"l":1506465,"d":2499},{"a":1506620,"name":"processing","f":1494892,"d_finished":5646,"c":3,"l":1506469,"d":6587},{"name":"ProduceResults","f":1493697,"d_finished":2872,"c":6,"l":1507117,"d":2872},{"a":1507124,"name":"Finish","f":1507124,"d_finished":0,"c":0,"l":1507561,"d":437},{"name":"task_result","f":1494910,"d_finished":3998,"c":2,"l":1504719,"d":3998}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.694497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:337:2349];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:41.695086Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:337:2349];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.002},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.003},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":1491309,"name":"_full_task","f":1491309,"d_finished":0,"c":0,"l":1508223,"d":16914},"events":[{"name":"bootstrap","f":1491576,"d_finished":2923,"c":1,"l":1494499,"d":2923},{"a":1506646,"name":"ack","f":1504881,"d_finished":1584,"c":1,"l":1506465,"d":3161},{"a":1506620,"name":"processing","f":1494892,"d_finished":5646,"c":3,"l":1506469,"d":7249},{"name":"ProduceResults","f":1493697,"d_finished":2872,"c":6,"l":1507117,"d":2872},{"a":1507124,"name":"Finish","f":1507124,"d_finished":0,"c":0,"l":1508223,"d":1099},{"name":"task_result","f":1494910,"d_finished":3998,"c":2,"l":1504719,"d":3998}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:41.695179Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:41.615207Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:37:41.695229Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:41.695408Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:343:2355];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-04T02:37:41.696428Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-04T02:37:41.696742Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-04T02:37:41.696887Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-04T02:37:41.696945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:41.697054Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress+Reboot [GOOD] >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithCommitInProgress+Reboot [GOOD] Test command err: 2026-01-04T02:37:41.490133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:41.515381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:41.515682Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:41.522281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:41.522504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:41.522754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:41.522913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:41.522995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:41.523061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:41.523128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:41.523195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:41.523297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:41.523365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:41.523445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:41.523519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:41.523603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:41.547537Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:41.547667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:41.547722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:41.547880Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:41.548048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:41.548188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:41.548250Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:41.548359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:41.548447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:41.548501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:41.548540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:41.548691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:41.548748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:41.548776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:41.548797Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:41.548889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:41.548931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:41.548979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:41.549005Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:41.549072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:41.549117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:41.549171Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:41.549207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:41.549235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:41.549256Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:41.549418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:41.549450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:41.549469Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:41.549581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:41.549616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:41.549636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:41.549670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:41.549702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:41.549722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:41.549764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:41.549796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:41.549851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:41.549978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:41.550009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ta_format;batch_size=0;num_rows=100;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:37:43.196471Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:528:2493];bytes=6350;rows=100;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:37:43.196694Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.196937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.197120Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.197379Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:43.197567Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.197734Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.198054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:535:2499] finished for tablet 9437184 2026-01-04T02:37:43.198665Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:528:2493];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":2119723,"name":"_full_task","f":2119723,"d_finished":0,"c":0,"l":2133123,"d":13400},"events":[{"name":"bootstrap","f":2120049,"d_finished":2004,"c":1,"l":2122053,"d":2004},{"a":2132330,"name":"ack","f":2130561,"d_finished":1579,"c":1,"l":2132140,"d":2372},{"a":2132303,"name":"processing","f":2122292,"d_finished":4827,"c":3,"l":2132144,"d":5647},{"name":"ProduceResults","f":2121612,"d_finished":2636,"c":6,"l":2132748,"d":2636},{"a":2132756,"name":"Finish","f":2132756,"d_finished":0,"c":0,"l":2133123,"d":367},{"name":"task_result","f":2122304,"d_finished":3201,"c":2,"l":2130443,"d":3201}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.198773Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:528:2493];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:43.199333Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:528:2493];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":2119723,"name":"_full_task","f":2119723,"d_finished":0,"c":0,"l":2133813,"d":14090},"events":[{"name":"bootstrap","f":2120049,"d_finished":2004,"c":1,"l":2122053,"d":2004},{"a":2132330,"name":"ack","f":2130561,"d_finished":1579,"c":1,"l":2132140,"d":3062},{"a":2132303,"name":"processing","f":2122292,"d_finished":4827,"c":3,"l":2132144,"d":6337},{"name":"ProduceResults","f":2121612,"d_finished":2636,"c":6,"l":2132748,"d":2636},{"a":2132756,"name":"Finish","f":2132756,"d_finished":0,"c":0,"l":2133813,"d":1057},{"name":"task_result","f":2122304,"d_finished":3201,"c":2,"l":2130443,"d":3201}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:43.199448Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:43.134341Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:37:43.199494Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:43.199709Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:535:2499];TabletId=9437184;ScanId=0;TxId=12;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-04T02:37:43.200444Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-04T02:37:43.200823Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 12 scanId: 0 version: {18446744073709551615:12} readable: {18446744073709551615:max} at tablet 9437184 2026-01-04T02:37:43.200956Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 12 scanId: 0 at tablet 9437184 2026-01-04T02:37:43.201005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:483:2456];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:37:43.201106Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:483:2456];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=12;scan_id=0;gen=0;table=;snapshot={18446744073709551615:12};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpWorkload::KV [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable+Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:42.271911Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:42.297209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:42.297390Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:42.303176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:42.303350Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:42.303533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:42.303640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:42.303703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:42.303770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:42.303838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:42.303909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:42.304016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:42.304108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:42.304175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:42.304246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:42.304334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:42.328625Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:42.328739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:42.328787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:42.328914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:42.329040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:42.329136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:42.329181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:42.329290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:42.329347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:42.329387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:42.329428Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:42.329546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:42.329600Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:42.329631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:42.329664Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:42.329734Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:42.329783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:42.329829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:42.329852Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:42.329894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:42.329923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:42.329947Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:42.329980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:42.330025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:42.330056Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:42.330247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:42.330295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:42.330377Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:42.330473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:42.330500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:42.330534Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:42.330570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:42.330595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:42.330624Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:42.330652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:42.330677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:42.330696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:42.330799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:42.330829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :StartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:43.641357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137219670885568;op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137425832451584;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:43.641446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137219670885568;op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137425832451584;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:43.641525Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137219670885568;op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494263353;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137425832451584;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-04T02:37:43.641770Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:43.641961Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494263353 at tablet 9437184, mediator 0 2026-01-04T02:37:43.642005Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] execute at tablet 9437184 2026-01-04T02:37:43.642304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:43.642342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:43.642404Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:43.642461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-04T02:37:43.642520Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-04T02:37:43.642768Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:43.643388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-04T02:37:43.655377Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[4] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:43.656969Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137219670888480;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767494263356;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:43.669084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494263356;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137219670888480;op_tx=120:TX_KIND_SCHEMA;min=1767494263356;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:43.669158Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494263356;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137219670888480;op_tx=120:TX_KIND_SCHEMA;min=1767494263356;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:43.670369Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137219670890272;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767494263358;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:43.682374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494263358;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137219670890272;op_tx=121:TX_KIND_SCHEMA;min=1767494263358;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:43.682452Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494263358;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137219670890272;op_tx=121:TX_KIND_SCHEMA;min=1767494263358;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:43.683679Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:378:2387];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137219670892064;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767494263359;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:43.695481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494263359;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137219670892064;op_tx=122:TX_KIND_SCHEMA;min=1767494263359;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:43.695559Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494263359;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137219670892064;op_tx=122:TX_KIND_SCHEMA;min=1767494263359;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::KV [GOOD] Test command err: Trying to start YDB, gRPC: 5356, MsgBus: 20556 2026-01-04T02:36:07.664998Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329647405047417:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:07.665128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:07.709494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:36:07.978659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:07.978781Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:08.008197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:08.031934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:08.063498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:08.203955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:08.203994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:08.204007Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:08.204134Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:08.240965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:08.645869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:08.670581Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:11.025115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329664584917370:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.025249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.030021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329664584917380:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.030105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.298563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:11.820162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329664584918940:2452], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.820251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.821965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329664584918943:2454], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.822018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.822267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329664584918947:2456], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:11.826259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:11.837162Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329664584918949:2457], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:36:11.973155Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329664584919000:3552] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:12.665008Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329647405047417:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:12.665095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:22.957486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:36:22.957519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.122528s took: 0.122765s took: 0.124384s took: 0.124803s took: 0.126313s took: 0.126692s took: 0.127690s took: 0.128303s took: 0.128398s took: 0.195152s took: 0.114244s took: 0.114686s took: 0.114163s took: 0.117718s took: 0.117807s took: 0.119046s took: 0.119394s took: 0.119908s took: 0.119698s took: 0.119869s took: 0.172481s took: 0.172399s took: 0.173439s took: 0.173639s took: 0.173744s took: 0.176068s took: 0.175996s took: 0.176534s took: 0.180526s took: 0.180426s took: 0.025407s took: 0.026601s took: 0.027399s took: 0.028139s took: 0.032264s took: 0.037068s took: 0.039276s took: 0.039409s took: 0.037464s took: 0.037934s took: 0.152052s took: 0.156244s took: 0.152341s took: 0.178435s took: 0.178586s took: 0.174448s took: 0.178691s took: 0.178870s took: 0.178881s took: 0.174969s 2026-01-04T02:37:43.581061Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-04T02:37:43.581102Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-04T02:37:43.581116Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037920 not found 2026-01-04T02:37:43.581135Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037895 not found 2026-01-04T02:37:43.582850Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037918 not found 2026-01-04T02:37:43.587943Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:37:43.587988Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-04T02:37:43.594203Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-04T02:37:43.594243Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-04T02:37:43.594261Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:37:43.594280Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037906 not found 2026-01-04T02:37:43.594295Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037919 not found 2026-01-04T02:37:43.594313Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-04T02:37:43.594329Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037922 not found 2026-01-04T02:37:43.594348Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-04T02:37:43.594365Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037926 not found 2026-01-04T02:37:43.594382Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037905 not found 2026-01-04T02:37:43.594400Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-04T02:37:43.594417Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-04T02:37:43.594438Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-04T02:37:43.594463Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-04T02:37:43.594485Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-04T02:37:43.608360Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037900 not found 2026-01-04T02:37:43.608401Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-04T02:37:43.608417Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-04T02:37:43.608432Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-04T02:37:43.608447Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037907 not found 2026-01-04T02:37:43.608463Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:37:43.608478Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-04T02:37:43.608493Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037925 not found 2026-01-04T02:37:43.608532Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037898 not found 2026-01-04T02:37:43.608548Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037904 not found 2026-01-04T02:37:43.608563Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037894 not found 2026-01-04T02:37:43.608578Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037915 not found 2026-01-04T02:37:43.608593Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037908 not found 2026-01-04T02:37:43.608613Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037911 not found 2026-01-04T02:37:43.608641Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037912 not found 2026-01-04T02:37:43.608658Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037910 not found 2026-01-04T02:37:43.608672Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-04T02:37:43.608692Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037916 not found |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer >> YdbIndexTable::MultiShardTableUniqAndNonUniqIndex [GOOD] >> YdbIndexTable::MultiShardTableTwoIndexes >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId >> KqpWorkload::STOCK [GOOD] |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots-GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:46.826106Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:46.853359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:46.853589Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:46.860345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:46.860549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:46.860711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:46.860801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:46.860868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:46.860929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:46.861007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:46.861072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:46.861128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:46.861221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.861286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:46.861354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:46.861423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:46.883039Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:46.883188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:46.883237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:46.883382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:46.883535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:46.883601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:46.883639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:46.883711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:46.883766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:46.883809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:46.883839Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:46.884033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:46.884113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:46.884156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:46.884200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:46.884302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:46.884355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:46.884394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:46.884423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:46.884486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:46.884531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:46.884561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:46.884609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:46.884648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:46.884672Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:46.884871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:46.884909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:46.884973Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:46.885057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:46.885083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.885108Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.885145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:46.885174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:46.885195Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:46.885239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:46.885263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:46.885281Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:46.885364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:46.885392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tartProposeOnExecute;tx_info=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:47.993248Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137155548309344;op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137361709603072;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:47.993307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137155548309344;op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137361709603072;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:47.993348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137155548309344;op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494267855;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137361709603072;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-04T02:37:47.993570Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:47.993645Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494267855 at tablet 9437184, mediator 0 2026-01-04T02:37:47.993680Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-04T02:37:47.993924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:47.993959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:47.994009Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 20, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:47.994066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=20; 2026-01-04T02:37:47.994101Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=20; 2026-01-04T02:37:47.994273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:47.994803Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=20; 2026-01-04T02:37:48.006685Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:48.007839Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137155548312256;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767494267858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:48.019732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494267858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137155548312256;op_tx=120:TX_KIND_SCHEMA;min=1767494267858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:48.019799Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494267858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137155548312256;op_tx=120:TX_KIND_SCHEMA;min=1767494267858;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:48.020804Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137155548314048;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767494267859;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:48.032932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494267859;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137155548314048;op_tx=121:TX_KIND_SCHEMA;min=1767494267859;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:48.033005Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494267859;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137155548314048;op_tx=121:TX_KIND_SCHEMA;min=1767494267859;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:48.034048Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137155548315840;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767494267861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:48.045946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494267861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137155548315840;op_tx=122:TX_KIND_SCHEMA;min=1767494267861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:48.046008Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494267861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137155548315840;op_tx=122:TX_KIND_SCHEMA;min=1767494267861;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/perf/unittest >> KqpWorkload::STOCK [GOOD] Test command err: Trying to start YDB, gRPC: 15210, MsgBus: 63448 2026-01-04T02:36:40.378743Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329787869695280:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:40.379005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:40.711173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:40.711280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:40.738139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:40.784900Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:40.790214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:40.790319Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329787869695153:2081] 1767494200353618 != 1767494200353621 2026-01-04T02:36:40.922321Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:40.922342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:40.922347Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:40.922413Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:41.023880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:41.395394Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:41.450616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:41.462481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:36:43.795882Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329800754597930:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.796020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.796714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329800754597940:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:43.796787Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:44.062153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:44.209581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.004286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:36:45.385654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329787869695280:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:45.389082Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:36:45.485913Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329809344536451:2627], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.485989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.486268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329809344536457:2631], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.486309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329809344536456:2630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.486387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:45.491765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:45.504552Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329809344536460:2632], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:36:45.652799Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329809344536511:5077] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:36:55.696932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:36:55.696962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded took: 0.549924s took: 0.553267s took: 0.558817s took: 0.562528s took: 0.566542s took: 0.568719s took: 0.573038s took: 0.578123s took: 0.578282s took: 0.580662s took: 6.437298s 2026-01-04T02:37:38.793731Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976711089; took: 6.476663s 2026-01-04T02:37:38.811539Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7591330036977814032:5028], Table: `/Root/stock` ([72057594046644480:38:1]), SessionActorId: [1:7591330011208008596:5028]Got LOCKS BROKEN for table `/Root/stock`. ShardID=72075186224037888, Sink=[1:7591330036977814032:5028].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:37:38.812153Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591330036977812978:5028], SessionActorId: [1:7591330011208008596:5028], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7591330011208008596:5028]. 2026-01-04T02:37:38.812409Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=Zjk2NzAyZmQtNmNmM2I4YzktMTMwYTcyYTYtNzBhNmQ3NTk=, ActorId: [1:7591330011208008596:5028], ActorState: ExecuteState, LegacyTraceId: 01ke3dvy9x0nk0ad7cadt79xdv, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7591330036977813961:5028] from: [1:7591330036977812978:5028] trace_id# 2026-01-04T02:37:38.812498Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591330036977813961:5028] TxId: 281474976711089. Ctx: { TraceId: 01ke3dvy9x0nk0ad7cadt79xdv, Database: /Root, SessionId: ydb://session/3?node_id=1&id=Zjk2NzAyZmQtNmNmM2I4YzktMTMwYTcyYTYtNzBhNmQ3NTk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/stock`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:37:38.812714Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976711089; 2026-01-04T02:37:38.812888Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=Zjk2NzAyZmQtNmNmM2I4YzktMTMwYTcyYTYtNzBhNmQ3NTk=, ActorId: [1:7591330011208008596:5028], ActorState: ExecuteState, LegacyTraceId: 01ke3dvy9x0nk0ad7cadt79xdv, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/stock`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:37:38.812911Z node 1 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767494258839 : 281474976711089] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } took: 6.494405s ... abletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-04T02:37:46.269181Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-04T02:37:46.269196Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037909 not found 2026-01-04T02:37:46.269219Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037921 not found 2026-01-04T02:37:46.269235Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037897 not found 2026-01-04T02:37:46.269285Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037901 not found 2026-01-04T02:37:46.269309Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-04T02:37:46.269324Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:37:46.269338Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-04T02:37:46.269353Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-04T02:37:46.269367Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-04T02:37:46.269381Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2026-01-04T02:37:46.269396Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-04T02:37:46.269412Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-04T02:37:46.269426Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-04T02:37:46.280348Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-04T02:37:46.280406Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-04T02:37:46.280439Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037903 not found 2026-01-04T02:37:46.280462Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037902 not found 2026-01-04T02:37:46.280478Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037923 not found 2026-01-04T02:37:46.280493Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037893 not found 2026-01-04T02:37:46.280510Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-04T02:37:46.280528Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037928 not found 2026-01-04T02:37:46.280543Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037913 not found 2026-01-04T02:37:46.280558Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037896 not found 2026-01-04T02:37:46.280598Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037914 not found 2026-01-04T02:37:46.280620Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037917 not found 2026-01-04T02:37:46.284508Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037927 not found 2026-01-04T02:37:46.284544Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:37:46.284565Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037924 not found 2026-01-04T02:37:46.405517Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037965 not found 2026-01-04T02:37:46.405567Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037959 not found 2026-01-04T02:37:46.405590Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037954 not found 2026-01-04T02:37:46.407373Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037941 not found 2026-01-04T02:37:46.407419Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037953 not found 2026-01-04T02:37:46.407436Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037932 not found 2026-01-04T02:37:46.407454Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037939 not found 2026-01-04T02:37:46.407509Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037948 not found 2026-01-04T02:37:46.407536Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037966 not found 2026-01-04T02:37:46.423499Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037963 not found 2026-01-04T02:37:46.423535Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037968 not found 2026-01-04T02:37:46.423551Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037962 not found 2026-01-04T02:37:46.423567Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037952 not found 2026-01-04T02:37:46.423581Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037946 not found 2026-01-04T02:37:46.423597Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037969 not found 2026-01-04T02:37:46.423613Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037937 not found 2026-01-04T02:37:46.423628Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037936 not found 2026-01-04T02:37:46.423643Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037950 not found 2026-01-04T02:37:46.423698Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037967 not found 2026-01-04T02:37:46.423767Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037961 not found 2026-01-04T02:37:46.423786Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037956 not found 2026-01-04T02:37:46.423809Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037935 not found 2026-01-04T02:37:46.423834Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037942 not found 2026-01-04T02:37:46.423852Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037957 not found 2026-01-04T02:37:46.423865Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037933 not found 2026-01-04T02:37:46.423880Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037943 not found 2026-01-04T02:37:46.423893Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037949 not found 2026-01-04T02:37:46.423910Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037947 not found 2026-01-04T02:37:46.423936Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037930 not found 2026-01-04T02:37:46.423949Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037931 not found 2026-01-04T02:37:46.423962Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037958 not found 2026-01-04T02:37:46.423979Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037964 not found 2026-01-04T02:37:46.424008Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037938 not found 2026-01-04T02:37:46.424031Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037944 not found 2026-01-04T02:37:46.424049Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037955 not found 2026-01-04T02:37:46.424098Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037934 not found 2026-01-04T02:37:46.424126Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037951 not found 2026-01-04T02:37:46.481693Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037945 not found 2026-01-04T02:37:46.481751Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037940 not found 2026-01-04T02:37:46.481796Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037960 not found |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/perf/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId |89.5%| [TA] $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} |89.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/perf/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::ExportAfterFail >> TColumnShardTestSchema::ColdTiers >> TColumnShardTestSchema::RebootForgetWithLostAnswer >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] >> KqpRboYql::TPCH_YQL [GOOD] >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::CreateTable-Reboots+GenerateInternalPathId [GOOD] Test command err: 2026-01-04T02:37:49.914471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:49.935430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:49.935669Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:49.941563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:49.941727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:49.941902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:49.941993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:49.942063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:49.942131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:49.942196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:49.942260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:49.942318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:49.942408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:49.942475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:49.942543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:49.942613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:49.962549Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:49.962658Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:49.962697Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:49.962838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:49.962972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:49.963054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:49.963094Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:49.963190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:49.963235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:49.963267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:49.963286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:49.963406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:49.963466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:49.963501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:49.963527Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:49.963590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:49.963624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:49.963656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:49.963673Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:49.963715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:49.963744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:49.963766Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:49.963819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:49.963845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:49.963868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:49.964040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:49.964076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:49.964150Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:49.964265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:49.964314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:49.964344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:49.964379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:49.964403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:49.964423Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:49.964467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:49.964506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:49.964540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:49.964623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:49.964650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;fline=schema.h:38;event=sync_schema; 2026-01-04T02:37:51.082277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137274239772512;op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137480401055680;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:37:51.082334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137274239772512;op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137480401055680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:51.082372Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;this=137274239772512;op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_op_tx=119:TX_KIND_SCHEMA;min=1767494270947;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=019:0;;int_this=137480401055680;method=TTxController::FinishProposeOnComplete;tx_id=119;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=119; 2026-01-04T02:37:51.082673Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:37:51.082820Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494270947 at tablet 9437184, mediator 0 2026-01-04T02:37:51.082892Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] execute at tablet 9437184 2026-01-04T02:37:51.083147Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:51.083177Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=20;result=not_found; 2026-01-04T02:37:51.083227Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000202, ss: 20} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:37:51.083301Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000202; 2026-01-04T02:37:51.083345Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine.h:158;event=RegisterTable;path_id=1000000202; 2026-01-04T02:37:51.083510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:37:51.083982Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=119;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000202; 2026-01-04T02:37:51.095585Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[36] complete at tablet 9437184 CreateTable: { SeqNo { Generation: 20 } EnsureTables { Tables { PathId: 21 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4609 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:51.096871Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=120;this=137274239775424;method=TTxController::StartProposeOnExecute;tx_info=120:TX_KIND_SCHEMA;min=1767494270950;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:51.108920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494270950;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137274239775424;op_tx=120:TX_KIND_SCHEMA;min=1767494270950;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:51.109004Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=120:TX_KIND_SCHEMA;min=1767494270950;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;this=137274239775424;op_tx=120:TX_KIND_SCHEMA;min=1767494270950;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=020:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=120; CreateTable: { SeqNo { Generation: 21 } EnsureTables { Tables { PathId: 22 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4610 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:51.110213Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=121;this=137274239777216;method=TTxController::StartProposeOnExecute;tx_info=121:TX_KIND_SCHEMA;min=1767494270952;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:51.122267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494270952;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137274239777216;op_tx=121:TX_KIND_SCHEMA;min=1767494270952;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:51.122327Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=121:TX_KIND_SCHEMA;min=1767494270952;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;this=137274239777216;op_tx=121:TX_KIND_SCHEMA;min=1767494270952;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=021:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=121; CreateTable: { SeqNo { Generation: 22 } EnsureTables { Tables { PathId: 23 SchemaPreset { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "k0" TypeId: 4612 } Columns { Id: 2 Name: "resource_type" TypeId: 4608 } Columns { Id: 3 Name: "resource_id" TypeId: 4608 DataAccessorConstructor { ClassName: "SPARSED" } } Columns { Id: 4 Name: "uid" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 5 Name: "level" TypeId: 1 } Columns { Id: 6 Name: "message" TypeId: 4608 StorageId: "__MEMORY" } Columns { Id: 7 Name: "json_payload" TypeId: 4610 } Columns { Id: 8 Name: "ingested_at" TypeId: 50 } Columns { Id: 9 Name: "saved_at" TypeId: 50 } Columns { Id: 10 Name: "request_id" TypeId: 4608 } KeyColumnNames: "k0" KeyColumnNames: "resource_type" KeyColumnNames: "resource_id" KeyColumnNames: "uid" Indexes { Id: 1004 Name: "MAX::INDEX::level" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 5 } } Indexes { Id: 1007 Name: "MAX::INDEX::ingested_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 8 } } Indexes { Id: 1008 Name: "MAX::INDEX::saved_at" StorageId: "__LOCAL_METADATA" InheritPortionStorage: false ClassName: "MAX" MaxIndex { ColumnId: 9 } } } } TtlSettings { Version: 1 } } } } 2026-01-04T02:37:51.123629Z node 1 :TX_COLUMNSHARD_TX ERROR: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];ev=NKikimr::TEvColumnShard::TEvProposeTransaction;tablet_id=9437184;tx_id=122;this=137274239779008;method=TTxController::StartProposeOnExecute;tx_info=122:TX_KIND_SCHEMA;min=1767494270953;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=tx_controller.cpp:350;error=problem on start;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription; 2026-01-04T02:37:51.135780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494270953;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137274239779008;op_tx=122:TX_KIND_SCHEMA;min=1767494270953;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:157:2180]; 2026-01-04T02:37:51.135853Z node 1 :TX_COLUMNSHARD ERROR: log.cpp:841: tablet_id=9437184;request_tx=122:TX_KIND_SCHEMA;min=1767494270953;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;this=137274239779008;op_tx=122:TX_KIND_SCHEMA;min=1767494270953;max=18446744073709551615;plan=0;src=[1:157:2180];cookie=022:0;;fline=propose_tx.cpp:23;message=Invalid schema: Column errors: key column k0 has unsupported type NKikimrSchemeOp.TOlapColumnDescription;tablet_id=9437184;tx_id=122; |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:24.523511Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:24.529613Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:24.530118Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:24.556342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:24.556611Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:24.563387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:24.563616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:24.563819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:24.563957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:24.564044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:24.564133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:24.564193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:24.564276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:24.564375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:24.564442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.564544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:24.564669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:24.564793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:24.585497Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:24.587849Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:24.587976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:24.588032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:24.588179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.588310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:24.588367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:24.588398Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:24.588472Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:24.588535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:24.588568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:24.588587Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:24.588788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.588852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:24.588888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:24.588911Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:24.588970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:24.589005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:24.589031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:24.589047Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:24.589086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:24.589114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:24.589132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:24.589176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:24.589205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:24.589222Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:24.589386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:24.589436Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:24.589486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:24.589621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:24.589671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.589702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.589761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:24.589822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:24.589853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:24.589900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:24.589938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... s;result=1;count=1000;finished=1; 2026-01-04T02:37:51.276291Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:37:51.276345Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:37:51.276614Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:51.276823Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.276868Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:37:51.277081Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:37:51.277150Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-04T02:37:51.277543Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-04T02:37:51.277747Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.277938Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.278128Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.278345Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:51.278528Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.278697Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.279016Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:485:2489] finished for tablet 9437184 2026-01-04T02:37:51.279581Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:484:2488];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":27267469,"name":"_full_task","f":27267469,"d_finished":0,"c":0,"l":27276216,"d":8747},"events":[{"name":"bootstrap","f":27267743,"d_finished":1395,"c":1,"l":27269138,"d":1395},{"a":27275452,"name":"ack","f":27273715,"d_finished":1582,"c":1,"l":27275297,"d":2346},{"a":27275429,"name":"processing","f":27269285,"d_finished":3660,"c":3,"l":27275301,"d":4447},{"name":"ProduceResults","f":27268615,"d_finished":2646,"c":6,"l":27275867,"d":2646},{"a":27275873,"name":"Finish","f":27275873,"d_finished":0,"c":0,"l":27276216,"d":343},{"name":"task_result","f":27269307,"d_finished":2009,"c":2,"l":27273500,"d":2009}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.279672Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:484:2488];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:51.280220Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:484:2488];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":27267469,"name":"_full_task","f":27267469,"d_finished":0,"c":0,"l":27276850,"d":9381},"events":[{"name":"bootstrap","f":27267743,"d_finished":1395,"c":1,"l":27269138,"d":1395},{"a":27275452,"name":"ack","f":27273715,"d_finished":1582,"c":1,"l":27275297,"d":2980},{"a":27275429,"name":"processing","f":27269285,"d_finished":3660,"c":3,"l":27275301,"d":5081},{"name":"ProduceResults","f":27268615,"d_finished":2646,"c":6,"l":27275867,"d":2646},{"a":27275873,"name":"Finish","f":27275873,"d_finished":0,"c":0,"l":27276850,"d":977},{"name":"task_result","f":27269307,"d_finished":2009,"c":2,"l":27273500,"d":2009}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:51.280317Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:51.268513Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-04T02:37:51.280371Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:51.280539Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:485:2489];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:19.464866Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:19.476927Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:19.480543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:19.510330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:19.510568Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:19.523361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:19.523633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:19.523916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:19.524054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:19.524167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:19.524281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:19.524381Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:19.524525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:19.524608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:19.524703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.524806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:19.524887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:19.524963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:19.550333Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:19.556215Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:19.556374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:19.556453Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:19.556660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:19.557943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:19.558045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:19.558093Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:19.558222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:19.558328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:19.558358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:19.558585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:19.558741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:19.558775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:19.558862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:19.558963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:19.558995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:19.559051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:19.559091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:19.559117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:19.559161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:19.559195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:19.559221Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:19.559457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:19.559512Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:19.559545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:19.559662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:19.559725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.559756Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.559817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:19.559869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:19.559896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:19.559958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:19.559995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... =1; 2026-01-04T02:37:50.926130Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:37:50.926191Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:37:50.926401Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:50.926631Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.926692Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:37:50.926901Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:37:50.926971Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-04T02:37:50.927288Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-04T02:37:50.927519Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.927686Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.927876Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.928076Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:50.928215Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.928336Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.928638Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-04T02:37:50.929235Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":32008862,"name":"_full_task","f":32008862,"d_finished":0,"c":0,"l":32023411,"d":14549},"events":[{"name":"bootstrap","f":32009192,"d_finished":1628,"c":1,"l":32010820,"d":1628},{"a":32022758,"name":"ack","f":32019324,"d_finished":3153,"c":2,"l":32022628,"d":3806},{"a":32022744,"name":"processing","f":32011039,"d_finished":6936,"c":5,"l":32022632,"d":7603},{"name":"ProduceResults","f":32010206,"d_finished":4396,"c":9,"l":32023059,"d":4396},{"a":32023064,"name":"Finish","f":32023064,"d_finished":0,"c":0,"l":32023411,"d":347},{"name":"task_result","f":32011065,"d_finished":3659,"c":3,"l":32019088,"d":3659}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.929318Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:50.929932Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":32008862,"name":"_full_task","f":32008862,"d_finished":0,"c":0,"l":32024074,"d":15212},"events":[{"name":"bootstrap","f":32009192,"d_finished":1628,"c":1,"l":32010820,"d":1628},{"a":32022758,"name":"ack","f":32019324,"d_finished":3153,"c":2,"l":32022628,"d":4469},{"a":32022744,"name":"processing","f":32011039,"d_finished":6936,"c":5,"l":32022632,"d":8266},{"name":"ProduceResults","f":32010206,"d_finished":4396,"c":9,"l":32023059,"d":4396},{"a":32023064,"name":"Finish","f":32023064,"d_finished":0,"c":0,"l":32024074,"d":1010},{"name":"task_result","f":32011065,"d_finished":3659,"c":3,"l":32019088,"d":3659}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:50.930058Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:50.911704Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-04T02:37:50.930110Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:50.930302Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpRboPg::Bench_10Joins [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboYql::TPCH_YQL [GOOD] Test command err: Trying to start YDB, gRPC: 12406, MsgBus: 23180 2026-01-04T02:36:55.554267Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329849600936931:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:55.554366Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:55.807879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:55.823140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:55.823280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:55.882779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:55.885881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:55.889182Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329849600936893:2081] 1767494215552523 != 1767494215552526 2026-01-04T02:36:56.016732Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:56.057244Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:56.057269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:56.057294Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:56.057445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:56.505549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:56.561851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:58.728188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329862485839681:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:58.728423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329862485839672:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:58.728526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:58.730760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329862485839688:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:58.730872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:36:58.733136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:36:58.749672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329862485839686:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:36:58.865986Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329862485839740:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 1338, MsgBus: 30052 2026-01-04T02:37:00.079587Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591329873717171562:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:00.079718Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:00.122954Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:37:00.231181Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:00.231258Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:00.264090Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329873717171523:2081] 1767494220075854 != 1767494220075857 2026-01-04T02:37:00.273332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:00.277917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:00.362781Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:00.390440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:00.390466Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:00.390473Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:00.390556Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:00.526095Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:00.900685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:00.911189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:37:01.120271Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:03.453946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329886602074293:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:03.454052Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:03.457929Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329886602074303:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:03.458015Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:03.594890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:37:03.816562Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[2:7591329886602074553:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:03.816565Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[2:7591329886602074572:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:03.816890Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[2:7591329886602074572:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:03.817078Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[2:7591329886602074572:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:03.817193Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[2:7591329886602074572:2348];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:03.817262Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[2:7591329886602074553:2337];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37: ... X_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.315665Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.315690Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.315712Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.315756Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.315774Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.327959Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.327959Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.328024Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.328031Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.328051Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.328052Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340150Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340230Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340255Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340748Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340815Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.340834Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.351221Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.351302Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.351326Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.353940Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.354012Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.354033Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.361432Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.361505Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.361527Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038006;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.363677Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.363727Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.363746Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.371174Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.371252Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.371276Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.372048Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.372105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.372123Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710665;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=45;result=not_found; 2026-01-04T02:37:40.415935Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330044358424622:3016], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:40.416024Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:40.416163Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330044358424627:3019], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:40.416502Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330044358424629:3020], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:40.416609Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:40.420422Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710666:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:40.435119Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330044358424630:3021], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710666 completed, doublechecking } 2026-01-04T02:37:40.505764Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330044358424686:5413] txid# 281474976710667, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 49], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:45.385591Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:45.385635Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_minikql/unittest >> TTxDataShardMiniKQL::WriteAndReadMany [GOOD] Test command err: 2026-01-04T02:34:43.149764Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:34:43.151514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:43.151567Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:43.161275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:34:43.161609Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:34:43.161925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:34:43.207946Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:34:43.215804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:34:43.215900Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:34:43.217614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:34:43.217698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:34:43.217747Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:34:43.218174Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:34:43.219560Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:34:43.219681Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:34:43.290729Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:34:43.327860Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:34:43.328091Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:34:43.328216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:34:43.328274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:34:43.328333Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:34:43.328374Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:34:43.328592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:43.328634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:43.328949Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:34:43.329057Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:34:43.329122Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:34:43.329157Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:34:43.329190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:34:43.329223Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:34:43.329273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:34:43.329335Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:34:43.329375Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:34:43.329457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:43.329504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:43.329543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:34:43.338082Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:34:43.338180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:34:43.338276Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:34:43.338486Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:34:43.338534Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:34:43.338580Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:34:43.338639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:34:43.338699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:34:43.338743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:34:43.338778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:34:43.339121Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:34:43.339161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:34:43.339198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:34:43.339239Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:34:43.339291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:34:43.339353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:34:43.339385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:34:43.339420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:34:43.339456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:34:43.354591Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:34:43.354675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:34:43.354710Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:34:43.354751Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:34:43.354858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:34:43.355446Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:43.355498Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:34:43.355540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:34:43.355734Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:34:43.355772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:34:43.355954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:34:43.356004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:34:43.356065Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:34:43.356119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:34:43.367051Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:34:43.367172Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:34:43.367484Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:43.367530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:34:43.367604Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:34:43.367647Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:34:43.367681Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:34:43.367743Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:34:43.367782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... execution plan for [0:10] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:45.128627Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:45.128663Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit FinishPropose 2026-01-04T02:37:45.128708Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 10 at tablet 9437184 send to client, exec latency: 8 ms, propose latency: 8 ms, status: COMPLETE 2026-01-04T02:37:45.128773Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is DelayComplete 2026-01-04T02:37:45.128801Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:45.128831Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:10] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:45.128862Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:10] at 9437184 on unit CompletedOperations 2026-01-04T02:37:45.128909Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:10] at 9437184 is Executed 2026-01-04T02:37:45.128933Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:10] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:45.128960Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 9437184 has finished 2026-01-04T02:37:45.133528Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:45.133601Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 9437184 on unit FinishPropose 2026-01-04T02:37:45.133654Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:37:47.690951Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [3:103:2137], Recipient [3:237:2229]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 12884904025 } 2026-01-04T02:37:47.691001Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState 2026-01-04T02:37:47.691265Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:494:2468], Recipient [3:237:2229]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:47.691291Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:37:47.691316Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [3:493:2467], serverId# [3:494:2468], sessionId# [0:0:0] 2026-01-04T02:37:47.691467Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [3:103:2137], Recipient [3:237:2229]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 103 RawX2: 12884904025 } TxBody: "\032\354\002\037\010\0021\010key1\010key2\nvalue\005\205\n\205\002\205\004\206\205\006\207\203\004\207\203\001H\207\203\001H\006\n\016\203\014\020List$Truncated\002\205\004\205\002?\022\002\205\000\034MyReads MyWrites\205\004\205\002?\022\002\206\202\024Reply\024Write?\030\205\002\206\203\010\002 AllReads\030MyKeys\014Run4ShardsForRead4ShardsToWrite\005?\024)\211\026?\022\203\005\004\200\205\006\203\004\203\004\203\004\006\n\016\213\004\203\004\207\203\001H\213\002\203\004\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?* h\020\000\000\000\000\000\000\016\000\000\000\000\000\000\000?\014\005?2\003?,D\003?.F\003?0p\007\013?:\003?4\000\'?8\003\013?>\003?<\003j\030\001\003?@\000\003?B\000\003?D\007\240%&\003?F\000\006\004?J\003\203\014\000\003\203\014\000\003\003?L\000\377\007\002\000\005?\032\005?\026?x\000\005?\030\003\005? \005?\034?x\000\006 2026-01-04T02:37:47.691492Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:37:47.691544Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:47.692115Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CheckDataTx 2026-01-04T02:37:47.706581Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-04T02:37:47.706656Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CheckDataTx 2026-01-04T02:37:47.706685Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:37:47.706712Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:37:47.706760Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 9437184 CompleteEdge# v1000001/1 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v1000001/18446744073709551615 ImmediateWriteEdgeReplied# v1000001/18446744073709551615 2026-01-04T02:37:47.706812Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 9437184 2026-01-04T02:37:47.706839Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-04T02:37:47.706857Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:37:47.706873Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:37:47.706891Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit BlockFailPoint 2026-01-04T02:37:47.706921Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-04T02:37:47.706943Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:37:47.706961Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:37:47.706976Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:47.723913Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 4194304 and requests 33554432 more for the next try 2026-01-04T02:37:47.724196Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-04T02:37:47.724236Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-04T02:37:47.745231Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:47.745282Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:47.745963Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-04T02:37:47.905346Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:195: Tablet 9437184 is not ready for [0:11] execution 2026-01-04T02:37:47.906961Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-04T02:37:47.907033Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-04T02:37:48.122945Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:48.123006Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:48.123624Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-04T02:37:48.309357Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:181: Operation [0:11] at 9437184 exceeded memory limit 37748736 and requests 301989888 more for the next try 2026-01-04T02:37:48.310804Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 11 released its data 2026-01-04T02:37:48.310872Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Restart 2026-01-04T02:37:48.317832Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:37:48.317887Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit ExecuteDataTx 2026-01-04T02:37:48.318512Z node 3 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 11 at 9437184 restored its data 2026-01-04T02:37:49.267532Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [0:11] at tablet 9437184 with status COMPLETE 2026-01-04T02:37:49.267642Z node 3 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [0:11] at 9437184: {NSelectRow: 0, NSelectRange: 1, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 129871, SelectRangeBytes: 40000268, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:37:49.267709Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-04T02:37:49.267748Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:37:49.267782Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit FinishPropose 2026-01-04T02:37:49.267816Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit FinishPropose 2026-01-04T02:37:49.267860Z node 3 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 11 at tablet 9437184 send to client, exec latency: 6 ms, propose latency: 6 ms, status: COMPLETE 2026-01-04T02:37:49.267942Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is DelayComplete 2026-01-04T02:37:49.267970Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit FinishPropose 2026-01-04T02:37:49.267998Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 9437184 to execution unit CompletedOperations 2026-01-04T02:37:49.268027Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 9437184 on unit CompletedOperations 2026-01-04T02:37:49.268069Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 9437184 is Executed 2026-01-04T02:37:49.268092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 9437184 executing on unit CompletedOperations 2026-01-04T02:37:49.268117Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 9437184 has finished 2026-01-04T02:37:49.273087Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:37:49.273157Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:11] at 9437184 on unit FinishPropose 2026-01-04T02:37:49.273207Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_minikql/unittest >> MoveTable::WithUncomittedData |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/rbo/unittest >> KqpRboPg::Bench_10Joins [GOOD] Test command err: Trying to start YDB, gRPC: 19707, MsgBus: 62257 2026-01-04T02:36:56.851465Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329855022609365:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:36:56.851526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:36:57.171274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:36:57.185351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:36:57.185474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:36:57.280718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:36:57.334821Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:36:57.337910Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329855022609152:2081] 1767494216817713 != 1767494216817716 2026-01-04T02:36:57.394412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:36:57.410115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:36:57.410133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:36:57.410151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:36:57.410222Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:36:57.817960Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:36:57.940014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:36:57.950323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:37:00.174830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329872202479223:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.174946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.175309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329872202479233:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.177524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:00.463928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:37:01.030139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:01.030382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:01.030532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:01.030625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:01.030714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:01.030774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:01.030854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:01.030943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:01.031013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:01.031082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:01.031133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:01.031248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:01.031308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037944;self_id=[1:7591329872202479737:2329];tablet_id=72075186224037944;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:01.042643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:01.042729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:01.042924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:01.043017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:01.043137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:01.043245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:01.043352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:01.043464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:01.043557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:01.043642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:01.043725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:01.043813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:01.043904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;self_id=[1:7591329872202479738:2330];tablet_id=72075186224037943;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:01.066961Z node ... iptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:39.257248Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:39.538831Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 24657, MsgBus: 6904 2026-01-04T02:37:42.761855Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330054451282673:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:42.761937Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:42.777986Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:42.935433Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330054451282635:2081] 1767494262760714 != 1767494262760717 2026-01-04T02:37:42.951475Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:42.954191Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:42.954297Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:42.964168Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:43.001254Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:43.001290Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:43.001302Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:43.001409Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:43.028050Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:43.455802Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:43.532417Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330035625166485:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:43.532538Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:43.768398Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:46.624603Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330071631152701:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:46.624716Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:46.625057Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330071631152710:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:46.625126Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:46.684776Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.720883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.756173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.789873Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.821513Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.852721Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.886351Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:46.988152Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.022247Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.056568Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.102892Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330075926120732:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:47.102962Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330075926120737:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:47.102990Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:47.103170Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330075926120740:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:47.103230Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:47.107306Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715668:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:47.120745Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591330075926120739:2402], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715668 completed, doublechecking } 2026-01-04T02:37:47.183868Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591330075926120792:3009] txid# 281474976715669, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 51], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:47.762032Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591330054451282673:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:47.762118Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:19.467850Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:19.476901Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:19.480546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:19.509517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:19.510560Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:19.523355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:19.523638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:19.523905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:19.524046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:19.524160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:19.524230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:19.524314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:19.524468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:19.524590Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:19.524693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.524808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:19.524926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:19.525023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:19.548864Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:19.556220Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:19.556401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:19.556471Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:19.556677Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:19.558142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:19.558194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:19.558319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:19.558461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:19.558508Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:19.558740Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:19.558823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:19.558876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:19.558912Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:19.559019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:19.559079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:19.559121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:19.559150Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:19.559211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:19.559251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:19.559286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:19.559334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:19.559379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:19.559420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:19.559667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:19.559730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:19.559777Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:19.559945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:19.559999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.560033Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:19.560099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:19.560158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:19.560191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:19.560247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:19.560299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... nished=1; 2026-01-04T02:37:53.318804Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:37:53.318846Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:37:53.319184Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:53.319452Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.319514Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:37:53.319739Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:37:53.319823Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-04T02:37:53.320208Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-04T02:37:53.320459Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.320638Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.320852Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.321141Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:53.321384Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.321629Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.322010Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:759:2739] finished for tablet 9437184 2026-01-04T02:37:53.322685Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:758:2738];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":34701651,"name":"_full_task","f":34701651,"d_finished":0,"c":0,"l":34712363,"d":10712},"events":[{"name":"bootstrap","f":34701923,"d_finished":1670,"c":1,"l":34703593,"d":1670},{"a":34711372,"name":"ack","f":34709414,"d_finished":1767,"c":1,"l":34711181,"d":2758},{"a":34711346,"name":"processing","f":34703838,"d_finished":4298,"c":3,"l":34711188,"d":5315},{"name":"ProduceResults","f":34702984,"d_finished":3179,"c":6,"l":34711927,"d":3179},{"a":34711933,"name":"Finish","f":34711933,"d_finished":0,"c":0,"l":34712363,"d":430},{"name":"task_result","f":34703863,"d_finished":2435,"c":2,"l":34709136,"d":2435}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.322792Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:758:2738];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:53.323432Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:758:2738];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":34701651,"name":"_full_task","f":34701651,"d_finished":0,"c":0,"l":34713118,"d":11467},"events":[{"name":"bootstrap","f":34701923,"d_finished":1670,"c":1,"l":34703593,"d":1670},{"a":34711372,"name":"ack","f":34709414,"d_finished":1767,"c":1,"l":34711181,"d":3513},{"a":34711346,"name":"processing","f":34703838,"d_finished":4298,"c":3,"l":34711188,"d":6070},{"name":"ProduceResults","f":34702984,"d_finished":3179,"c":6,"l":34711927,"d":3179},{"a":34711933,"name":"Finish","f":34711933,"d_finished":0,"c":0,"l":34713118,"d":1185},{"name":"task_result","f":34703863,"d_finished":2435,"c":2,"l":34709136,"d":2435}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:53.323535Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:53.309225Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59288;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59288;selected_rows=0; 2026-01-04T02:37:53.323601Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:53.323807Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:759:2739];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/rbo/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TA] $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_minikql/test-results/unittest/{meta.json ... results_accumulator.log} >> MoveTable::WithUncomittedData [GOOD] |89.6%| [TA] $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/rbo/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::HotTiersTtl |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> MoveTable::WithUncomittedData [GOOD] Test command err: 2026-01-04T02:37:54.482369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:54.502960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:54.503179Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:54.508546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:54.508711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:54.508904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:54.509010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:54.509096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:54.509158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:54.509217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:54.509302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:54.509392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:54.509472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.509541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:54.509609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:54.509682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:54.529838Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:54.529945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:54.529983Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:54.530137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:54.530285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:54.530376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:54.530412Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:54.530488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:54.530534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:54.530559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:54.530580Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:54.530706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:54.530749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:54.530773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:54.530795Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:54.530852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:54.530885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:54.530912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:54.530928Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:54.530969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:54.531005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:54.531033Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:54.531059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:54.531107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:54.531142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:54.531302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:54.531337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:54.531363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:54.531454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:54.531481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.531499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.531527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:54.531557Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:54.531574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:54.531615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:54.531641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:54.531669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:54.531763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:54.531792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ,"id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"1,2,3,4,5,6,7,8,9,10","t":"FetchOriginalData"},"w":20,"id":21},"8":{"p":{"i":"4","p":{"address":{"name":"uid","id":4}},"o":"4","t":"AssembleOriginalData"},"w":25,"id":8},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":25,"id":2},"18":{"p":{"i":"9","p":{"address":{"name":"saved_at","id":9}},"o":"9","t":"AssembleOriginalData"},"w":25,"id":18},"0":{"p":{"i":"1,2,3,4,5,6,7,8,9,10","t":"Projection"},"w":250,"id":0},"4":{"p":{"i":"2","p":{"address":{"name":"resource_type","id":2}},"o":"2","t":"AssembleOriginalData"},"w":25,"id":4},"20":{"p":{"i":"10","p":{"address":{"name":"request_id","id":10}},"o":"10","t":"AssembleOriginalData"},"w":25,"id":20},"16":{"p":{"i":"8","p":{"address":{"name":"ingested_at","id":8}},"o":"8","t":"AssembleOriginalData"},"w":25,"id":16},"14":{"p":{"i":"7","p":{"address":{"name":"json_payload","id":7}},"o":"7","t":"AssembleOriginalData"},"w":25,"id":14},"10":{"p":{"i":"5","p":{"address":{"name":"level","id":5}},"o":"5","t":"AssembleOriginalData"},"w":25,"id":10},"6":{"p":{"i":"3","p":{"address":{"name":"resource_id","id":3}},"o":"3","t":"AssembleOriginalData"},"w":25,"id":6},"22":{"p":{"p":{"data":[{"name":"timestamp","id":1},{"name":"resource_type","id":2},{"name":"resource_id","id":3},{"name":"uid","id":4},{"name":"level","id":5},{"name":"message","id":6},{"name":"json_payload","id":7},{"name":"ingested_at","id":8},{"name":"saved_at","id":9},{"name":"request_id","id":10}]},"o":"0","t":"ReserveMemory"},"w":0,"id":22},"12":{"p":{"i":"6","p":{"address":{"name":"message","id":6}},"o":"6","t":"AssembleOriginalData"},"w":25,"id":12}}}; 2026-01-04T02:37:55.290455Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1767494275466:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:37:55.350730Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=11;scan_id=0;gen=0;table=;snapshot={1767494275466:11};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:328:2340];trace_detailed=; 2026-01-04T02:37:55.352381Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:92;ff_first=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);; 2026-01-04T02:37:55.352666Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; 2026-01-04T02:37:55.353236Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.353458Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.353762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:55.353998Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.354212Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.354489Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:328:2340] finished for tablet 9437184 2026-01-04T02:37:55.355032Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:322:2334];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.003}],"full":{"a":1300361,"name":"_full_task","f":1300361,"d_finished":0,"c":0,"l":1304275,"d":3914},"events":[{"name":"bootstrap","f":1300714,"d_finished":2522,"c":1,"l":1303236,"d":2522},{"a":1303451,"name":"ack","f":1303451,"d_finished":0,"c":0,"l":1304275,"d":824},{"a":1303425,"name":"processing","f":1303425,"d_finished":0,"c":0,"l":1304275,"d":850},{"name":"ProduceResults","f":1302718,"d_finished":943,"c":2,"l":1303979,"d":943},{"a":1303983,"name":"Finish","f":1303983,"d_finished":0,"c":0,"l":1304275,"d":292}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.355103Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:322:2334];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:55.355479Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:322:2334];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.003},{"events":["l_ack","l_processing","l_Finish"],"t":0.004}],"full":{"a":1300361,"name":"_full_task","f":1300361,"d_finished":0,"c":0,"l":1304881,"d":4520},"events":[{"name":"bootstrap","f":1300714,"d_finished":2522,"c":1,"l":1303236,"d":2522},{"a":1303451,"name":"ack","f":1303451,"d_finished":0,"c":0,"l":1304881,"d":1430},{"a":1303425,"name":"processing","f":1303425,"d_finished":0,"c":0,"l":1304881,"d":1456},{"name":"ProduceResults","f":1302718,"d_finished":943,"c":2,"l":1303979,"d":943},{"a":1303983,"name":"Finish","f":1303983,"d_finished":0,"c":0,"l":1304881,"d":898}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:55.355586Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:55.290428Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:37:55.355630Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:55.355798Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:328:2340];TabletId=9437184;ScanId=0;TxId=11;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=timestamp;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl >> TColumnShardTestSchema::RebootForgetAfterFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:20.523596Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:20.528524Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:20.529043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:20.560774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:20.561049Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:20.568504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:20.568758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:20.569008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:20.569153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:20.569258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:20.569357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:20.569456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:20.569630Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:20.569702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:20.569768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.569879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:20.569959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:20.570045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:20.593700Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:20.597188Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:20.597337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:20.597434Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:20.597623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.597823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:20.597906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:20.597949Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:20.598074Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:20.598142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:20.598184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:20.598241Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:20.598448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:20.598507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:20.598551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:20.598578Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:20.598673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:20.598732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:20.598776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:20.598804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:20.598857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:20.598895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:20.598921Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:20.598962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:20.598998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:20.599028Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:20.599245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:20.599332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:20.599366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:20.599516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:20.599560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.599588Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:20.599651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:20.599702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:20.599729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:20.599777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:20.599815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... UG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:37:56.281187Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:37:56.281508Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:56.281766Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.281856Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:37:56.282094Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:37:56.282193Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-04T02:37:56.282508Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-04T02:37:56.282731Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.282896Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.283090Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.283334Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:56.283551Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.283765Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.284154Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:513:2516] finished for tablet 9437184 2026-01-04T02:37:56.284843Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:512:2515];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":36259889,"name":"_full_task","f":36259889,"d_finished":0,"c":0,"l":36269717,"d":9828},"events":[{"name":"bootstrap","f":36260203,"d_finished":1578,"c":1,"l":36261781,"d":1578},{"a":36268770,"name":"ack","f":36266947,"d_finished":1662,"c":1,"l":36268609,"d":2609},{"a":36268750,"name":"processing","f":36261945,"d_finished":4069,"c":3,"l":36268613,"d":5036},{"name":"ProduceResults","f":36261208,"d_finished":2947,"c":6,"l":36269276,"d":2947},{"a":36269282,"name":"Finish","f":36269282,"d_finished":0,"c":0,"l":36269717,"d":435},{"name":"task_result","f":36261976,"d_finished":2319,"c":2,"l":36266682,"d":2319}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.284963Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:512:2515];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:56.285599Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:512:2515];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":36259889,"name":"_full_task","f":36259889,"d_finished":0,"c":0,"l":36270497,"d":10608},"events":[{"name":"bootstrap","f":36260203,"d_finished":1578,"c":1,"l":36261781,"d":1578},{"a":36268770,"name":"ack","f":36266947,"d_finished":1662,"c":1,"l":36268609,"d":3389},{"a":36268750,"name":"processing","f":36261945,"d_finished":4069,"c":3,"l":36268613,"d":5816},{"name":"ProduceResults","f":36261208,"d_finished":2947,"c":6,"l":36269276,"d":2947},{"a":36269282,"name":"Finish","f":36269282,"d_finished":0,"c":0,"l":36270497,"d":1215},{"name":"task_result","f":36261976,"d_finished":2319,"c":2,"l":36266682,"d":2319}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:56.285713Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:56.272478Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-04T02:37:56.285784Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:56.285987Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:513:2516];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/sharding/ut/unittest |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/sharding/ut/unittest |89.6%| [TA] $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {RESULT} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/sharding/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:25.342504Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:25.347000Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:25.347520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:25.381502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:25.381799Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:25.389777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:25.390068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:25.390336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:25.390464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:25.390598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:25.390713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:25.390821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:25.390952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:25.391064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:25.391174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.391319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:25.391431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:25.391566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:25.418906Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:25.421891Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:25.422061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:25.422124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:25.422344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.422537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:25.422632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:25.422678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:25.422790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:25.422867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:25.422914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:25.422952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:25.423182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.423261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:25.423336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:25.423371Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:25.423467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:25.423523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:25.423577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:25.423608Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:25.423669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:25.423707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:25.423736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:25.423776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:25.423814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:25.423846Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:25.424088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:25.424143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:25.424177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:25.424325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:25.424374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.424408Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.424469Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:25.424519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:25.424548Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:25.424596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:25.424631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... esult=0;count=0;finished=1; 2026-01-04T02:37:57.557635Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:37:57.557677Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:37:57.557852Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:57.558032Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.558079Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:37:57.558236Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:37:57.558297Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-04T02:37:57.558535Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:720:2700];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-04T02:37:57.558713Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.558852Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.559007Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.559162Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:37:57.559242Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.559309Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.559582Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:721:2701] finished for tablet 9437184 2026-01-04T02:37:57.560070Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:720:2700];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":32711471,"name":"_full_task","f":32711471,"d_finished":0,"c":0,"l":32722573,"d":11102},"events":[{"name":"bootstrap","f":32711691,"d_finished":1231,"c":1,"l":32712922,"d":1231},{"a":32722076,"name":"ack","f":32719369,"d_finished":2462,"c":2,"l":32721963,"d":2959},{"a":32722064,"name":"processing","f":32713088,"d_finished":5306,"c":5,"l":32721967,"d":5815},{"name":"ProduceResults","f":32712462,"d_finished":3358,"c":9,"l":32722249,"d":3358},{"a":32722252,"name":"Finish","f":32722252,"d_finished":0,"c":0,"l":32722573,"d":321},{"name":"task_result","f":32713110,"d_finished":2749,"c":3,"l":32719204,"d":2749}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.560172Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:720:2700];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:37:57.560624Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:720:2700];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":32711471,"name":"_full_task","f":32711471,"d_finished":0,"c":0,"l":32723143,"d":11672},"events":[{"name":"bootstrap","f":32711691,"d_finished":1231,"c":1,"l":32712922,"d":1231},{"a":32722076,"name":"ack","f":32719369,"d_finished":2462,"c":2,"l":32721963,"d":3529},{"a":32722064,"name":"processing","f":32713088,"d_finished":5306,"c":5,"l":32721967,"d":6385},{"name":"ProduceResults","f":32712462,"d_finished":3358,"c":9,"l":32722249,"d":3358},{"a":32722252,"name":"Finish","f":32722252,"d_finished":0,"c":0,"l":32723143,"d":891},{"name":"task_result","f":32713110,"d_finished":2749,"c":3,"l":32719204,"d":2749}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:37:57.560715Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:37:57.546747Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-04T02:37:57.560764Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:37:57.560923Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:721:2701];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout >> TestMalformedRequest::ContentLengthCorrect >> TestMalformedRequest::ContentLengthLower >> TestMalformedRequest::CompressedDeflateContentLengthCorrect >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-04T02:38:01.076132Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:38:01.086659Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:38:01.088059Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:38:01.088195Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.088260Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:38:01.089859Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:259:2253], now have 1 active actors on pipe 2026-01-04T02:38:01.090002Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:38:01.111386Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:38:01.111608Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.119417Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:38:01.119633Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:38:01.120063Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:38:01.123100Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:267:2224] 2026-01-04T02:38:01.126090Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:38:01.126155Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:38:01.126196Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:267:2224] 2026-01-04T02:38:01.127229Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:38:01.127326Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:38:01.128232Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:38:01.128286Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:01.129289Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.129333Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.129379Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.129414Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:38:01.130722Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:38:01.133794Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:38:01.134418Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:270:2258], now have 1 active actors on pipe 2026-01-04T02:38:01.174461Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:38:01.177684Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:38:01.177952Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-04T02:38:01.178001Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.178048Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-04T02:38:01.178729Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:398:2354], now have 1 active actors on pipe 2026-01-04T02:38:01.178818Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:38:01.180760Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:38:01.180853Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.181500Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:38:01.181626Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:38:01.181948Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:38:01.182158Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928138][Partition][1][StateInit] bootstrapping 1 [2:406:2325] 2026-01-04T02:38:01.183665Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:38:01.183707Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:1:Initializer] Initializing completed. 2026-01-04T02:38:01.183737Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928138][Partition][1][StateInit] init complete for topic 'rt3.dc1--topic2' partition 1 generation 2 [2:406:2325] 2026-01-04T02:38:01.183785Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928138][Partition][1][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:38:01.183829Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928138][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:38:01.183865Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928138][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:38:01.183890Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928138][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:38:01.183928Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.183951Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928138][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.183977Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928138][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.183999Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-04T02:38:01.184060Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:38:01.184218Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:38:01.184624Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:409:2359], now have 1 active actors on pipe 2026-01-04T02:38:01.199215Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:38:01.201979Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:38:01.202229Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:38:01.202271Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.202313Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:38:01.202982Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:458:2395], now have 1 active actors on pipe 2026-01-04T02:38:01.203029Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:38:01.204784Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:38:01.204886Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:38:01.205459Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:38:01.205564Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:38:01.205786Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:38:01.205980Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:466:2366] 2026-01-04T02:38:01.207511Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:38:01.207564Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:38:01.207605Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:466:2366] 2026-01-04T02:38:01.207644Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:38:01.207689Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:38:01.207718Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:38:01.207761Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:38:01.207790Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.207813Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.207842Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.207867Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:38:01.207938Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:38:01.208089Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:38:01.208418Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:469:2400], now have 1 active actors on pipe REQUEST MetaRequest { CmdGetReadSessionsInfo { ClientId: "client_id" Topic: "rt3.dc1--topic1" Topic: "rt3.dc1--topic2" } } Ticket: "client_id@builtin" 2026-01-04T02:38:01.214209Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:476:2403], now have 1 active actors on pipe 2026-01-04T02:38:01.215524Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:479:2404], now have 1 active actors on pipe 2026-01-04T02:38:01.215753Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:480:2404], now have 1 active actors on pipe 2026-01-04T02:38:01.217215Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:476:2403] destroyed 2026-01-04T02:38:01.219596Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [2:479:2404] destroyed 2026-01-04T02:38:01.219718Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:480:2404] destroyed RESULT Status: 1 ErrorCode: OK MetaResponse { CmdGetReadSessionsInfoResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 0 ErrorCode: INITIALIZING ErrorReason: "tablet for partition is not running" } PartitionResult { Partition: 1 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } PartitionResult { Partition: 2 ClientOffset: 0 StartOffset: 0 EndOffset: 0 TimeLag: 0 TabletNode: "::1" ClientReadOffset: 0 ReadTimeLag: 0 TabletNodeId: 2 ErrorCode: OK } ErrorCode: OK } } } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494841.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494841.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493641.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-04T02:37:22.955513Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:22.977274Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:22.977468Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:22.983276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:22.983513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:22.983746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:22.983874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:22.983991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:22.984119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:22.984226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:22.984334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:22.984455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:22.984578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.984678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:22.984778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:22.984894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:23.008686Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:23.008838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:23.008894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:23.009080Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:23.009268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:23.009362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:23.009404Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:23.009494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:23.009554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:23.009593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:23.009621Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:23.009730Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:23.009826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:23.009874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:23.009902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:23.009990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:23.010048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:23.010080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:23.010102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:23.010135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:23.010166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:23.010196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:23.010245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:23.010285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:23.010316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:23.010485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:23.010533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:23.010561Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:23.010686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:23.010737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:23.010765Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:23.010823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:23.010862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:23.010889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:23.010930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... E:granule/portions;fline=constructor_portion.cpp:44;memory_size=286;data_size=260;sum=5688;count=20;size_of_portion=192; 2026-01-04T02:38:01.632359Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=1963; 2026-01-04T02:38:01.632416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=6; 2026-01-04T02:38:01.632862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=409; 2026-01-04T02:38:01.632900Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2623; 2026-01-04T02:38:01.632928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2711; 2026-01-04T02:38:01.632969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-04T02:38:01.633058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=22; 2026-01-04T02:38:01.633098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=3390; 2026-01-04T02:38:01.633225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=62; 2026-01-04T02:38:01.633308Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=47; 2026-01-04T02:38:01.633397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=59; 2026-01-04T02:38:01.633467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=41; 2026-01-04T02:38:01.634904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1397; 2026-01-04T02:38:01.636142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1177; 2026-01-04T02:38:01.636191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-04T02:38:01.636246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:38:01.636279Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:38:01.636333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2026-01-04T02:38:01.636369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:38:01.636468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-04T02:38:01.636508Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2026-01-04T02:38:01.636551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2026-01-04T02:38:01.636602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=29; 2026-01-04T02:38:01.636742Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=121; 2026-01-04T02:38:01.636766Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=16888; 2026-01-04T02:38:01.636855Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:01.636966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:01.637018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:01.637087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:01.646124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:01.646316Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:01.646411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:38:01.646483Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:01.646532Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:01.646591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:01.646699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:01.646879Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.010000s; 2026-01-04T02:38:01.648434Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:01.649131Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:01.649179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:01.649269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:38:01.649336Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:01.649378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:01.649406Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:01.649469Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:01.649835Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.199000s; 2026-01-04T02:38:01.649884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1279:3146];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] >> RetryPolicy::RetryWithBatching [GOOD] >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494844.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494844.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494844.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494844.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494844.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494844.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=167494844.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494844.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493644.000000s;Name=;Codec=}; 2026-01-04T02:37:24.734217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:24.754392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:24.754667Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:24.761625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:24.761837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:24.762010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:24.762118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:24.762205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:24.762292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:24.762375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:24.762440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:24.762524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:24.762629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.762695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:24.762766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:24.762870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:24.787832Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:24.787970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:24.788024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:24.788189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.788324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:24.788414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:24.788451Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:24.788515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:24.788594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:24.788626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:24.788648Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:24.788776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.788833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:24.788864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:24.788886Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:24.788957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:24.789005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:24.789041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:24.789063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:24.789099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:24.789126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:24.789148Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:24.789182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:24.789228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:24.789253Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:24.789411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:24.789458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:24.789480Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:24.789582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:24.789624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.789649Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.789685Z node 1 :TX ... nLoadingTime=11; 2026-01-04T02:38:02.548232Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=90; 2026-01-04T02:38:02.548268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7521; 2026-01-04T02:38:02.548310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7662; 2026-01-04T02:38:02.548368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-04T02:38:02.548460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=47; 2026-01-04T02:38:02.548506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8275; 2026-01-04T02:38:02.548639Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=82; 2026-01-04T02:38:02.548747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=59; 2026-01-04T02:38:02.548878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=78; 2026-01-04T02:38:02.549002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=69; 2026-01-04T02:38:02.553473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4414; 2026-01-04T02:38:02.558020Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4430; 2026-01-04T02:38:02.558115Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-04T02:38:02.558188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-04T02:38:02.558235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:38:02.558309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2026-01-04T02:38:02.558346Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:02.558436Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=49; 2026-01-04T02:38:02.558502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=16; 2026-01-04T02:38:02.558585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-04T02:38:02.558664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=45; 2026-01-04T02:38:02.558944Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=247; 2026-01-04T02:38:02.558986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=27850; 2026-01-04T02:38:02.559114Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=24365192;raw_bytes=35131129;count=5;records=400000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:02.559229Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:02.559283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:02.559343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:02.580357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:02.580501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:02.580603Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:02.580679Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493982458;tx_id=18446744073709551615;;current_snapshot_ts=1767494246050; 2026-01-04T02:38:02.580720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:02.580762Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:02.580796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:02.580874Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:02.581092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.050000s; 2026-01-04T02:38:02.582733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:02.583009Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:02.583058Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:02.583159Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:02.583223Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493982458;tx_id=18446744073709551615;;current_snapshot_ts=1767494246050; 2026-01-04T02:38:02.583268Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:02.583310Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:02.583346Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:02.583441Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:02.583846Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.178000s; 2026-01-04T02:38:02.583902Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1794:3663];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneIndexIndexOverlap [GOOD] Test command err: Trying to start YDB, gRPC: 6669, MsgBus: 8052 2026-01-04T02:35:15.444042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329422709702232:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:15.444174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:15.773547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:15.773650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:15.780681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:15.914206Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:15.919639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:16.013188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:16.013218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:16.013242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:16.013343Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:16.169492Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:16.450276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:16.476541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:16.482528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:16.536139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.731134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.910890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.018309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:18.908003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329435594605853:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.908141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.908667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329435594605863:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.908711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.230926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.261141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.289530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.318795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.356126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.394563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.437783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.478144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.554073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439889574031:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.554139Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.554205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439889574036:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.554345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439889574038:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.554379Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.557427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:19.566369Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329439889574040:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:19.667560Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329439889574091:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:20.443307Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329422709702232:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.443382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:21.289489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689 ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:14.745079Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:14.847745Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:14.847882Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:14.849650Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:14.860386Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591329932501830003:2081] 1767494234701543 != 1767494234701546 2026-01-04T02:37:14.892013Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:14.995874Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:14.995929Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:14.995956Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:14.996116Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:15.000025Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:15.537270Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:15.553429Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:15.626157Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:15.752559Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:15.814512Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:15.896540Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.481951Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329953976668357:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:19.482052Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:19.484892Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329953976668367:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:19.484976Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:19.621523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.662268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.696897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.705393Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591329932501830033:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:19.705468Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:19.732687Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.776984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.818699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.872838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.938492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:20.052719Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329958271636538:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:20.052809Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:20.052879Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329958271636543:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:20.053166Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591329958271636545:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:20.053220Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:20.057527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:20.070286Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591329958271636546:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:37:20.133455Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591329958271636598:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:22.323859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:29.822102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:29.822133Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-04T02:31:41.541700Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.541769Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.541879Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:31:41.542637Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:41.542685Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.542715Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.543878Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.009435s 2026-01-04T02:31:41.544584Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:41.544626Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.544651Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.544712Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007867s 2026-01-04T02:31:41.545271Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:31:41.545301Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.545327Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:31:41.545389Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007250s 2026-01-04T02:31:41.581502Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1767493901581468 2026-01-04T02:31:41.993896Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328502885503875:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:41.993967Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:42.072119Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:42.127646Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591328508244203449:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:42.127797Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:31:42.160210Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:31:42.160617Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:31:42.369297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:42.386917Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:42.542626Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:31:42.673071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:42.673185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:42.678551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:31:42.678616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:31:42.708236Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:31:42.708616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:42.716340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:31:42.981547Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:31:42.983531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:42.983647Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:43.117888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:31:43.154494Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:31:43.521288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0026a5/r3tmp/yandexEjTs9e.tmp 2026-01-04T02:31:43.521319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0026a5/r3tmp/yandexEjTs9e.tmp 2026-01-04T02:31:43.521522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0026a5/r3tmp/yandexEjTs9e.tmp 2026-01-04T02:31:43.521643Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:31:43.674069Z INFO: TTestServer started on Port 8036 GrpcPort 27754 PQClient connected to localhost:27754 2026-01-04T02:31:44.187798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:31:44.410377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:31:46.989664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328502885503875:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:46.989783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:47.121923Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591328508244203449:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:31:47.122035Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:31:47.787842Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328529719040146:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.787922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328529719040155:2307], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.788006Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.789662Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591328529719040185:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.789720Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:31:47.806654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:31:47.846582Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591328529719040184:2308], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:31:47.969171Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591328529719040214:2141] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:31:48.223524Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 2146435072 Duration# 0.177144s 2026-01-04T02:31:48.223566Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.177220s 2026-01-04T02:31:48.253614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:31:48.260165Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328528655308893:2338], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you ... 2:38:01.735622Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:38:01.735643Z node 17 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037892' partition 0 offset 0 partno 0 count 10 parts 0 suffix '63' 2026-01-04T02:38:01.735742Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 10 count 10 size 1188 from pos 0 cbcount 10 2026-01-04T02:38:01.735806Z node 17 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'rt3.dc1--test-topic' partition 0 user user readTimeStamp done, result 1767494281675 queuesize 0 startOffset 0 2026-01-04T02:38:01.735823Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session got write response: sequence_numbers: 1 sequence_numbers: 2 sequence_numbers: 3 sequence_numbers: 4 sequence_numbers: 5 sequence_numbers: 6 sequence_numbers: 7 sequence_numbers: 8 sequence_numbers: 9 sequence_numbers: 10 offsets: 0 offsets: 1 offsets: 2 offsets: 3 offsets: 4 offsets: 5 offsets: 6 offsets: 7 offsets: 8 offsets: 9 already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false already_written: false write_statistics { persist_duration_ms: 4 queued_in_partition_duration_ms: 55 } 2026-01-04T02:38:01.735900Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 1 2026-01-04T02:38:01.735985Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 2 2026-01-04T02:38:01.736031Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 3 2026-01-04T02:38:01.736058Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 4 2026-01-04T02:38:01.736092Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 5 2026-01-04T02:38:01.736114Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 6 2026-01-04T02:38:01.736139Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 7 2026-01-04T02:38:01.736165Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 8 2026-01-04T02:38:01.736197Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 9 2026-01-04T02:38:01.736229Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: acknoledged message 10 2026-01-04T02:38:01.736531Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: close. Timeout = 0 ms 2026-01-04T02:38:01.736596Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session will now close 2026-01-04T02:38:01.736649Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: aborting 2026-01-04T02:38:01.737498Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session is aborting and will not restart 2026-01-04T02:38:01.737502Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:38:01.737572Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0] Write session: destroy 2026-01-04T02:38:01.737699Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0 grpc read done: success: 0 data: 2026-01-04T02:38:01.737720Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0 grpc read failed 2026-01-04T02:38:01.737753Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 7 sessionId: test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0 2026-01-04T02:38:01.737773Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|e9072d92-51b6026a-fb7efdbe-ee1e987f_0 is DEAD 2026-01-04T02:38:01.738221Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:38:01.738351Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [17:7591330136902871141:2586] destroyed 2026-01-04T02:38:01.738392Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:38:01.738425Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:01.738446Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.738464Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.738485Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.738501Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:01.831302Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:01.831345Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.831368Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.831394Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.831415Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:01.931665Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:01.931699Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.931721Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:01.931746Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:01.931764Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:02.032049Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:02.032106Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.032140Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:02.032175Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.032199Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:02.132393Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:02.132439Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.132486Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:02.132517Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.132550Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:02.232732Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:02.232777Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.232802Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:02.232828Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.232849Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:02.333075Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:02.333112Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.333143Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:02.333167Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.333189Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:02.433494Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:02.433547Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.433578Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:02.433610Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:02.433636Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TestMalformedRequest::ContentLengthNone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnDuplicatedPartition [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple partition 2 in TopicRequest for topic \'rt3.dc1--topic2\'" ErrorCode: BAD_REQUEST } >> TestMalformedRequest::CompressedDeflateContentLengthCorrect [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnEmptyTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "empty topic in GetTopicMetadata request" ErrorCode: BAD_REQUEST } >> TestMalformedRequest::ContentLengthCorrect [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot+Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:25.158716Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:25.163133Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:25.163631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:25.196354Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:25.196569Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:25.203364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:25.203562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:25.203729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:25.203812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:25.203875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:25.203962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:25.204073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:25.204223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:25.204326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:25.204432Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.204561Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:25.204688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:25.204828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:25.228595Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:25.231000Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:25.231167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:25.231230Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:25.231437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.231628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:25.231710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:25.231775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:25.231870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:25.231951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:25.232007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:25.232035Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:25.232189Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:25.232244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:25.232289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:25.232330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:25.232413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:25.232462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:25.232522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:25.232559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:25.232617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:25.232653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:25.232685Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:25.232732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:25.232764Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:25.232782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:25.233019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:25.233090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:25.233123Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:25.233305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:25.233352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.233381Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:25.233439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:25.233499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:25.233537Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:25.233587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:25.233623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... :38:05.060602Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:38:05.060651Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:38:05.061022Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:05.061291Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.061359Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:38:05.061612Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:38:05.061731Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-04T02:38:05.062097Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:616:2595];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-04T02:38:05.062368Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.062570Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.062805Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.063060Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:05.063296Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.063548Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.063932Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:617:2596] finished for tablet 9437184 2026-01-04T02:38:05.064636Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:616:2595];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.01}],"full":{"a":40377960,"name":"_full_task","f":40377960,"d_finished":0,"c":0,"l":40388798,"d":10838},"events":[{"name":"bootstrap","f":40378288,"d_finished":1688,"c":1,"l":40379976,"d":1688},{"a":40387801,"name":"ack","f":40385758,"d_finished":1871,"c":1,"l":40387629,"d":2868},{"a":40387779,"name":"processing","f":40380174,"d_finished":4458,"c":3,"l":40387633,"d":5477},{"name":"ProduceResults","f":40379375,"d_finished":3269,"c":6,"l":40388369,"d":3269},{"a":40388375,"name":"Finish","f":40388375,"d_finished":0,"c":0,"l":40388798,"d":423},{"name":"task_result","f":40380202,"d_finished":2501,"c":2,"l":40385452,"d":2501}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.064754Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:616:2595];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:05.065408Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:616:2595];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":40377960,"name":"_full_task","f":40377960,"d_finished":0,"c":0,"l":40389585,"d":11625},"events":[{"name":"bootstrap","f":40378288,"d_finished":1688,"c":1,"l":40379976,"d":1688},{"a":40387801,"name":"ack","f":40385758,"d_finished":1871,"c":1,"l":40387629,"d":3655},{"a":40387779,"name":"processing","f":40380174,"d_finished":4458,"c":3,"l":40387633,"d":6264},{"name":"ProduceResults","f":40379375,"d_finished":3269,"c":6,"l":40388369,"d":3269},{"a":40388375,"name":"Finish","f":40388375,"d_finished":0,"c":0,"l":40389585,"d":1210},{"name":"task_result","f":40380202,"d_finished":2501,"c":2,"l":40385452,"d":2501}],"id":"9437184::35"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:05.065539Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:05.051080Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-04T02:38:05.065596Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:05.065835Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:617:2596];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; >> TestMalformedRequest::ContentLengthLower [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthNone >> TestMalformedRequest::ContentLengthHigher >> JsonProtoConversion::ProtoMapToJson [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] >> TestMalformedRequest::CompressedDeflateContentLengthNone ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::ProtoMapToJson_ReceiveMessageResult [GOOD] Test command err: 2026-01-04T02:38:00.483986Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330130594144698:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:00.484110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:00.736850Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:00.736969Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:00.785998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:00.800258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:00.872681Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:00.917351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:00.917387Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:00.917395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:00.917484Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:00.957240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:01.105047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:01.262749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:01.268735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:01.344681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.462196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.493126Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:01.502765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.539817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.565793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.591749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.618935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.646127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.674231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.701114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:02.915101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330139184080918:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:02.915101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330139184080906:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:02.915217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:02.915461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330139184080921:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:02.915515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:02.920483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:02.931151Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330139184080920:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:38:03.011086Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330143479048269:3100] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:03.579870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.602844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.627937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.657159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.684880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.711670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T0 ... er { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:04.927703Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 62ms 2026-01-04T02:38:04.927727Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:04.927763Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:38:04.927817Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 66ms 2026-01-04T02:38:04.928023Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:04.928025Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:04.928045Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:38:04.928090Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 65ms 2026-01-04T02:38:04.928404Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:05.077713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330147774016257:2439]: Pool not found 2026-01-04T02:38:05.078419Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:38:05.287838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330147774016251:2435]: Pool not found 2026-01-04T02:38:05.288088Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:38:05.290546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330152068983663:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:05.290614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:05.290625Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591330152068983664:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:38:05.290841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330152068983667:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:05.290881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:05.483152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330130594144698:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:05.483241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:05.547522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330152068983661:2454]: Pool not found 2026-01-04T02:38:05.547791Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:38:05.852397Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:51434) incoming connection opened 2026-01-04T02:38:05.852538Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:51434) -> (POST /Root, 24 bytes) 2026-01-04T02:38:05.852849Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [f864:49aa:3f7c:0:e064:49aa:3f7c:0] request [CreateQueue] url [/Root] database [/Root] requestId: 58838205-f42af504-a92eb626-fe918bb7 2026-01-04T02:38:05.855301Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [58838205-f42af504-a92eb626-fe918bb7] reply with status: BAD_REQUEST message: Failed to decode POST body 2026-01-04T02:38:05.855503Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:51434) <- (400 AccessDeniedException, 73 bytes) 2026-01-04T02:38:05.855575Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:51434) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: deflate Content-Length: 32 {"QueueName": "Example"} 2026-01-04T02:38:05.855621Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:51434) Response: HTTP/1.1 400 AccessDeniedException Connection: keep-alive x-amzn-requestid: 58838205-f42af504-a92eb626-fe918bb7 Content-Type: application/x-amz-json-1.1 Content-Length: 73 Http output full {"__type":"AccessDeniedException","message":"Failed to decode POST body"} 2026-01-04T02:38:05.855898Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:51434) connection closed |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoMap [GOOD] |89.6%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthLower >> TSequence::CreateSequence >> TSequence::CreateSequenceParallel ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::Sinks_Olap_WriteToTopicAndTable_4_Query [GOOD] Test command err: 2026-01-04T02:32:52.808190Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328807947379265:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:52.808267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:32:52.862576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:52.885930Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:32:53.207919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:53.208037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:53.215765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:53.341715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:32:53.433502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:53.465887Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001ce5/r3tmp/yandex9wUXyb.tmp 2026-01-04T02:32:53.465938Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001ce5/r3tmp/yandex9wUXyb.tmp 2026-01-04T02:32:53.466135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001ce5/r3tmp/yandex9wUXyb.tmp 2026-01-04T02:32:53.466223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:53.515593Z INFO: TTestServer started on Port 6079 GrpcPort 27305 2026-01-04T02:32:53.560217Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:32:53.817915Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:27305 2026-01-04T02:32:53.879667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:53.887434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:32:54.051097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:54.197491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:32:54.211282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715661, at schemeshard: 72057594046644480 2026-01-04T02:32:56.418763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825127249431:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.418862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825127249441:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.419059Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.419672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328825127249448:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.419745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:32:56.423066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:32:56.433977Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328825127249447:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:32:56.501764Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328825127249513:2650] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:32:56.751598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.792239Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328825127249521:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:32:56.795162Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjM4Y2QzOGItZTAyY2JjMDgtOWExMjRkNTItNzExMzRlMjk=, ActorId: [1:7591328825127249417:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3dkgb005fh5jm2bm5xtxf5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:32:56.799533Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:32:56.805934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:56.912471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328829422217103:2827] 2026-01-04T02:32:57.807543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328807947379265:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:32:57.807636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:02.342498Z :Sinks_Oltp_WriteToTopicAndTable_1_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:02.373419Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:02.394754Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328850897053780:2919] connected; active server actors: 1 2026-01-04T02:33:02.395036Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:02.402255Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:02.402464Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:02.423309Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:02.424886Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_184467440737095516 ... EAD_PROXY INFO: direct_read_actor.cpp:87: Direct read proxy [14:7591330126796901776:3300]: session cookie 2 consumer test-consumer session test-consumer_14_1_958339611369626063_v1grpc read failed 2026-01-04T02:38:05.406812Z node 14 :PQ_READ_PROXY DEBUG: direct_read_actor.cpp:363: Direct read proxy [14:7591330126796901776:3300]: session cookie 2 consumer test-consumer session test-consumer_14_1_958339611369626063_v1 Close session with reason: reads done signal, closing everything 2026-01-04T02:38:05.406837Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:381: session cookie 2 consumer test-consumer session test-consumer_14_1_958339611369626063_v1 closed 2026-01-04T02:38:05.406863Z node 14 :PQ_READ_PROXY INFO: direct_read_actor.cpp:167: Direct read proxy [14:7591330126796901776:3300]: session cookie 2 consumer test-consumer session test-consumer_14_1_958339611369626063_v1 proxy is DEAD 2026-01-04T02:38:05.477839Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.477868Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.477888Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.477910Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.477923Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.480939Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T02:38:05.480994Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-04T02:38:05.481040Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-04T02:38:05.481061Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2026-01-04T02:38:05.481124Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T02:38:05.481183Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0 grpc read done: success: 0 data: 2026-01-04T02:38:05.481213Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0 grpc read failed 2026-01-04T02:38:05.481253Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 7 sessionId: test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0 grpc closed 2026-01-04T02:38:05.481269Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message_group_id|d2ba8e66-5ece9af-11218ff5-92abd305_0 is DEAD 2026-01-04T02:38:05.481873Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.481907Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.481924Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.481946Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.481959Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.482496Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-04T02:38:05.482529Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session will now close 2026-01-04T02:38:05.482507Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:38:05.482571Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-04T02:38:05.482568Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037896 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:38:05.482754Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7591330122501934269:3268] destroyed 2026-01-04T02:38:05.482798Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037896][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:38:05.482831Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.482855Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.482872Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.482892Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.482907Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.482947Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037896] server disconnected, pipe [14:7591330122501934272:3268] destroyed 2026-01-04T02:38:05.483167Z :INFO: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T02:38:05.483202Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session: OnReadDone gRpcStatusCode: 1, Msg: CANCELLED, Details: , InternalError: 0 2026-01-04T02:38:05.483242Z :TRACE: [/Root] TRACE_EVENT Error status=CLIENT_CANCELLED 2026-01-04T02:38:05.483262Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session is aborting and will not restart 2026-01-04T02:38:05.483302Z :DEBUG: [/Root] TraceId [] SessionId [test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T02:38:05.483348Z node 14 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 5 sessionId: test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0 grpc read done: success: 0 data: 2026-01-04T02:38:05.483373Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 5 sessionId: test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0 grpc read failed 2026-01-04T02:38:05.483780Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 5 sessionId: test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0 2026-01-04T02:38:05.483810Z node 14 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 5 sessionId: test-message_group_id|563cf7e5-e4d70ff3-af47e44a-48d9ff7b_0 is DEAD 2026-01-04T02:38:05.484177Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:38:05.484214Z node 14 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037894 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:38:05.484330Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591330122501934224:3260] destroyed 2026-01-04T02:38:05.484360Z node 14 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037894][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:38:05.484386Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.484398Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.484408Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.484423Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.484435Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.484464Z node 14 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [14:7591330122501934227:3260] destroyed 2026-01-04T02:38:05.496270Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.496312Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.496330Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.496356Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.496372Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.578249Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.578297Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.578316Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.578345Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.578364Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:38:05.582277Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:38:05.582320Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.582337Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:38:05.582364Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:38:05.582381Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |89.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> KqpBatchDelete::SimplePartitions [GOOD] |89.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSequence::CreateSequence [GOOD] >> TSequence::CreateDropRecreate >> TSequence::CreateSequenceParallel [GOOD] >> TSequence::CreateSequenceSequential |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 5844, MsgBus: 23881 2026-01-04T02:33:47.479099Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329044935028795:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:47.479247Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:47.799752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:47.799839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:47.823198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:47.863059Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:47.869926Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:47.869941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329044935028582:2081] 1767494027418043 != 1767494027418046 2026-01-04T02:33:48.016617Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:48.016642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:48.016662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:48.016743Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:48.123815Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:48.459659Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:48.691721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:48.763339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:48.960424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.150447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:49.228777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.356191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329062114899631:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:51.356372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:51.358074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329062114899641:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:51.358155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:51.786924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.832256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.875059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:51.923675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.019705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.107341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.165475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.232357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:52.334075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329066409867814:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:52.334172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:52.334802Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329066409867819:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:52.334854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329066409867820:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:52.334996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:52.339600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:52.352355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329066409867823:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:33:52.443700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329066409867874:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:52.473051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329044935028795:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:52.476877Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:34:02.787889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:34:02.787935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 14021, MsgBus: 27787 2026-01-04T02:34:04.378691Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_e ... :7591330071117800280:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 21839, MsgBus: 4082 2026-01-04T02:37:55.852348Z node 16 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[16:7591330108510604299:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:55.852461Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:55.872411Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:55.970713Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7591330108510604266:2081] 1767494275851348 != 1767494275851351 2026-01-04T02:37:55.971295Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:55.981598Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:55.981711Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:56.004114Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:56.030168Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:56.043630Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:56.043661Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:56.043671Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:56.043778Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:56.533460Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:56.545603Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:56.610960Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:56.772061Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:56.838963Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:56.859185Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:00.296444Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330129985442616:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.296544Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.296839Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330129985442625:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.296899Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.388330Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.418687Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.452023Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.493192Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.527637Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.562006Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.597411Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.644635Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:00.720899Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330129985443495:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.721002Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.721001Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330129985443500:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.721211Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330129985443502:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.721269Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:00.725303Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:00.737412Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7591330129985443503:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:00.821040Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7591330129985443555:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:00.852765Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7591330108510604299:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:00.852844Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TestMalformedRequest::ContentLengthNone [GOOD] >> TColumnShardTestSchema::RebootOneColdTier [GOOD] >> TColumnShardTestSchema::OneColdTier [GOOD] >> AssignTxId::Basic >> TxUsage::Sinks_Oltp_WriteToTopics_3_Query [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] >> TSequence::CreateDropRecreate [GOOD] >> TSequence::CreateSequenceInsideSequenceNotAllowed >> TSequence::CreateSequenceSequential [GOOD] >> TSequence::CreateSequenceInsideTableThenDropSequence >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::OneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494842.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494842.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494842.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494842.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493642.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493642.000000s;Name=;Codec=}; 2026-01-04T02:37:24.230316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:24.261704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:24.262040Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:24.268199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:24.268394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:24.268566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:24.268678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:24.268794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:24.268932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:24.269016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:24.269088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:24.269174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:24.269278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.269353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:24.269420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:24.269493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:24.288523Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:24.288629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:24.288678Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:24.288837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.288965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:24.289032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:24.289063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:24.289127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:24.289173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:24.289218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:24.289246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:24.289448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.289533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:24.289580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:24.289613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:24.289728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:24.289817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:24.289868Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:24.289901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:24.289956Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:24.289997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:24.290034Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:24.290084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:24.290129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:24.290158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:24.290389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:24.290471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:24.290513Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:24.290676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:24.290734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.290768Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.290821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:24.290871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:24.290902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:24.290949Z node 1 :TX_COLUM ... ;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:10.882013Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:874:2832];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:10.882644Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-04T02:38:10.882868Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494280374:max} readable: {1767494280374:max} at tablet 9437184 2026-01-04T02:38:10.882952Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:10.883084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494280374:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:10.883135Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494280374:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:10.883507Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494280374:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:10.884690Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494280374:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:10.885349Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494280374:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:882:2840];trace_detailed=; 2026-01-04T02:38:10.885688Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:10.885873Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:10.886062Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:10.886197Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:10.886399Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:10.886515Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:10.886596Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:10.886754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:882:2840] finished for tablet 9437184 2026-01-04T02:38:10.887167Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:881:2839];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":49066516,"name":"_full_task","f":49066516,"d_finished":0,"c":0,"l":49068026,"d":1510},"events":[{"name":"bootstrap","f":49066684,"d_finished":751,"c":1,"l":49067435,"d":751},{"a":49067595,"name":"ack","f":49067595,"d_finished":0,"c":0,"l":49068026,"d":431},{"a":49067584,"name":"processing","f":49067584,"d_finished":0,"c":0,"l":49068026,"d":442},{"name":"ProduceResults","f":49067184,"d_finished":440,"c":2,"l":49067821,"d":440},{"a":49067825,"name":"Finish","f":49067825,"d_finished":0,"c":0,"l":49068026,"d":201}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:10.887238Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:881:2839];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:10.887541Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:881:2839];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":49066516,"name":"_full_task","f":49066516,"d_finished":0,"c":0,"l":49068503,"d":1987},"events":[{"name":"bootstrap","f":49066684,"d_finished":751,"c":1,"l":49067435,"d":751},{"a":49067595,"name":"ack","f":49067595,"d_finished":0,"c":0,"l":49068503,"d":908},{"a":49067584,"name":"processing","f":49067584,"d_finished":0,"c":0,"l":49068503,"d":919},{"name":"ProduceResults","f":49067184,"d_finished":440,"c":2,"l":49067821,"d":440},{"a":49067825,"name":"Finish","f":49067825,"d_finished":0,"c":0,"l":49068503,"d":678}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:882:2840]->[1:881:2839] 2026-01-04T02:38:10.887615Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:10.884666Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:10.887646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:10.887746Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootOneColdTier [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494839.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494839.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494839.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494839.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493639.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493639.000000s;Name=;Codec=}; 2026-01-04T02:37:21.376966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:21.418541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:21.420017Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:21.432050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:21.432371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:21.432688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:21.432843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:21.432977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:21.433108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:21.433219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:21.433333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:21.433472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:21.433621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.433736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:21.433872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:21.433998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:21.464409Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:21.464573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:21.464635Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:21.464829Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.465020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:21.465108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:21.465154Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:21.465253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:21.465314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:21.465359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:21.465394Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:21.465591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.465673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:21.465721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:21.465770Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:21.465915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:21.465986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:21.466035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:21.466069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:21.466125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:21.466169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:21.466204Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:21.466256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:21.466300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:21.466330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:21.466555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:21.466614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:21.466654Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:21.466802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:21.466860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.466893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.466949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:21.466992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:21.467024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:21.467068Z node 1 :TX_COLUM ... ard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-04T02:38:10.646466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=71; 2026-01-04T02:38:10.646500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2321; 2026-01-04T02:38:10.646543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2410; 2026-01-04T02:38:10.646595Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-04T02:38:10.646655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=18; 2026-01-04T02:38:10.646693Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2870; 2026-01-04T02:38:10.646782Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=48; 2026-01-04T02:38:10.646850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=39; 2026-01-04T02:38:10.646939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=51; 2026-01-04T02:38:10.647002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=42; 2026-01-04T02:38:10.648117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1075; 2026-01-04T02:38:10.649183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1023; 2026-01-04T02:38:10.649222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-04T02:38:10.649253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:38:10.649277Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:38:10.649323Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=27; 2026-01-04T02:38:10.649347Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-04T02:38:10.649393Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=28; 2026-01-04T02:38:10.649416Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-04T02:38:10.649458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=24; 2026-01-04T02:38:10.649506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=28; 2026-01-04T02:38:10.649680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=151; 2026-01-04T02:38:10.649711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=11992; 2026-01-04T02:38:10.649797Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:10.649897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:10.649932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:10.649974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:10.656467Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:10.656591Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:10.656666Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:10.656733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493978903;tx_id=18446744073709551615;;current_snapshot_ts=1767494266790; 2026-01-04T02:38:10.656775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:10.656817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.656851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.656911Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:10.657063Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.041000s; 2026-01-04T02:38:10.658251Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:10.658844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:10.658881Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:10.658942Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:10.659012Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493978903;tx_id=18446744073709551615;;current_snapshot_ts=1767494266790; 2026-01-04T02:38:10.659060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:10.659098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.659124Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.659184Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:10.659504Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.113000s; 2026-01-04T02:38:10.659571Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 >> TTxAllocatorClientTest::ZeroRange [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494839.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494839.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494839.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494839.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493639.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493639.000000s;Name=;Codec=}; 2026-01-04T02:37:21.875698Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:21.910460Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:21.910749Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:21.918685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:21.918970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:21.919228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:21.919373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:21.919501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:21.919624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:21.919739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:21.919841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:21.920096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:21.920253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.920370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:21.920490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:21.920616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:21.948094Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:21.948239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:21.948305Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:21.948475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.948615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:21.948685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:21.948718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:21.948816Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:21.948877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:21.948926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:21.948959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:21.949172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.949235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:21.949279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:21.949306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:21.949382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:21.949424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:21.949462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:21.949482Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:21.949517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:21.949544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:21.949573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:21.949615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:21.949648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:21.949669Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:21.949968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:21.950032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:21.950067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:21.950166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:21.950234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.950261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.950296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:21.950326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:21.950348Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:21.950419Z node 1 :TX_COLUM ... d;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-04T02:38:10.859521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=96; 2026-01-04T02:38:10.859561Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=3514; 2026-01-04T02:38:10.859599Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=3661; 2026-01-04T02:38:10.859651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-04T02:38:10.859719Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=30; 2026-01-04T02:38:10.859754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=4266; 2026-01-04T02:38:10.859877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=75; 2026-01-04T02:38:10.859999Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=61; 2026-01-04T02:38:10.860116Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=77; 2026-01-04T02:38:10.860222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=61; 2026-01-04T02:38:10.862624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2340; 2026-01-04T02:38:10.864982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2261; 2026-01-04T02:38:10.865089Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:38:10.865148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:38:10.865197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:38:10.865275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2026-01-04T02:38:10.865319Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:10.865398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=47; 2026-01-04T02:38:10.865437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:38:10.865500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-04T02:38:10.865609Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-01-04T02:38:10.865909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=251; 2026-01-04T02:38:10.865964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=20379; 2026-01-04T02:38:10.866103Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:10.866212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:10.866270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:10.866332Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:10.873747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:10.873866Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:10.873936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:10.874005Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493979408;tx_id=18446744073709551615;;current_snapshot_ts=1767494267295; 2026-01-04T02:38:10.874042Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:10.874075Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.874102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.874162Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:10.874330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.148000s; 2026-01-04T02:38:10.875998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:10.876092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:10.876128Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:10.876196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:10.876239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493979408;tx_id=18446744073709551615;;current_snapshot_ts=1767494267295; 2026-01-04T02:38:10.876266Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:10.876299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.876323Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:10.876392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:10.876781Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.038000s; 2026-01-04T02:38:10.876813Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSequence::CreateSequenceInsideSequenceNotAllowed [GOOD] >> TSequence::CreateSequenceInsideIndexTableNotAllowed >> TestMalformedRequest::CompressedDeflateContentLengthNone [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthNone [GOOD] Test command err: 2026-01-04T02:38:05.828755Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330150976619880:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:05.828822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:06.025574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:06.025705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:06.052874Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:06.067206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:06.120530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:06.125923Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330150976619853:2081] 1767494285827547 != 1767494285827550 2026-01-04T02:38:06.168276Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:06.168296Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:06.168300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:06.168371Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:06.221090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:06.326903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:06.458291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:06.463676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:06.522532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.636917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.676859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.719209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.749675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.780458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.811686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.834990Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:06.844201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.881941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:06.906377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.260928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330163861523397:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:08.260929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330163861523392:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:08.261038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:08.261300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330163861523407:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:08.261383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:08.264260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:08.272457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330163861523406:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:38:08.375148Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330163861523459:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:08.721588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.746461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.768190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.792383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.818830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.846690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... ber { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.067419Z node 1 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 44ms 2026-01-04T02:38:10.067653Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.067706Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:38:10.067735Z node 1 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.067752Z node 1 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:38:10.067808Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 46ms 2026-01-04T02:38:10.067841Z node 1 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 47ms 2026-01-04T02:38:10.068117Z node 1 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.068489Z node 1 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.179466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330172451458748:2439]: Pool not found 2026-01-04T02:38:10.179654Z node 1 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:38:10.422183Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330172451458738:2436]: Pool not found 2026-01-04T02:38:10.422478Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:38:10.425055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330172451458852:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.425081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [1:7591330172451458853:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:38:10.425144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.425394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330172451458856:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.425477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.622799Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [1:7591330172451458850:2454]: Pool not found 2026-01-04T02:38:10.623003Z node 1 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:38:10.829102Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330150976619880:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:10.829214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:10.992273Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:47672) incoming connection opened 2026-01-04T02:38:10.992386Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:47672) -> (POST /Root) 2026-01-04T02:38:10.992705Z node 1 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1807:a58f:3f7c:0:7:a58f:3f7c:0] request [CreateQueue] url [/Root] database [/Root] requestId: 5a92215c-968d1b0a-23767481-4d4323c3 2026-01-04T02:38:10.993845Z node 1 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [5a92215c-968d1b0a-23767481-4d4323c3] reply with status: BAD_REQUEST message: Empty body 2026-01-04T02:38:10.994091Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:47672) <- (400 InvalidArgumentException, 60 bytes) 2026-01-04T02:38:10.994177Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:47672) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json 2026-01-04T02:38:10.994218Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:47672) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 5a92215c-968d1b0a-23767481-4d4323c3 Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-04T02:38:10.994454Z node 1 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:47672) connection closed |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSequence::CreateSequenceInsideTableThenDropSequence [GOOD] >> TSequence::CreateSequenceInsideTableThenDropTable >> TestMalformedRequest::CompressedGzipContentLengthCorrect >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator_client/ut/unittest >> TTxAllocatorClientTest::ZeroRange [GOOD] Test command err: 2026-01-04T02:36:53.805356Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:36:53.805932Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:36:53.806787Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:36:53.808726Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.809348Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:36:53.823237Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.823374Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.823498Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:36:53.823642Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.823741Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.823845Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:36:53.823983Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:36:53.824816Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#5000 2026-01-04T02:36:53.825377Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.825473Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:36:53.825575Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 5000 2026-01-04T02:36:53.825629Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 0 to# 5000 |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator_client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthNone [GOOD] Test command err: 2026-01-04T02:38:00.484005Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330131379575231:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:00.484091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:00.721247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:00.721331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:00.739898Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:00.757396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:00.760792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:00.762756Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330131379575199:2081] 1767494280480474 != 1767494280480477 2026-01-04T02:38:00.881080Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:00.881110Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:00.881117Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:00.881211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:01.014694Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:01.131448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:01.325854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:01.330654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:01.384823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.464031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.491266Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:01.502715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.539602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.565902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.592612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.618740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.646598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.670941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.693317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.207933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330144264478740:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.207933Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330144264478748:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.208038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.208267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330144264478755:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.208330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.210921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:03.219867Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330144264478754:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:38:03.283269Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330144264478807:3103] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:03.579886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.603342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.630453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.657149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.683737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.711114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.214296Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:38:10.214346Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 5ms 2026-01-04T02:38:10.214537Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.215218Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.215235Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 6ms 2026-01-04T02:38:10.215477Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.215496Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:38:10.215560Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 7ms 2026-01-04T02:38:10.215854Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:10.320585Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330175544907580:2433]: Pool not found 2026-01-04T02:38:10.320719Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:38:10.518224Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330175544907597:2439]: Pool not found 2026-01-04T02:38:10.518393Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:38:10.520382Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330175544907697:2454], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:38:10.520382Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330175544907696:2453], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.520430Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.520614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330175544907700:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.520649Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.716252Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330175544907694:2452]: Pool not found 2026-01-04T02:38:10.716552Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:38:11.189786Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:48108) incoming connection opened 2026-01-04T02:38:11.189966Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:48108) -> (POST /Root) 2026-01-04T02:38:11.190135Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [d87b:f08a:7c:0:c07b:f08a:7c:0] request [CreateQueue] url [/Root] database [/Root] requestId: 7cc1ad81-f2e1bf97-2ad00ffb-4633da51 2026-01-04T02:38:11.190679Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [7cc1ad81-f2e1bf97-2ad00ffb-4633da51] reply with status: BAD_REQUEST message: Empty body 2026-01-04T02:38:11.190915Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:48108) <- (400 InvalidArgumentException, 60 bytes) 2026-01-04T02:38:11.190999Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:48108) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip 2026-01-04T02:38:11.191049Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:48108) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: 7cc1ad81-f2e1bf97-2ad00ffb-4633da51 Content-Type: application/x-amz-json-1.1 Content-Length: 60 Http output full {"__type":"InvalidArgumentException","message":"Empty body"} 2026-01-04T02:38:11.191400Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:48108) connection closed >> TestMalformedRequest::CompressedDeflateContentLengthLower [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |89.7%| [TA] $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator_client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestMalformedRequest::CompressedDeflateContentLengthHigher |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSequence::CreateSequenceInsideIndexTableNotAllowed [GOOD] >> TSequence::CopyTableWithSequence |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TSequence::CreateSequenceInsideTableThenDropTable [GOOD] >> TSequence::CreateSequencesWithIndexedTable |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> TSubDomainTest::DatashardRunAtOtherNodeWhenOneNodeIsStopped [GOOD] Test command err: 2026-01-04T02:33:37.845881Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329000616664564:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:37.846083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:37.887906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:38.154602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.186526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:38.186613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:38.192901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:38.305457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:38.335571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:33:38.537327Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591329000616664790:2126] Handle TEvNavigate describe path dc-1 2026-01-04T02:33:38.537375Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591329004911632568:2445] HANDLE EvNavigateScheme dc-1 2026-01-04T02:33:38.537530Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591329000616664803:2130], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:33:38.537958Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591329004911632402:2320][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591329000616664803:2130], cookie# 1 2026-01-04T02:33:38.539505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004911632410:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632407:2320], cookie# 1 2026-01-04T02:33:38.539562Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004911632411:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632408:2320], cookie# 1 2026-01-04T02:33:38.539591Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591329004911632412:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632409:2320], cookie# 1 2026-01-04T02:33:38.539628Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000616664465:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632410:2320], cookie# 1 2026-01-04T02:33:38.539657Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000616664468:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632411:2320], cookie# 1 2026-01-04T02:33:38.539673Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591329000616664471:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591329004911632412:2320], cookie# 1 2026-01-04T02:33:38.539707Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004911632410:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000616664465:2050], cookie# 1 2026-01-04T02:33:38.539724Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004911632411:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000616664468:2053], cookie# 1 2026-01-04T02:33:38.539739Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591329004911632412:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329000616664471:2056], cookie# 1 2026-01-04T02:33:38.539774Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004911632402:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004911632407:2320], cookie# 1 2026-01-04T02:33:38.539802Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591329004911632402:2320][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:33:38.539824Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004911632402:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004911632408:2320], cookie# 1 2026-01-04T02:33:38.539869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591329004911632402:2320][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:33:38.539952Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591329004911632402:2320][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591329004911632409:2320], cookie# 1 2026-01-04T02:33:38.539980Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591329004911632402:2320][/dc-1] Sync cookie mismatch: sender# [1:7591329004911632409:2320], cookie# 1, current cookie# 0 2026-01-04T02:33:38.540035Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591329000616664803:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:33:38.546272Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591329000616664803:2130], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591329004911632402:2320] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:33:38.546397Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591329000616664803:2130], cacheItem# { Subscriber: { Subscriber: [1:7591329004911632402:2320] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:33:38.549050Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591329004911632569:2446], recipient# [1:7591329004911632568:2445], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:33:38.549101Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591329004911632568:2445] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:33:38.596540Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591329004911632568:2445] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:33:38.600115Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591329004911632568:2445] Handle TEvDescribeSchemeResult Forward to# [1:7591329004911632567:2444] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:33:38.606638Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591329000616664790:2126] Handle TEvProposeTransaction 2026-01-04T02:33:38.606662Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591329000616664790:2126] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:33:38.606716Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591329000616664790:2126] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591329004911632574:2450] 2026-01-04T02:33:38.698782Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591329004911632574:2450] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "/dc-1:test" Kind: ... e.cpp:1892: FillEntry for TNavigate: self# [7:7591329622524677090:2107], cacheItem# { Subscriber: { Subscriber: [7:7591329626819644806:2352] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:11.197351Z node 7 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [7:7591330176575459859:3188], recipient# [7:7591330176575459856:2598], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:11.219000Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329625918920783:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:11.219145Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329625918920783:2109], cacheItem# { Subscriber: { Subscriber: [6:7591329630213888432:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:11.219254Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591330179969703128:2683], recipient# [6:7591330179969703127:2698], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:11.239475Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329625918920783:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:11.239633Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329625918920783:2109], cacheItem# { Subscriber: { Subscriber: [6:7591329630213888432:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:11.239765Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591330179969703130:2684], recipient# [6:7591330179969703129:2699], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } IsActive: /dc-1/USER_0 -- 2 -- 2 2026-01-04T02:38:11.466388Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7591329620691139012:2051] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7591329622524677102:2110] 2026-01-04T02:38:11.466435Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7591329620691139012:2051] Unsubscribe: subscriber# [7:7591329622524677102:2110], path# /dc-1/USER_0 2026-01-04T02:38:11.466491Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7591329620691139018:2057] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7591329622524677104:2110] 2026-01-04T02:38:11.466503Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7591329620691139018:2057] Unsubscribe: subscriber# [7:7591329622524677104:2110], path# /dc-1/USER_0 2026-01-04T02:38:11.466504Z node 5 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1084: [5:7591329620691139015:2054] Handle NKikimrSchemeBoard.TEvUnsubscribe { Path: /dc-1/USER_0 }: sender# [7:7591329622524677103:2110] 2026-01-04T02:38:11.466522Z node 5 :SCHEME_BOARD_REPLICA INFO: replica.cpp:671: [5:7591329620691139015:2054] Unsubscribe: subscriber# [7:7591329622524677103:2110], path# /dc-1/USER_0 2026-01-04T02:38:11.466622Z node 5 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 7 2026-01-04T02:38:11.467659Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:38:12.174438Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329625918920783:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:12.174587Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329625918920783:2109], cacheItem# { Subscriber: { Subscriber: [6:7591329643098790338:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:12.174699Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591330184264670429:2685], recipient# [6:7591330184264670428:2700], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:12.219951Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329625918920783:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:12.220100Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329625918920783:2109], cacheItem# { Subscriber: { Subscriber: [6:7591329630213888432:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:12.220201Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591330184264670431:2686], recipient# [6:7591330184264670430:2701], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:12.240445Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [6:7591329625918920783:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:12.240593Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [6:7591329625918920783:2109], cacheItem# { Subscriber: { Subscriber: [6:7591329630213888432:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:12.240695Z node 6 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [6:7591330184264670433:2687], recipient# [6:7591330184264670432:2702], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_base_tenant/unittest >> AssignTxId::Basic [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization |89.7%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestRemoveSubscription >> TSequence::CreateSequencesWithIndexedTable [GOOD] >> TSequence::CreateTableWithDefaultFromSequence >> TSequence::CopyTableWithSequence [GOOD] >> TSequence::AlterSequence |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_base_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsCacheTests::TestConfigurationSaveOnNotification >> TConfigsDispatcherTests::TestSubscriptionNotification >> TConfigsDispatcherObservabilityTests::TestSeedNodesInitialization [GOOD] >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberAfterUpdate [GOOD] >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate >> TConfigsDispatcherObservabilityTests::TestGetStateRequestResponse [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> AssignTxId::Basic [GOOD] Test command err: 2026-01-04T02:38:11.224943Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330177974977505:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:11.225730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:11.481528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:11.486839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:11.487029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:11.497752Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:11.521768Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:11.781724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:11.781742Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:11.781747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:11.781861Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:11.783067Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:12.020609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:12.231954Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:13.772004Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330186564912953:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:13.772122Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:13.772425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330186564912963:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:13.772455Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:14.376848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateReplication, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_replication.cpp:491) 2026-01-04T02:38:14.424495Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:41: [controller 72075186224037888] OnActivateExecutor 2026-01-04T02:38:14.424565Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:17: [controller 72075186224037888][TxInitSchema] Execute 2026-01-04T02:38:14.434321Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init_schema.cpp:26: [controller 72075186224037888][TxInitSchema] Complete 2026-01-04T02:38:14.434390Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:245: [controller 72075186224037888][TxInit] Execute 2026-01-04T02:38:14.441034Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_init.cpp:250: [controller 72075186224037888][TxInit] Complete 2026-01-04T02:38:14.441085Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:128: [controller 72075186224037888] SwitchToWork 2026-01-04T02:38:14.461562Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:171: [controller 72075186224037888] Handle NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:24233" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-04T02:38:14.461896Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:22: [controller 72075186224037888][TxCreateReplication] Execute: NKikimrReplication.TEvCreateReplication PathId { OwnerId: 72057594046644480 LocalId: 38 } OperationId { TxId: 281474976710658 PartId: 0 } Config { SrcConnectionParams { Endpoint: "localhost:24233" Database: "/Root" OAuthToken { Token: "***" } EnableSsl: false } Specific { Targets { SrcPath: "/Root/table" DstPath: "/Root/replica" } } ConsistencySettings { Global { CommitIntervalMilliSeconds: 10000 } } } Database: "/Root" 2026-01-04T02:38:14.461989Z node 1 :REPLICATION_CONTROLLER NOTICE: tx_create_replication.cpp:43: [controller 72075186224037888][TxCreateReplication] Add replication: rid# 1, pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:38:14.467002Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_create_replication.cpp:58: [controller 72075186224037888][TxCreateReplication] Complete 2026-01-04T02:38:14.467048Z node 1 :REPLICATION_CONTROLLER INFO: tx_create_replication.cpp:68: [controller 72075186224037888][TxCreateReplication] Discover tenant nodes: tenant# /Root 2026-01-04T02:38:14.468947Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:335: [controller 72075186224037888] Handle NKikimr::TEvDiscovery::TEvDiscoveryData 2026-01-04T02:38:14.469559Z node 1 :REPLICATION_CONTROLLER DEBUG: controller.cpp:359: [controller 72075186224037888] Create session: nodeId# 1 2026-01-04T02:38:14.509088Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-04T02:38:14.509132Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 2026-01-04T02:38:14.509317Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186224037888] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-04T02:38:14.509421Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186224037888][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/Root/table: SCHEME_ERROR ({
: Error: Path not found })] } 2026-01-04T02:38:14.509477Z node 1 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186224037888][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /Root/table: SCHEME_ERROR ({
: Error: Path not found }) 2026-01-04T02:38:14.509918Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186224037888][TxDiscoveryTargetsResult] Complete 2026-01-04T02:38:14.522245Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 1 TxId: 0 } 2026-01-04T02:38:14.522317Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 0 2026-01-04T02:38:14.522375Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 1, assigned# 0, allocated# 0, exhausted# 1 2026-01-04T02:38:14.522456Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:38:14.522486Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 0, allocated# 5 2026-01-04T02:38:14.522916Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-04T02:38:14.523352Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 0 } 2026-01-04T02:38:14.523416Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-04T02:38:14.523460Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-04T02:38:14.523755Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 9999 TxId: 18446744073709551615 } 2026-01-04T02:38:14.523811Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-04T02:38:14.523843Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 1, allocated# 4, exhausted# 0 2026-01-04T02:38:14.524094Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 10000 TxId: 0 } 2026-01-04T02:38:14.524127Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 1, allocated# 4 2026-01-04T02:38:14.524510Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-04T02:38:14.524774Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 5000 TxId: 0 } 2026-01-04T02:38:14.524804Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 2, allocated# 3 2026-01-04T02:38:14.524837Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 2, allocated# 3, exhausted# 0 2026-01-04T02:38:14.525174Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 20000 TxId: 0 } Versions { Step: 30000 TxId: 0 } Versions { Step: 40000 TxId: 0 } 2026-01-04T02:38:14.525210Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 3, assigned# 2, allocated# 3 2026-01-04T02:38:14.525605Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 0, exhausted# 0 2026-01-04T02:38:14.525711Z node 1 :REPLICATION_CONTROLLER TRACE: tx_assign_tx_id.cpp:174: [controller 72075186224037888] Handle NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:38:14.525788Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 0, assigned# 5, allocated# 5 2026-01-04T02:38:14.525875Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 2026-01-04T02:38:14.525934Z node 1 :REPLICATION_CONTROLLER TRACE: controller.cpp:795: [controller 72075186224037888] Handle NKikimrReplication.TEvGetTxId Versions { Step: 50000 TxId: 0 } 2026-01-04T02:38:14.525960Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:76: [controller 72075186224037888][TxAssignTxId] Execute: pending# 1, assigned# 5, allocated# 5 2026-01-04T02:38:14.526249Z node 1 :REPLICATION_CONTROLLER DEBUG: tx_assign_tx_id.cpp:142: [controller 72075186224037888][TxAssignTxId] Complete: pending# 0, assigned# 5, allocated# 5, exhausted# 0 |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_assign_tx_id/unittest >> TConfigsDispatcherTests::TestRemoveSubscription [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification >> THeavyPerfTest::TTestLoadEverything [GOOD] >> ObjectDistribution::TestImbalanceCalcualtion [GOOD] >> ObjectDistribution::TestAllowedDomainsAndDown >> TConfigsDispatcherTests::TestYamlAndNonYamlCoexist [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb >> ObjectDistribution::TestAllowedDomainsAndDown [GOOD] >> ObjectDistribution::TestAddSameNode [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes >> TConfigsDispatcherTests::TestYamlEndToEnd >> TConfigsDispatcherTests::TestSubscriptionNotification [GOOD] >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] >> TConfigsCacheTests::TestNoNotificationIfConfigIsCached [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore |89.7%| [TA] $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} |89.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_assign_tx_id/test-results/unittest/{meta.json ... results_accumulator.log} >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] >> TConfigsCacheTests::TestOverwrittenConfigurationDoesntCauseNotification [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization >> TConfigsCacheTests::TestConfigurationSaveOnNotification [GOOD] >> TConfigsCacheTests::TestConfigurationChangeSensor >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestGetStorageYamlRequestResponse [GOOD] >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceAndIndex |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestUnknownConfigSource [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |89.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestSubscriptionNotificationForNewSubscriberDuringUpdate [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestEmptyChangeCausesNoNotification [GOOD] |89.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlConfigAndIcb [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestRemoveSubscriptionWhileUpdateInProcess [GOOD] >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] >> TSchemeShardTest::ManyDirs [GOOD] >> TSchemeShardTest::ListNotCreatedDirCase >> TSequence::AlterSequence [GOOD] >> TSequence::AlterTableSetDefaultFromSequence |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherTests::TestYamlEndToEnd [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestFullConfigurationRestore [GOOD] Test command err: 2026-01-04T02:38:15.420937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:15.420992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:15.633006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:16.553058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:16.553125Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:16.615006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsDispatcherObservabilityTests::TestDynamicConfigInitialization [GOOD] Test command err: 2026-01-04T02:38:15.421578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:15.421629Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:15.637504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TSentinelTests::Smoke >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TConfigsCacheTests::TestConfigurationChangeSensor [GOOD] Test command err: 2026-01-04T02:38:15.920359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:15.920424Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:16.116604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:17.102613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:17.102671Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:17.161532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_configs_dispatcher/unittest >> TSentinelBaseTests::GuardianDataCenterRatio [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks >> TSentinelTests::PDiskFaultyState >> TSchemeShardTest::ListNotCreatedDirCase [GOOD] >> TSchemeShardTest::ListNotCreatedIndexCase >> TSentinelTests::PDiskPileGuardHalfPile |89.8%| [TA] $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} >> TSentinelTests::PDiskErrorState >> TSentinelBaseTests::PDiskErrorState [GOOD] >> TSentinelBaseTests::PDiskInactiveAfterStateChange [GOOD] >> TSentinelBaseTests::PDiskFaultyState [GOOD] |89.8%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceAndIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:09.123527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:09.123624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:09.123657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:09.123687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:09.124208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:09.124247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:09.124321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:09.124793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:09.125655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:09.126714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:09.208429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:09.208477Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:09.222247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:09.223245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:09.223653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:09.233166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:09.234170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:09.235864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:09.237451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:09.243741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:09.243918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:09.247733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:09.247812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:09.247886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:09.247937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:09.248043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:09.248544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:09.367795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.368951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.369430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... y parts: 3/4 2026-01-04T02:38:18.029665Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:2 progress is 3/4 2026-01-04T02:38:18.029700Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 3/4 2026-01-04T02:38:18.029743Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 3/4, is published: true 2026-01-04T02:38:18.032004Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032034Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-04T02:38:18.032088Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:747:2723] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-04T02:38:18.032165Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.032195Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032338Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.032361Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032398Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.032417Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032445Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.032489Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032524Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.032542Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.032591Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:38:18.032614Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:38:18.032647Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:18.032676Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:38:18.032860Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:38:18.032937Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:38:18.032958Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-04T02:38:18.032979Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-04T02:38:18.033005Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 4/4 2026-01-04T02:38:18.033026Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-04T02:38:18.033048Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 4/4, is published: true 2026-01-04T02:38:18.033093Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:815:2772] message: TxId: 102 2026-01-04T02:38:18.033133Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 4/4 2026-01-04T02:38:18.033173Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:18.033200Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:18.033285Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:18.033317Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-04T02:38:18.033332Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-04T02:38:18.033363Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:38:18.033387Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:2 2026-01-04T02:38:18.033409Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:2 2026-01-04T02:38:18.033437Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:38:18.033457Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:3 2026-01-04T02:38:18.033473Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:3 2026-01-04T02:38:18.033505Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:38:18.033893Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:38:18.033934Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-04T02:38:18.033990Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:38:18.034030Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:38:18.034100Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:18.036461Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:18.036493Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.036621Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.037608Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:18.037674Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:815:2772] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-04T02:38:18.037858Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:18.037903Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [7:922:2871] 2026-01-04T02:38:18.038098Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:924:2873], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:38:18.038122Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:38:18.038141Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 2026-01-04T02:38:18.038221Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-04T02:38:18.040222Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [7:1000:2949], Recipient [7:127:2152]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:38:18.040274Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:38:18.040363Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:38:18.040547Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 187us result status StatusPathDoesNotExist 2026-01-04T02:38:18.040690Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table\', error: path has been deleted (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeTable, state: EPathStateNotExist), drop stepId: 5000039, drop txId: 102" Path: "/MyRoot/Table" PathId: 38 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_configs_dispatcher/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSentinelBaseTests::PDiskInitialStatus [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeNotExpectedRestart [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] >> TSentinelBaseTests::GuardianFaultyPDisks [GOOD] >> TSentinelBaseTests::GuardianRackRatio >> TSentinelTests::PDiskRackGuardHalfRack >> TSentinelBaseTests::GuardianRackRatio [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthCorrect [GOOD] Test command err: 2026-01-04T02:38:07.406090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330160239661769:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:07.406880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:07.571956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:07.591337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:07.591421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:07.653557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:07.685279Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:07.687750Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330160239661740:2081] 1767494287404513 != 1767494287404516 2026-01-04T02:38:07.731900Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:07.731945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:07.731961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:07.732084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:07.809813Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:07.877103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:08.012120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:08.017860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:08.098706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.200507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.237101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.272451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.296740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.320547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.345348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.370368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.400493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.420648Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:08.432265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:09.679018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330168829597980:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:09.679021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330168829597991:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:09.679104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:09.679284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330168829597995:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:09.679347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:09.682131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:09.692008Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330168829597994:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:38:09.792438Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330168829598047:3100] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:10.069450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.092039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.114504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.138491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.161369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.187070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... de 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:16.760721Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:38:16.760830Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 16ms 2026-01-04T02:38:16.761176Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:16.761265Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:16.761291Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 15ms 2026-01-04T02:38:16.761513Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:16.761540Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:38:16.761588Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 16ms 2026-01-04T02:38:16.761853Z node 2 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:38:16.886466Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330197908991329:2438]: Pool not found 2026-01-04T02:38:16.886628Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:38:17.063556Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330197908991317:2435]: Pool not found 2026-01-04T02:38:17.063662Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:38:17.066169Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330202203958728:2454], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:17.066229Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330202203958729:2455], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:38:17.066264Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:17.076614Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330202203958732:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:17.076698Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:17.242284Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330202203958726:2453]: Pool not found 2026-01-04T02:38:17.242490Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:38:17.738314Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:38948) incoming connection opened 2026-01-04T02:38:17.738391Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:38948) -> (POST /Root, 44 bytes) 2026-01-04T02:38:17.738498Z node 2 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58c5:97dd:357c:0:40c5:97dd:357c:0] request [CreateQueue] url [/Root] database [/Root] requestId: aaa1eb5e-7f35b06e-8a3d48a1-6ae0b72f 2026-01-04T02:38:17.739040Z node 2 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateQueue] requestId [aaa1eb5e-7f35b06e-8a3d48a1-6ae0b72f] reply with status: BAD_REQUEST message: Can not parse request body from JSON 2026-01-04T02:38:17.739168Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:38948) <- (400 InvalidArgumentException, 86 bytes) 2026-01-04T02:38:17.739226Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:38948) Request: POST /Root HTTP/1.1 Host:example.amazonaws.com X-Amz-Target:AmazonSQS.CreateQueue X-Amz-Date:20150830T123600Z Authorization: Content-Type:application/json Content-Encoding: gzip Content-Length: 44 nhV ,M-MKMURPrH-IU2j 2026-01-04T02:38:17.739274Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:38948) Response: HTTP/1.1 400 InvalidArgumentException Connection: keep-alive x-amzn-requestid: aaa1eb5e-7f35b06e-8a3d48a1-6ae0b72f Content-Type: application/x-amz-json-1.1 Content-Length: 86 Http output full {"__type":"InvalidArgumentException","message":"Can not parse request body from JSON"} 2026-01-04T02:38:17.739532Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:208: (#37,[::1]:38948) connection closed 2026-01-04T02:38:17.871148Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330180729119761:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:17.871233Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskFaultyState [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNodeExpectedRestart [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::PDiskStateChangeNormalFlow [GOOD] >> TSentinelBaseTests::PDiskStateChangeNodePermanentlyBad [GOOD] >> TSentinelTests::BSControllerUnresponsive >> TSentinelTests::PDiskPileGuardFullPile >> TSchemeShardTest::ListNotCreatedIndexCase [GOOD] >> TSchemeShardTest::FindSubDomainPathId >> TSequence::AlterTableSetDefaultFromSequence [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelBaseTests::GuardianRackRatio [GOOD] |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpBatchDelete::SimpleOnePartition >> TSentinelTests::Smoke [GOOD] >> TSentinelTests::PDiskUnknownState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_sequence/unittest >> TSequence::AlterTableSetDefaultFromSequence [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:09.123522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:09.123628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:09.123715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:09.123756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:09.124241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:09.124296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:09.124370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:09.124789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:09.125510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:09.126705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:09.207147Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:09.207198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:09.222144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:09.223103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:09.223638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:09.233326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:09.234162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:09.235890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:09.237462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:09.243737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:09.243920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:09.247732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:09.247800Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:09.247842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:09.247892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:09.247993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:09.248534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:09.405131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.406981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.407086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.407176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.407278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.407388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:09.407500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Latency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1125 } } CommitVersion { Step: 5000050 TxId: 114 } debug: NTableState::TProposedWaitParts operationId# 114:0 2026-01-04T02:38:19.447850Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:38:19.448842Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [7:1459:3395], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:38:19.448875Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:38:19.448895Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:38:19.449113Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269551620, Sender [7:1398:3342], Recipient [7:127:2152]: NKikimrTxDataShard.TEvSchemaChanged Source { RawX1: 1398 RawX2: 30064774414 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-04T02:38:19.449143Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5280: StateWork, processing event TEvDataShard::TEvSchemaChanged 2026-01-04T02:38:19.449222Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 1398 RawX2: 30064774414 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-04T02:38:19.449261Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 114, tablet: 72075186233409549, partId: 0 2026-01-04T02:38:19.449408Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 114:0, at schemeshard: 72057594046678944, message: Source { RawX1: 1398 RawX2: 30064774414 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-04T02:38:19.449477Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:38:19.449575Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 114:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 1398 RawX2: 30064774414 } Origin: 72075186233409549 State: 2 TxId: 114 Step: 0 Generation: 2 2026-01-04T02:38:19.449650Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 114:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:19.449683Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-04T02:38:19.449717Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 114:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:38:19.449768Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 114:0 129 -> 240 2026-01-04T02:38:19.449932Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:38:19.450286Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.452718Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-04T02:38:19.452794Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.452978Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 114 2026-01-04T02:38:19.453008Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.453171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-04T02:38:19.453192Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.453309Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-04T02:38:19.453354Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.453387Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 114:0 2026-01-04T02:38:19.453472Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:1398:3342] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 114 at schemeshard: 72057594046678944 2026-01-04T02:38:19.453596Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [7:127:2152], Recipient [7:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:38:19.453641Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:38:19.453702Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 114:0, at schemeshard: 72057594046678944 2026-01-04T02:38:19.453739Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 114:0 ProgressState 2026-01-04T02:38:19.453869Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:38:19.453898Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-04T02:38:19.453938Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-04T02:38:19.453996Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#114:0 progress is 1/1 2026-01-04T02:38:19.454033Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-04T02:38:19.454081Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 114, ready parts: 1/1, is published: true 2026-01-04T02:38:19.454135Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [7:795:2762] message: TxId: 114 2026-01-04T02:38:19.454179Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 114 ready parts: 1/1 2026-01-04T02:38:19.454225Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 114:0 2026-01-04T02:38:19.454267Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 114:0 2026-01-04T02:38:19.454386Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 3 2026-01-04T02:38:19.455833Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:38:19.455916Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [7:795:2762] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 114 at schemeshard: 72057594046678944 2026-01-04T02:38:19.456074Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 114: got EvNotifyTxCompletionResult 2026-01-04T02:38:19.456122Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 114: satisfy waiter [7:1426:3362] 2026-01-04T02:38:19.456291Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [7:1428:3364], Recipient [7:127:2152]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:38:19.456334Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:38:19.456355Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 114 TestModificationResults wait txId: 115 2026-01-04T02:38:19.457180Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [7:1468:3404], Recipient [7:127:2152]: {TEvModifySchemeTransaction txid# 115 TabletId# 72057594046678944} 2026-01-04T02:38:19.457227Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:38:19.459296Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "Table3" Columns { Name: "value" DefaultFromSequence: "/MyRoot/seq1" } } } TxId: 115 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:38:19.459490Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/Table3, pathId: , opId: 115:0, at schemeshard: 72057594046678944 2026-01-04T02:38:19.459850Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 115:1, propose status:StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, at schemeshard: 72057594046678944 2026-01-04T02:38:19.460028Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:38:19.461777Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 115, response: Status: StatusInvalidParameter Reason: "Column \'value\' is of type Bool but default expression is of type Int64" TxId: 115 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:38:19.461994Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 115, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Column 'value' is of type Bool but default expression is of type Int64, operation: ALTER TABLE, path: /MyRoot/Table3 2026-01-04T02:38:19.462057Z node 7 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 115, wait until txId: 115 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_sequence/unittest >> KqpBatchUpdate::ColumnTable |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 >> TSchemeShardTest::FindSubDomainPathId [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActor >> KqpBatchDelete::TableNotExists |89.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> TColumnShardTestSchema::HotTiers [GOOD] |89.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} |89.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_sequence/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpBatchDelete::Large_1 >> TSentinelTests::PDiskPileGuardHalfPile [GOOD] >> TSentinelTests::PDiskPileGuardWithoutBridgeMode >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494842.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494842.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494842.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494842.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494842.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494842.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493642.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494842.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494842.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493642.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493642.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493642.000000s;Name=;Codec=}; 2026-01-04T02:37:22.433136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:22.470562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:22.470815Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:22.478272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:22.478520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:22.478746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:22.478862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:22.478980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:22.479094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:22.479194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:22.479311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:22.479431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:22.479537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.479646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:22.479749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:22.479857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:22.509477Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:22.509626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:22.509681Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:22.509886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.510068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:22.510158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:22.510201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:22.510294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:22.510352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:22.510397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:22.510427Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:22.510600Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.510674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:22.510720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:22.510754Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:22.510858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:22.510917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:22.510961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:22.510994Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:22.511043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:22.511085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:22.511117Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:22.511160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:22.511200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:22.511229Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:22.511422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:22.511482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:22.511517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:22.511648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:22.511696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.511 ... e::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:21.112775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:21.112936Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494290748:max} readable: {1767494290748:max} at tablet 9437184 2026-01-04T02:38:21.113039Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:21.113182Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494290748:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:21.113228Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494290748:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:21.113580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494290748:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:21.114800Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494290748:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:21.115477Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494290748:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1455:3399];trace_detailed=; 2026-01-04T02:38:21.115779Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:21.115957Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:21.116147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.116293Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.116545Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:21.116646Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.116735Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.116889Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1455:3399] finished for tablet 9437184 2026-01-04T02:38:21.117217Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1454:3398];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":62455267,"name":"_full_task","f":62455267,"d_finished":0,"c":0,"l":62456770,"d":1503},"events":[{"name":"bootstrap","f":62455436,"d_finished":722,"c":1,"l":62456158,"d":722},{"a":62456367,"name":"ack","f":62456367,"d_finished":0,"c":0,"l":62456770,"d":403},{"a":62456354,"name":"processing","f":62456354,"d_finished":0,"c":0,"l":62456770,"d":416},{"name":"ProduceResults","f":62455886,"d_finished":457,"c":2,"l":62456589,"d":457},{"a":62456593,"name":"Finish","f":62456593,"d_finished":0,"c":0,"l":62456770,"d":177}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.117274Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1454:3398];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:21.117586Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1454:3398];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":62455267,"name":"_full_task","f":62455267,"d_finished":0,"c":0,"l":62457150,"d":1883},"events":[{"name":"bootstrap","f":62455436,"d_finished":722,"c":1,"l":62456158,"d":722},{"a":62456367,"name":"ack","f":62456367,"d_finished":0,"c":0,"l":62457150,"d":783},{"a":62456354,"name":"processing","f":62456354,"d_finished":0,"c":0,"l":62457150,"d":796},{"name":"ProduceResults","f":62455886,"d_finished":457,"c":2,"l":62456589,"d":457},{"a":62456593,"name":"Finish","f":62456593,"d_finished":0,"c":0,"l":62457150,"d":557}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1455:3399]->[1:1454:3398] 2026-01-04T02:38:21.117657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:21.114779Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:21.117708Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:21.117845Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSentinelTests::PDiskUnknownState [GOOD] >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates >> TSchemeShardTest::FindSubDomainPathIdActor [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync >> TSentinelTests::PDiskPileGuardFullPile [GOOD] >> TSentinelTests::PDiskPileGuardConfig >> KqpBatchUpdate::MultiStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167494843.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494843.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494843.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494843.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494843.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494843.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494843.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493643.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494843.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494843.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493643.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493643.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493643.000000s;Name=;Codec=}; 2026-01-04T02:37:24.311972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:24.341164Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:24.341395Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:24.349769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:24.350057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:24.350299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:24.350420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:24.350521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:24.350657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:24.350784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:24.350905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:24.351043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:24.351160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.351264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:24.351373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:24.351489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:24.378896Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:24.379084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:24.379161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:24.379365Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.379583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:24.379676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:24.379746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:24.379847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:24.379919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:24.379982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:24.380019Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:24.380235Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:24.380316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:24.380363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:24.380396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:24.380523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:24.380592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:24.380641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:24.380675Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:24.380728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:24.380770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:24.380800Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:24.380857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:24.381004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:24.381048Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:24.381258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:24.381312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:24.381363Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:24.381511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:24.381559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:24.381600Z nod ... te::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:21.802448Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:21.802592Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494292622:max} readable: {1767494292622:max} at tablet 9437184 2026-01-04T02:38:21.802674Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:21.802796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494292622:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:21.802850Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494292622:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:21.803221Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494292622:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:21.804398Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494292622:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:21.805054Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494292622:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1449:3394];trace_detailed=; 2026-01-04T02:38:21.805376Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:21.805526Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:21.805693Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.805879Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.806148Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:21.806246Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.806338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.806498Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1449:3394] finished for tablet 9437184 2026-01-04T02:38:21.806812Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1448:3393];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":61322421,"name":"_full_task","f":61322421,"d_finished":0,"c":0,"l":61323967,"d":1546},"events":[{"name":"bootstrap","f":61322595,"d_finished":734,"c":1,"l":61323329,"d":734},{"a":61323557,"name":"ack","f":61323557,"d_finished":0,"c":0,"l":61323967,"d":410},{"a":61323545,"name":"processing","f":61323545,"d_finished":0,"c":0,"l":61323967,"d":422},{"name":"ProduceResults","f":61323029,"d_finished":480,"c":2,"l":61323777,"d":480},{"a":61323781,"name":"Finish","f":61323781,"d_finished":0,"c":0,"l":61323967,"d":186}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:21.806865Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1448:3393];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:21.807199Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1448:3393];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":61322421,"name":"_full_task","f":61322421,"d_finished":0,"c":0,"l":61324329,"d":1908},"events":[{"name":"bootstrap","f":61322595,"d_finished":734,"c":1,"l":61323329,"d":734},{"a":61323557,"name":"ack","f":61323557,"d_finished":0,"c":0,"l":61324329,"d":772},{"a":61323545,"name":"processing","f":61323545,"d_finished":0,"c":0,"l":61324329,"d":784},{"name":"ProduceResults","f":61323029,"d_finished":480,"c":2,"l":61323777,"d":480},{"a":61323781,"name":"Finish","f":61323781,"d_finished":0,"c":0,"l":61324329,"d":548}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1449:3394]->[1:1448:3393] 2026-01-04T02:38:21.807285Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:21.804376Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:21.807321Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:21.807421Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskUnknownState [GOOD] Test command err: 2026-01-04T02:38:18.968385Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:18.968431Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:18.968514Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:18.968546Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:18.968597Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:18.968715Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:18.971030Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:18.974726Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 74182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.009128Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-04T02:38:21.009219Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.009310Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.009342Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:21.009583Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:36, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-04T02:38:21.009620Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-04T02:38:21.009793Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 2 2026-01-04T02:38:21.009842Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:36 2026-01-04T02:38:21.009894Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:21.009914Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:21.009990Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:38:21.010017Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-04T02:38:21.010034Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-04T02:38:21.010052Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-04T02:38:21.010068Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:38:21.010086Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:38:21.010101Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-04T02:38:21.010116Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-04T02:38:21.010271Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved16 } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.010312Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 16 2026-01-04T02:38:21.010664Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved15 } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.010681Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 15 2026-01-04T02:38:21.010836Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.010983Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.011051Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.011153Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.011265Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Reserved17 } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.011292Z node 9 :CMS CRIT: sentinel.cpp:721: [Sentinel] [StateUpdater] Unknown pdisk state: 17 2026-01-04T02:38:21.011367Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:21.011396Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:21.011637Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# ACTIVE, required status# INACTIVE, reason# PrevState# Normal State# Unknown StateCounter# 1 current# ACTIVE, dry run# 0 2026-01-04T02:38:21.011675Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-04T02:38:21.011828Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 3 2026-01-04T02:38:21.011878Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] >> TColumnShardTestSchema::RebootColdTiers [GOOD] |89.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494840.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494840.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494840.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494840.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494840.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494840.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493640.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494840.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494840.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493640.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493640.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493640.000000s;Name=;Codec=}; 2026-01-04T02:37:21.113182Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:21.143877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:21.144131Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:21.150962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:21.151194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:21.151416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:21.151532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:21.151637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:21.151762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:21.151870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:21.152002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:21.152119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:21.152235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.152331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:21.152424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:21.152530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:21.180030Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:21.180168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:21.180238Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:21.180421Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.180591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:21.180672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:21.180714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:21.180800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:21.180855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:21.180897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:21.180927Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:21.181102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:21.181175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:21.181219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:21.181252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:21.181353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:21.181412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:21.181454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:21.181485Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:21.181532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:21.181574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:21.181604Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:21.181653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:21.181689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:21.181717Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:21.181950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:21.182006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:21.182036Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:21.182167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:21.182214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.182244Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:21.182292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:21.182331Z node 1 :TX_COLUMNSHARD WARN: l ... ime=5; 2026-01-04T02:38:23.068801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=50; 2026-01-04T02:38:23.068834Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2041; 2026-01-04T02:38:23.068861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2115; 2026-01-04T02:38:23.068903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=6; 2026-01-04T02:38:23.068963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=23; 2026-01-04T02:38:23.068986Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2510; 2026-01-04T02:38:23.069065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=43; 2026-01-04T02:38:23.069146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=48; 2026-01-04T02:38:23.069217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=43; 2026-01-04T02:38:23.069290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=51; 2026-01-04T02:38:23.070494Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1163; 2026-01-04T02:38:23.072452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1894; 2026-01-04T02:38:23.072543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-04T02:38:23.072601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-04T02:38:23.072656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=16; 2026-01-04T02:38:23.072728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2026-01-04T02:38:23.072802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2026-01-04T02:38:23.072898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=57; 2026-01-04T02:38:23.072938Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:38:23.072997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-04T02:38:23.073065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=38; 2026-01-04T02:38:23.073334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=228; 2026-01-04T02:38:23.073382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=13509; 2026-01-04T02:38:23.073487Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:23.073586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:23.073637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:23.073690Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:23.086057Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:23.086166Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:23.086230Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:23.086284Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493990756;tx_id=18446744073709551615;;current_snapshot_ts=1767494278643; 2026-01-04T02:38:23.086314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:23.086345Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:23.086371Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:23.086422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:23.086581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.079000s; 2026-01-04T02:38:23.087524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:23.087716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:23.087760Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:23.087814Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:23.087856Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493990756;tx_id=18446744073709551615;;current_snapshot_ts=1767494278643; 2026-01-04T02:38:23.087883Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:23.087927Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:23.087949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:23.088016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:23.088265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.171000s; 2026-01-04T02:38:23.088288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:1520:3340];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 >> TSentinelTests::PDiskFaultyState [GOOD] >> TSentinelTests::PDiskFaultyGuard |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL-Reboot+Internal-FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:54.181537Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:54.184723Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:54.185058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:54.216809Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:54.217107Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:54.224363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:54.224637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:54.224892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:54.225029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:54.225170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:54.225271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:54.225366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:54.225519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:54.225633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:54.225730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.225859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:54.225966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:54.226104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:54.251929Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:54.254736Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:54.254872Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:54.254926Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:54.255126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:54.255324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:54.255410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:54.255451Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:54.255539Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:54.255597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:54.255635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:54.255689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:54.255892Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:54.256014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:54.256075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:54.256104Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:54.256195Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:54.256248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:54.256289Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:54.256316Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:54.256367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:54.256405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:54.256432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:54.256473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:54.256511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:54.256544Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:54.256792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:54.256849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:54.256878Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:54.257038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:54.257086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.257119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:54.257194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:54.257247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:54.257276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:54.257322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:54.257357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... =1; 2026-01-04T02:38:23.138164Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:38:23.138229Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:38:23.138455Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:23.138692Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=saved_at: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.138746Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:38:23.138964Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:38:23.139056Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=saved_at; 2026-01-04T02:38:23.139389Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=16000;rows=2000;faults=0;finished=0;fault=0;schema=saved_at: uint64; 2026-01-04T02:38:23.139605Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.139776Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.139993Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.140180Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:23.140298Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.140405Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.140735Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:618:2623] finished for tablet 9437184 2026-01-04T02:38:23.141403Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:617:2622];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.014}],"full":{"a":29396708,"name":"_full_task","f":29396708,"d_finished":0,"c":0,"l":29411390,"d":14682},"events":[{"name":"bootstrap","f":29397047,"d_finished":1605,"c":1,"l":29398652,"d":1605},{"a":29410732,"name":"ack","f":29407159,"d_finished":3282,"c":2,"l":29410601,"d":3940},{"a":29410717,"name":"processing","f":29398846,"d_finished":7117,"c":5,"l":29410607,"d":7790},{"name":"ProduceResults","f":29398068,"d_finished":4454,"c":9,"l":29410999,"d":4454},{"a":29411005,"name":"Finish","f":29411005,"d_finished":0,"c":0,"l":29411390,"d":385},{"name":"task_result","f":29398878,"d_finished":3706,"c":3,"l":29406925,"d":3706}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.141515Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:617:2622];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:23.142191Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:617:2622];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":29396708,"name":"_full_task","f":29396708,"d_finished":0,"c":0,"l":29412155,"d":15447},"events":[{"name":"bootstrap","f":29397047,"d_finished":1605,"c":1,"l":29398652,"d":1605},{"a":29410732,"name":"ack","f":29407159,"d_finished":3282,"c":2,"l":29410601,"d":4705},{"a":29410717,"name":"processing","f":29398846,"d_finished":7117,"c":5,"l":29410607,"d":8555},{"name":"ProduceResults","f":29398068,"d_finished":4454,"c":9,"l":29410999,"d":4454},{"a":29411005,"name":"Finish","f":29411005,"d_finished":0,"c":0,"l":29412155,"d":1150},{"name":"task_result","f":29398878,"d_finished":3706,"c":3,"l":29406925,"d":3706}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:23.142294Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:23.123607Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=59748;inserted_portions_bytes=61952;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=121700;selected_rows=0; 2026-01-04T02:38:23.142385Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:23.142588Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:618:2623];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=9;column_names=saved_at;);;program_input=(column_ids=9;column_names=saved_at;);;; |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpBatchDelete::TableNotExists [GOOD] >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] >> KqpBatchUpdate::Returning >> KqpBatchDelete::UnknownColumn >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 22348, MsgBus: 18473 2026-01-04T02:38:21.374350Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330219421351944:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:21.374426Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:21.571342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:21.577047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:21.577128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:21.618645Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:21.653587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:21.655433Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330219421351916:2081] 1767494301372701 != 1767494301372704 2026-01-04T02:38:21.697093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:21.697134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:21.697146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:21.697279Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:21.834438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:22.050167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:22.380278Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:23.605710Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228011287394:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.605719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228011287405:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.605839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.606174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228011287409:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.606250Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.609039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:23.617945Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330228011287408:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:38:23.720127Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330228011287461:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:23.914247Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330228011287478:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:35: Error: At function: KiDeleteTable!
:2:35: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:38:23.914637Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmZlMTQ1YTEtZjYwZGM1ODYtOTA3ZGIzYmItYTZlYWNjMzc=, ActorId: [1:7591330228011287366:2320], ActorState: ExecuteState, LegacyTraceId: 01ke3dxfvkaemf833gvtehaapx, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 35 } message: "At function: KiDeleteTable!" end_position { row: 2 column: 35 } severity: 1 issues { position { row: 2 column: 35 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 35 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:38:23.976748Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330228011287496:2339], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:41: Error: At function: KiDeleteTable!
:3:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:38:23.977111Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NmZlMTQ1YTEtZjYwZGM1ODYtOTA3ZGIzYmItYTZlYWNjMzc=, ActorId: [1:7591330228011287366:2320], ActorState: ExecuteState, LegacyTraceId: 01ke3dxg5f2r17623dansrk990, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 41 } message: "At function: KiDeleteTable!" end_position { row: 3 column: 41 } severity: 1 issues { position { row: 3 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |89.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardWithoutBridgeMode [GOOD] Test command err: 2026-01-04T02:38:19.330687Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:19.330736Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:19.330783Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:19.330806Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:19.330841Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:19.330896Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:19.331483Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:19.338379Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-04T02:38:23.333475Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-04T02:38:23.333564Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3360110 2026-01-04T02:38:23.333616Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:23.344182Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:23.344229Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:23.344330Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:38:23.344394Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-04T02:38:23.344425Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-04T02:38:23.344446Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-04T02:38:23.344467Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:38:23.344506Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:38:23.344531Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-04T02:38:23.344553Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-04T02:38:23.345011Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345389Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345506Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345583Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345672Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345765Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.345885Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.346014Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3420110 2026-01-04T02:38:23.346068Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:23.346438Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:48, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:23.346485Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:50, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:23.346508Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:49, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:23.346532Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 12:51, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:23.346565Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 4 2026-01-04T02:38:23.346776Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-04T02:38:23.346815Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:48 2026-01-04T02:38:23.346843Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:49 2026-01-04T02:38:23.346861Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:50 2026-01-04T02:38:23.346874Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 12:51 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] >> KqpBatchUpdate::TableWithIndex >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates >> TSentinelTests::PDiskFaultyGuard [GOOD] >> TSentinelTests::PDiskFaultyGuardWithForced >> KqpBatchDelete::DeleteOn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskPileGuardConfig [GOOD] Test command err: 2026-01-04T02:38:20.104106Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:20.104165Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:20.104235Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:20.104269Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:20.104327Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:20.104422Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:20.105559Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:20.110214Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... tateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3600110 2026-01-04T02:38:24.281757Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:24.292552Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:24.292612Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:24.292724Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:38:24.292770Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-04T02:38:24.292814Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-04T02:38:24.292850Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-04T02:38:24.292881Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:38:24.292912Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:38:24.292941Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-04T02:38:24.292999Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-04T02:38:24.293290Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294164Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294388Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294528Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294683Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294841Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.294990Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.295137Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 3660110 2026-01-04T02:38:24.295208Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:24.295672Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:56, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295736Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:57, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295770Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:58, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295803Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 14:59, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295855Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 16:64, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295919Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 16:65, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295955Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 16:66, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.295998Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 16:67, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:24.296042Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2026-01-04T02:38:24.296390Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 4 2026-01-04T02:38:24.296452Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:56 2026-01-04T02:38:24.296487Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:57 2026-01-04T02:38:24.296510Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:58 2026-01-04T02:38:24.296534Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 14:59 2026-01-04T02:38:24.296557Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 16:64 2026-01-04T02:38:24.296580Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 16:65 2026-01-04T02:38:24.296622Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 16:66 2026-01-04T02:38:24.296648Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 16:67 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::FindSubDomainPathIdActorAsync [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:34:01.727859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:34:01.727977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:01.728027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:34:01.728083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:34:01.728129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:34:01.728160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:34:01.728244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:34:01.728359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:34:01.729307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:34:01.729662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:34:01.941476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:34:01.941543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:34:01.971009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:34:01.971318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:34:01.971519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:34:01.988348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:34:01.988638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:34:01.989441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:34:01.989745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:34:02.002691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:02.002967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:34:02.004421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:34:02.004508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:34:02.004740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:34:02.004790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:34:02.004835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:34:02.005090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:34:02.221219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.226684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.226870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.226953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:34:02.227806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 11:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:38:23.748702Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 39 2026-01-04T02:38:23.749543Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:23.749625Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:23.749865Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:23.749945Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:23.750012Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:23.750065Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:23.750166Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:38:23.750236Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:23.750315Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:23.750382Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:23.750634Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:38:23.750719Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:38:23.750778Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:38:23.750828Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:38:23.752004Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:23.752108Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:23.752151Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:23.752205Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:38:23.752285Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:23.753172Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:23.753261Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:23.753294Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:23.753323Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:38:23.753352Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:23.753426Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:38:23.757544Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:23.757784Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:38:23.758064Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:38:23.758127Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:38:23.758575Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:38:23.758682Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:23.758742Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [17:878:2829] TestWaitNotification: OK eventTxId 102 2026-01-04T02:38:23.759328Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:38:23.759643Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA" took 377us result status StatusSuccess 2026-01-04T02:38:23.760348Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA" PathDescription { Self { Name: "SubDomenA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:38:23.760968Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomenA/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:38:23.761228Z node 17 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomenA/Topic1" took 276us result status StatusSuccess 2026-01-04T02:38:23.761851Z node 17 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomenA/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 247 AccountSize: 247 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:38:24.202914Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__find_subdomain_path_id.cpp:20: FindTabletSubDomainPathId for tablet 72075186233409546 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest >> KqpBatchUpdate::ColumnTable [GOOD] >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] >> TStorageBalanceTest::TestScenario3 [GOOD] |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::TTL+Reboot-Internal+FirstPkColumn [GOOD] Test command err: Running TestTtl ttlColumnType=Timestamp 2026-01-04T02:37:56.931170Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:37:56.935479Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:37:56.935924Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:56.966775Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:56.967026Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:56.973973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:56.974222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:56.974440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:56.974569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:56.974680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:56.974778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:56.974886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:56.975015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:56.975131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:56.975236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:56.975346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:56.975465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:56.975609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:56.998796Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:106:2139], Recipient [1:128:2159]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:37:57.001023Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:57.001124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:57.001177Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:57.001312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:57.001442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:57.001528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:57.001559Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:57.001617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:57.001660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:57.001698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:57.001736Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:57.001926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:57.001971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:57.002001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:57.002020Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:57.002073Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:57.002104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:57.002126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:57.002146Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:57.002194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:57.002233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:57.002252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:57.002277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:57.002301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:57.002323Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:57.002476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:57.002521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:57.002555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:57.002672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:57.002712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:57.002739Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:57.002787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:57.002826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:57.002845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:57.002874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:57.002895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Copy ... =[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:38:26.316595Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:38:26.316936Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:26.317228Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1000;schema=timestamp: uint64;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.317322Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:38:26.317564Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=1000; 2026-01-04T02:38:26.317660Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1000;batch_columns=timestamp; 2026-01-04T02:38:26.318031Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:589:2568];bytes=8000;rows=1000;faults=0;finished=0;fault=0;schema=timestamp: uint64; 2026-01-04T02:38:26.318303Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.318482Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.318711Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.319004Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:26.319234Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.319458Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.319840Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [5:590:2569] finished for tablet 9437184 2026-01-04T02:38:26.320580Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[5:589:2568];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":29843159,"name":"_full_task","f":29843159,"d_finished":0,"c":0,"l":29854209,"d":11050},"events":[{"name":"bootstrap","f":29843525,"d_finished":1718,"c":1,"l":29845243,"d":1718},{"a":29853223,"name":"ack","f":29851160,"d_finished":1852,"c":1,"l":29853012,"d":2838},{"a":29853200,"name":"processing","f":29845456,"d_finished":4411,"c":3,"l":29853016,"d":5420},{"name":"ProduceResults","f":29844633,"d_finished":3196,"c":6,"l":29853752,"d":3196},{"a":29853759,"name":"Finish","f":29853759,"d_finished":0,"c":0,"l":29854209,"d":450},{"name":"task_result","f":29845479,"d_finished":2471,"c":2,"l":29850879,"d":2471}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.320689Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[5:589:2568];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:26.321340Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[5:589:2568];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.007},{"events":["f_ack"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.01},{"events":["l_ack","l_processing","l_Finish"],"t":0.011}],"full":{"a":29843159,"name":"_full_task","f":29843159,"d_finished":0,"c":0,"l":29855011,"d":11852},"events":[{"name":"bootstrap","f":29843525,"d_finished":1718,"c":1,"l":29845243,"d":1718},{"a":29853223,"name":"ack","f":29851160,"d_finished":1852,"c":1,"l":29853012,"d":3640},{"a":29853200,"name":"processing","f":29845456,"d_finished":4411,"c":3,"l":29853016,"d":6222},{"name":"ProduceResults","f":29844633,"d_finished":3196,"c":6,"l":29853752,"d":3196},{"a":29853759,"name":"Finish","f":29853759,"d_finished":0,"c":0,"l":29855011,"d":1252},{"name":"task_result","f":29845479,"d_finished":2471,"c":2,"l":29850879,"d":2471}],"id":"9437184::30"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:26.321471Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:26.306765Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=59184;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=59184;selected_rows=0; 2026-01-04T02:38:26.321532Z node 5 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:26.321738Z node 5 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[5:590:2569];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpBatchUpdate::TableNotExists >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskFaultyGuardWithForced [GOOD] Test command err: 2026-01-04T02:38:19.275481Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:19.275528Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:19.275575Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:19.275600Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:19.275635Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:19.275699Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:19.276492Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:19.280301Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... oupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1000 } } Group { GroupId: 13 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1001 } } Group { GroupId: 14 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1002 } } Group { GroupId: 15 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 133 VSlotId: 1003 } } Group { GroupId: 16 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1000 } } Group { GroupId: 17 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1001 } } Group { GroupId: 18 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1002 } } Group { GroupId: 19 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 134 VSlotId: 1003 } } Group { GroupId: 20 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1000 } } Group { GroupId: 21 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1001 } } Group { GroupId: 22 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1002 } } Group { GroupId: 23 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 135 VSlotId: 1003 } } Group { GroupId: 24 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1000 } } Group { GroupId: 25 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1001 } } Group { GroupId: 26 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1002 } } Group { GroupId: 27 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 136 VSlotId: 1003 } } Group { GroupId: 28 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1000 } } Group { GroupId: 29 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1001 } } Group { GroupId: 30 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1002 } } Group { GroupId: 31 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 137 VSlotId: 1003 } } Group { GroupId: 32 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1000 } } Group { GroupId: 33 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1001 } } Group { GroupId: 34 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1002 } } Group { GroupId: 35 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 138 VSlotId: 1003 } } Group { GroupId: 36 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1000 } } Group { GroupId: 37 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1001 } } Group { GroupId: 38 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1002 } } Group { GroupId: 39 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 13 PDiskId: 139 VSlotId: 1003 } } Group { GroupId: 40 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1000 } } Group { GroupId: 41 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1001 } } Group { GroupId: 42 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1002 } } Group { GroupId: 43 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 140 VSlotId: 1003 } } Group { GroupId: 44 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1000 } } Group { GroupId: 45 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1001 } } Group { GroupId: 46 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1002 } } Group { GroupId: 47 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 141 VSlotId: 1003 } } Group { GroupId: 48 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1000 } } Group { GroupId: 49 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1001 } } Group { GroupId: 50 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1002 } } Group { GroupId: 51 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 142 VSlotId: 1003 } } Group { GroupId: 52 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1000 } } Group { GroupId: 53 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1001 } } Group { GroupId: 54 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1002 } } Group { GroupId: 55 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 143 VSlotId: 1003 } } Group { GroupId: 56 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1000 } } Group { GroupId: 57 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1001 } } Group { GroupId: 58 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1002 } } Group { GroupId: 59 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 144 VSlotId: 1003 } } Group { GroupId: 60 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1000 } } Group { GroupId: 61 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1001 } } Group { GroupId: 62 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1002 } } Group { GroupId: 63 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 145 VSlotId: 1003 } } Group { GroupId: 64 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1000 } } Group { GroupId: 65 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1001 } } Group { GroupId: 66 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1002 } } Group { GroupId: 67 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 146 VSlotId: 1003 } } Group { GroupId: 68 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1000 } } Group { GroupId: 69 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1001 } } Group { GroupId: 70 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1002 } } Group { GroupId: 71 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 147 VSlotId: 1003 } } Group { GroupId: 72 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1000 } } Group { GroupId: 73 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1001 } } Group { GroupId: 74 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1002 } } Group { GroupId: 75 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 148 VSlotId: 1003 } } Group { GroupId: 76 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1000 } } Group { GroupId: 77 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1001 } } Group { GroupId: 78 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1002 } } Group { GroupId: 79 GroupGeneration: 1 ErasureSpecies: "none" VSlotId { NodeId: 14 PDiskId: 149 VSlotId: 1003 } } } } Success: true 2026-01-04T02:38:26.596104Z node 13 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.000000s 2026-01-04T02:38:26.596161Z node 13 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:26.596316Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:38:26.596366Z node 13 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:38:26.596752Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 132 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-132.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 133 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-133.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 134 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-134.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 135 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-135.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 136 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-136.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 137 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-137.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 138 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-138.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 139 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-139.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:26.597239Z node 13 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 140 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-140.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 141 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-141.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 142 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-142.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 143 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-143.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 144 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-144.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 145 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-145.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 146 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-146.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 147 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-147.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 148 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-148.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 149 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-149.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120110 2026-01-04T02:38:26.597323Z node 13 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpBatchUpdate::MultiStatement [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 25596, MsgBus: 16164 2026-01-04T02:38:20.926119Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330216018491179:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:20.926217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:21.044288Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:21.111910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:21.111980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:21.161774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:21.206037Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330216018491152:2081] 1767494300924647 != 1767494300924650 2026-01-04T02:38:21.209975Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:21.243870Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:21.243891Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:21.243909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:21.243979Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:21.252714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:21.610019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:21.932764Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:23.413434Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228903393923:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.413447Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228903393931:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.413576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.413883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228903393938:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.413976Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.417396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:23.425424Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330228903393937:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:38:23.538649Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330228903393990:2531] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:23.731815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:38:24.132187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:38:24.132497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:38:24.132778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:38:24.132918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:38:24.133039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:38:24.133151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:38:24.133282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:38:24.133406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:38:24.133525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:38:24.133652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:38:24.133771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:38:24.133916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:38:24.134022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591330228903394554:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:38:24.140325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:38:24.140403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:38:24.140652Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:38:24.140789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:38:24.140902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:38:24.141058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:38:24.141226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:38:24.141345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:38:24.141498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;self_id=[1:7591330228903394597:2343];tablet_id=72075186224037903;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... tate=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.651879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.656687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.656752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.656769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.659885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.659955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.659969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.665507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.665597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.665614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.667950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.668011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.668026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.674471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.674575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.674595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.676036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.676099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.676115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037946;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.683838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037942;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.689631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.689681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.689692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.692484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.692570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.692605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.697701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.697756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.697765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037925;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.701937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.702020Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.702042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.704070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.704112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.704121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.710715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.710800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.710819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:25.926403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330216018491179:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:25.926493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:26.281580Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZWMyZWQyMjAtODkyMGM5MmYtNzc5ZWYzOWMtNTM2ZDMxYmU=, ActorId: [1:7591330228903393904:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3dxj6h2v0c4gcdavta2y3c, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 >> KqpBatchUpdate::Large_2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> TStorageBalanceTest::TestScenario3 [GOOD] Test command err: 2026-01-04T02:34:54.445063Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.473025Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.473377Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.474262Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.474611Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.475655Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:49:2075] ControllerId# 72057594037932033 2026-01-04T02:34:54.475693Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.475812Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.475928Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.487207Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.487306Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.489756Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.489961Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490142Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490290Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490431Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490571Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490711Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.490737Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.490854Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:49:2075] 2026-01-04T02:34:54.490890Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:49:2075] 2026-01-04T02:34:54.490951Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.491036Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.491507Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:54.496838Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:54.497024Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:54.497409Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:54.497750Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:34:54.499024Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:34:54.499096Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:54.500081Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2077] ControllerId# 72057594037932033 2026-01-04T02:34:54.500117Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:54.500193Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:54.500312Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:54.514307Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:54.514383Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:54.516171Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:81:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.516327Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:82:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.516463Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:83:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.516598Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:84:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.516733Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:85:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.516869Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:86:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.517000Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:87:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:54.517027Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:54.517116Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:73:2077] 2026-01-04T02:34:54.517147Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:73:2077] 2026-01-04T02:34:54.517185Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:54.517224Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:54.518223Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.518451Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.520176Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:34:54.520262Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.520480Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:54.520622Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.521318Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:49:2075] 2026-01-04T02:34:54.521365Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.522015Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.522066Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:54.537746Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.540202Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:54.540374Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2093] 2026-01-04T02:34:54.540411Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2093] 2026-01-04T02:34:54.540560Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:54.540912Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:54.541078Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:54.541220Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:34:54.541259Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:34:54.541296Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:54.541463Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:102:2089] 2026-01-04T02:34:54.541510Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [2:102:2089] 2026-01-04T02:34:54.541571Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:34:54.54 ... 543c] Result# TEvPutResult {Id# [72057594037927937:2:484:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:38:24.939548Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [36fc78bae8f3543c] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:484:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:38:24.940545Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:484:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:38:24.940716Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:485} commited cookie 1 for step 484 2026-01-04T02:38:24.943138Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:485} Tx{1468, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-04T02:38:24.943206Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:485} Tx{1468, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:24.943459Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:485} Tx{1468, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{980, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-04T02:38:24.943516Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:485} Tx{1468, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:24.943645Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1359:2259] 2026-01-04T02:38:24.943682Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1359:2259] 2026-01-04T02:38:24.943738Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1295:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.054) 2026-01-04T02:38:25.045837Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1469, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-04T02:38:25.045940Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1469, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:25.046080Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335615186656}: tablet 72075186224037945 wasn't changed 2026-01-04T02:38:25.046126Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335615186656}: tablet 72075186224037945 skipped channel 0 2026-01-04T02:38:25.046214Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335615186656}: tablet 72075186224037945 skipped channel 1 2026-01-04T02:38:25.046253Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335615186656}: tablet 72075186224037945 skipped channel 2 2026-01-04T02:38:25.046329Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{136335615186656}(72075186224037945)::Execute - TryToBoot was not successfull 2026-01-04T02:38:25.046406Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1469, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{981, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-04T02:38:25.046458Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1469, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:25.065638Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [777bde6898dbfef1] bootstrap ActorId# [20:11681:4471] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:485:0:0:246:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:38:25.065854Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [777bde6898dbfef1] Id# [72057594037927937:2:485:0:0:246:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:38:25.065908Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [777bde6898dbfef1] restore Id# [72057594037927937:2:485:0:0:246:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:38:25.065967Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [777bde6898dbfef1] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:485:0:0:246:1] Marker# BPG33 2026-01-04T02:38:25.066006Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [777bde6898dbfef1] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:485:0:0:246:1] Marker# BPG32 2026-01-04T02:38:25.066137Z node 20 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [20:333:2090] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:485:0:0:246:1] FDS# 246 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:38:25.069585Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [777bde6898dbfef1] received {EvVPutResult Status# OK ID# [72057594037927937:2:485:0:0:246:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 502 } Cost# 81937 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 503 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-04T02:38:25.069709Z node 20 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [777bde6898dbfef1] Result# TEvPutResult {Id# [72057594037927937:2:485:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:38:25.069765Z node 20 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [777bde6898dbfef1] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:485:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:38:25.070726Z node 20 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:485:0:0:246:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:38:25.070891Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} commited cookie 1 for step 485 2026-01-04T02:38:25.073328Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1470, NKikimr::NHive::TTxReassignGroups} queued, type NKikimr::NHive::TTxReassignGroups 2026-01-04T02:38:25.073390Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1470, NKikimr::NHive::TTxReassignGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:25.073635Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1470, NKikimr::NHive::TTxReassignGroups} hope 1 -> done Change{982, redo 335b alter 0b annex 0, ~{ 1, 2 } -{ }, 0 gb} 2026-01-04T02:38:25.073689Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:486} Tx{1470, NKikimr::NHive::TTxReassignGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:25.073828Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037932033] send [20:1359:2259] 2026-01-04T02:38:25.073865Z node 20 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [20:1359:2259] 2026-01-04T02:38:25.073918Z node 20 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [20:1295:2223] EventType# 268637702 c[def1] *****----------------------------------------------------------------------------------------------- (0.05) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.056) ******---------------------------------------------------------------------------------------------- (0.056) *****----------------------------------------------------------------------------------------------- (0.054) *****----------------------------------------------------------------------------------------------- (0.052) ******---------------------------------------------------------------------------------------------- (0.058) ******---------------------------------------------------------------------------------------------- (0.064) *****----------------------------------------------------------------------------------------------- (0.052) *****----------------------------------------------------------------------------------------------- (0.054) 2026-01-04T02:38:25.176126Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:487} Tx{1471, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-04T02:38:25.176238Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:487} Tx{1471, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:25.176375Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:272: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335616045696}: tablet 72075186224037968 wasn't changed 2026-01-04T02:38:25.176418Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335616045696}: tablet 72075186224037968 skipped channel 0 2026-01-04T02:38:25.176502Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335616045696}: tablet 72075186224037968 skipped channel 1 2026-01-04T02:38:25.176548Z node 20 :HIVE WARN: tx__update_tablet_groups.cpp:281: HIVE#72057594037927937 THive::TTxUpdateTabletGroups::Execute{136335616045696}: tablet 72075186224037968 skipped channel 2 2026-01-04T02:38:25.176624Z node 20 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{136335616045696}(72075186224037968)::Execute - TryToBoot was not successfull 2026-01-04T02:38:25.176702Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:487} Tx{1471, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{983, redo 257b alter 0b annex 0, ~{ 2, 1 } -{ }, 0 gb} 2026-01-04T02:38:25.176756Z node 20 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:487} Tx{1471, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 12263, MsgBus: 1231 2026-01-04T02:38:22.709461Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330224664391714:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:22.709527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:22.916850Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:22.932306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:22.932440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:22.934000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:23.009194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:23.010442Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330224664391687:2081] 1767494302708168 != 1767494302708171 2026-01-04T02:38:23.045998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:23.046024Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:23.046032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:23.046114Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:23.092016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:23.374768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:23.426321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.539714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.641249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.686697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.725968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:25.054511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330237549295448:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.054609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.054898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330237549295458:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.054951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.395586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.421657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.445544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.472824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.499801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.530698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.562049Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.603615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.682533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330237549296323:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.682677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.682699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330237549296328:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.682936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330237549296330:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.682977Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.686081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:25.695961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330237549296331:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:25.798493Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330237549296385:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::MultiStatement [GOOD] Test command err: Trying to start YDB, gRPC: 3172, MsgBus: 62771 2026-01-04T02:38:23.006316Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330230544491186:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:23.007205Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:23.201528Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:23.210095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:23.210156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:23.250239Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:23.277764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:23.327318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:23.327341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:23.327348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:23.327493Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:23.371945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:23.658311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:23.714952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.834949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.972559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.013747Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:24.033647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.592172Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330239134427613:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.592323Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.592678Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330239134427623:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.592745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.827078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.853554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.880017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.906877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.933785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.963417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.991497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.031682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.085384Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330243429395792:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:26.085461Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:26.085585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330243429395798:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:26.085632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:26.085649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330243429395797:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:26.088208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:26.095685Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330243429395801:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:26.166330Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330243429395852:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:27.625599Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330247724363459:2535], status: GENERIC_ERROR, issues:
:5:32: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:38:27.625940Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTE3ZGVhMWMtNThhYWI4Ni1kYzIzM2YzYS02NGRmMzNmZg==, ActorId: [1:7591330247724363450:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxkps70dxkw6m6gfks5ec, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 5 column: 32 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 5 column: 32 } severity: 1 } tx_id# trace_id# 2026-01-04T02:38:27.640749Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330247724363465:2538], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:38:27.641082Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTE3ZGVhMWMtNThhYWI4Ni1kYzIzM2YzYS02NGRmMzNmZg==, ActorId: [1:7591330247724363450:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxksefzk0z3men62vtjqx, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-04T02:38:27.654689Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330247724363469:2540], status: GENERIC_ERROR, issues:
:4:17: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:38:27.654932Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTE3ZGVhMWMtNThhYWI4Ni1kYzIzM2YzYS02NGRmMzNmZg==, ActorId: [1:7591330247724363450:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxksxawdqqyg6mep1ym5j, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 17 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 17 } severity: 1 } tx_id# trace_id# 2026-01-04T02:38:27.668060Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330247724363473:2542], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:38:27.668304Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTE3ZGVhMWMtNThhYWI4Ni1kYzIzM2YzYS02NGRmMzNmZg==, ActorId: [1:7591330247724363450:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxktb5n6kr7myvwst7zt4, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# 2026-01-04T02:38:27.681905Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330247724363477:2544], status: GENERIC_ERROR, issues:
:4:29: Error: BATCH can't be used with multiple writes or reads. 2026-01-04T02:38:27.682217Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTE3ZGVhMWMtNThhYWI4Ni1kYzIzM2YzYS02NGRmMzNmZg==, ActorId: [1:7591330247724363450:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxktr5x8fm7jfvsa1w97c, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 4 column: 29 } message: "BATCH can\'t be used with multiple writes or reads." end_position { row: 4 column: 29 } severity: 1 } tx_id# trace_id# >> KqpBatchUpdate::NotIdempotent |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl >> KqpBatchUpdate::Returning [GOOD] >> KqpBatchDelete::UnknownColumn [GOOD] >> TxUsage::Sinks_Oltp_WriteToTopics_4_Table [GOOD] >> TColumnShardTestSchema::ExportAfterFail [GOOD] >> KqpBatchUpdate::ManyPartitions_3 >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] >> KqpBatchDelete::ColumnTable >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query >> KqpBatchDelete::TableWithIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494870.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494870.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493670.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-04T02:37:51.473568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:51.495036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:51.495273Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:51.501380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:51.501577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:51.501763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:51.501870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:51.501945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:51.502045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:51.502144Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:51.502223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:51.502311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:51.502411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:51.502518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:51.502619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:51.502695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:51.523244Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:51.523386Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:51.523429Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:51.523583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:51.523723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:51.523794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:51.523826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:51.523891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:51.523949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:51.523981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:51.524001Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:51.524120Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:51.524173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:51.524205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:51.524225Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:51.524316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:51.524366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:51.524395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:51.524417Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:51.524450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:51.524479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:51.524500Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:51.524532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:51.524558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:51.524579Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:51.524711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:51.524750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:51.524786Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:51.524929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:51.524972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:51.524993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:51.525032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:51.525065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:51.525086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:51.525116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:30.270166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:80000;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.270203Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:38:30.270316Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=80000; 2026-01-04T02:38:30.270375Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=80000;batch_columns=timestamp; 2026-01-04T02:38:30.270593Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:830:2799];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:831:2800]->[1:830:2799] 2026-01-04T02:38:30.270763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.270906Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.271012Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.271166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:30.271262Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.271359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.271594Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:831:2800] finished for tablet 9437184 2026-01-04T02:38:30.271998Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:830:2799];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":40374154,"name":"_full_task","f":40374154,"d_finished":0,"c":0,"l":40385969,"d":11815},"events":[{"name":"bootstrap","f":40374417,"d_finished":741,"c":1,"l":40375158,"d":741},{"a":40385478,"name":"ack","f":40383007,"d_finished":2251,"c":2,"l":40385371,"d":2742},{"a":40385465,"name":"processing","f":40375257,"d_finished":4661,"c":5,"l":40385375,"d":5165},{"name":"ProduceResults","f":40374907,"d_finished":2983,"c":9,"l":40385711,"d":2983},{"a":40385715,"name":"Finish","f":40385715,"d_finished":0,"c":0,"l":40385969,"d":254},{"name":"task_result","f":40375268,"d_finished":2313,"c":3,"l":40382807,"d":2313}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.272084Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:830:2799];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:30.272452Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:830:2799];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.008},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":40374154,"name":"_full_task","f":40374154,"d_finished":0,"c":0,"l":40386449,"d":12295},"events":[{"name":"bootstrap","f":40374417,"d_finished":741,"c":1,"l":40375158,"d":741},{"a":40385478,"name":"ack","f":40383007,"d_finished":2251,"c":2,"l":40385371,"d":3222},{"a":40385465,"name":"processing","f":40375257,"d_finished":4661,"c":5,"l":40385375,"d":5645},{"name":"ProduceResults","f":40374907,"d_finished":2983,"c":9,"l":40385711,"d":2983},{"a":40385715,"name":"Finish","f":40385715,"d_finished":0,"c":0,"l":40386449,"d":734},{"name":"task_result","f":40375268,"d_finished":2313,"c":3,"l":40382807,"d":2313}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:831:2800]->[1:830:2799] 2026-01-04T02:38:30.272550Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:30.258533Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9739224;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9739224;selected_rows=0; 2026-01-04T02:38:30.272583Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:30.272684Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:831:2800];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 160000/9739224 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::UnknownColumn [GOOD] Test command err: Trying to start YDB, gRPC: 18331, MsgBus: 17111 2026-01-04T02:38:24.980695Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330232573039603:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:24.980799Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:25.202630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:25.202806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:25.251907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:25.306799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:25.308846Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:25.366485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:25.366505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:25.366511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:25.366618Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:25.536312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:25.695182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:25.744309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.852435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.987717Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:25.988983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.052200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.648278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330245457943325:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.648380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.648627Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330245457943335:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.648725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.910538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.937603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.965093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.992944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.018458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.045262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.085540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.116809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.169231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249752911501:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.169290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249752911506:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.169300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.169470Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249752911509:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.169501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.172500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:28.182925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330249752911508:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:28.270727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330249752911561:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:29.520735Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330254047879167:2535], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:31: Error: At lambda, At function: Coalesce
:3:37: Error: At function: ==
:3:23: Error: At function: Member
:3:23: Error: Member not found: UnknownColumn 2026-01-04T02:38:29.521217Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDVhNzcxNmQtNzJkMjU3N2EtNjYwMTg3Yi00Nzk1ZWE2OQ==, ActorId: [1:7591330254047879158:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxnkdfx8t3n6k9690wshe, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 31 } message: "At lambda, At function: Coalesce" end_position { row: 2 column: 31 } severity: 1 issues { position { row: 3 column: 37 } message: "At function: ==" end_position { row: 3 column: 37 } severity: 1 issues { position { row: 3 column: 23 } message: "At function: Member" end_position { row: 3 column: 23 } severity: 1 issues { position { row: 3 column: 23 } message: "Member not found: UnknownColumn" end_position { row: 3 column: 23 } severity: 1 } } } } } tx_id# trace_id# >> KqpBatchUpdate::TableNotExists [GOOD] >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 8280, MsgBus: 63972 2026-01-04T02:38:24.912929Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330232722392238:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:24.913356Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:25.081639Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:25.085526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:25.085580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:25.108026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:25.141476Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:25.142779Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330232722392134:2081] 1767494304908658 != 1767494304908661 2026-01-04T02:38:25.219967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:25.219996Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:25.220013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:25.220124Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:25.269155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:25.583080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:25.637222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.775386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.884422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.918251Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:25.933747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.514646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330245607295891:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.514761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.515162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330245607295901:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.515279Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:27.754307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.781941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.810712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.839548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.868793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.898376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.929497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.967007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.035156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249902264067:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.035242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.035254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249902264072:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.035405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330249902264074:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.035454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.038445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:28.047805Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330249902264075:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:28.133183Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330249902264127:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:29.379680Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330254197231733:2535], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH UPDATE is unsupported with RETURNING 2026-01-04T02:38:29.380070Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=OWIyNDExNjctM2U1YzQ5NmMtMzE4ZTY2OTQtYmIwNTQwYzM=, ActorId: [1:7591330254197231724:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxnfd0q2hacra7zbgwjwp, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH UPDATE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167494850.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494850.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494850.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494850.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494850.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494850.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494850.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493650.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494850.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494850.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493650.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493650.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493650.000000s;Name=;Codec=}; 2026-01-04T02:37:30.888739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:30.918225Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:30.918466Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:30.926168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:30.926396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:30.926609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:30.926720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:30.926851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:30.926962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:30.927059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:30.927170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:30.927283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:30.927383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.927478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:30.927573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:30.927678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:30.955634Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:30.955779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:30.955832Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:30.956019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.956196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:30.956273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:30.956318Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:30.956410Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:30.956463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:30.956505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:30.956533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:30.956690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.956756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:30.956794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:30.956834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:30.956928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:30.956976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:30.957014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:30.957043Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:30.957087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:30.957123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:30.957155Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:30.957199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:30.957258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:30.957285Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:30.957463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:30.957507Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:30.957535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:30.957665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:30.957715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:3 ... ablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:30.542144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:30.542353Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494299209:max} readable: {1767494299209:max} at tablet 9437184 2026-01-04T02:38:30.542448Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:30.542584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494299209:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:30.542646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494299209:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:30.543008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494299209:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:30.544152Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494299209:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:30.544867Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494299209:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1449:3394];trace_detailed=; 2026-01-04T02:38:30.545234Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:30.545397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:30.545576Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.545722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.546035Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:30.546146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.546255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.546421Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1449:3394] finished for tablet 9437184 2026-01-04T02:38:30.546768Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1448:3393];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":63345792,"name":"_full_task","f":63345792,"d_finished":0,"c":0,"l":63347452,"d":1660},"events":[{"name":"bootstrap","f":63346014,"d_finished":721,"c":1,"l":63346735,"d":721},{"a":63347004,"name":"ack","f":63347004,"d_finished":0,"c":0,"l":63347452,"d":448},{"a":63346990,"name":"processing","f":63346990,"d_finished":0,"c":0,"l":63347452,"d":462},{"name":"ProduceResults","f":63346480,"d_finished":467,"c":2,"l":63347258,"d":467},{"a":63347263,"name":"Finish","f":63347263,"d_finished":0,"c":0,"l":63347452,"d":189}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:30.546836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1448:3393];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:30.547156Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1448:3393];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":63345792,"name":"_full_task","f":63345792,"d_finished":0,"c":0,"l":63347859,"d":2067},"events":[{"name":"bootstrap","f":63346014,"d_finished":721,"c":1,"l":63346735,"d":721},{"a":63347004,"name":"ack","f":63347004,"d_finished":0,"c":0,"l":63347859,"d":855},{"a":63346990,"name":"processing","f":63346990,"d_finished":0,"c":0,"l":63347859,"d":869},{"name":"ProduceResults","f":63346480,"d_finished":467,"c":2,"l":63347258,"d":467},{"a":63347263,"name":"Finish","f":63347263,"d_finished":0,"c":0,"l":63347859,"d":596}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1449:3394]->[1:1448:3393] 2026-01-04T02:38:30.547226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:30.544132Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:30.547261Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:30.547354Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1449:3394];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> KqpBatchDelete::DeleteOn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494864.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494864.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493664.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493664.000000s;Name=;Codec=}; 2026-01-04T02:37:46.013007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:46.035774Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:46.035995Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:46.041375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:46.041542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:46.041712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:46.041793Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:46.041878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:46.041966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:46.042040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:46.042109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:46.042207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:46.042281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.042349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:46.042420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:46.042485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:46.059866Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:46.060002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:46.060045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:46.060169Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:46.060314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:46.060380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:46.060409Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:46.060480Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:46.060530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:46.060558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:46.060577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:46.060704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:46.060754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:46.060782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:46.060803Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:46.060874Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:46.060914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:46.060940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:46.060960Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:46.061007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:46.061045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:46.061067Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:46.061100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:46.061124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:46.061142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:46.061275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:46.061309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:46.061327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:46.061422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:46.061459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.061481Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:46.061548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:46.061587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:46.061611Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:46.061640Z node 1 :TX_COLUM ... tor; 2026-01-04T02:38:31.535775Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:874:2832];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:31.536661Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-04T02:38:31.536920Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494302162:max} readable: {1767494302162:max} at tablet 9437184 2026-01-04T02:38:31.537061Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:31.537231Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494302162:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:31.537324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494302162:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:31.537844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494302162:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:31.539407Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494302162:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:31.540324Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494302162:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:882:2840];trace_detailed=; 2026-01-04T02:38:31.540794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:31.541005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:31.541255Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:31.541446Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:31.541701Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:31.541849Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:31.541981Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:31.542202Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:882:2840] finished for tablet 9437184 2026-01-04T02:38:31.542718Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:881:2839];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47646393,"name":"_full_task","f":47646393,"d_finished":0,"c":0,"l":47648435,"d":2042},"events":[{"name":"bootstrap","f":47646648,"d_finished":984,"c":1,"l":47647632,"d":984},{"a":47647826,"name":"ack","f":47647826,"d_finished":0,"c":0,"l":47648435,"d":609},{"a":47647809,"name":"processing","f":47647809,"d_finished":0,"c":0,"l":47648435,"d":626},{"name":"ProduceResults","f":47647287,"d_finished":623,"c":2,"l":47648156,"d":623},{"a":47648167,"name":"Finish","f":47648167,"d_finished":0,"c":0,"l":47648435,"d":268}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:31.542813Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:881:2839];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:31.543244Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:881:2839];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":47646393,"name":"_full_task","f":47646393,"d_finished":0,"c":0,"l":47649023,"d":2630},"events":[{"name":"bootstrap","f":47646648,"d_finished":984,"c":1,"l":47647632,"d":984},{"a":47647826,"name":"ack","f":47647826,"d_finished":0,"c":0,"l":47649023,"d":1197},{"a":47647809,"name":"processing","f":47647809,"d_finished":0,"c":0,"l":47649023,"d":1214},{"name":"ProduceResults","f":47647287,"d_finished":623,"c":2,"l":47648156,"d":623},{"a":47648167,"name":"Finish","f":47648167,"d_finished":0,"c":0,"l":47649023,"d":856}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:882:2840]->[1:881:2839] 2026-01-04T02:38:31.543329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:31.539378Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:31.543380Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:31.543491Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableNotExists [GOOD] Test command err: Trying to start YDB, gRPC: 16046, MsgBus: 22983 2026-01-04T02:38:27.762026Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330245085561745:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:27.762099Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:27.984413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:27.984680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:28.018936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:28.028373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:28.069849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:28.071171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330245085561716:2081] 1767494307760725 != 1767494307760728 2026-01-04T02:38:28.106788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:28.106816Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:28.106825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:28.106940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:28.188801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:28.437109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:28.767053Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:30.428555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330257970464499:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:30.428555Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330257970464491:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:30.428685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:30.429117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330257970464506:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:30.429216Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:30.432799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:30.442310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330257970464505:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:38:30.577369Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330257970464558:2535] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:30.812294Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330257970464575:2333], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:34: Error: At function: KiUpdateTable!
:3:34: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:38:30.812911Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmU1YjVlMDMtZGNkYzVmNTEtODJkYTk4OWItN2U0ODJlZWY=, ActorId: [1:7591330257970464465:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3dxpgs4fz8wd0j5myz2kpa, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 34 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 34 } severity: 1 issues { position { row: 3 column: 34 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 34 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:38:30.854297Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330257970464595:2340], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:4:41: Error: At function: KiUpdateTable!
:4:41: Error: Cannot find table 'db.[/Root/TestBatchNotExists]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:38:30.854570Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmU1YjVlMDMtZGNkYzVmNTEtODJkYTk4OWItN2U0ODJlZWY=, ActorId: [1:7591330257970464465:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3dxpx2bmbmn9nxwppwhkq7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 41 } message: "At function: KiUpdateTable!" end_position { row: 4 column: 41 } severity: 1 issues { position { row: 4 column: 41 } message: "Cannot find table \'db.[/Root/TestBatchNotExists]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 4 column: 41 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags+UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 2689, MsgBus: 7905 2026-01-04T02:38:26.222820Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330241838526723:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:26.222988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:26.440816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:26.440953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:26.445875Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:26.496296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:26.531775Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:26.533689Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330241838526700:2081] 1767494306221875 != 1767494306221878 2026-01-04T02:38:26.579253Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:26.579280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:26.579291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:26.579367Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:26.742384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:26.902970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:26.960134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.079330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.220728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.256886Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:27.289084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.969779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330250428463168:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.969957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.970302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330250428463178:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.970366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.269986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.298116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.325986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.348764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.376368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.407489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.452361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.488211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.547423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254723431350:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.547505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.547526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254723431355:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.547661Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254723431357:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.547701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.550964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:29.561690Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330254723431359:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:29.647610Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330254723431410:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:31.223065Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330241838526723:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.223133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DeleteOn [GOOD] Test command err: Trying to start YDB, gRPC: 29476, MsgBus: 18480 2026-01-04T02:38:26.410699Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330243648981575:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:26.410782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:26.609834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:26.664776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:26.664931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:26.671847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:26.682886Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:26.706452Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330243648981548:2081] 1767494306409260 != 1767494306409263 2026-01-04T02:38:26.750988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:26.751011Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:26.751019Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:26.751139Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:26.763827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:27.116885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:27.165213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.277237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.408592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.457238Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:27.485115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.396435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330256533885309:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.396523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.396783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330256533885319:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.396845Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.629146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.656463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.683631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.711644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.740561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.769725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.803404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.846395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.915321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330256533886193:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.915460Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.915749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330256533886198:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.915783Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330256533886199:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.915843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.919059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:29.928702Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330256533886202:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:30.000609Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330256533886253:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:31.274108Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330265123821156:2535], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with ON 2026-01-04T02:38:31.274412Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzU4MTlkMDgtNzY2MjdmNmMtMjA5OGNkMGQtZTc1YjhmNmQ=, ActorId: [1:7591330265123821147:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxqaq16sy2wpfy6p7v4a2, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with ON" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# 2026-01-04T02:38:31.411152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330243648981575:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.411261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates >> KqpBatchDelete::Returning >> KqpBatchUpdate::NotIdempotent [GOOD] >> TSentinelTests::PDiskRackGuardHalfRack [GOOD] >> TSentinelTests::PDiskRackGuardFullRack >> KqpBatchUpdate::HasTxControl [GOOD] >> TColumnShardTestSchema::RebootHotTiers [GOOD] >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] >> KqpBatchUpdate::TableWithIndex [GOOD] >> TColumnShardTestSchema::HotTiersTtl [GOOD] >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::NotIdempotent [GOOD] Test command err: Trying to start YDB, gRPC: 26838, MsgBus: 15165 2026-01-04T02:38:29.619062Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330253647452298:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:29.619845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:29.774569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:29.793057Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:29.793159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:29.845794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:29.853029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:29.868251Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330253647452275:2081] 1767494309618300 != 1767494309618303 2026-01-04T02:38:29.942236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:29.942258Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:29.942265Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:29.942406Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:30.012979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:30.261215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:30.301458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.398253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.540070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.606811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.695115Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:32.393443Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330266532356036:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.393582Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.393968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330266532356046:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.394043Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.741720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.777030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.803767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.832502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.862358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.898937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.929860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.965993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.021606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270827324215:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.021680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.021740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270827324220:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.021883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270827324222:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.021924Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.024305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:33.032442Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330270827324224:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:33.118729Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330270827324275:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:34.339563Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330275122291882:2535], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-04T02:38:34.340430Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTUzMmUwZmMtYjcyMzQ2OWEtYzQyNjE0NmItNjA2ZDE0NmQ=, ActorId: [1:7591330275122291873:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxt9v1w505hzxfnwvd572, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-04T02:38:34.363361Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330275122291886:2537], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:43: Error: Batch update is only supported for idempotent updates. 2026-01-04T02:38:34.363683Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTUzMmUwZmMtYjcyMzQ2OWEtYzQyNjE0NmItNjA2ZDE0NmQ=, ActorId: [1:7591330275122291873:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxtb985gy6fgw0y2j5vbm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 43 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 43 } severity: 1 } } tx_id# trace_id# 2026-01-04T02:38:34.381620Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330275122291892:2540], status: GENERIC_ERROR, issues:
: Error: Table intent determination, code: 1040
:3:51: Error: Batch update is only supported for idempotent updates. 2026-01-04T02:38:34.381894Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZTUzMmUwZmMtYjcyMzQ2OWEtYzQyNjE0NmItNjA2ZDE0NmQ=, ActorId: [1:7591330275122291873:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxtc11c1npr9pfpx6z0bc, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table intent determination" issue_code: 1040 severity: 1 issues { position { row: 3 column: 51 } message: "Batch update is only supported for idempotent updates." end_position { row: 3 column: 51 } severity: 1 } } tx_id# trace_id# 2026-01-04T02:38:34.619363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330253647452298:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:34.619433Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] >> TBackupTests::BackupUuidColumn[Zstd] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494849.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494849.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494849.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494849.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494849.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494849.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493649.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494849.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494849.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493649.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493649.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493649.000000s;Name=;Codec=}; 2026-01-04T02:37:29.347435Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:29.372852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:29.373071Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:29.379248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:29.379444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:29.379612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:29.379695Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:29.379777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:29.379890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:29.379975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:29.380050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:29.380128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:29.380201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:29.380266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:29.380332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:29.380413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:29.402649Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:29.402817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:29.402902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:29.403090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:29.403256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:29.403330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:29.403366Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:29.403432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:29.403477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:29.403510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:29.403532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:29.403676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:29.403733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:29.403762Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:29.403793Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:29.403862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:29.403902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:29.403953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:29.403975Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:29.404009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:29.404037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:29.404063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:29.404106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:29.404137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:29.404166Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:29.404323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:29.404373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:29.404401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:29.404508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:29.404544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:29.404 ... ; 2026-01-04T02:38:34.991430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=105; 2026-01-04T02:38:34.991475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8738; 2026-01-04T02:38:34.991521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8857; 2026-01-04T02:38:34.991586Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-04T02:38:34.991673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=37; 2026-01-04T02:38:34.991709Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9463; 2026-01-04T02:38:34.991859Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=91; 2026-01-04T02:38:34.991993Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-04T02:38:34.992129Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=90; 2026-01-04T02:38:34.992241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=71; 2026-01-04T02:38:34.997293Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=4987; 2026-01-04T02:38:35.001572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4164; 2026-01-04T02:38:35.001675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:38:35.001735Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-04T02:38:35.001779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:38:35.001875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2026-01-04T02:38:35.001926Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:35.002013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-01-04T02:38:35.002058Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=5; 2026-01-04T02:38:35.002125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-04T02:38:35.002220Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-04T02:38:35.002566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=307; 2026-01-04T02:38:35.002616Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=29322; 2026-01-04T02:38:35.002767Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:35.002889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:35.002950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:35.003021Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:35.021867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:35.021996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:35.022065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:35.022119Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493999172;tx_id=18446744073709551615;;current_snapshot_ts=1767494250651; 2026-01-04T02:38:35.022152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:35.022187Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.022217Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.022278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:35.022447Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.137000s; 2026-01-04T02:38:35.024120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:35.024290Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:35.024359Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:35.024454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:35.024518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767493999172;tx_id=18446744073709551615;;current_snapshot_ts=1767494250651; 2026-01-04T02:38:35.024568Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:35.024608Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.024639Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.024707Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:35.024946Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.080000s; 2026-01-04T02:38:35.024984Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2037:3856];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ExportWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494866.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494866.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494866.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493666.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493666.000000s;Name=;Codec=}; 2026-01-04T02:37:47.811833Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:47.841511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:47.841752Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:47.848475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:47.848700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:47.848925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:47.849051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:47.849167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:47.849280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:47.849380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:47.849480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:47.849598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:47.849718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:47.849833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:47.849938Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:47.850044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:47.877410Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:47.877544Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:47.877594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:47.877758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:47.877943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:47.878028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:47.878068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:47.878168Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:47.878242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:47.878291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:47.878321Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:47.878503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:47.878569Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:47.878624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:47.878657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:47.878746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:47.878803Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:47.878843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:47.878871Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:47.878917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:47.878953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:47.878982Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:47.879037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:47.879075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:47.879102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:47.879279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:47.879326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:47.879358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:47.879491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:47.879548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:47.879577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:47.879624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:47.879659Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:47.879689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:47.879731Z node 1 :TX_COLUM ... son=unexpected on destructor; 2026-01-04T02:38:35.369324Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:874:2832];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:35.370144Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 7 at tablet 9437184 2026-01-04T02:38:35.370393Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494303961:max} readable: {1767494303961:max} at tablet 9437184 2026-01-04T02:38:35.370486Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:35.370650Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494303961:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.370708Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494303961:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.371033Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494303961:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:35.372252Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494303961:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:35.372928Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494303961:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:882:2840];trace_detailed=; 2026-01-04T02:38:35.373294Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:35.373450Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:35.373658Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.373789Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.374109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:35.374257Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.374352Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.374539Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:882:2840] finished for tablet 9437184 2026-01-04T02:38:35.374873Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:881:2839];stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":49129826,"name":"_full_task","f":49129826,"d_finished":0,"c":0,"l":49131528,"d":1702},"events":[{"name":"bootstrap","f":49130016,"d_finished":806,"c":1,"l":49130822,"d":806},{"a":49131027,"name":"ack","f":49131027,"d_finished":0,"c":0,"l":49131528,"d":501},{"a":49131009,"name":"processing","f":49131009,"d_finished":0,"c":0,"l":49131528,"d":519},{"name":"ProduceResults","f":49130477,"d_finished":602,"c":2,"l":49131351,"d":602},{"a":49131356,"name":"Finish","f":49131356,"d_finished":0,"c":0,"l":49131528,"d":172}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.374948Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:881:2839];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:35.375270Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:881:2839];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","l_bootstrap","f_ProduceResults"],"t":0},{"events":["f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":49129826,"name":"_full_task","f":49129826,"d_finished":0,"c":0,"l":49131932,"d":2106},"events":[{"name":"bootstrap","f":49130016,"d_finished":806,"c":1,"l":49130822,"d":806},{"a":49131027,"name":"ack","f":49131027,"d_finished":0,"c":0,"l":49131932,"d":905},{"a":49131009,"name":"processing","f":49131009,"d_finished":0,"c":0,"l":49131932,"d":923},{"name":"ProduceResults","f":49130477,"d_finished":602,"c":2,"l":49131351,"d":602},{"a":49131356,"name":"Finish","f":49131356,"d_finished":0,"c":0,"l":49131932,"d":576}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:882:2840]->[1:881:2839] 2026-01-04T02:38:35.375350Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:35.372232Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:35.375388Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:35.375497Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:882:2840];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::HasTxControl [GOOD] Test command err: Trying to start YDB, gRPC: 2845, MsgBus: 20593 2026-01-04T02:38:30.056756Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330259942216996:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:30.056895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:30.265210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:30.278256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:30.278385Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:30.306439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:30.375671Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330259942216969:2081] 1767494310055404 != 1767494310055407 2026-01-04T02:38:30.382017Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:30.421668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:30.421696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:30.421702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:30.421841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:30.564082Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:30.804762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:30.860436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.974516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.070952Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:31.107594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.167005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.807597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330268532153432:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.807747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.808107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330268532153442:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.808179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.051829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.074846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.099194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.124132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.148909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.175979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.220755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.254578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.314201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330272827121610:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.314297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.314406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330272827121615:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.314439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330272827121616:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.314498Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.317981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:33.327204Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330272827121619:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:33.390090Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330272827121670:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:34.861955Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2Q5NTUxODQtODhkYTMxNDUtZjc4MWY0ZjItN2Y0YTNkN2Y=, ActorId: [1:7591330277122089267:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxtpzbp2pbq64q5qkk3ez, Create QueryResponse for error on request, msg: BATCH operation can be executed only in the implicit transaction mode. status# PRECONDITION_FAILED issues# trace_id# 2026-01-04T02:38:35.056943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330259942216996:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:35.057051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |89.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TSentinelTests::PDiskErrorState [GOOD] >> TSentinelTests::NodeStatusComputer [GOOD] >> TSentinelTests::InitialDeploymentGracePeriod |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::HotTiersTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494878.000000s;Name=;Codec=}; WaitEmptyAfter=1;Tiers={{Column=timestamp;EvictAfter=167494878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494878.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493678.000000s;Name=;Codec=}; 2026-01-04T02:37:58.627649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:58.645534Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:58.645718Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:58.650578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:58.650726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:58.650870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:58.650937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:58.650997Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:58.651068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:58.651131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:58.651187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:58.651263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:58.651331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.651393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:58.651451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:58.651508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:58.667422Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:58.667535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:58.667573Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:58.667689Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:58.667804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:58.667863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:58.667892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:58.667962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:58.668010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:58.668043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:58.668062Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:58.668173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:58.668219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:58.668246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:58.668265Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:58.668332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:58.668371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:58.668397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:58.668416Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:58.668446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:58.668470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:58.668494Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:58.668523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:58.668547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:58.668564Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:58.668672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:58.668701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:58.668719Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:58.668814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:58.668874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.668899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.668929Z node 1 :TX ... PeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.741582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:35.741767Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494314958:max} readable: {1767494314958:max} at tablet 9437184 2026-01-04T02:38:35.741894Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:35.742059Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494314958:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.742111Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494314958:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.742479Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494314958:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:35.743763Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494314958:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:35.744490Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494314958:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1325:3294];trace_detailed=; 2026-01-04T02:38:35.744908Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:35.745097Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:35.745289Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.745435Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.745660Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:35.745754Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.745909Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.746100Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1325:3294] finished for tablet 9437184 2026-01-04T02:38:35.746446Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1324:3293];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":40012887,"name":"_full_task","f":40012887,"d_finished":0,"c":0,"l":40014600,"d":1713},"events":[{"name":"bootstrap","f":40013090,"d_finished":819,"c":1,"l":40013909,"d":819},{"a":40014092,"name":"ack","f":40014092,"d_finished":0,"c":0,"l":40014600,"d":508},{"a":40014080,"name":"processing","f":40014080,"d_finished":0,"c":0,"l":40014600,"d":520},{"name":"ProduceResults","f":40013651,"d_finished":506,"c":2,"l":40014377,"d":506},{"a":40014383,"name":"Finish","f":40014383,"d_finished":0,"c":0,"l":40014600,"d":217}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.746508Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1324:3293];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:35.746921Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1324:3293];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":40012887,"name":"_full_task","f":40012887,"d_finished":0,"c":0,"l":40014994,"d":2107},"events":[{"name":"bootstrap","f":40013090,"d_finished":819,"c":1,"l":40013909,"d":819},{"a":40014092,"name":"ack","f":40014092,"d_finished":0,"c":0,"l":40014994,"d":902},{"a":40014080,"name":"processing","f":40014080,"d_finished":0,"c":0,"l":40014994,"d":914},{"name":"ProduceResults","f":40013651,"d_finished":506,"c":2,"l":40014377,"d":506},{"a":40014383,"name":"Finish","f":40014383,"d_finished":0,"c":0,"l":40014994,"d":611}],"id":"9437184::8"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1325:3294]->[1:1324:3293] 2026-01-04T02:38:35.747033Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:35.743736Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:35.747077Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:35.747206Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1325:3294];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 80000/4886744 0/0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::EnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494856.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494856.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494856.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494856.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494856.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494856.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493656.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494856.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494856.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493656.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493656.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493656.000000s;Name=;Codec=}; 2026-01-04T02:37:36.904718Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:36.925769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:36.926008Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:36.931894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:36.932084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:36.932242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:36.932317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:36.932399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:36.932487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:36.932551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:36.932639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:36.932760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:36.932875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:36.932943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:36.933008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:36.933085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:36.952638Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:36.952776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:36.952840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:36.952963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:36.953087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:36.953159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:36.953194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:36.953259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:36.953302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:36.953331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:36.953350Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:36.953487Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:36.953553Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:36.953591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:36.953613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:36.953685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:36.953723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:36.953754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:36.953782Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:36.953845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:36.953876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:36.953897Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:36.953930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:36.953957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:36.953985Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:36.954114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:36.954160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:36.954188Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:36.954292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:36.954323Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:36.954345Z node 1 :TX_ ... umnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:35.914304Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:35.914536Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494305227:max} readable: {1767494305227:max} at tablet 9437184 2026-01-04T02:38:35.914678Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:35.914870Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494305227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.914933Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494305227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:35.915511Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494305227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:35.917270Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494305227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:35.918314Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494305227:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:1455:3399];trace_detailed=; 2026-01-04T02:38:35.918785Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:35.919013Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:35.919266Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.919465Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.919914Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:35.920072Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.920199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.920441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1455:3399] finished for tablet 9437184 2026-01-04T02:38:35.920964Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1454:3398];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":62248883,"name":"_full_task","f":62248883,"d_finished":0,"c":0,"l":62251158,"d":2275},"events":[{"name":"bootstrap","f":62249155,"d_finished":998,"c":1,"l":62250153,"d":998},{"a":62250521,"name":"ack","f":62250521,"d_finished":0,"c":0,"l":62251158,"d":637},{"a":62250500,"name":"processing","f":62250500,"d_finished":0,"c":0,"l":62251158,"d":658},{"name":"ProduceResults","f":62249795,"d_finished":624,"c":2,"l":62250870,"d":624},{"a":62250875,"name":"Finish","f":62250875,"d_finished":0,"c":0,"l":62251158,"d":283}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:35.921053Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1454:3398];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:35.921511Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1454:3398];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":62248883,"name":"_full_task","f":62248883,"d_finished":0,"c":0,"l":62251764,"d":2881},"events":[{"name":"bootstrap","f":62249155,"d_finished":998,"c":1,"l":62250153,"d":998},{"a":62250521,"name":"ack","f":62250521,"d_finished":0,"c":0,"l":62251764,"d":1243},{"a":62250500,"name":"processing","f":62250500,"d_finished":0,"c":0,"l":62251764,"d":1264},{"name":"ProduceResults","f":62249795,"d_finished":624,"c":2,"l":62250870,"d":624},{"a":62250875,"name":"Finish","f":62250875,"d_finished":0,"c":0,"l":62251764,"d":889}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1455:3399]->[1:1454:3398] 2026-01-04T02:38:35.921611Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:35.917241Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:35.921657Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:35.921790Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:1455:3399];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |89.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 10327, MsgBus: 16897 2026-01-04T02:38:26.142941Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330241518541906:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:26.143302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:26.315677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:26.315830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:26.366027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:26.376076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:26.380517Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:26.381362Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330241518541802:2081] 1767494306139589 != 1767494306139592 2026-01-04T02:38:26.446493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:26.446523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:26.446563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:26.446663Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:26.655307Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:26.807374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:26.848415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.976159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.108551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:27.150457Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:27.183936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:28.821656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330250108478265:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.821789Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.822082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330250108478275:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:28.822161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.072153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.096702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.122252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.151490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.178091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.207687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.239626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.302017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.368884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254403446443:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.368950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.369109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254403446448:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.369173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330254403446449:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.369222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:29.371679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:29.379929Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330254403446452:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:29.464415Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330254403446503:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:30.899997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.933307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:30.966692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.142408Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330241518541906:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.142497Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:32.701358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] >> KqpBatchDelete::ColumnTable [GOOD] >> TColumnShardTestSchema::ForgetAfterFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags+UseSink-UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 12059, MsgBus: 5677 2026-01-04T02:38:31.742420Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330263521864631:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.743488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:31.930158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:31.950418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:31.950533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:31.955136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:32.025661Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330263521864603:2081] 1767494311740884 != 1767494311740887 2026-01-04T02:38:32.035352Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:32.078843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:32.078863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:32.078868Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:32.078949Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:32.110802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:32.414355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:32.490393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.614907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.751335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:32.762098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.820260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.426560Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276406768358:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.426705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.427222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276406768368:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.427290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.713470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.744794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.772223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.800338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.830081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.861055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.918963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.960115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.027073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280701736536:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.027144Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.027201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280701736541:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.027262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280701736543:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.027305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.030571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:35.040171Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330280701736545:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:35.098027Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330280701736596:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:36.389024Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330284996704202:2535], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:38:36.389425Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDk0NWVkNy1lZjM5MzRiZS1lNWY2ODczZC1lN2JjNWNhZA==, ActorId: [1:7591330284996704193:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3dxw97dvgc0wqkv7a54ge8, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] >> TSentinelTests::BSControllerUnresponsive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 25883, MsgBus: 10282 2026-01-04T02:38:31.737124Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330263595554097:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.737564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:31.952742Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:32.010117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:32.010262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:32.017192Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:32.020615Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330263595553992:2081] 1767494311733210 != 1767494311733213 2026-01-04T02:38:32.038108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:32.108623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:32.108666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:32.108679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:32.108769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:32.211764Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:32.464706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:32.513939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.637730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.745295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:32.790210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.848794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.492591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276480457750:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.492685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.492925Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276480457760:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.492973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.801444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.840526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.870975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.897405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.926392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.965146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.013847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.052198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.120736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280775425929:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.120831Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.121106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280775425935:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.121175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.121183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330280775425934:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:35.124284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:35.134621Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330280775425938:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:35.225111Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330280775425989:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:36.736567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330263595554097:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:36.736629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:36.947214Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330285070393555:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:3:34: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:38:36.947707Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MjZmMDFiNjgtZDQyM2VjZi1lNDU0ZTliYy00OTA1YWJhOQ==, ActorId: [1:7591330285070393546:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3dxwtw4wyzr8yzzp3qz4rp, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 3 column: 34 } message: "BATCH operations are not supported at the current time." end_position { row: 3 column: 34 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494848.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494848.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493648.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-04T02:37:30.062733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:30.100603Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:30.100853Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:30.108164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:30.108406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:30.108626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:30.108759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:30.108865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:30.108995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:30.109114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:30.109219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:30.109347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:30.109480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.109585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:30.109687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:30.109791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:30.139305Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:30.139451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:30.139515Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:30.139692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.139876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:30.139984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:30.140038Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:30.140137Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:30.140205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:30.140253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:30.140288Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:30.140464Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:30.140544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:30.140587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:30.140618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:30.140729Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:30.140792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:30.140836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:30.140865Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:30.140914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:30.140951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:30.140981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:30.141032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:30.141075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:30.141110Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:30.141304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:30.141352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:30.141382Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:30.141505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:30.141550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.141580Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:30.141629Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:30.141686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:30.141728Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:30.141784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:37.655500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:2254;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.655561Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:38:37.655740Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=2254; 2026-01-04T02:38:37.655802Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=2254;batch_columns=timestamp; 2026-01-04T02:38:37.656090Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1234:3171];bytes=1280000;rows=160000;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; Got TEvKqpCompute::TEvScanData [1:1235:3172]->[1:1234:3171] 2026-01-04T02:38:37.656242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.656362Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.656528Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.656695Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:37.656806Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.656903Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.657207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1235:3172] finished for tablet 9437184 2026-01-04T02:38:37.657682Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1234:3171];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.034},{"events":["l_task_result"],"t":0.935},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.937}],"full":{"a":68612104,"name":"_full_task","f":68612104,"d_finished":0,"c":0,"l":69550064,"d":937960},"events":[{"name":"bootstrap","f":68612339,"d_finished":1161,"c":1,"l":68613500,"d":1161},{"a":69549471,"name":"ack","f":68646289,"d_finished":467576,"c":71,"l":69549373,"d":468169},{"a":69549457,"name":"processing","f":68613634,"d_finished":888600,"c":143,"l":69549381,"d":889207},{"name":"ProduceResults","f":68613066,"d_finished":774929,"c":216,"l":69549725,"d":774929},{"a":69549730,"name":"Finish","f":69549730,"d_finished":0,"c":0,"l":69550064,"d":334},{"name":"task_result","f":68613650,"d_finished":418568,"c":72,"l":69547872,"d":418568}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:37.657766Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1234:3171];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:37.658241Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1234:3171];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.034},{"events":["l_task_result"],"t":0.935},{"events":["l_ProduceResults","f_Finish"],"t":0.937},{"events":["l_ack","l_processing","l_Finish"],"t":0.938}],"full":{"a":68612104,"name":"_full_task","f":68612104,"d_finished":0,"c":0,"l":69550623,"d":938519},"events":[{"name":"bootstrap","f":68612339,"d_finished":1161,"c":1,"l":68613500,"d":1161},{"a":69549471,"name":"ack","f":68646289,"d_finished":467576,"c":71,"l":69549373,"d":468728},{"a":69549457,"name":"processing","f":68613634,"d_finished":888600,"c":143,"l":69549381,"d":889766},{"name":"ProduceResults","f":68613066,"d_finished":774929,"c":216,"l":69549725,"d":774929},{"a":69549730,"name":"Finish","f":69549730,"d_finished":0,"c":0,"l":69550623,"d":893},{"name":"task_result","f":68613650,"d_finished":418568,"c":72,"l":69547872,"d":418568}],"id":"9437184::7"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:1235:3172]->[1:1234:3171] 2026-01-04T02:38:37.658337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:36.717996Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=10565848;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=10565848;selected_rows=0; 2026-01-04T02:38:37.658397Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:37.658531Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=3;SelfId=[1:1235:3172];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ColumnTable [GOOD] Test command err: Trying to start YDB, gRPC: 64041, MsgBus: 27711 2026-01-04T02:38:31.031362Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330265142882236:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.031618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:31.232661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:31.239533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:31.239647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:31.296065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:31.319539Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330265142882132:2081] 1767494311027335 != 1767494311027338 2026-01-04T02:38:31.334797Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:31.373727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:31.373776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:31.373788Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:31.373899Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:31.470905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:31.712955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:32.036513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:33.832656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330273732817610:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.832660Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330273732817621:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.832778Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.833012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330273732817625:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.833072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.836194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:33.844968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330273732817624:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:38:33.967399Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330273732817677:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:34.211028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:38:34.731601Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:38:34.731602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:38:34.731912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:38:34.731914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:38:34.732256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:38:34.732343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:38:34.732407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:38:34.732490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:38:34.732505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:38:34.732848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:38:34.732932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:38:34.733008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:38:34.733049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:38:34.733141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:38:34.733163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:38:34.733280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:38:34.733313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:38:34.733738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:38:34.733843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:38:34.733922Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:38:34.733950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;self_id=[1:7591330278027785589:2346];tablet_id=72075186224037939;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:38:34.734034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;self_id=[1:7591330278027785584:2341];tablet_id=72075186224037945;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLAS ... pp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.307781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.307819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.307829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.314767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.314863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.314895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.315075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.315128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.315145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.320181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.320226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.320236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.323187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.323253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.323270Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.325554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.325595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.325606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.331780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.336887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.336927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.336936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.342345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.342395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.342405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.347548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.347591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.347602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.352700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.352743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.352754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.358312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.358367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.358377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.363187Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.363231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.363240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.370996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.371062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.371076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037947;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710660;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=42;result=not_found; 2026-01-04T02:38:36.868211Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2M0ZTA5YTMtOTNmZWNhNzYtYmQwOTQ5YTUtZTUyOTE4Yw==, ActorId: [1:7591330273732817606:2321], ActorState: ExecuteState, LegacyTraceId: 01ke3dxwj3d69wk3n7w85makx4, Create QueryResponse for error on request, msg: BATCH operations are not supported for column tables at the current time. status# PRECONDITION_FAILED issues# trace_id# >> TBackupTests::ShouldSucceedOnLargeData[Zstd] |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] >> TBackupTests::ShouldSucceedOnLargeData[Raw] >> TBackupTests::BackupUuidColumn[Raw] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:36.787670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:36.787784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.787824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:36.787858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:36.787901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:36.787930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:36.787993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.788109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:36.788906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:36.789608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:36.897484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:36.897557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:36.912154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:36.912639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:36.914205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:36.930634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:36.930878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:36.931543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:36.931779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:36.937993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.939528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:36.944624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:36.945222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:36.945296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:36.945884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:37.110725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.111792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.111936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.112972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AAA amz-sdk-invocation-id: 171546D4-DF2F-45F6-A4FC-839CDE0D3D90 amz-sdk-request: attempt=1 content-length: 20 content-md5: 8NOHH1ycwPXC5K+v+37u8g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_01.csv.zst / / 20 2026-01-04T02:38:38.058584Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:881:2839] 2026-01-04T02:38:38.058750Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:882:2840], sender# [1:881:2839], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-04T02:38:38.058831Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:891:2847], result# PutObjectResult { ETag: f0d3871f5c9cc0f5c2e4afaffb7eeef2 } 2026-01-04T02:38:38.058866Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:891:2847], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:38.059135Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:890:2845], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:29343 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 51959154-5987-4AFE-A433-26440CB1EAF4 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-04T02:38:38.064960Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:882:2840], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-04T02:38:38.065019Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:882:2840], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:38.065981Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.066072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.066099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:38.066130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:38.066197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:38:38.066264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:38:38.066457Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:881:2839], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:38:38.074318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:38.097259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.097308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:38.097437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 726 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.097516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 726 RawX2: 4294970005 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.097569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:38.097680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:38.098149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 730 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.098187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:38:38.098281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 730 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.098440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 730 RawX2: 4294970006 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.098482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:38.098526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.098571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:38.098610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:38:38.098715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:38.098802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:38.101241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.101666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.102039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.102080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:38.102175Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.102200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.102233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.102255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.102284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:38.102337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:777:2744] message: TxId: 102 2026-01-04T02:38:38.102371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.102404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:38.102441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:38.102555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:38.104038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:38.104079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:862:2821] TestWaitNotification: OK eventTxId 102 |89.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:36.787657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:36.787743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.787801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:36.787834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:36.787868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:36.787927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:36.787977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.788049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:36.788853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:36.789605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:36.907153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:36.907226Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:36.922490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:36.923004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:36.923187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:36.931215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:36.931435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:36.932103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:36.932336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:36.938112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.939518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:36.944652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:36.945211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:36.945270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:36.946017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:37.077638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.078826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.078951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.079881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7594046678944 2026-01-04T02:38:37.983647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:38:37.983774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:38:37.983961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:38.000115Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:823:2792], attempt# 0 2026-01-04T02:38:38.027112Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:823:2792], sender# [1:822:2790] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:38:38.029448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:38.029562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:38.029945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:38.029995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:38:38.030881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.030952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:38:38.031903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.032027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.032089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:38.032135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:38.032192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:38.032309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:38:38.034661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:30104 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E4E34D20-BB16-4E68-97BB-ABEF7C2C8613 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-04T02:38:38.046256Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:30104 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 531B24E9-A02F-4454-85B0-7F140CCC1246 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-04T02:38:38.051840Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-04T02:38:38.051990Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:822:2790] 2026-01-04T02:38:38.053215Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:823:2792], sender# [1:822:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:30104 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F2285C27-17FB-4998-BCD4-E69DB01024D5 amz-sdk-request: attempt=1 content-length: 40 content-md5: LXbLDYru8NmFsYXNSXjnpQ== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 40 2026-01-04T02:38:38.056372Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: 2d76cb0d8aeef0d985b185cd4978e7a5 } 2026-01-04T02:38:38.056435Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:823:2792], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:38.056574Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:822:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:38.065900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:38.065965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:38.066123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:38.066223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:38.066322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:38.066630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:38.068828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.069130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.069191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:38.069301Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.069337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.069421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:38.069541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2719] message: TxId: 102 2026-01-04T02:38:38.069600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:38.069674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:38.069799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:38.071698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:38.071750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:806:2776] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:36.787660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:36.787756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.787793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:36.787824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:36.787865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:36.787908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:36.787961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.788037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:36.788799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:36.789605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:36.898604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:36.898667Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:36.912137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:36.912574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:36.914017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:36.926989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:36.927782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:36.929006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:36.931670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:36.937875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.939541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:36.944622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:36.945209Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:36.945302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:36.945916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:37.107015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.108945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.109042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.109141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.109203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2057594046678944 2026-01-04T02:38:37.984306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:38:37.984438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:38:37.984607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:38.000116Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:823:2792], attempt# 0 2026-01-04T02:38:38.027132Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:823:2792], sender# [1:822:2790] FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:38:38.029461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:38.029544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:38.029945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:38.029992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:38:38.031052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.031124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:38:38.032192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.032327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:38.032401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:38.032460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:38.032512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:38.032605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:38:38.035159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:9676 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 7B2E5995-B47E-461E-AC6A-2A36464A66FD amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-04T02:38:38.046293Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:9676 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 011352A6-AA43-4377-819B-34B326866D3A amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-04T02:38:38.052337Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-04T02:38:38.052441Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:822:2790] 2026-01-04T02:38:38.052765Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:823:2792], sender# [1:822:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:9676 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EF67BD69-DBA9-42FF-BAC5-A154B5A339C8 amz-sdk-request: attempt=1 content-length: 20 content-md5: 2qFn9G0TW8wfvJ9C+A5Jbw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 20 2026-01-04T02:38:38.055900Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:823:2792], result# PutObjectResult { ETag: daa167f46d135bcc1fbc9f42f80e496f } 2026-01-04T02:38:38.055974Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:823:2792], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:38.056124Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:822:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:38.065866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.065922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:38.066064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.066160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:38.066237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:38.066356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:38.066540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:38.068809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.069144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:38.069191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:38.069303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.069351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:38.069432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:38.069540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2719] message: TxId: 102 2026-01-04T02:38:38.069611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:38.069653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:38.069684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:38.069855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:38.071699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:38.071749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:806:2776] TestWaitNotification: OK eventTxId 102 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::BSControllerUnresponsive [GOOD] Test command err: 2026-01-04T02:38:20.075387Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:20.075443Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:20.075502Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:20.075530Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:20.075563Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:20.075660Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:20.076343Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:20.080347Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-04T02:38:36.962219Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-04T02:38:36.962310Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37860110 2026-01-04T02:38:36.962382Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:36.962667Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 2:10, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:36.962705Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 8:34, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:36.962725Z node 1 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 6:26, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:36.962751Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-04T02:38:36.973145Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 1 2026-01-04T02:38:36.973224Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-04T02:38:36.983573Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:36.983619Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:36.983700Z node 1 :CMS DEBUG: sentinel.cpp:1403: [Sentinel] [Main] Retrying: attempt# 2 2026-01-04T02:38:36.983730Z node 1 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 3 2026-01-04T02:38:36.983886Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-04T02:38:36.983939Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-04T02:38:36.983966Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-04T02:38:36.983991Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-04T02:38:36.984030Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-04T02:38:36.984050Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-04T02:38:36.984082Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-04T02:38:36.984126Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-04T02:38:36.984323Z node 1 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { }, cookie# 123 2026-01-04T02:38:36.984353Z node 1 :CMS ERROR: sentinel.cpp:1385: [Sentinel] [Main] Unsuccesful response from BSC: error# 2026-01-04T02:38:36.984829Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.985207Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.985408Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.985657Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.985735Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.985950Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.986115Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.986359Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 37880110 2026-01-04T02:38:36.986436Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest >> KqpBatchDelete::Returning [GOOD] >> TS3WrapperTests::CopyPartUpload [GOOD] >> TS3WrapperTests::GetUnknownObject |90.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CopyPartUpload [GOOD] Test command err: 2026-01-04T02:38:39.819551Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 7DDD5CF6-0111-4F7D-9D19-A0CAC78432C3, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:19107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E2D7F0A4-FFFB-497C-8BDE-E379AD394B11 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-04T02:38:39.835172Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 7DDD5CF6-0111-4F7D-9D19-A0CAC78432C3, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-04T02:38:39.836114Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 433941F2-9D1C-4DA5-8778-06CC2CB82FB7, request# CreateMultipartUpload { Bucket: TEST Key: key1 } REQUEST: POST /TEST/key1?uploads HTTP/1.1 HEADERS: Host: localhost:19107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F0CD046C-F57C-4F43-96E8-2F8EFC65FC51 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploads= 2026-01-04T02:38:39.840759Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 433941F2-9D1C-4DA5-8778-06CC2CB82FB7, response# CreateMultipartUploadResult { Bucket: Key: TEST/key1 UploadId: 1 } 2026-01-04T02:38:39.841073Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 772682DF-717D-4F96-8D49-44A741B562E5, request# UploadPartCopy { Bucket: TEST Key: key1 UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key1?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 55A15A54-3F4C-443B-8146-634AE0F9962B amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-copy-source: /TEST/key x-amz-copy-source-range: bytes=1-2 S3_MOCK::HttpServeWrite: /TEST/key1 / partNumber=1&uploadId=1 / 0 2026-01-04T02:38:39.844518Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 772682DF-717D-4F96-8D49-44A741B562E5, response# UploadPartCopyResult { } 2026-01-04T02:38:39.844833Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# CAD680C1-C4CF-4DF5-88B1-BDD5026B34FD, request# CompleteMultipartUpload { Bucket: TEST Key: key1 UploadId: 1 MultipartUpload: { Parts: [afc7e8a98f75755e513d9d5ead888e1d] } } REQUEST: POST /TEST/key1?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:19107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 6EAAD76C-DA5F-4328-BE2E-339FEB48F2DE amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key1 / uploadId=1 2026-01-04T02:38:39.851581Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# CAD680C1-C4CF-4DF5-88B1-BDD5026B34FD, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key1 ETag: afc7e8a98f75755e513d9d5ead888e1d } 2026-01-04T02:38:39.852365Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 989A2488-1A53-4651-9502-B91E5FD90DC4, request# GetObject { Bucket: TEST Key: key1 Range: bytes=0-1 } REQUEST: GET /TEST/key1 HTTP/1.1 HEADERS: Host: localhost:19107 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: D9C29DB7-31CB-42B6-A0E8-D459F8C1C918 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-1 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key1 / 2 2026-01-04T02:38:39.856001Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 989A2488-1A53-4651-9502-B91E5FD90DC4, response# GetObjectResult { } >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Returning [GOOD] Test command err: Trying to start YDB, gRPC: 16740, MsgBus: 27413 2026-01-04T02:38:34.285782Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330276653723704:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:34.285870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:34.503811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:34.503922Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:34.531185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:34.577574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:34.580515Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330276653723677:2081] 1767494314284501 != 1767494314284504 2026-01-04T02:38:34.590026Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:34.628600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:34.628624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:34.628631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:34.628714Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:34.777741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:35.056401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:35.103306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.229742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.329275Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:35.375493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.437154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.205331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330289538627437:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.205427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.205726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330289538627447:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.205791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.516466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.540832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.566796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.591383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.619381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.650367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.682573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.725530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.784755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330289538628316:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.784851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330289538628321:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.784854Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.785086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330289538628323:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.785179Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.788632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:37.797656Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330289538628324:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:37.871393Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330289538628376:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:39.287274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330276653723704:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:39.289202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:39.360434Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330298128563284:2537], status: GENERIC_ERROR, issues:
:2:22: Error: BATCH DELETE is unsupported with RETURNING 2026-01-04T02:38:39.360766Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NDJlZmUwYjQtN2ViMzZjZGYtZTVmMzRiZWUtODVlNTVjYjY=, ActorId: [1:7591330298128563273:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3dxz7g7z4envp2h8rsnyh0, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 2 column: 22 } message: "BATCH DELETE is unsupported with RETURNING" end_position { row: 2 column: 22 } severity: 1 } tx_id# trace_id# >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::DisableFlags-UseSink+UseBatchUpdates [GOOD] Test command err: Trying to start YDB, gRPC: 20836, MsgBus: 18597 2026-01-04T02:38:33.969008Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330274286513403:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:33.969696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:34.180039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:34.180148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:34.204618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:34.269733Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:34.269962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:34.270510Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330274286513373:2081] 1767494313967245 != 1767494313967248 2026-01-04T02:38:34.332986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:34.333010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:34.333017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:34.333118Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:34.517856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:34.692331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:34.739885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.869392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.974882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:35.004019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.062096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.970488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330287171417140:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.970692Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.971139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330287171417150:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.971191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.224186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.251569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.276948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.307508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.336631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.368369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.428631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.466510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.532139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330291466385318:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.532245Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.532520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330291466385324:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.532536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330291466385323:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.532575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.536119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:37.546700Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330291466385327:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:37.620394Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330291466385378:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:38.969323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330274286513403:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:38.969380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:39.150823Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330300056320241:2527], status: PRECONDITION_FAILED, issues:
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029
: Error: Execution, code: 1060
:4:41: Error: BATCH operations are not supported at the current time., code: 2029 2026-01-04T02:38:39.151404Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjIxY2QyNWUtZDY0NDQ3YjQtODk5MzIyYzYtZjBmMTJkNGQ=, ActorId: [1:7591330300056320232:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3dxyz51vvmwkjw174xtbra, ReplyQueryCompileError, remove tx status# PRECONDITION_FAILED issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 4 column: 41 } message: "BATCH operations are not supported at the current time." end_position { row: 4 column: 41 } issue_code: 2029 severity: 1 } } tx_id# trace_id# |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] >> TS3WrapperTests::AbortMultipartUpload ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetUnknownObject [GOOD] Test command err: 2026-01-04T02:38:40.381898Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 8CCD811E-5CBF-456F-9D08-4E9B0FF576D4, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:25246 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: E9E3B4E6-DEBC-4866-AA31-9482E46C6224 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-04T02:38:40.386702Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 8CCD811E-5CBF-456F-9D08-4E9B0FF576D4, response# No response body. |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> KqpBatchDelete::TableWithIndex [GOOD] >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableOneUniqIndexDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6126, MsgBus: 9399 2026-01-04T02:35:15.813223Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329422289988457:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:15.813382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:16.287076Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:16.330248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:16.330407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:16.338220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:16.425365Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:16.429348Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329422289988233:2081] 1767494115755854 != 1767494115755857 2026-01-04T02:35:16.560053Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:16.638252Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:16.638275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:16.638281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:16.638366Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:16.813933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:17.162674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:17.171646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:35:17.211040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.338196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.489633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.574461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.455712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439469859303:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.455783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.455955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439469859313:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.455996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.703559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.728752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.754749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.784401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.817786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.851284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.882503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.931594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:20.025153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443764827481:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.025256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.025610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443764827486:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.025670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443764827487:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.026041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.030547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:20.043107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329443764827490:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:35:20.111152Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329443764827541:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:20.812165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329422289988457:2254];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.812277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:21.599878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:04.348796Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:04.363213Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329889242458344:2081] 1767494224218846 != 1767494224218849 2026-01-04T02:37:04.395611Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:04.395725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:04.409074Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:04.565894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:04.574523Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:04.574549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:04.574560Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:04.574659Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:05.172066Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:05.183253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:37:05.199399Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:05.243309Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:05.280051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:05.485360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:05.558630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.638347Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329906422329407:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.638470Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.642127Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329906422329417:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.642240Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:08.716145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.752625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.789790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.835140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.877161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.927831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:08.974383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.032484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.121686Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329910717297587:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.121756Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.122168Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329910717297592:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.122204Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591329910717297593:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.122273Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.125994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:09.138056Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591329910717297596:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:37:09.219975Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329889242458380:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:09.220225Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:09.223379Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591329910717297647:3773] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:11.218173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:19.314025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:19.314055Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::InitialDeploymentGracePeriod [GOOD] Test command err: 2026-01-04T02:38:19.404098Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:19.404166Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:19.404208Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:19.404227Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:19.404256Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:19.404322Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:19.404889Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:19.408852Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... : 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-04T02:38:38.864947Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-04T02:38:38.865044Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-04T02:38:38.865156Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7740110 2026-01-04T02:38:38.865192Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:38.875765Z node 9 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:38.875826Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:38:38.875928Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:38:38.875969Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-04T02:38:38.876008Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-04T02:38:38.876035Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-04T02:38:38.876075Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:38:38.876102Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:38:38.876128Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, wbId# [15:8388350642965737326:1634689637] 2026-01-04T02:38:38.876154Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 16, wbId# [16:8388350642965737326:1634689637] 2026-01-04T02:38:38.876555Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 9, response# PDiskStateInfo { PDiskId: 36 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-36.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 37 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-37.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 38 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-38.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 39 CreateTime: 0 ChangeTime: 0 Path: "/9/pdisk-39.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877083Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 12, response# PDiskStateInfo { PDiskId: 48 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-48.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 49 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-49.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 50 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-50.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 51 CreateTime: 0 ChangeTime: 0 Path: "/12/pdisk-51.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877263Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 13, response# PDiskStateInfo { PDiskId: 52 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-52.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 53 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-53.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 54 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-54.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 55 CreateTime: 0 ChangeTime: 0 Path: "/13/pdisk-55.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877390Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 14, response# PDiskStateInfo { PDiskId: 56 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-56.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 57 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-57.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 58 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-58.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 59 CreateTime: 0 ChangeTime: 0 Path: "/14/pdisk-59.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877514Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 15, response# PDiskStateInfo { PDiskId: 60 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-60.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 61 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-61.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 62 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-62.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 63 CreateTime: 0 ChangeTime: 0 Path: "/15/pdisk-63.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877626Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 16, response# PDiskStateInfo { PDiskId: 64 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-64.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 65 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-65.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 66 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-66.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 67 CreateTime: 0 ChangeTime: 0 Path: "/16/pdisk-67.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.877894Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 10, response# PDiskStateInfo { PDiskId: 40 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-40.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 41 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-41.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 42 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-42.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 43 CreateTime: 0 ChangeTime: 0 Path: "/10/pdisk-43.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.878009Z node 9 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 11, response# PDiskStateInfo { PDiskId: 44 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-44.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 45 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-45.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 46 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-46.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 47 CreateTime: 0 ChangeTime: 0 Path: "/11/pdisk-47.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 7800110 2026-01-04T02:38:38.878045Z node 9 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:38.878312Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:37, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:38.878372Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 1 2026-01-04T02:38:38.878589Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Success: true, cookie# 6 2026-01-04T02:38:38.878637Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:37 |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.0%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortMultipartUpload [GOOD] Test command err: 2026-01-04T02:38:41.345056Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 02909A98-05DA-478D-9AFB-9A16A3E2A00D, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:27086 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 99149948-3BAB-4E87-9C39-937D83B034D1 amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-04T02:38:41.350917Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 02909A98-05DA-478D-9AFB-9A16A3E2A00D, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-04T02:38:41.351277Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 22C273E1-BA8C-4C34-84EB-6124B054DBFF, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: 1 } REQUEST: DELETE /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:27086 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 2E2FFA1F-23E9-4DB9-8ECA-1C156FB3AC74 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=1 2026-01-04T02:38:41.354058Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 22C273E1-BA8C-4C34-84EB-6124B054DBFF, response# AbortMultipartUploadResult { } 2026-01-04T02:38:41.354387Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# C471A4C2-3261-483F-8683-9D463CA5777F, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:27086 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 645BCCC2-DE29-4730-8891-C5200D41FDF4 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-04T02:38:41.357438Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# C471A4C2-3261-483F-8683-9D463CA5777F, response# No response body. |90.0%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnSingleShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:39.511085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:39.511191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.511234Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:39.511277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:39.511331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:39.511373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:39.511446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.511541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:39.512456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:39.512765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:39.584048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:39.584107Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:39.595555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:39.595741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:39.595870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:39.603530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:39.603716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:39.604259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:39.604482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:39.607328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.607540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:39.608500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:39.608546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.608653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:39.608690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:39.608725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:39.608893Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:39.735579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.736963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.737317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2026-01-04T02:38:40.586916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:38:40.587005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:38:40.587122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:40.596577Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:822:2791], attempt# 0 2026-01-04T02:38:40.614012Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:822:2791], sender# [1:821:2790] 2026-01-04T02:38:40.617197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:40.617266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:40.617550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.617604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:38:40.618207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.618267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:4299 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B3BDCDC2-DF16-48B0-9F94-E08E59DA423F amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-04T02:38:40.618829Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:40.618963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:40.619020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:40.619156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:40.619202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:40.619274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:38:40.620011Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } 2026-01-04T02:38:40.623958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:4299 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 67E950A6-1888-4C76-AB41-602B76D19246 amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-04T02:38:40.624613Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-04T02:38:40.624687Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:821:2790] 2026-01-04T02:38:40.624779Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:822:2791], sender# [1:821:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:4299 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: CAB55060-440C-409D-B336-7ACDC979A79C amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 2026-01-04T02:38:40.627845Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-04T02:38:40.627913Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:822:2791], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:40.628054Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:821:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:40.645276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:40.645336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:40.645469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:40.645555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:40.645615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:40.645659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.645692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:40.645941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:40.646091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:40.647987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.648123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.648161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:40.648252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:40.648287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.648317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:40.648347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.648374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:40.648424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2026-01-04T02:38:40.648462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.648491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:40.648519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:40.648642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:40.650234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:40.650276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:806:2776] TestWaitNotification: OK eventTxId 102 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetWithLostAnswer [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494871.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=167494871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494871.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147494871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493671.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=147493671.000000s;Name=;Codec=}; 2026-01-04T02:37:52.410157Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:52.429360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:52.429541Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:52.435195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:52.435374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:52.435530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:52.435633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:52.435727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:52.435798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:52.435876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:52.435963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:52.436058Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:52.436155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.436218Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:52.436278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:52.436345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:52.454704Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:52.454851Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:52.454920Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:52.455052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:52.455238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:52.455342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:52.455372Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:52.455451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:52.455494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:52.455521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:52.455539Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:52.455654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:52.455713Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:52.455744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:52.455762Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:52.455824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:52.455875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:52.455931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:52.455954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:52.455987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:52.456024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:52.456048Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:52.456076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:52.456112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:52.456133Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:52.456247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:52.456277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:52.456307Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:52.456403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:52.456441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.456460Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.456501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:52.456528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:52.456565Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:52.456599Z node 1 :TX_COLUM ... hard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2026-01-04T02:38:41.044860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=63; 2026-01-04T02:38:41.044885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=2361; 2026-01-04T02:38:41.044914Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=2440; 2026-01-04T02:38:41.044951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2026-01-04T02:38:41.045001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=21; 2026-01-04T02:38:41.045022Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=2856; 2026-01-04T02:38:41.045109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=46; 2026-01-04T02:38:41.045193Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=48; 2026-01-04T02:38:41.045285Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=45; 2026-01-04T02:38:41.045375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=49; 2026-01-04T02:38:41.047186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1771; 2026-01-04T02:38:41.048584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1340; 2026-01-04T02:38:41.048648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=8; 2026-01-04T02:38:41.048691Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-04T02:38:41.048716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:38:41.048764Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=26; 2026-01-04T02:38:41.048802Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:41.048849Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=27; 2026-01-04T02:38:41.048878Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=8; 2026-01-04T02:38:41.048919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=19; 2026-01-04T02:38:41.048969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=28; 2026-01-04T02:38:41.049174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=158; 2026-01-04T02:38:41.049202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=12916; 2026-01-04T02:38:41.049306Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=9739224;raw_bytes=13544452;count=2;records=160000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:41.049385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:41.049430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:41.049474Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:41.056400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:41.056545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:41.056644Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:41.056693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494009949;tx_id=18446744073709551615;;current_snapshot_ts=1767494297836; 2026-01-04T02:38:41.056731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:41.056767Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:41.056798Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:41.056865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:41.057030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.162000s; 2026-01-04T02:38:41.058158Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:41.058739Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:41.058775Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:41.058851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:38:41.058910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494009949;tx_id=18446744073709551615;;current_snapshot_ts=1767494297836; 2026-01-04T02:38:41.058936Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:41.058970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:41.058994Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:41.059072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:41.059479Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.111000s; 2026-01-04T02:38:41.059521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1332:3190];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/9739224 160000/9739224 80000/4873744 0/0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::BackupUuidColumn[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:39.885735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:39.885843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.885884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:39.885919Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:39.885958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:39.885988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:39.886042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.886145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:39.886997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:39.887284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:39.980400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:39.980451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:39.994096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:39.994271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:39.994433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:40.002222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:40.002408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:40.003090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:40.003301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:40.006079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.006246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:40.007170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:40.007225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.007358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:40.007405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:40.007446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:40.007635Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.135947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.136808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.136922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.136987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.137847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rd: 72057594046678944 2026-01-04T02:38:40.882474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:38:40.882567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:38:40.882697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:40.890811Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:822:2791], attempt# 0 2026-01-04T02:38:40.912014Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:822:2791], sender# [1:821:2790] 2026-01-04T02:38:40.914848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:40.914915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:40.915237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.915289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:38:40.915996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.916065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:38:40.916860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:40.917008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:40.917047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:40.917082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:40.917153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:40.917294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5B681750-7586-4D4D-ACCD-02016E412E6A amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-04T02:38:40.918386Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 01DF5CE8-1EC2-414F-B982-56DD47170048 amz-sdk-request: attempt=1 content-length: 357 content-md5: IxJB3qM/y2xlsv8qcwTF7g== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-04T02:38:40.922636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:40.923090Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 231241dea33fcb6c65b2ff2a7304c5ee } 2026-01-04T02:38:40.923143Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:821:2790] 2026-01-04T02:38:40.923228Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:822:2791], sender# [1:821:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:12893 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: EB6745E8-18CF-4C52-80D4-B8A67954730E amz-sdk-request: attempt=1 content-length: 39 content-md5: GLX1nc5/cKhlAfxBHlykQA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 39 2026-01-04T02:38:40.926066Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:822:2791], result# PutObjectResult { ETag: 18b5f59dce7f70a86501fc411e5ca440 } 2026-01-04T02:38:40.926119Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:822:2791], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:40.926261Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:821:2790], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:40.944155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:40.944212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:40.944395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:40.944519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 20 RowsProcessed: 1 } 2026-01-04T02:38:40.944596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:40.944647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.944687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:40.944737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:40.944938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:40.946833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.947026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.947081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:40.947183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:40.947239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.947293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:40.947329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.947361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:40.947431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2026-01-04T02:38:40.947477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:40.947512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:40.947544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:40.947665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:40.949757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:40.949822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:806:2776] TestWaitNotification: OK eventTxId 102 |90.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TS3WrapperTests::CompleteUnknownUpload >> TS3WrapperTests::AbortUnknownUpload ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::TableWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 61465, MsgBus: 16586 2026-01-04T02:38:31.163234Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330263969914838:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:31.163301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:31.391077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:31.391150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:31.423521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:31.458743Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:31.459154Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330263969914811:2081] 1767494311161359 != 1767494311161362 2026-01-04T02:38:31.463027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:31.519794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:31.519831Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:31.519838Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:31.519974Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:31.694208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:31.892935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:31.935208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.080302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.176542Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:32.229421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.284757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.317320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276854818576:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.317469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.317827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276854818586:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.317880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.599093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.627696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.655502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.687006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.717347Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.751238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.814565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.857319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.924451Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276854819454:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.924536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.924694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276854819459:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.924769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330276854819461:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.924815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:34.929216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:34.939353Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330276854819463:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:35.040173Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330281149786810:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:36.163204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330263969914838:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:36.163266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:36.420082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.453637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.492177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:38.221211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnMultiShardTable[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:39.896625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:39.896708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.896745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:39.896778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:39.896828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:39.896863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:39.896926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.897011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:39.897872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:39.898156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:39.991003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:39.991067Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:40.004258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:40.004444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:40.004596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:40.012462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:40.012666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:40.013414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:40.013650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:40.016434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.016615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:40.017738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:40.017796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:40.017956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:40.018007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:40.018053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:40.018259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.157424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.158964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.159680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:41.126147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:38:41.126434Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:889:2845], result# PutObjectResult { ETag: 8ec321cb31fe732aef669066d1d41519 } 2026-01-04T02:38:41.126478Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:889:2845], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:41.126676Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:880:2837] 2026-01-04T02:38:41.126750Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:881:2839], sender# [1:880:2837], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } 2026-01-04T02:38:41.126851Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:888:2844], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:41.135445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:41.135568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:38:41.135638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:38:41.135685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:38:41.135756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:38:41.135864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true REQUEST: PUT /data_00.csv HTTP/1.1 HEADERS: Host: localhost:28417 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: B06994D6-A2C7-4DDF-A006-BECABCA6170A amz-sdk-request: attempt=1 content-length: 11 content-md5: bj4KQf2rit2DOGLxvSlUww== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv / / 11 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:38:41.136999Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:881:2839], result# PutObjectResult { ETag: 6e3e0a41fdab8add833862f1bd2954c3 } 2026-01-04T02:38:41.137043Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:881:2839], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:38:41.137435Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:880:2837], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:38:41.147641Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:38:41.171126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 722 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.171250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:38:41.171448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 722 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.171550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 722 RawX2: 4294970001 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.171624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:41.171766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:41.172254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970002 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.172283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:38:41.172421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 725 RawX2: 4294970002 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.172512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 725 RawX2: 4294970002 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10 RowsProcessed: 1 } 2026-01-04T02:38:41.172561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:41.172609Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:41.172645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:38:41.172675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:38:41.172731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:38:41.172820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:41.175323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:41.175675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:41.176012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:38:41.176053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:38:41.176150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:41.176186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:41.176218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:38:41.176255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:41.176302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:38:41.176352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:775:2742] message: TxId: 102 2026-01-04T02:38:41.176387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:38:41.176441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:38:41.176466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:38:41.176596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:41.178189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:38:41.178233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:862:2821] TestWaitNotification: OK eventTxId 102 |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] >> TS3WrapperTests::AbortUnknownUpload [GOOD] |90.1%| [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject >> TS3WrapperTests::MultipartUpload >> TS3WrapperTests::HeadUnknownObject >> TS3WrapperTests::UploadUnknownPart ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::AbortUnknownUpload [GOOD] Test command err: 2026-01-04T02:38:42.531028Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# A42AD4A3-C323-44B0-BB06-B292E8AD788D, request# AbortMultipartUpload { Bucket: TEST Key: key UploadId: uploadId } REQUEST: DELETE /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:20420 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 46DF6680-72A6-4DC4-A422-9DB6466492F7 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 6 / /TEST/key / uploadId=uploadId 2026-01-04T02:38:42.535954Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# A42AD4A3-C323-44B0-BB06-B292E8AD788D, response# |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::CompleteUnknownUpload [GOOD] Test command err: 2026-01-04T02:38:42.569350Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 0069A00A-20FE-401A-AE9B-DCACE34EF0F2, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: uploadId MultipartUpload: { Parts: [ETag] } } REQUEST: POST /TEST/key?uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:8557 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1A55E903-0BA7-4770-965B-3C4249E74FB4 amz-sdk-request: attempt=1 content-length: 207 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=uploadId 2026-01-04T02:38:42.575058Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 0069A00A-20FE-401A-AE9B-DCACE34EF0F2, response# >> TS3WrapperTests::UploadUnknownPart [GOOD] >> TS3WrapperTests::PutObject >> TS3WrapperTests::GetObject [GOOD] >> TS3WrapperTests::MultipartUpload [GOOD] >> TS3WrapperTests::HeadUnknownObject [GOOD] |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::MultipartUpload [GOOD] Test command err: 2026-01-04T02:38:43.060913Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 64121ECA-792C-46D6-AEA2-363F69A1D81B, request# CreateMultipartUpload { Bucket: TEST Key: key } REQUEST: POST /TEST/key?uploads HTTP/1.1 HEADERS: Host: localhost:7640 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 40541A7C-BF90-4A0E-BB05-ADB21D29E31A amz-sdk-request: attempt=1 content-length: 0 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 x-amz-storage-class: STANDARD S3_MOCK::HttpServeAction: 4 / /TEST/key / uploads= 2026-01-04T02:38:43.065958Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 64121ECA-792C-46D6-AEA2-363F69A1D81B, response# CreateMultipartUploadResult { Bucket: Key: TEST/key UploadId: 1 } 2026-01-04T02:38:43.066307Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# A4D0CC80-9D3E-4D8C-8E1A-C2755DADCD4E, request# UploadPart { Bucket: TEST Key: key UploadId: 1 PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:7640 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 948A765D-6873-47F9-9052-9FA4E2324D57 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=1 / 4 2026-01-04T02:38:43.068765Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# A4D0CC80-9D3E-4D8C-8E1A-C2755DADCD4E, response# UploadPartResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-04T02:38:43.069135Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 22C9E67D-6FA4-4EFC-890C-7577E15C90E7, request# CompleteMultipartUpload { Bucket: TEST Key: key UploadId: 1 MultipartUpload: { Parts: [841a2d689ad86bd1611447453c22c6fc] } } REQUEST: POST /TEST/key?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:7640 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 41D83B17-FAC7-4A68-9DB6-B42E79C72A75 amz-sdk-request: attempt=1 content-length: 235 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /TEST/key / uploadId=1 2026-01-04T02:38:43.071871Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 22C9E67D-6FA4-4EFC-890C-7577E15C90E7, response# CompleteMultipartUploadResult { Bucket: Key: TEST/key ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-04T02:38:43.072221Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 59A2B839-D092-4B8D-A1EE-0BE405AE33BE, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:7640 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 3311F328-9ED7-43FE-8FD8-069A25EFA9F6 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-04T02:38:43.074793Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 59A2B839-D092-4B8D-A1EE-0BE405AE33BE, response# GetObjectResult { } |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::GetObject [GOOD] Test command err: 2026-01-04T02:38:43.072014Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 85544C54-CB86-4D50-8191-0B5BDFBFABB5, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:29882 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A18465B7-FA19-461E-BDFB-B0BC1C9F1AA1 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-04T02:38:43.076341Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 85544C54-CB86-4D50-8191-0B5BDFBFABB5, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-04T02:38:43.076656Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 8BA4B148-316F-4706-A45D-548E88B5990F, request# GetObject { Bucket: TEST Key: key Range: bytes=0-3 } REQUEST: GET /TEST/key HTTP/1.1 HEADERS: Host: localhost:29882 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BF47DC4F-D1A8-49F0-8A41-08ACC92609D4 amz-sdk-request: attempt=1 content-type: application/xml range: bytes=0-3 user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-04T02:38:43.079282Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 8BA4B148-316F-4706-A45D-548E88B5990F, response# GetObjectResult { } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadUnknownObject [GOOD] Test command err: 2026-01-04T02:38:43.102010Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 4EEB3675-9933-47AE-B41B-549D2FC44EE2, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:27925 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: A5D3BFDB-7159-4BEA-9C18-4EEAF84D33AA amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 2026-01-04T02:38:43.106203Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 4EEB3675-9933-47AE-B41B-549D2FC44EE2, response# No response body. |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TDatabaseResolverTests::PostgreSQL >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] >> TDatabaseResolverTests::DataStreams_Dedicated >> TDatabaseResolverTests::Greenplum_MasterNode >> TDatabaseResolverTests::PostgreSQL [GOOD] >> TDatabaseResolverTests::PostgreSQL_PermissionDenied >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError >> TDatabaseResolverTests::DataStreams_Dedicated [GOOD] >> TDatabaseResolverTests::ClickHouse_PermissionDenied ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::UploadUnknownPart [GOOD] Test command err: 2026-01-04T02:38:43.083829Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 7BED4BE0-F0A6-43AF-9F32-A21AA6B1A517, request# UploadPart { Bucket: TEST Key: key UploadId: uploadId PartNumber: 1 } REQUEST: PUT /TEST/key?partNumber=1&uploadId=uploadId HTTP/1.1 HEADERS: Host: localhost:61548 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 63CE46D0-D30E-4953-AF53-E00DCF6C22CB amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /TEST/key / partNumber=1&uploadId=uploadId / 4 2026-01-04T02:38:43.088716Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 7BED4BE0-F0A6-43AF-9F32-A21AA6B1A517, response# |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TDatabaseResolverTests::Greenplum_MasterNode [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied >> TDatabaseResolverTests::ClickHouseNative >> TS3WrapperTests::HeadObject [GOOD] >> TDatabaseResolverTests::Ydb_Dedicated >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout >> TDatabaseResolverTests::ClickHouseNative [GOOD] >> TDatabaseResolverTests::ClickHouseHttp [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::PutObject [GOOD] Test command err: 2026-01-04T02:38:43.390934Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# 89C29A19-A6F1-49C0-9E47-735B43F843A2, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:17836 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 354390C4-0A03-4E91-9092-107D85272AD8 amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-04T02:38:43.396515Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# 89C29A19-A6F1-49C0-9E47-735B43F843A2, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] >> KqpBatchDelete::Large_1 [GOOD] >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] >> TDatabaseResolverTests::DataStreams_Serverless >> TDatabaseResolverTests::MySQL >> TDatabaseResolverTests::DataStreams_Serverless [GOOD] >> TDatabaseResolverTests::DataStreams_PermissionDenied >> TDatabaseResolverTests::MySQL [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ResolveTwoDataStreamsFirstError [GOOD] Test command err: 2026-01-04T02:38:43.965765Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgb1 via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgb1': Status: 404 Response body: {"message":"Database not found"} |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/wrappers/ut/unittest >> TS3WrapperTests::HeadObject [GOOD] Test command err: 2026-01-04T02:38:43.830977Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# B055CEC2-67C4-4DBC-8A79-0ADF56423D07, request# PutObject { Bucket: TEST Key: key } REQUEST: PUT /TEST/key HTTP/1.1 HEADERS: Host: localhost:64407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 09391460-4EBD-4142-8D0D-A6735E48A8DB amz-sdk-request: attempt=1 content-length: 4 content-md5: hBotaJrYa9FhFEdFPCLG/A== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /TEST/key / / 4 2026-01-04T02:38:43.839664Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# B055CEC2-67C4-4DBC-8A79-0ADF56423D07, response# PutObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc } 2026-01-04T02:38:43.840079Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:75: Request: uuid# ECE4C620-70F1-4B8B-9535-7CA82BE2041B, request# HeadObject { Bucket: TEST Key: key } REQUEST: HEAD /TEST/key HTTP/1.1 HEADERS: Host: localhost:64407 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: FD4F3ECD-6C06-4504-BC4E-170C243EB893 amz-sdk-request: attempt=1 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeRead: /TEST/key / 4 2026-01-04T02:38:43.843128Z node 1 :S3_WRAPPER NOTICE: s3_storage.h:63: Response: uuid# ECE4C620-70F1-4B8B-9535-7CA82BE2041B, response# HeadObjectResult { ETag: 841a2d689ad86bd1611447453c22c6fc ContentLength: 4 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::PostgreSQL_PermissionDenied [GOOD] Test command err: 2026-01-04T02:38:44.036931Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed PostgreSQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-postgresql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-postgresql.viewer`. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouse_PermissionDenied [GOOD] Test command err: 2026-01-04T02:38:44.036967Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed ClickHouse database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-clickhouse/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. Please check that your service account has role `managed-clickhouse.viewer`. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Greenplum_PermissionDenied [GOOD] Test command err: 2026-01-04T02:38:44.077683Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Greenplum database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-greenplum/v1/clusters/etn021us5r9rhld1vgbh/master-hosts': you have no permission to resolve database id into database endpoint. |90.1%| [TS] {BAZEL_UPLOAD} ydb/core/wrappers/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::ClickHouseHttp [GOOD] |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless_Timeout [GOOD] Test command err: 2026-01-04T02:38:44.135959Z node 1 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed Ydb database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': Connection timeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootHotTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167494858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494858.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494858.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494858.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494858.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=167494858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493658.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494858.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147494858.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493658.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493658.000000s;Name=tier1;Codec=zstd};};TTL={Column=timestamp;EvictAfter=147493658.000000s;Name=;Codec=}; 2026-01-04T02:37:38.897247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:38.926905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:38.927139Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:38.934024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:38.934250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:38.934460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:38.934568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:38.934670Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:38.934805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:38.934912Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:38.935024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:38.935140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:38.935241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:38.935338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:38.935439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:38.935546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:38.963063Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:38.963217Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:38.963271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:38.963438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:38.963632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:38.963712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:38.963753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:38.963844Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:38.963903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:38.963960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:38.963989Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:38.964158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:38.964233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:38.964274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:38.964314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:38.964407Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:38.964458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:38.964498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:38.964530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:38.964576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:38.964615Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:38.964642Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:38.964687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:38.964725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:38.964751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:38.964924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:38.964994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:38.965030Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:38.965161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:38.965209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:3 ... te_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=98; 2026-01-04T02:38:43.353931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6547; 2026-01-04T02:38:43.353960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6631; 2026-01-04T02:38:43.353998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2026-01-04T02:38:43.354062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=32; 2026-01-04T02:38:43.354095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7096; 2026-01-04T02:38:43.354211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=73; 2026-01-04T02:38:43.354314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2026-01-04T02:38:43.354445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=87; 2026-01-04T02:38:43.354574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=84; 2026-01-04T02:38:43.358078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3448; 2026-01-04T02:38:43.362312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4158; 2026-01-04T02:38:43.362391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:38:43.362438Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-04T02:38:43.362478Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:38:43.362559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=47; 2026-01-04T02:38:43.362605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:43.362704Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=67; 2026-01-04T02:38:43.362748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:38:43.362825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2026-01-04T02:38:43.362917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-01-04T02:38:43.363286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=330; 2026-01-04T02:38:43.363329Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23508; 2026-01-04T02:38:43.363445Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:43.363541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:43.363589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:43.363648Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:43.384210Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:43.384349Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:43.384446Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-04T02:38:43.384507Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494008722;tx_id=18446744073709551615;;current_snapshot_ts=1767494260201; 2026-01-04T02:38:43.384545Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:43.384589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:43.384620Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:43.384695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:43.384888Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.100000s; 2026-01-04T02:38:43.385003Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=4;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2026-01-04T02:38:43.386554Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:43.386810Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:43.386855Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:43.386942Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-04T02:38:43.387001Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494008722;tx_id=18446744073709551615;;current_snapshot_ts=1767494260201; 2026-01-04T02:38:43.387061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:43.387117Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:43.387153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:43.387238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:43.387615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.050000s; 2026-01-04T02:38:43.387670Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2007:3827];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 >> TDatabaseResolverTests::Ydb_Serverless [GOOD] |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Dedicated [GOOD] |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::DataStreams_PermissionDenied [GOOD] Test command err: 2026-01-04T02:38:44.437711Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed DataStreams database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'ydbc.ydb.cloud.yandex.net:8789', url '/ydbc/cloud-prod/database?databaseId=etn021us5r9rhld1vgbh': you have no permission to resolve database id into database endpoint. ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::MySQL_PermissionDenied [GOOD] Test command err: 2026-01-04T02:38:44.520853Z node 2 :FQ_DATABASE_RESOLVER ERROR: database_resolver.cpp:175: TraceId: traceId ResponseProcessor::Handle(HttpIncomingResponse): error=Error while trying to resolve managed MySQL database with id etn021us5r9rhld1vgbh via HTTP request to: endpoint 'mdb.api.cloud.yandex.net:443', url '/managed-mysql/v1/clusters/etn021us5r9rhld1vgbh/hosts': you have no permission to resolve database id into database endpoint. |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TA] $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {RESULT} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/wrappers/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/actors/ut/unittest >> TDatabaseResolverTests::Ydb_Serverless [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_1 [GOOD] Test command err: Trying to start YDB, gRPC: 22664, MsgBus: 23254 2026-01-04T02:38:21.876276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330220874326861:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:21.876334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:22.042271Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:22.042377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:22.091745Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:22.104970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:22.106042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:22.107979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330220874326832:2081] 1767494301874741 != 1767494301874744 2026-01-04T02:38:22.177184Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:22.177210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:22.177223Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:22.177353Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:22.294008Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:22.497208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:22.545778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.638577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.781753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.843852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.955592Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:24.573693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330233759230596:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.573836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.574116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330233759230606:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.574161Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.823319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.850132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.876725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.906992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.935091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.966796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.998428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.043945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:25.104411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330238054198770:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.104470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.104475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330238054198775:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.104646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330238054198777:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.104680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:25.107514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:25.116317Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330238054198778:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:25.217233Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330238054198830:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:26.294059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.877121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330220874326861:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:26.877668Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... B, gRPC: 23613, MsgBus: 19819 2026-01-04T02:38:37.281316Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330287418921593:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:37.281379Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:37.293454Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:37.360276Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:37.361219Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330287418921566:2081] 1767494317280497 != 1767494317280500 2026-01-04T02:38:37.374428Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:37.374502Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:37.401194Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:37.438489Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:37.438518Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:37.438526Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:37.438613Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:37.519815Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:37.819949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:37.827797Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.876444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:38.009757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:38.069910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:38.287703Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:40.557429Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591330300303825317:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.557529Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.557764Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591330300303825327:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.557824Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.612511Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.641387Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.670116Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.698655Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.727687Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.756275Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.789797Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.835533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:40.919597Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591330300303826194:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.919708Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.919751Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591330300303826199:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.919973Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591330300303826201:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.920055Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:40.923384Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:40.933537Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591330300303826203:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:41.029276Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591330304598793550:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:42.281795Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591330287418921593:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:42.281888Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:42.548314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/actors/ut/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst >> ReadOnlyVDisk::TestGarbageCollect >> ReadOnlyVDisk::TestStorageLoad >> ReadOnlyVDisk::TestSync |90.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads |90.1%| [TA] $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TA] {RESULT} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyVDisk::TestDiscover |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/actors/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] |90.1%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest::LdapServerIsUnavailable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::DeclareAndLs-EnableRealSystemViewPaths-false [GOOD] Test command err: 2026-01-04T02:38:43.670510Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330315594886268:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:43.670989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:43.879037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:43.879133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:43.881445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:43.918361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:38:43.958810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:43.959899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330315594886164:2081] 1767494323666564 != 1767494323666567 2026-01-04T02:38:44.141731Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591330315594886426:2105] Handle TEvNavigate describe path dc-1 2026-01-04T02:38:44.141846Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591330319889854004:2263] HANDLE EvNavigateScheme dc-1 2026-01-04T02:38:44.141999Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591330315594886444:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:44.142162Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591330315594886444:2113], cookie# 1 2026-01-04T02:38:44.144139Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886644:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886641:2206], cookie# 1 2026-01-04T02:38:44.144192Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886645:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886642:2206], cookie# 1 2026-01-04T02:38:44.144205Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886646:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886643:2206], cookie# 1 2026-01-04T02:38:44.144229Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886135:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886645:2206], cookie# 1 2026-01-04T02:38:44.144235Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886132:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886644:2206], cookie# 1 2026-01-04T02:38:44.144293Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886645:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886135:2052], cookie# 1 2026-01-04T02:38:44.144312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886644:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886132:2049], cookie# 1 2026-01-04T02:38:44.144343Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886138:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886646:2206], cookie# 1 2026-01-04T02:38:44.144390Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886642:2206], cookie# 1 2026-01-04T02:38:44.144502Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591330315594886617:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:38:44.144524Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886641:2206], cookie# 1 2026-01-04T02:38:44.144555Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591330315594886617:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:38:44.144595Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886646:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886138:2055], cookie# 1 2026-01-04T02:38:44.144643Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886643:2206], cookie# 1 2026-01-04T02:38:44.144664Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591330315594886617:2206][/dc-1] Sync cookie mismatch: sender# [1:7591330315594886643:2206], cookie# 1, current cookie# 0 2026-01-04T02:38:44.144678Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591330315594886444:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:38:44.151986Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591330315594886444:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591330315594886617:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:38:44.152159Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591330315594886444:2113], cacheItem# { Subscriber: { Subscriber: [1:7591330315594886617:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:38:44.154790Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591330319889854005:2264], recipient# [1:7591330319889854004:2263], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:38:44.154909Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591330319889854004:2263] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:38:44.178459Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591330315594886444:2113], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/result_sets TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:44.178537Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591330315594886444:2113], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480 2026-01-04T02:38:44.178610Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591330315594886444:2113], path# /dc-1/.metadata/script_execution_leases, domainOwnerId# 72057594046644480 2026-01-04T02:38:44.178638Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591330315594886444:2113], path# /dc-1/.metadata/result_sets, domainOwnerId# 72057594046644480 2026-01-04T02:38:44.178855Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591330319889854006:2265][/dc-1/.metadata/script_executions] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:38:44.179657Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591330315594886138:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/script_executions DomainOwnerId: 72057594046644480 }: sender# [1:7591330319889854014:2265] 2026-01-04T02:38:44.179679Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591330315594886138:2055] Upsert description: path# /dc-1/.metadata/script_executions 2026-01-04T02:38:44.179759Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591330315594886138:2055] Subscribe: subscriber# [1:7591330319889854014:2265], path# /dc-1/.metadata/script_executions, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:38:44.179821Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591330319889854014:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591330315594886138:2055] 2026-01-04T02:38:44.179859Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591330319889854006:2265][/dc-1/.metadata/script_executions] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/script_executions Version: 0 }: sender# [1:7591330319889854011:2265] 2026-01-04T02 ... fy# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 } 2026-01-04T02:38:44.292669Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591330315594886444:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591330319889854073:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767494324330 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:38:44.292722Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591330315594886444:2113], cacheItem# { Subscriber: { Subscriber: [1:7591330319889854073:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 9 TableKind: 0 Created: 1 CreateStep: 1767494324330 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:38:44.292814Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591330319889854080:2307], recipient# [1:7591330319889854072:2305], result# { ErrorCount: 1 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0 TableId: [72057594046644480:2:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: RedirectLookupError Kind: KindExtSubdomain DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:38:44.292846Z node 1 :TX_PROXY INFO: describe.cpp:354: Actor# [1:7591330319889854072:2305] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-04T02:38:44.308248Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591330315594886426:2105] Handle TEvNavigate describe path /dc-1 2026-01-04T02:38:44.308310Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591330319889854082:2309] HANDLE EvNavigateScheme /dc-1 2026-01-04T02:38:44.308419Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591330315594886444:2113], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:44.308513Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591330315594886444:2113], cookie# 4 2026-01-04T02:38:44.308568Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886644:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886641:2206], cookie# 4 2026-01-04T02:38:44.308607Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886645:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886642:2206], cookie# 4 2026-01-04T02:38:44.308631Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886132:2049] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886644:2206], cookie# 4 2026-01-04T02:38:44.308676Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330315594886646:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886643:2206], cookie# 4 2026-01-04T02:38:44.308701Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886135:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886645:2206], cookie# 4 2026-01-04T02:38:44.308723Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330315594886138:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330315594886646:2206], cookie# 4 2026-01-04T02:38:44.308725Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886644:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886132:2049], cookie# 4 2026-01-04T02:38:44.308769Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886645:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886135:2052], cookie# 4 2026-01-04T02:38:44.308791Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330315594886646:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886138:2055], cookie# 4 2026-01-04T02:38:44.308816Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886641:2206], cookie# 4 2026-01-04T02:38:44.308835Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591330315594886617:2206][/dc-1] Sync is in progress: cookie# 4, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:38:44.308858Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886642:2206], cookie# 4 2026-01-04T02:38:44.308881Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591330315594886617:2206][/dc-1] Sync is done in the ring group: cookie# 4, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:38:44.308927Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330315594886617:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 5 Partial: 0 Cluster State: { } }: sender# [1:7591330315594886643:2206], cookie# 4 2026-01-04T02:38:44.308946Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591330315594886617:2206][/dc-1] Sync cookie mismatch: sender# [1:7591330315594886643:2206], cookie# 4, current cookie# 0 2026-01-04T02:38:44.309001Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591330315594886444:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:38:44.309097Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591330315594886444:2113], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591330315594886617:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494324316 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:38:44.309187Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591330315594886444:2113], cacheItem# { Subscriber: { Subscriber: [1:7591330315594886617:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 4 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 1767494324316 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 4 IsSync: true Partial: 0 } 2026-01-04T02:38:44.309351Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591330319889854083:2310], recipient# [1:7591330319889854082:2309], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:38:44.309401Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591330319889854082:2309] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:38:44.309472Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591330319889854082:2309] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:38:44.310121Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591330319889854082:2309] Handle TEvDescribeSchemeResult Forward to# [1:7591330319889854081:2308] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 63 Record# Status: StatusSuccess Path: "/dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 1767494324316 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } StoragePools { Name: "/dc-1:test" Kind: "test" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithoutLoginPlaceholders [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnames [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV4List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromIpV6List [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesLdapsScheme [GOOD] >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterNoEviction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494862.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494862.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494862.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494862.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494862.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494862.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493662.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494862.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494862.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493662.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493662.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493662.000000s;Name=;Codec=}; 2026-01-04T02:37:42.856988Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:42.889430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:42.889702Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:42.897342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:42.897606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:42.897871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:42.898014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:42.898149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:42.898281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:42.898393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:42.898525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:42.898664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:42.898792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:42.898910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:42.899019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:42.899133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:42.927891Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:42.928084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:42.928172Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:42.928358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:42.928543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:42.928627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:42.928680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:42.928781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:42.928847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:42.928892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:42.928923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:42.929103Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:42.929185Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:42.929246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:42.929287Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:42.929391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:42.929464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:42.929516Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:42.929551Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:42.929605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:42.929650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:42.929689Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:42.929737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:42.929784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:42.929837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:42.930045Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:42.930101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:42.930133Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:42.930280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:42.930339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:42.930372Z node 1 :TX_ ... ; 2026-01-04T02:38:46.276493Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=100; 2026-01-04T02:38:46.276537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=9064; 2026-01-04T02:38:46.276587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=9190; 2026-01-04T02:38:46.276652Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-04T02:38:46.276739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=40; 2026-01-04T02:38:46.276778Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9814; 2026-01-04T02:38:46.276934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=97; 2026-01-04T02:38:46.277050Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-04T02:38:46.277183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=87; 2026-01-04T02:38:46.277295Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=72; 2026-01-04T02:38:46.282951Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5585; 2026-01-04T02:38:46.287879Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=4811; 2026-01-04T02:38:46.288011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:38:46.288066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:38:46.288109Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:38:46.288186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2026-01-04T02:38:46.288231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:38:46.288312Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=49; 2026-01-04T02:38:46.288385Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=9; 2026-01-04T02:38:46.288456Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=35; 2026-01-04T02:38:46.288564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2026-01-04T02:38:46.288901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=289; 2026-01-04T02:38:46.288943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=31815; 2026-01-04T02:38:46.289075Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:38:46.289187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:38:46.289242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:38:46.289307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:38:46.310699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:38:46.310867Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:46.310959Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:46.311029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494012681;tx_id=18446744073709551615;;current_snapshot_ts=1767494264160; 2026-01-04T02:38:46.311102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:46.311157Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:46.311199Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:46.311289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:46.311522Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.099000s; 2026-01-04T02:38:46.312774Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:38:46.313733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:38:46.313824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:38:46.313935Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=4; 2026-01-04T02:38:46.314004Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494012681;tx_id=18446744073709551615;;current_snapshot_ts=1767494264160; 2026-01-04T02:38:46.314055Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=4;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:38:46.314106Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:46.314147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:38:46.314240Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:38:46.314551Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.018000s; 2026-01-04T02:38:46.314608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2031:3851];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 240000/14617704 160000/9752224 160000/9752224 80000/4886744 0/0 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> TSchemeShardTopicSplitMergeTest::Boot |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsUrisCreatorTest::CreateUrisFromHostnamesUnknownScheme [GOOD] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::MultiShardTableTwoIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 3683, MsgBus: 3558 2026-01-04T02:35:15.451117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329423848082225:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:15.451179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:15.731371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:15.731477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:15.761077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:15.799484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:15.917949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:15.944469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:15.944489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:15.944498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:15.944609Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:16.095623Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:16.420359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:16.426717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:35:16.452680Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:16.490343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.635696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.791133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:16.866107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:18.878549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329436732985750:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.878693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.879182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329436732985760:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:18.879261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.222733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.255359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.287057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.327906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.360138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.393536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.429252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.479706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.550586Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329441027953933:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.550677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.550701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329441027953938:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.550880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329441027953940:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.550960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.554674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:19.566445Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329441027953941:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:19.657725Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329441027953993:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:20.451528Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329423848082225:2249];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.451634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:21.228326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689 ... etect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:46.715265Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:46.787216Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:46.788904Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330069580211449:2081] 1767494266702279 != 1767494266702282 2026-01-04T02:37:46.828630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:46.828712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:46.831541Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:46.876419Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:46.876440Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:46.876449Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:46.876549Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:46.881251Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:47.269483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:47.278036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.333886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.500982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.560323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:47.725354Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:50.075045Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330086760082502:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.075117Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.075305Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330086760082512:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.075361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.142510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.170802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.234238Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.267414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.299835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.329838Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.360614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.403958Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:50.476982Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330086760083378:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.477044Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330086760083383:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.477060Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.477243Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330086760083385:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.477305Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:50.480859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:50.490565Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330086760083386:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:37:50.556921Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330086760083438:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:51.703674Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330069580211488:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:51.703760Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:52.068360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.780844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:38:01.780883Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS finished with status: SUCCESS |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest >> TSchemeShardTopicSplitMergeTest::SetBoundWithWrongPartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition >> TSchemeShardTopicSplitMergeTest::Boot [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition >> TxUsage::Sinks_Oltp_WriteToTopics_4_Query [GOOD] >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad >> HttpRequest::AnalyzeServerless >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdCompactionSmoke [GOOD] Test command err: 2026-01-04T02:37:21.967334Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:21.999290Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:21.999544Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:22.007199Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:22.007461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:22.007693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:22.007829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:22.007944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:22.008044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:22.008138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:22.008266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:22.008390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:22.008492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.008597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:22.008727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:22.008859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:22.037531Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:22.037676Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:22.037729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:22.037927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.038066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:22.038148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:22.038203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:22.038332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:22.038406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:22.038454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:22.038488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:22.038684Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:22.038756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:22.038797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:22.038830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:22.038915Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:22.038975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:22.039035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:22.039065Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:22.039111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:22.039155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:22.039185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:22.039228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:22.039260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:22.039290Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:22.039473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:22.039523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:22.039580Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:22.039739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:22.039787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.039815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:22.039859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:22.039894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:22.039922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:22.040008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:37:22.040049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:37:22.040079Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:37:22.040196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:37:22.040234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.711974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.712248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.712518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.712784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.713072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.713320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.713605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.713916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.714217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.714482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.714797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.715090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.715361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5216,5216;s_splitted=5296,5304;r_splitted=850,852; 2026-01-04T02:38:45.715642Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=7;size=480168;limit=10240;r_count=80000;fline=column_info.h:139;sizes=5208,5208;s_splitted=5312,5296;r_splitted=854,854; 2026-01-04T02:38:45.719317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};entity_id=5;size=320144;limit=10240;r_count=80000;fline=column_info.h:139;sizes=10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10004,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005,10005;s_splitted=10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10144,10208;r_splitted=2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2499,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2500,2516; 2026-01-04T02:38:45.780139Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=26;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=6817016;count=1255;actions=__DEFAULT,;waiting=1;; 2026-01-04T02:38:46.727707Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6265200;event=data_write_finished;writing_id=7d31470e-e91611f0-b9cbff28-330f1647; 2026-01-04T02:38:46.729505Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=86;data_size=65;sum=4128;count=95; 2026-01-04T02:38:46.729577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=182;data_size=177;sum=8736;count=96;size_of_meta=112; 2026-01-04T02:38:46.729632Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=262;data_size=257;sum=12576;count=48;size_of_portion=192; 2026-01-04T02:38:46.744591Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-04T02:38:46.756299Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=110;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-04T02:38:47.831691Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:28 Blob count: 1255 2026-01-04T02:38:47.833347Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=110;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:38:47.934565Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494247267 at tablet 9437184, mediator 0 2026-01-04T02:38:47.934660Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] execute at tablet 9437184 2026-01-04T02:38:47.935698Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=128;fline=abstract.h:88;progress_tx_id=128;lock_id=1;broken=0; 2026-01-04T02:38:47.960774Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[112] complete at tablet 9437184 2026-01-04T02:38:47.960895Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=128;lock_id=1;broken=0; 2026-01-04T02:38:47.961084Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=27;operation_id=26; 2026-01-04T02:38:47.961127Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=128;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=26; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/hot' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] >> BasicStatistics::ServerlessGlobalIndex >> TSchemeShardTopicSplitMergeTest::CreateTopicWithOnePartition [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition >> LdapAuthProviderTest::LdapServerIsUnavailable [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyHost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGetWithMustRestoreFirst [GOOD] Test command err: RandomSeed# 6466993928080640662 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-04T02:38:47.443556Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-04T02:38:47.448696Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-04T02:38:47.453774Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-04T02:38:47.456514Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-04T02:38:47.464832Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-04T02:38:47.472257Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-04T02:38:47.477438Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-04T02:38:47.481721Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-04T02:38:49.538404Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.538548Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:49.538688Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.539544Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: [e2e4000afbc9e564] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-04T02:38:49.541439Z 1 00h05m30.160512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.504 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.505 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.505 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.506 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.506 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.507 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.546 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.612 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.773 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.809 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.839 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-04T02:38:49.543476Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.543898Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.545144Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-04T02:38:49.547175Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.548038Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.548960Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderN ... OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 1 Situations# EUUUUU } { OrderNumber# 2 Situations# UEUUUU } { OrderNumber# 3 Situations# UUSUUU } { OrderNumber# 4 Situations# UUUSUU } { OrderNumber# 5 Situations# UUUUSU } { OrderNumber# 6 Situations# UUUUUS } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:17:0:0:32768:0] 2026-01-04T02:38:49.559560Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.559788Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:49.559857Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:17:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:17:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# UEUUUU } { OrderNumber# 2 Situations# UUEUUU } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUSU } { OrderNumber# 5 Situations# UUUUUS } { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:18:0:0:131072:0] 2026-01-04T02:38:49.562093Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.562304Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.562417Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] TEvPutResult: TEvPutResult {Id# [1:1:18:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:18:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 7 Situations# SUUUUU } { OrderNumber# 0 Situations# UEUUUU } { OrderNumber# 1 Situations# UUEUUU } { OrderNumber# 2 Situations# UUUEUU } { OrderNumber# 3 Situations# UUUUSU } { OrderNumber# 4 Situations# UUUUUS } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:19:0:0:32768:0] 2026-01-04T02:38:49.564891Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.565117Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:49.565208Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:19:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:19:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 6 Situations# SUUUUU } { OrderNumber# 7 Situations# USUUUU } { OrderNumber# 0 Situations# UUEUUU } { OrderNumber# 1 Situations# UUUEUU } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } { OrderNumber# 4 Situations# UUSUUU } { OrderNumber# 5 Situations# UUUUSU } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:20:0:0:131072:0] 2026-01-04T02:38:49.567628Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.567748Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:49.567866Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:20:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:20:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvGet with key [1:1:11:0:0:32768:0] 2026-01-04T02:38:49.573478Z 1 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:705] 2026-01-04T02:38:49.573677Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:712] 2026-01-04T02:38:49.573745Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:719] 2026-01-04T02:38:49.574369Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: [109a34b48a6e0adb] Response# TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} Marker# BPG29 2026-01-04T02:38:49.574747Z 1 00h05m30.160512s :BS_PROXY_GET ERROR: {BPG72@dsproxy_get.cpp:427} Query history GroupId# 2181038080 HandleClass# FastRead History# THistory { Entries# [ TEvVGet{ TimestampMs# 0.452 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVGet{ TimestampMs# 0.453 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGet{ TimestampMs# 0.453 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGet{ TimestampMs# 0.454 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVGet{ TimestampMs# 0.454 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 6 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVGet{ TimestampMs# 0.454 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVGet{ TimestampMs# 0.454 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVGet{ TimestampMs# 0.454 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVGetResult{ TimestampMs# 2.811 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# OK } TEvVGetResult{ TimestampMs# 3.479 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVGetResult{ TimestampMs# 3.529 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVGetResult{ TimestampMs# 3.564 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVGetResult{ TimestampMs# 3.593 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVGetResult{ TimestampMs# 3.886 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPut{ TimestampMs# 3.914 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 3.914 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 3.914 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVGetResult{ TimestampMs# 4.56 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# OK } TEvVPut{ TimestampMs# 4.57 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVGetResult{ TimestampMs# 4.613 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# OK } TEvVPut{ TimestampMs# 4.618 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 4.648 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } PutAcceleration{ TimestampMs# 4.652 } PutAcceleration{ TimestampMs# 4.676 } TEvVPutResult{ TimestampMs# 4.899 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPutResult{ TimestampMs# 5.115 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } 2026-01-04T02:38:49.574876Z 2 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:712] 2026-01-04T02:38:49.574948Z 3 00h05m30.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:719] TEvGetResult: TEvGetResult {Status# ERROR ResponseSz# 1 {[1:1:11:0:0:32768:0] ERROR Size# 0 RequestedSize# 32768} ErrorReason# "TStrategyBase saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# PUUUUU } { OrderNumber# 6 Situations# UPUUUU } { OrderNumber# 7 Situations# UUPUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# AAAPAA } { OrderNumber# 4 Situations# AAAAAA } ] "} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] >> ReadOnlyVDisk::TestDiscover [GOOD] >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SetOnDisabledSplitMerge [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:48.892980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:48.893069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.893125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:48.893169Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:48.893211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:48.893243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:48.893314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.893376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:48.894240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:48.894514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:48.981859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:48.981922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:48.995825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:48.996036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:48.996168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:49.003124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:49.003304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:49.003790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:49.004003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:49.006454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:49.006612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:49.007599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:49.007653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:49.007768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:49.007814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:49.007924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:49.008089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.148726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.149687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.149823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.149911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.149981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.150716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.965327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:38:49.965536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:38:49.965765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:49.965859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:49.968417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.968932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:49.968984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:49.969155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:38:49.969357Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:49.969397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:38:49.969440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:38:49.970490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.970553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:38:49.970773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:38:49.970812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:49.970857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:38:49.970914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:49.970953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:38:49.970996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:49.971057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:38:49.971091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:38:49.971243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:38:49.971298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:38:49.971340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:38:49.971394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:38:49.972385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:49.972483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:49.972534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:38:49.972575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:38:49.972630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:38:49.974134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:49.974222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:49.974251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:38:49.974280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:38:49.974310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:49.974387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:38:49.977281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:38:49.978203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:38:49.978424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:38:49.978469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:38:49.978892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:38:49.978987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:38:49.979026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:998:2918] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } TestModificationResults wait txId: 106 2026-01-04T02:38:49.983527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: false } } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:38:49.983811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.983979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusInvalidParameter, reason: Split and merge operations disabled, at schemeshard: 72057594046678944 2026-01-04T02:38:49.986054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusInvalidParameter Reason: "Split and merge operations disabled" TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:38:49.986298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Split and merge operations disabled, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:38:49.986598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:38:49.986657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:38:49.987134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:38:49.987222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:38:49.987276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:1005:2925] TestWaitNotification: OK eventTxId 106 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestDiscover [GOOD] Test command err: RandomSeed# 2464409812624615649 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 3 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-04T02:38:47.503879Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-04T02:38:47.777857Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:47.778928Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-04T02:38:48.031453Z 3 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:48.032511Z 1 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:48.033178Z 2 00h02m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:48.033460Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: [e429a471afaf4071] Result# TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-04T02:38:48.035023Z 1 00h02m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.468 sample PartId# [1:1:5:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.469 sample PartId# [1:1:5:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.469 sample PartId# [1:1:5:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.469 sample PartId# [1:1:5:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.469 sample PartId# [1:1:5:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPut{ TimestampMs# 0.47 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 1.609 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.667 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 2.297 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.339 sample PartId# [1:1:5:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 2.533 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.549 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.563 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.579 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.624 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 2.785 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:5:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 2 Situations# EUUUUU } { OrderNumber# 3 Situations# UPUUUU } { OrderNumber# 4 Situations# UUPUUU } { OrderNumber# 5 Situations# UUUPUU } { OrderNumber# 6 Situations# UUUUPU } { OrderNumber# 7 Situations# UUUUUP } { OrderNumber# 0 Situations# EUUUUU } { OrderNumber# 1 Situations# EUUUUU } ] " ApproximateFreeSpaceShare# 0.999988} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 6 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Putting VDisk #4 to normal === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Putting VDisk #5 to normal === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Putting VDisk #6 to normal === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowFromTopicWithSplittedPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:47.763035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:47.763115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:47.763143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:47.763176Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:47.763210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:47.763233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:47.763285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:47.763332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:47.764092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:47.764396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:47.837463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:47.837514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.851148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:47.851365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:47.851530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:47.859767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:47.860029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:47.860820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:47.861130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:47.864017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:47.864216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:47.865309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:47.865368Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:47.865484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:47.865531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:47.865647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:47.865835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:47.983568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.984995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:47.985407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ionPlan, step: 300, at tablet: 72057594046678944 2026-01-04T02:38:50.387085Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 107:0 can't persist state: ShardsInProgress is not empty, remain: 1 2026-01-04T02:38:50.406252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 107, tablet: 72075186233409548, partId: 0 2026-01-04T02:38:50.406461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 107:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-04T02:38:50.406555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 107 Step: 300 2026-01-04T02:38:50.406620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 107:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.406669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 107:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:38:50.406850Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 128 -> 240 2026-01-04T02:38:50.407058Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:50.411508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.411952Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:50.412016Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:38:50.412349Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:50.412408Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 107, path id: 39 2026-01-04T02:38:50.413034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.413094Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 107:0 ProgressState 2026-01-04T02:38:50.413218Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:38:50.413262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:38:50.413317Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:38:50.413358Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:38:50.413405Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-04T02:38:50.413453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:38:50.413500Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:38:50.413541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-04T02:38:50.413716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:38:50.413764Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 1 2026-01-04T02:38:50.413827Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-04T02:38:50.414574Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:38:50.414692Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:38:50.414739Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:38:50.414784Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:38:50.414850Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:50.414980Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 1 2026-01-04T02:38:50.415033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:824:2790] 2026-01-04T02:38:50.419429Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:38:50.419561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:38:50.419612Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [2:1100:3013] TestWaitNotification: OK eventTxId 107 2026-01-04T02:38:50.420279Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:38:50.420542Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 308us result status StatusSuccess 2026-01-04T02:38:50.421399Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } Status: Inactive ChildPartitionIds: 1 ChildPartitionIds: 2 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { ToBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\177" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 5 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 5 NextPartitionId: 5 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "\177" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "\177" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 5 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] >> ReadOnlyVDisk::TestWrites [GOOD] >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] >> TSentinelTests::PDiskRackGuardFullRack [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::CreateTopicWithManyPartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:48.165422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:48.165496Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.165542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:48.165590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:48.165624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:48.165648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:48.165705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.165759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:48.166560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:48.166813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:48.241516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:48.241568Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:48.253257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:48.253477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:48.253671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:48.261414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:48.261658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:48.262294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:48.262556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:48.265533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:48.265687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:48.266749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:48.266814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:48.267002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:48.267042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:48.267118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:48.267267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:48.397292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.398913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:48.399002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... lPathId: 27] was 0 2026-01-04T02:38:50.521797Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:38:50.521838Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:38:50.521860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:38:50.521896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:38:50.521920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:38:50.521942Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:38:50.521963Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:38:50.521985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:38:50.522006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:38:50.522026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:38:50.522173Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.522277Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.522378Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:38:50.522423Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:38:50.522453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:38:50.522475Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:38:50.522494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:38:50.522585Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.522661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.522842Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 14, at schemeshard: 72057594046678944 2026-01-04T02:38:50.523002Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:38:50.523305Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.523411Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.523739Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.523816Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524148Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524295Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524343Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524430Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524627Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.524716Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525193Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525472Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525548Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525599Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525746Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525797Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.525861Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.531012Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:50.535524Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:50.535588Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:50.536178Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:50.536232Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:50.536273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:50.536928Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [2:1150:3053] sender: [2:1210:2058] recipient: [2:15:2062] 2026-01-04T02:38:50.616854Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:38:50.617077Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 252us result status StatusSuccess 2026-01-04T02:38:50.617603Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 3 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } Status: Active } AlterVersion: 3 BalancerTabletID: 72075186233409549 NextPartitionId: 3 Allocate { Name: "Topic1" AlterVersion: 3 TotalGroupCount: 3 NextPartitionId: 3 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { ToBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "*\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252" ToBound: "\325UUUUUUUUUUUUUUT" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active KeyRange { FromBound: "\325UUUUUUUUUUUUUUT" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::ColdTiers [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494871.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494871.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494871.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493671.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493671.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493671.000000s;Name=;Codec=}; 2026-01-04T02:37:51.995564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:52.013703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:52.013893Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:52.018785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:52.018947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:52.019091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:52.019166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:52.019226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:52.019309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:52.019387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:52.019456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:52.019542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:52.019602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.019679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:52.019738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:52.019797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:52.037571Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:52.037711Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:52.037761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:52.037940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:52.038098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:52.038155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:52.038185Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:52.038248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:52.038303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:52.038335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:52.038355Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:52.038475Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:52.038529Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:52.038556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:52.038576Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:52.038646Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:52.038684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:52.038711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:52.038729Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:52.038758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:52.038782Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:52.038801Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:52.038843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:52.038870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:52.038888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:52.038995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:52.039025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:52.039044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:52.039131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:52.039161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.039179Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:52.039207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:52.039229Z node 1 :TX_COLUMNSHARD WARN: l ... hing=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:50.885042Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 9 at tablet 9437184 2026-01-04T02:38:50.885362Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 18446744073709551615 scanId: 0 version: {1767494320157:max} readable: {1767494320157:max} at tablet 9437184 2026-01-04T02:38:50.885509Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 18446744073709551615 scanId: 0 at tablet 9437184 2026-01-04T02:38:50.885729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494320157:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:50.885837Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494320157:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Projection { Columns { Id: 1 } } } ; 2026-01-04T02:38:50.886391Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494320157:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":3}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"timestamp","id":1}]},"o":"1","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"p":{"data":[{"name":"timestamp","id":1}]},"o":"0","t":"ReserveMemory"},"w":0,"id":3},"2":{"p":{"i":"1","p":{"address":{"name":"timestamp","id":1}},"o":"1","t":"AssembleOriginalData"},"w":7,"id":2},"0":{"p":{"i":"1","t":"Projection"},"w":7,"id":0}}}; 2026-01-04T02:38:50.888307Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494320157:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=read_metadata.h:140;filter_limit_not_detected=no_ranges; 2026-01-04T02:38:50.889208Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=18446744073709551615;scan_id=0;gen=0;table=;snapshot={1767494320157:max};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:214;event=TTxScan started;actor_id=[1:961:2906];trace_detailed=; 2026-01-04T02:38:50.889619Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:92;ff_first=(column_ids=1;column_names=timestamp;);; 2026-01-04T02:38:50.889794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=context.cpp:107;columns_context_info=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; 2026-01-04T02:38:50.890074Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:50.890242Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:50.890497Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:38:50.890608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:50.890724Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:50.890876Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:961:2906] finished for tablet 9437184 2026-01-04T02:38:50.891251Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:960:2905];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","l_ack","f_processing","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.001}],"full":{"a":60428061,"name":"_full_task","f":60428061,"d_finished":0,"c":0,"l":60429850,"d":1789},"events":[{"name":"bootstrap","f":60428305,"d_finished":897,"c":1,"l":60429202,"d":897},{"a":60429399,"name":"ack","f":60429399,"d_finished":0,"c":0,"l":60429850,"d":451},{"a":60429377,"name":"processing","f":60429377,"d_finished":0,"c":0,"l":60429850,"d":473},{"name":"ProduceResults","f":60428889,"d_finished":534,"c":2,"l":60429667,"d":534},{"a":60429671,"name":"Finish","f":60429671,"d_finished":0,"c":0,"l":60429850,"d":179}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:38:50.891325Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:960:2905];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:38:50.891663Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:960:2905];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_ack","f_processing","l_ProduceResults","f_Finish"],"t":0.001},{"events":["l_ack","l_processing","l_Finish"],"t":0.002}],"full":{"a":60428061,"name":"_full_task","f":60428061,"d_finished":0,"c":0,"l":60430311,"d":2250},"events":[{"name":"bootstrap","f":60428305,"d_finished":897,"c":1,"l":60429202,"d":897},{"a":60429399,"name":"ack","f":60429399,"d_finished":0,"c":0,"l":60430311,"d":912},{"a":60429377,"name":"processing","f":60429377,"d_finished":0,"c":0,"l":60430311,"d":934},{"name":"ProduceResults","f":60428889,"d_finished":534,"c":2,"l":60429667,"d":534},{"a":60429671,"name":"Finish","f":60429671,"d_finished":0,"c":0,"l":60430311,"d":640}],"id":"9437184::10"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); Got TEvKqpCompute::TEvScanData [1:961:2906]->[1:960:2905] 2026-01-04T02:38:50.891749Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:38:50.888273Z;index_granules=0;index_portions=0;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=0;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=0;selected_rows=0; 2026-01-04T02:38:50.891794Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:38:50.891930Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=4;SelfId=[1:961:2906];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9739224 160000/9739224 160000/9739224 80000/4873744 0/0 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestWrites [GOOD] Test command err: RandomSeed# 15433486283030139923 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but writes go through === SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-04T02:38:48.242040Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-04T02:38:48.247370Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-04T02:38:48.252898Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-04T02:38:48.256215Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:5:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:6:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:7:0:0:32768:0] 2026-01-04T02:38:48.265206Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:7:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:8:0:0:131072:0] 2026-01-04T02:38:48.267679Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:8:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:9:0:0:32768:0] 2026-01-04T02:38:48.270876Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:9:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:10:0:0:131072:0] 2026-01-04T02:38:48.273784Z 1 00h01m30.060512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] TEvPutResult: TEvPutResult {Id# [1:1:10:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 11 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Put 2 more VDisks to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Write 10 more blobs, expect errors === SEND TEvPut with key [1:1:11:0:0:32768:0] 2026-01-04T02:38:49.223331Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] 2026-01-04T02:38:49.223456Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:49.223566Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2026-01-04T02:38:49.224264Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: [d84605dfef085da8] Result# TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-04T02:38:49.225826Z 1 00h03m30.110512s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.411 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPut{ TimestampMs# 0.412 sample PartId# [1:1:11:0:0:32768:3] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.412 sample PartId# [1:1:11:0:0:32768:2] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.412 sample PartId# [1:1:11:0:0:32768:5] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPut{ TimestampMs# 0.412 sample PartId# [1:1:11:0:0:32768:1] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.413 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPutResult{ TimestampMs# 1.32 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.36 sample PartId# [1:1:11:0:0:32768:4] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.477 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.499 sample PartId# [1:1:11:0:0:32768:6] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPutResult{ TimestampMs# 1.522 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:11:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:11:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 5 Situations# SUUUUU } { OrderNumber# 6 Situations# USUUUU } { OrderNumber# 7 Situations# UUSUUU } { OrderNumber# 0 Situations# UUUEUU } { OrderNumber# 1 Situations# UUUUEU } { OrderNumber# 2 Situations# UUUUUE } { OrderNumber# 3 Situations# UUUSUU } { OrderNumber# 4 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:12:0:0:131072:0] 2026-01-04T02:38:49.227545Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] 2026-01-04T02:38:49.227678Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2026-01-04T02:38:49.228609Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] TEvPutResult: TEvPutResult {Id# [1:1:12:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:12:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 4 Situations# SUUUUU } { OrderNumber# 5 Situations# USUUUU } { OrderNumber# 6 Situations# UUSUUU } { OrderNumber# 7 Situations# UUUSUU } { OrderNumber# 0 Situations# UUUUEU } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUEU } { OrderNumber# 3 Situations# UUUUUS } ] " ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:13:0:0:32768:0] 2026-01-04T02:38:49.230287Z 1 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5325:704] 2026-01-04T02:38:49.230981Z 2 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2026-01-04T02:38:49.231605Z 3 00h03m30.110512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] TEvPutResult: TEvPutResult {Id# [1:1:13:0:0:32768:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:13:0:0:32768:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNu ... ey [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Write 10 blobs, expect some VDisks refuse parts but the writes still go through === SEND TEvPut with key [1:1:21:0:0:32768:0] 2026-01-04T02:38:51.282752Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.282938Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:21:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:22:0:0:131072:0] 2026-01-04T02:38:51.286664Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2026-01-04T02:38:51.288093Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] TEvPutResult: TEvPutResult {Id# [1:1:22:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:23:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:23:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:24:0:0:131072:0] 2026-01-04T02:38:51.292765Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] TEvPutResult: TEvPutResult {Id# [1:1:24:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:25:0:0:32768:0] 2026-01-04T02:38:51.295717Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.295823Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:25:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:26:0:0:131072:0] 2026-01-04T02:38:51.298720Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.298828Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:26:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:27:0:0:32768:0] 2026-01-04T02:38:51.301881Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.301982Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:27:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:28:0:0:131072:0] 2026-01-04T02:38:51.304868Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] 2026-01-04T02:38:51.305142Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] TEvPutResult: TEvPutResult {Id# [1:1:28:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:29:0:0:32768:0] 2026-01-04T02:38:51.308255Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.308389Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:29:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} SEND TEvPut with key [1:1:30:0:0:131072:0] 2026-01-04T02:38:51.311128Z 3 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5339:718] 2026-01-04T02:38:51.311285Z 2 00h08m00.161536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5332:711] TEvPutResult: TEvPutResult {Id# [1:1:30:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999963} === Read all 31 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:7:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:7:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:8:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:8:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:9:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:9:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:10:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:10:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:11:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:11:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:12:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:12:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:13:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:13:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:14:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:14:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:15:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:15:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:16:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:16:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:17:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:17:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:18:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:18:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:19:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:19:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:20:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:20:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:21:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:21:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:22:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:22:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:23:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:23:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:24:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:24:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:25:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:25:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:26:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:26:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:27:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:27:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:28:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:28:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:29:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:29:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:30:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:30:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::SplitTwoPartitions [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:50.076015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:50.076123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:50.076168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:50.076216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:50.076259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:50.076293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:50.076356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:50.076420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:50.077302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:50.077598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:50.165670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:50.165723Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.179998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:50.180212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:50.180398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:50.189521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:50.189824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:50.190560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:50.190893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:50.193770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:50.194016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:50.195280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:50.195347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:50.195463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:50.195514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:50.195635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:50.195835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:50.354131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.355994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.356070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.356150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.356236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:50.356332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, operationId: 105:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:38:51.294916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409548 Status: COMPLETE TxId: 105 Step: 250 2026-01-04T02:38:51.294972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 105:0, shardIdx: 72057594046678944:3, shard: 72075186233409548, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.295015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 105:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:38:51.295252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 105:0 128 -> 240 2026-01-04T02:38:51.295432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:51.299523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.299864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:51.299944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:38:51.300238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:51.300292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 39 2026-01-04T02:38:51.300798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.300852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 105:0 ProgressState 2026-01-04T02:38:51.300948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:38:51.300981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:38:51.301027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 1/1 2026-01-04T02:38:51.301062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:38:51.301100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 1/1, is published: false 2026-01-04T02:38:51.301157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 1/1 2026-01-04T02:38:51.301203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:38:51.301240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:38:51.301381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:38:51.301424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 1, subscribers: 1 2026-01-04T02:38:51.301457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 39], 3 2026-01-04T02:38:51.302159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:38:51.302261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:38:51.302298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:38:51.302333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:38:51.302376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:51.302455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 1 2026-01-04T02:38:51.302492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:814:2781] 2026-01-04T02:38:51.304792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:38:51.304873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:38:51.304908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:1021:2945] TestWaitNotification: OK eventTxId 105 2026-01-04T02:38:51.305526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1/Topic1" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:38:51.305860Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1/Topic1" took 356us result status StatusSuccess 2026-01-04T02:38:51.306747Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1/Topic1" PathDescription { Self { Name: "Topic1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 104 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 2 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 39 TotalGroupCount: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 0 TabletId: 72075186233409548 KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 2 ChildPartitionIds: 3 } Partitions { PartitionId: 1 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Inactive ChildPartitionIds: 4 ChildPartitionIds: 5 } Partitions { PartitionId: 2 TabletId: 72075186233409548 KeyRange { ToBound: "?" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 3 TabletId: 72075186233409548 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } Status: Active ParentPartitionIds: 0 } Partitions { PartitionId: 4 TabletId: 72075186233409548 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } Status: Active ParentPartitionIds: 1 } Partitions { PartitionId: 5 TabletId: 72075186233409548 KeyRange { FromBound: "\277" } Status: Active ParentPartitionIds: 1 } AlterVersion: 2 BalancerTabletID: 72075186233409549 NextPartitionId: 6 Allocate { Name: "Topic1" AlterVersion: 2 TotalGroupCount: 6 NextPartitionId: 6 PartitionPerTablet: 7 PQTabletConfig { PartitionConfig { LifetimeSeconds: 3600 } YdbDatabasePath: "/MyRoot" PartitionStrategy { PartitionStrategyType: CAN_SPLIT_AND_MERGE } } Partitions { PartitionId: 1 GroupId: 2 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 0 GroupId: 1 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Inactive KeyRange { ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 2 GroupId: 3 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { ToBound: "?" } } Partitions { PartitionId: 3 GroupId: 4 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 0 KeyRange { FromBound: "?" ToBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" } } Partitions { PartitionId: 5 GroupId: 6 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\277" } } Partitions { PartitionId: 4 GroupId: 5 TabletId: 72075186233409548 OwnerId: 72057594046678944 ShardId: 3 Status: Active ParentPartitionIds: 1 KeyRange { FromBound: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\376" ToBound: "\277" } } BalancerTabletID: 72075186233409549 BalancerOwnerId: 72057594046678944 BalancerShardId: 4 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 6 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> TSchemeShardTopicSplitMergeTest::GrowAndSplitNewFromTopicWithOnePartition [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:48.899703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:48.899835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.899881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:48.899937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:48.899979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:48.900012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:48.900071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:48.900143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:48.900933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:48.901157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:48.978838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:48.978902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:48.994063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:48.994234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:48.994378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:49.002068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:49.002254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:49.002835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:49.003109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:49.006070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:49.006207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:49.007191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:49.007258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:49.007334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:49.007372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:49.007445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:49.007581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:49.117046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.117908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.118932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.119043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:49.119135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... _common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:38:51.350948Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:38:51.351119Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:38:51.351211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:51.355831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.356207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:51.356252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:38:51.356393Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:38:51.356571Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:51.356607Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:38:51.356648Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 104, path id: 39 2026-01-04T02:38:51.357065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.357120Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:38:51.357231Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:38:51.357273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:51.357318Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:38:51.357354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:51.357395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:38:51.357437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:38:51.357479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:38:51.357510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:38:51.357655Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 5 2026-01-04T02:38:51.357706Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:38:51.357747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:38:51.357780Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:38:51.358829Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:51.358916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:51.358951Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:38:51.358993Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:38:51.359043Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:38:51.359489Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:51.359560Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:38:51.359589Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:38:51.359620Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:38:51.359651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:38:51.359715Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:38:51.362818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:38:51.363201Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:38:51.363420Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:38:51.363474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:38:51.363899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:38:51.364003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:38:51.364048Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [2:990:2919] TestWaitNotification: OK eventTxId 104 >>>>> Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } TestModificationResults wait txId: 105 2026-01-04T02:38:51.367762Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/USER_1" OperationType: ESchemeOpAlterPersQueueGroup AlterPersQueueGroup { Name: "Topic1" PQTabletConfig { PartitionConfig { } } Split { Partition: 1 SplitBoundary: "\177\377\377\377\377\377\377\377\377\377\377\377\377\377\377\377" ChildPartitionIds: 3 ChildPartitionIds: 4 } RootPartitionBoundaries { Partition: 0 KeyRange { ToBound: "UUUUUUUUUUUUUUUT" } CreatePartition: false } RootPartitionBoundaries { Partition: 1 KeyRange { FromBound: "UUUUUUUUUUUUUUUT" ToBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } RootPartitionBoundaries { Partition: 2 KeyRange { FromBound: "\252\252\252\252\252\252\252\252\252\252\252\252\252\252\252\251" } CreatePartition: true } } } TxId: 105 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:38:51.368050Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_pq.cpp:554: TAlterPQ Propose, path: /MyRoot/USER_1/Topic1, pathId: , opId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:38:51.368306Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 105:1, propose status:StatusInvalidParameter, reason: Splitting partition does not exists: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:51.370516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 105, response: Status: StatusInvalidParameter Reason: "Splitting partition does not exists: 1" TxId: 105 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:38:51.370781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 105, database: /MyRoot/USER_1, subject: , status: StatusInvalidParameter, reason: Splitting partition does not exists: 1, operation: ALTER PERSISTENT QUEUE, path: /MyRoot/USER_1/Topic1 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:38:51.371120Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:38:51.371167Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:38:51.371524Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:38:51.371620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:38:51.371676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:997:2926] TestWaitNotification: OK eventTxId 105 |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_topic_set_boundaries/unittest >> BasicStatistics::TwoNodes >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-false [GOOD] >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true >> KqpBatchDelete::SimpleOnePartition [GOOD] |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpBatchDelete::Large_3 [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |90.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel/unittest >> TSentinelTests::PDiskRackGuardFullRack [GOOD] Test command err: 2026-01-04T02:38:20.219037Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T02:38:20.219088Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T02:38:20.219156Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T02:38:20.219187Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T02:38:20.219253Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T02:38:20.219343Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T02:38:20.220859Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } Hosts { Name: "node-9" State: UNKNOWN Devices { Name: "pdisk-9-36" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-37" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-38" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-9-39" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 9 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-10" State: UNKNOWN Devices { Name: "pdisk-10-40" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-41" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-42" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-10-43" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 10 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-11" State: UNKNOWN Devices { Name: "pdisk-11-44" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-45" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-46" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-11-47" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 11 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-12" State: UNKNOWN Devices { Name: "pdisk-12-48" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-49" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-50" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-12-51" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 12 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-13" State: UNKNOWN Devices { Name: "pdisk-13-52" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-53" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-54" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-13-55" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 13 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-14" State: UNKNOWN Devices { Name: "pdisk-14-56" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-57" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-58" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-14-59" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 14 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-15" State: UNKNOWN Devices { Name: "pdisk-15-60" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-61" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-62" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-15-63" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 15 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-16" State: UNKNOWN Devices { Name: "pdisk-16-64" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-65" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-66" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-16-67" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 16 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T02:38:20.229675Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 36 Path: "/9/pdisk-36.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 37 Path: "/9/pdisk-37.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 38 Path: "/9/pdisk-38.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 9 PDiskId: 39 Path: "/9/pdisk-39.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 40 Path: "/10/pdisk-40.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 41 Path: "/10/pdisk-41.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 42 Path: "/10/pdisk-42.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 10 PDiskId: 43 Path: "/10/pdisk-43.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 44 Path: "/11/pdisk-44.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 45 Path: "/11/pdisk-45.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 46 Path: "/11/pdisk-46.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 11 PDiskId: 47 Path: "/11/pdisk-47.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 48 Path: "/12/pdisk-48.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 49 Path: "/12/pdisk-49.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 50 Path: "/12/pdisk-50.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 12 PDiskId: 51 Path: "/12/pdisk-51.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 52 Path: "/13/pdisk-52.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 53 Path: "/13/pdisk-53.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 54 Path: "/13/pdisk-54.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 13 PDiskId: 55 Path: "/13/pdisk-55.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 56 Path: "/14/pdisk-56.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 57 Path: "/14/pdisk-57.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 58 Path: "/14/pdisk-58.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 14 PDiskId: 59 Path: "/14/pdisk-59.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 60 Path: "/15/pdisk-60.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 61 Path: "/15/pdisk-61.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 62 Path: "/15/pdisk-62.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 15 PDiskId: 63 Path: "/15/pdisk-63.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 16 PDiskId: 64 Path: "/16/pdisk-64.data" G ... 374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.415376Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 25, response# PDiskStateInfo { PDiskId: 100 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-100.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 101 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-101.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 102 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-102.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 103 CreateTime: 0 ChangeTime: 0 Path: "/25/pdisk-103.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.415499Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 104 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-104.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 105 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-105.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 106 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-106.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 107 CreateTime: 0 ChangeTime: 0 Path: "/26/pdisk-107.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.415627Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 108 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-108.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 109 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-109.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 110 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-110.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 111 CreateTime: 0 ChangeTime: 0 Path: "/27/pdisk-111.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.415745Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 112 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-112.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 113 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-113.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 114 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-114.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 115 CreateTime: 0 ChangeTime: 0 Path: "/28/pdisk-115.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.415864Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 116 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-116.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 117 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-117.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 118 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-118.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 119 CreateTime: 0 ChangeTime: 0 Path: "/29/pdisk-119.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416002Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 120 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-120.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 121 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-121.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 122 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-122.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 123 CreateTime: 0 ChangeTime: 0 Path: "/30/pdisk-123.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416121Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 124 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-124.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 125 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-125.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 126 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-126.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 127 CreateTime: 0 ChangeTime: 0 Path: "/31/pdisk-127.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416236Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 128 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-128.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 129 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-129.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 130 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-130.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 131 CreateTime: 0 ChangeTime: 0 Path: "/32/pdisk-131.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416358Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 18, response# PDiskStateInfo { PDiskId: 72 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-72.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 73 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-73.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 74 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-74.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 75 CreateTime: 0 ChangeTime: 0 Path: "/18/pdisk-75.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416478Z node 17 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 19, response# PDiskStateInfo { PDiskId: 76 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-76.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 77 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-77.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 78 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-78.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 79 CreateTime: 0 ChangeTime: 0 Path: "/19/pdisk-79.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 36120110 2026-01-04T02:38:48.416544Z node 17 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:38:48.417358Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 30:123, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417430Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 22:88, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417468Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 22:89, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417503Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 22:90, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417541Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 22:91, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417575Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 30:120, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417615Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 30:121, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417653Z node 17 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 30:122, status# INACTIVE, required status# ACTIVE, reason# PrevState# Normal State# Normal StateCounter# 60 StateLimit# 60, dry run# 0 2026-01-04T02:38:48.417695Z node 17 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 8 2026-01-04T02:38:48.418157Z node 17 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Status { Success: true } Success: true, cookie# 20 2026-01-04T02:38:48.418200Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 22:88 2026-01-04T02:38:48.418231Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 22:89 2026-01-04T02:38:48.418255Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 22:90 2026-01-04T02:38:48.418281Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 22:91 2026-01-04T02:38:48.418306Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 30:120 2026-01-04T02:38:48.418334Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 30:121 2026-01-04T02:38:48.418360Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 30:122 2026-01-04T02:38:48.418383Z node 17 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 30:123 |90.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::Probe |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel/unittest |90.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_topic_set_boundaries/test-results/unittest/{meta.json ... results_accumulator.log} |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood >> BasicStatistics::TwoServerlessTwoSharedDbs >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad >> KqpBatchUpdate::Large_2 [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> BasicStatistics::ServerlessTimeIntervals ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::SimplePartitions [GOOD] Test command err: Trying to start YDB, gRPC: 21130, MsgBus: 20071 2026-01-04T02:33:48.677819Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329049728049919:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:48.686742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:48.724867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:33:49.062029Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:49.176699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:49.176780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:49.211652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:49.215717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:49.387369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:33:49.463979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:33:49.464021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:33:49.464028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:33:49.464132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:49.699040Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:50.233690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:50.373236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:50.624916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:50.863310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:50.998338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:53.681933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329049728049919:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:53.681999Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:33:54.296608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329075497855517:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.296729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.297185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329075497855527:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.297281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:54.660427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.704299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.752995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.823104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.870714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:54.937321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.033730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.128490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:55.274290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329079792823699:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.274394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.274912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329079792823704:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.274963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329079792823705:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.275084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:55.279006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:55.298305Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329079792823708:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:33:55.369686Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329079792823759:3779] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:34:03.958629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:34:03.958666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 21842, MsgBus: 21396 2026-01-04T02:34:11.149550Z node 2 :METADATA_PROVIDER W ... bleExistsActor;event=undelivered;self_id=[16:7591330276052137176:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:34.260758Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:34.271447Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:34.355604Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:34.357062Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:7591330276052137152:2081] 1767494314260227 != 1767494314260230 2026-01-04T02:38:34.367850Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:34.367957Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:34.373186Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:34.373787Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:34.432726Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:34.432750Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:34.432757Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:34.432843Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:34.587227Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:34.874499Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:34.971669Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.028921Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.190538Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.261901Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.266860Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:39.262082Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7591330276052137176:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:39.262169Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:39.386080Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330297526975506:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.386188Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.386462Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330297526975517:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.386534Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.425263Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.454884Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.484556Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.514521Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.546811Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.584902Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.622453Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.676619Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.806587Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330297526976385:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.806671Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.806744Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330297526976390:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.806873Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330297526976392:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.806933Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.810391Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:39.821734Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7591330297526976394:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:39.888183Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7591330297526976445:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:49.354011Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:38:49.354037Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> LdapAuthProviderTest::LdapRequestWithEmptyHost [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn >> HttpRequest::ProbeBaseStats |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |90.2%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::SimpleOnePartition [GOOD] Test command err: Trying to start YDB, gRPC: 2374, MsgBus: 15215 2026-01-04T02:38:20.349636Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330215242042720:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:20.350025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:20.517257Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:20.537854Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:20.537919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:20.566799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:20.592829Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:20.593739Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330215242042615:2081] 1767494300345451 != 1767494300345454 2026-01-04T02:38:20.634794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:20.634818Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:20.634827Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:20.634903Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:20.757918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:20.970753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:21.026259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:21.139052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:21.273396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:21.332451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:21.388364Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:22.730860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330223831979075:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:22.730943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:22.731169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330223831979085:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:22.731209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:22.943683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.968826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.992683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.019358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.043083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.068936Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.098410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.138772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.207986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228126947247:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.208065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.208100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228126947252:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.208240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228126947254:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.208276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.211581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:23.220879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330228126947255:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:23.302844Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330228126947307:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:25.349372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330215242042720:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:25.349445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 1836, MsgBus: 15675 2026-01-04T02:38:27.300806Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330245295033412:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:27.300876Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sna ... :841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591330275459198611:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:39.567166Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13655, MsgBus: 23192 2026-01-04T02:38:43.723954Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330315207882012:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:43.724017Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:43.747499Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:43.797569Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:43.799112Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330315207881978:2081] 1767494323722808 != 1767494323722811 2026-01-04T02:38:43.835339Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:43.835424Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:43.838115Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:43.866513Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:43.866533Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:43.866544Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:43.866614Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:43.964366Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:44.208495Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:44.217136Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:44.261744Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:44.399444Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:44.461005Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:44.730235Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:47.423498Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330332387753031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.423602Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.424056Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330332387753041:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.424117Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.498361Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.538742Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.574038Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.607614Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.645528Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.730797Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.763908Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.812441Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:47.890310Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330332387753916:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.890393Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.890452Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330332387753921:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.890598Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330332387753923:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.890690Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:47.894090Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:47.904750Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330332387753925:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:47.993731Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330332387753976:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:48.724209Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330315207882012:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:48.724299Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> BasicStatistics::NotFullStatisticsColumnshard ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::Large_3 [GOOD] Test command err: Trying to start YDB, gRPC: 8318, MsgBus: 17428 2026-01-04T02:38:21.261893Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330220206321154:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:21.262840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:21.417660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:21.425623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:21.425740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:21.479936Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:21.517203Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:21.518438Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330220206321123:2081] 1767494301260160 != 1767494301260163 2026-01-04T02:38:21.563725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:21.563748Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:21.563763Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:21.563857Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:21.705968Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:21.873905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:21.914659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.029056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.144848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.199332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:22.306094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:23.646974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228796257582:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.647105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.647388Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330228796257592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.647441Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:23.913377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.941618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.969502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:23.994883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.020381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.050686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.081364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.120730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:24.193375Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330233091225756:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.193450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.193550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330233091225761:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.193631Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330233091225763:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.193703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:24.196679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:24.205314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330233091225765:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:24.267203Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330233091225816:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:25.718791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:26.261835Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330220206321154:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:26.261915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migration ... 99536658146131:7762515]; 2026-01-04T02:38:35.915109Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:35.928951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:35.963195Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330282483097380:2081] 1767494315914072 != 1767494315914075 2026-01-04T02:38:36.026090Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:36.031288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:36.031390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:36.042932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:36.124974Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:36.124993Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:36.124998Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:36.125061Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:36.145267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:36.542655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:36.559658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.612592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.770577Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.830126Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.963060Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:39.042714Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330299662968430:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.042850Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.043169Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330299662968440:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.043253Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.101457Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.132680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.162165Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.190818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.219797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.251938Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.304208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.349550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:39.429826Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330299662969309:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.429962Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.429971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330299662969314:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.430140Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330299662969316:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.430230Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:39.433503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:39.445770Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330299662969317:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:39.526944Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330299662969369:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:40.918726Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330282483097408:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:40.919248Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:40.982145Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:51.024142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:38:51.024173Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> ReadOnlyVDisk::TestGarbageCollect [GOOD] |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> JsonChangeRecord::Heartbeat [GOOD] |90.3%| [TA] $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest >> JsonChangeRecord::Heartbeat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::Large_2 [GOOD] Test command err: Trying to start YDB, gRPC: 23133, MsgBus: 13051 2026-01-04T02:38:28.760391Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330249440688667:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:28.760461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:28.960222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:28.964050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:28.964173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:29.009653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:29.055363Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:29.058389Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330249440688640:2081] 1767494308758945 != 1767494308758948 2026-01-04T02:38:29.107210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:29.107238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:29.107245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:29.107398Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:29.195933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:29.465167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:29.523390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.648404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.767230Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:29.771015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.822176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.688198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330262325592396:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.688311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.688655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330262325592406:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.688730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.943014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.970861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.002620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.030976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.060971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.097519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.133272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.175459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:32.241856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330266620560574:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.241957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.242041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330266620560579:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.242225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330266620560581:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.242282Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:32.245708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:32.256874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330266620560583:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:32.341652Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330266620560634:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:33.760413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330249440688667:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:33.760471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:33.879000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... B, gRPC: 27242, MsgBus: 12942 2026-01-04T02:38:41.738537Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330305873350661:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:41.738580Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:41.754591Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:41.818456Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:41.819718Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330305873350633:2081] 1767494321737679 != 1767494321737682 2026-01-04T02:38:41.858025Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:41.858109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:41.863126Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:41.900095Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:41.900118Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:41.900127Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:41.900217Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:41.949128Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:42.302630Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:42.310440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:42.365978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:42.489061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:42.561754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:42.743987Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:44.970357Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330318758254392:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:44.970460Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:44.970767Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330318758254401:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:44.970825Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.050943Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.082652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.108545Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.140312Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.173032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.206622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.241236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.290262Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:45.362826Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330323053222567:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.362910Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.363124Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330323053222572:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.363125Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330323053222573:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.363175Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:45.366814Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:45.375882Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330323053222576:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:45.446267Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330323053222627:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:46.739124Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330305873350661:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:46.739203Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:46.978520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestGarbageCollect [GOOD] Test command err: RandomSeed# 1174347614683485074 SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} SEND TEvPut with key [1:1:1:0:0:32768:0] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 2 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:1:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:2:0:0:131072:0] 2026-01-04T02:38:47.455366Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-04T02:38:47.459847Z 1 00h01m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] SEND TEvGet with key [1:1:2:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-04T02:38:48.347193Z 1 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:48.348045Z 2 00h03m20.160512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} 2026-01-04T02:38:48.826207Z 1 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:48.826398Z 2 00h04m20.161024s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-04T02:38:49.181937Z 1 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.182969Z 2 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.183949Z 3 00h05m00.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:49.184219Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: [9584648c48e11d76] Result# TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} GroupId# 2181038080 Marker# BPP12 2026-01-04T02:38:49.185906Z 1 00h05m00.200000s :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038080 HandleClass# TabletLog Tactic# Default History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.578 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:0:0] NodeId# 1 } TEvVPut{ TimestampMs# 0.579 sample PartId# [1:1:4:0:0:131072:5] QueryCount# 1 VDiskId# [82000000:1:0:7:0] NodeId# 8 } TEvVPut{ TimestampMs# 0.579 sample PartId# [1:1:4:0:0:131072:4] QueryCount# 1 VDiskId# [82000000:1:0:6:0] NodeId# 7 } TEvVPut{ TimestampMs# 0.58 sample PartId# [1:1:4:0:0:131072:3] QueryCount# 1 VDiskId# [82000000:1:0:5:0] NodeId# 6 } TEvVPut{ TimestampMs# 0.58 sample PartId# [1:1:4:0:0:131072:2] QueryCount# 1 VDiskId# [82000000:1:0:4:0] NodeId# 5 } TEvVPut{ TimestampMs# 0.58 sample PartId# [1:1:4:0:0:131072:1] QueryCount# 1 VDiskId# [82000000:1:0:3:0] NodeId# 4 } TEvVPutResult{ TimestampMs# 1.213 VDiskId# [82000000:1:0:0:0] NodeId# 1 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 1.268 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 } TEvVPutResult{ TimestampMs# 2.442 VDiskId# [82000000:1:0:1:0] NodeId# 2 Status# ERROR ErrorReason# "VDisk is in read-only mode" } TEvVPut{ TimestampMs# 2.483 sample PartId# [1:1:4:0:0:131072:6] QueryCount# 1 VDiskId# [82000000:1:0:2:0] NodeId# 3 } TEvVPutResult{ TimestampMs# 2.773 VDiskId# [82000000:1:0:7:0] NodeId# 8 Status# OK } TEvVPutResult{ TimestampMs# 2.79 VDiskId# [82000000:1:0:6:0] NodeId# 7 Status# OK } TEvVPutResult{ TimestampMs# 2.802 VDiskId# [82000000:1:0:5:0] NodeId# 6 Status# OK } TEvVPutResult{ TimestampMs# 2.817 VDiskId# [82000000:1:0:4:0] NodeId# 5 Status# OK } TEvVPutResult{ TimestampMs# 2.867 VDiskId# [82000000:1:0:3:0] NodeId# 4 Status# OK } TEvVPutResult{ TimestampMs# 3.007 VDiskId# [82000000:1:0:2:0] NodeId# 3 Status# ERROR ErrorReason# "VDisk is in read-only mode" } ] } TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# ERROR StatusFlags# { } ErrorReason# "TRestoreStrategy saw optimisticState# EBS_DISINTEGRATED GroupId# 2181038080 BlobId# [1:1:4:0:0:131072:0] Reported ErrorReasons# [ { OrderNumber# 0 VDiskId# [82000000:1:0:0:0] NodeId# 1 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 1 VDiskId# [82000000:1:0:1:0] NodeId# 2 ErrorReasons# [ "VDisk is in read-only mode", ] } { OrderNumber# 2 VDiskId# [82000000:1:0:2:0] NodeId# 3 ErrorReasons# [ "VDisk is in read-only mode", ] } ] Part situations# [ { OrderNumber# 3 Situations# PUUUUU } { OrderNumber# 4 Situations# UPUUUU } { OrderNumber# 5 Situations# UUPUUU } { OrderNumber# 6 Situations# UUUPUU } { OrderNumber# 7 Situations# UUUUPU } { OrderNumber# 0 Situations# UUUUUE } { OrderNumber# 1 Situations# UUUUUE } { OrderNumber# 2 Situations# UUUUUE } ] " ApproximateFreeSpaceShare# 0.999988} 2026-01-04T02:38:49.630559Z 1 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:49.630762Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:49.630821Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-04T02:38:50.393351Z 1 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:50.393572Z 2 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:50.393627Z 3 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:50.393674Z 4 00h07m40.260512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-04T02:38:50.686177Z 1 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:50.686380Z 2 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:50.686439Z 3 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:50.686487Z 4 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:50.686533Z 5 00h08m20.262048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-04T02:38:50.933517Z 1 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:50.933719Z 2 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:50.933770Z 3 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:50.933836Z 4 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:50.933885Z 5 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:50.933931Z 6 00h09m00.310512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-04T02:38:51.152381Z 1 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5324:704] 2026-01-04T02:38:51.152593Z 2 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:51.152656Z 3 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:51.152708Z 4 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:51.152757Z 5 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:51.152806Z 6 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:51.152854Z 7 00h09m40.312048s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] === Putting VDisk #0 to normal === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-04T02:38:51.400148Z 2 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:5331:711] 2026-01-04T02:38:51.400232Z 3 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:51.400278Z 4 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:51.400326Z 5 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:51.400370Z 6 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:51.400411Z 7 00h10m20.360512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] === Putting VDisk #1 to normal === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] 2026-01-04T02:38:51.683511Z 3 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:5338:718] 2026-01-04T02:38:51.683606Z 4 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:51.683664Z 5 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:51.683716Z 6 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:51.683766Z 7 00h11m00.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] === Putting VDisk #2 to normal === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] 2026-01-04T02:38:51.976480Z 4 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:5345:725] 2026-01-04T02:38:51.976546Z 5 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:51.976580Z 6 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:51.976611Z 7 00h11m40.410512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] === Putting VDisk #3 to normal === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] 2026-01-04T02:38:52.229360Z 5 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:5352:732] 2026-01-04T02:38:52.229431Z 6 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:52.229466Z 7 00h12m20.450512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] 2026-01-04T02:38:53.050072Z 6 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:5359:739] 2026-01-04T02:38:53.050138Z 7 00h14m00.461536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] 2026-01-04T02:38:53.423278Z 7 00h14m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:5366:746] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvGet with key [1:1:3:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:1:0] NODATA Size# 0}} SEND TEvPut with key [1:1:4:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999976} SEND TEvGet with key [1:1:4:0:0:1:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:1:0] NODATA Size# 0}} |90.3%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.3%| [TA] {RESULT} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.3%| [TS] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_json_change_record/unittest |90.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut_sentinel/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_json_change_record/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute >> DataStreams::TestDeleteStream >> DataStreams::TestUpdateStorage >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad >> DataStreams::TestStreamStorageRetention >> ReadOnlyVDisk::TestReads [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts >> LdapAuthProviderTest::LdapRequestWithEmptyBaseDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn >> DataStreams::TestGetShardIterator ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestReads [GOOD] Test command err: RandomSeed# 10677662569298242466 === Trying to put and get a blob === SEND TEvPut with key [1:1:0:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #0 to read-only === Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #1 to read-only === Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #2 to read-only === Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #3 to read-only === Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #4 to read-only === Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #5 to read-only === Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Putting VDisk #6 to read-only === Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #0 === Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #1 === Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #2 === Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #3 === Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #4 === Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #5 === Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} === Restoring to normal VDisk #6 === Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] === Read all 1 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> LdapAuthProviderTest_nonSecure::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError >> LdapAuthProviderTest_LdapsScheme::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError >> DataStreams::TestGetRecordsStreamWithSingleShard |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithDontExistGroupAttribute [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithRemovedUserCredentialsBad [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood >> DataStreams::TestDeleteStream [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlag >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood >> DataStreams::TestUpdateStorage [GOOD] >> DataStreams::TestStreamTimeRetention >> LdapAuthProviderTest::LdapRequestWithEmptyBindDn [GOOD] >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword >> DataStreams::TestStreamStorageRetention [GOOD] >> DataStreams::TestStreamPagination >> DataStreams::TestGetShardIterator [GOOD] >> DataStreams::TestGetRecordsWithoutPermission ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/idx_test/unittest >> YdbIndexTable::OnlineBuildWithDataColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6474, MsgBus: 26286 2026-01-04T02:35:15.667352Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329421857283316:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:15.667416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:35:15.735982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:35:16.017570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:35:16.022245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:35:16.022371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:35:16.028334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:35:16.158106Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:35:16.286982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:35:16.287006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:35:16.287017Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:35:16.287087Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:35:16.293271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:35:16.701928Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:35:17.011211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:35:17.088094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.221642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.411467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:17.504026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.368827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439037154333:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.368942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.369306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329439037154343:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.369393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:19.738596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.768965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.808906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.840125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.903214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.943148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:19.985494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:20.035485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:35:20.111499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443332122512:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.111593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.111867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443332122517:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.111906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329443332122518:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.112220Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:35:20.116703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:35:20.131703Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329443332122521:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:35:20.220386Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329443332122572:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:35:20.668774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329421857283316:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:35:20.668843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:35:21.806445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshar ... 01-04T02:37:29.117473Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:29.213855Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:29.231095Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:29.244646Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591329998544935983:2081] 1767494249116969 != 1767494249116972 2026-01-04T02:37:29.273193Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:29.273327Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:29.282961Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:29.342635Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:29.342663Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:29.342673Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:29.342763Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:29.501913Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:29.749042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:29.768205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:29.828666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:29.992300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:30.062118Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:30.192950Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:32.395534Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330011429839743:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.395631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.395851Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330011429839752:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.395911Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.453106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.479783Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.509757Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.538712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.567445Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.601592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.641379Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.685697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:32.754187Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330011429840621:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.754270Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.754286Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330011429840626:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.754522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330011429840628:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.754587Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:32.757783Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:32.768641Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330011429840629:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:37:32.865182Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330011429840681:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:34.117928Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591329998544936010:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:34.117994Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:34.715253Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:44.192552Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:44.192587Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded finished with status: SUCCESS |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/idx_test/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TA] $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.3%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets |90.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/idx_test/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyVDisk::TestSync [GOOD] >> DataStreams::TestGetRecordsStreamWithSingleShard [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest |90.3%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestSync [GOOD] Test command err: RandomSeed# 10438539971231419794 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] SEND TEvPut with key [1:1:0:0:0:131072:0] 2026-01-04T02:38:47.774301Z 1 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:8829:948] 2026-01-04T02:38:47.774862Z 2 00h02m00.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8836:955] TEvPutResult: TEvPutResult {Id# [1:1:0:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] SEND TEvPut with key [1:1:1:0:0:32768:0] 2026-01-04T02:38:49.766305Z 3 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:2:0]: (2181038080) Unavailable in read-only Sender# [1:8843:962] 2026-01-04T02:38:49.766450Z 2 00h06m00.210512s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:1:0]: (2181038080) Unavailable in read-only Sender# [1:8836:955] TEvPutResult: TEvPutResult {Id# [1:1:1:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 1 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:1:0] Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] SEND TEvPut with key [1:1:2:0:0:131072:0] TEvPutResult: TEvPutResult {Id# [1:1:2:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 2 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:2:0] Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] SEND TEvPut with key [1:1:3:0:0:32768:0] 2026-01-04T02:38:53.714753Z 5 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8857:976] 2026-01-04T02:38:53.714832Z 4 00h14m00.361536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:3:0]: (2181038080) Unavailable in read-only Sender# [1:8850:969] TEvPutResult: TEvPutResult {Id# [1:1:3:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 3 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:3:0] Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] SEND TEvPut with key [1:1:4:0:0:131072:0] 2026-01-04T02:38:55.696841Z 6 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8864:983] 2026-01-04T02:38:55.696910Z 5 00h18m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:4:0]: (2181038080) Unavailable in read-only Sender# [1:8857:976] TEvPutResult: TEvPutResult {Id# [1:1:4:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] SEND TEvPut with key [1:1:5:0:0:32768:0] 2026-01-04T02:38:57.972281Z 7 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8871:990] 2026-01-04T02:38:57.972375Z 6 00h22m00.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:5:0]: (2181038080) Unavailable in read-only Sender# [1:8864:983] TEvPutResult: TEvPutResult {Id# [1:1:5:0:0:32768:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] SEND TEvPut with key [1:1:6:0:0:131072:0] 2026-01-04T02:39:00.039817Z 7 00h26m00.561536s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:6:0]: (2181038080) Unavailable in read-only Sender# [1:8871:990] TEvPutResult: TEvPutResult {Id# [1:1:6:0:0:131072:0] Status# OK StatusFlags# { } ApproximateFreeSpaceShare# 0.999988} Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] Setting VDisk read-only to 0 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] === Read all 7 blob(s) === SEND TEvGet with key [1:1:0:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:0:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:1:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:1:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:2:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:2:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:3:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:3:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:4:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:4:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} SEND TEvGet with key [1:1:5:0:0:32768:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:5:0:0:32768:0] OK Size# 32768 RequestedSize# 32768}} SEND TEvGet with key [1:1:6:0:0:131072:0] TEvGetResult: TEvGetResult {Status# OK ResponseSz# 1 {[1:1:6:0:0:131072:0] OK Size# 131072 RequestedSize# 131072}} |90.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] >> FolderServiceTest::TFolderServiceTransitional >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithDefaultGroupAttributeGoodUseListOfHosts [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserLoginBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad |90.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> LdapAuthProviderTest::LdapRequestWithEmptyBindPassword [GOOD] >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic >> DataStreams::TestDeleteStreamWithEnforceFlag [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_rtmr/unittest >> TRtmrTest::CreateWithoutTimeCastBuckets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:02.611790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:02.611959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:02.612017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:02.612060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:02.612108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:02.612147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:02.612202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:02.612305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:02.613222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:02.614833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:02.716821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:02.716896Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:02.736533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:02.737143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:02.738453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:02.750352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:02.751837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:02.754725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:02.758007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:02.765846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:02.766870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:02.773586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:02.773696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:02.773784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:02.773862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:02.774008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:02.774975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:02.951857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.954994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.955068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.955150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.955226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:02.955351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 100 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:03.652260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-04T02:39:03.652403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-04T02:39:03.652726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:03.652873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:03.652927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_rtmr.cpp:130: TCreateRTMR TPropose, operationId: 100:0 HandleReply TEvOperationPlan, at schemeshard: 72057594046678944 2026-01-04T02:39:03.653024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-04T02:39:03.653213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:03.653283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 FAKE_COORDINATOR: Erasing txId 100 2026-01-04T02:39:03.655203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:03.655246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:03.655434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:39:03.655540Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:03.655578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:39:03.655630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-04T02:39:03.655955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:39:03.656008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:39:03.656113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:39:03.656149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:39:03.656188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:39:03.656223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:39:03.656263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:39:03.656305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:39:03.656343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:39:03.656392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:39:03.656459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:03.656499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:39:03.656536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:39:03.656585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:39:03.657278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:39:03.657381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:39:03.657420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:39:03.657464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:39:03.657557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:03.658247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:39:03.658327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:39:03.658366Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:39:03.658395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:39:03.658432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:39:03.658504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:39:03.661890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:39:03.661997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-04T02:39:03.662250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:39:03.662317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-04T02:39:03.663976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:39:03.664075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:39:03.664116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:715:2705] TestWaitNotification: OK eventTxId 100 2026-01-04T02:39:03.664707Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/rtmr1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:03.664957Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/rtmr1" took 268us result status StatusSuccess 2026-01-04T02:39:03.665325Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/rtmr1" PathDescription { Self { Name: "rtmr1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeRtmrVolume CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 RTMRVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } RtmrVolumeDescription { Name: "rtmr1" PathId: 38 PartitionsCount: 0 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest >> TExtSubDomainTest::CreateTableInsideAndAlterDomainAndTable-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: 2026-01-04T02:38:44.209093Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330320774256035:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:44.209216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:44.411348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:38:44.437517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:44.437627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:44.445647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:44.510288Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:44.602208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:38:44.643751Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591330320774256270:2129] Handle TEvNavigate describe path dc-1 2026-01-04T02:38:44.643831Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591330320774256749:2448] HANDLE EvNavigateScheme dc-1 2026-01-04T02:38:44.643974Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591330320774256285:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:44.644138Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591330320774256547:2303][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591330320774256285:2137], cookie# 1 2026-01-04T02:38:44.646269Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330320774256574:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256571:2303], cookie# 1 2026-01-04T02:38:44.646329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330320774256575:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256572:2303], cookie# 1 2026-01-04T02:38:44.646347Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591330320774256576:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256573:2303], cookie# 1 2026-01-04T02:38:44.646405Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330320774255945:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256576:2303], cookie# 1 2026-01-04T02:38:44.646469Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330320774256576:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774255945:2056], cookie# 1 2026-01-04T02:38:44.646515Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330320774256547:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774256573:2303], cookie# 1 2026-01-04T02:38:44.646567Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591330320774256547:2303][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:38:44.646610Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330320774255939:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256574:2303], cookie# 1 2026-01-04T02:38:44.646664Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591330320774255942:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591330320774256575:2303], cookie# 1 2026-01-04T02:38:44.646696Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330320774256574:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774255939:2050], cookie# 1 2026-01-04T02:38:44.646743Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591330320774256575:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774255942:2053], cookie# 1 2026-01-04T02:38:44.646783Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330320774256547:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774256571:2303], cookie# 1 2026-01-04T02:38:44.646814Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591330320774256547:2303][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:38:44.646869Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591330320774256547:2303][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591330320774256572:2303], cookie# 1 2026-01-04T02:38:44.646884Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591330320774256547:2303][/dc-1] Sync cookie mismatch: sender# [1:7591330320774256572:2303], cookie# 1, current cookie# 0 2026-01-04T02:38:44.646946Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591330320774256285:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:38:44.653778Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591330320774256285:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591330320774256547:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:38:44.653930Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591330320774256285:2137], cacheItem# { Subscriber: { Subscriber: [1:7591330320774256547:2303] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:38:44.656381Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591330320774256750:2449], recipient# [1:7591330320774256749:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:38:44.656455Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591330320774256749:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:38:44.684555Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591330320774256749:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:38:44.687215Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591330320774256749:2448] Handle TEvDescribeSchemeResult Forward to# [1:7591330320774256748:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:38:44.688385Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591330320774256270:2129] Handle TEvProposeTransaction 2026-01-04T02:38:44.688412Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591330320774256270:2129] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:38:44.688516Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591330320774256270:2129] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591330320774256755:2453] 2026-01-04T02:38:44.766973Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591330320774256755:2453] txid# 281474976710657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-04T02:38:44.767018Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... lf# [3:7591330382879266081:4082], recipient# [3:7591330382879266079:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:59.072709Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330382879266082:4083], recipient# [3:7591330382879266080:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:59.793871Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:38:59.794022Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330369994363188:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:38:59.794169Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330382879266099:4084], recipient# [3:7591330382879266098:2338], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:00.073436Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:00.073587Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330361404428497:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:39:00.073695Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330387174233400:4088], recipient# [3:7591330387174233399:2339], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:00.795288Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:00.795441Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330369994363188:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:39:00.795547Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330387174233417:4089], recipient# [3:7591330387174233416:2340], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:01.074390Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:01.074572Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330361404428497:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:39:01.074713Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330391469200718:4093], recipient# [3:7591330391469200717:2341], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:01.796260Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:01.796433Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330369994363188:3213] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:39:01.796541Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330391469200735:4094], recipient# [3:7591330391469200734:2342], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:02.075777Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591330357109459836:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:39:02.075923Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591330357109459836:2121], cacheItem# { Subscriber: { Subscriber: [3:7591330361404428497:3197] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:39:02.076025Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591330395764168036:4098], recipient# [3:7591330395764168035:2343], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_rtmr/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_ext_tenant/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> DataStreams::TestGetRecordsWithoutPermission [GOOD] >> DataStreams::TestGetRecordsWithCount [GOOD] >> DataStreams::TestInvalidRetentionCombinations |90.4%| [TA] $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_rtmr/test-results/unittest/{meta.json ... results_accumulator.log} >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.4%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootEnableColdTiersAfterTtl [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=167494878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494878.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494878.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=167494878.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=167494878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493678.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147494878.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147494878.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493678.000000s;Name=tier0;Codec=};}{{Column=timestamp;EvictAfter=147493678.000000s;Name=tier1;Codec=};};TTL={Column=timestamp;EvictAfter=147493678.000000s;Name=;Codec=}; 2026-01-04T02:37:59.009047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:59.041251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:59.041491Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:59.048449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:59.048680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:59.048895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:59.049016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:59.049135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:59.049259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:59.049363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:59.049484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:59.049610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:59.049722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:59.049843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:59.049946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:59.050056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:59.078658Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:59.078804Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:59.078869Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:59.079044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:59.079213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:59.079296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:59.079360Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:59.079452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:59.079517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:59.079566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:59.079597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:59.079768Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:59.079854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:59.079900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:59.079964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:59.080070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:59.080130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:59.080176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:59.080206Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:59.080253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:59.080295Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:59.080326Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:59.080374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:59.080412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:59.080442Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:59.080621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:59.080699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:59.080739Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:59.080866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:59.080915Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:59.080944Z nod ... te_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=97; 2026-01-04T02:39:04.612800Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8711; 2026-01-04T02:39:04.612842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8819; 2026-01-04T02:39:04.612899Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=11; 2026-01-04T02:39:04.612972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=35; 2026-01-04T02:39:04.613006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9417; 2026-01-04T02:39:04.613146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=93; 2026-01-04T02:39:04.613249Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=63; 2026-01-04T02:39:04.613363Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=76; 2026-01-04T02:39:04.613459Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=63; 2026-01-04T02:39:04.618889Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=5369; 2026-01-04T02:39:04.624067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=5068; 2026-01-04T02:39:04.624162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-04T02:39:04.624210Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:39:04.624251Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:39:04.624316Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=36; 2026-01-04T02:39:04.624354Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:39:04.624426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=44; 2026-01-04T02:39:04.624465Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:39:04.624519Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=27; 2026-01-04T02:39:04.624591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=43; 2026-01-04T02:39:04.624886Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=261; 2026-01-04T02:39:04.624928Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=30341; 2026-01-04T02:39:04.625046Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=29251936;raw_bytes=43173354;count=6;records=480000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:39:04.625145Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:39:04.625191Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:39:04.625253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:39:04.644607Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:39:04.644763Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:39:04.644849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-04T02:39:04.644912Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494028839;tx_id=18446744073709551615;;current_snapshot_ts=1767494280318; 2026-01-04T02:39:04.644955Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:39:04.645002Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:04.645040Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:04.645120Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:39:04.645330Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.044000s; 2026-01-04T02:39:04.645433Z node 1 :TX_TIERING WARN: log.cpp:841: TEST_STEP=4;fline=fetcher.h:165;error=event_undelivered_to_scheme_cache;reason=ActorUnknown; 2026-01-04T02:39:04.646969Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:39:04.647102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:39:04.647147Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:39:04.647233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=3; 2026-01-04T02:39:04.647291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494028839;tx_id=18446744073709551615;;current_snapshot_ts=1767494280318; 2026-01-04T02:39:04.647332Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=3;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:39:04.647374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:04.647409Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:04.647489Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:39:04.648018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.093000s; 2026-01-04T02:39:04.648067Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=4;tablet_id=9437184;self_id=[1:2013:3832];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier0' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/tier1' stopped at tablet 9437184 160000/9752224 160000/9752224 160000/9752224 80000/4886744 0/0 >> RemoteTopicReader::ReadTopic |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.4%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestStreamPagination [GOOD] >> DataStreams::TestShardPagination |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest |90.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_ext_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> HttpRequest::AnalyzeServerless [GOOD] >> FolderServiceTest::TFolderServiceTransitional [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] >> LdapAuthProviderTest::LdapFetchGroupsWithDelayUpdateSecurityState [GOOD] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent >> RemoteTopicReader::PassAwayOnCreatingReadSession >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ycloud/impl/ut/unittest >> FolderServiceTest::TFolderServiceTransitional [GOOD] Test command err: 2026-01-04T02:39:03.252389Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330402376951740:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.253895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.441178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.441359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.515270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:03.515603Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.516682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.518868Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330402376951708:2081] 1767494343249215 != 1767494343249218 2026-01-04T02:39:03.689919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.717568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:03.722631Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cd16b49ed50] Connect to grpc://localhost:15840 2026-01-04T02:39:03.732918Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd16b49ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-04T02:39:03.739643Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cd16b49ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15840: Failed to connect to remote host: Connection refused 2026-01-04T02:39:03.740760Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd16b49ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-04T02:39:03.741314Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cd16b49ed50] Status 14 failed to connect to all addresses; last error: UNKNOWN: ipv4:127.0.0.1:15840: Failed to connect to remote host: Connection refused 2026-01-04T02:39:04.258098Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:04.741838Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd16b49ed50] Request ListFoldersRequest { id: "i_am_not_exists" } 2026-01-04T02:39:04.745102Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cd16b49ed50] Status 5 Not Found 2026-01-04T02:39:04.745495Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cd16b49ed50] Request ListFoldersRequest { id: "i_am_exists" } 2026-01-04T02:39:04.748509Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cd16b49ed50] Response ListFoldersResponse { result { cloud_id: "response_cloud_id" } } |90.4%| [TM] {BAZEL_UPLOAD} ydb/library/ycloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:52.430827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:52.506973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:52.512886Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:52.513127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:52.513217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:52.829369Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:52.913294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:52.913436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:52.961689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.034115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:53.695012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:53.695820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:53.695860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:53.695901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:53.696272Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:53.764410Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:54.322803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:38:57.041305Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:57.049671Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:38:57.053934Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:57.086650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.086766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.115066Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:38:57.116618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.314959Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.315722Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.316458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.317267Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.317545Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.317764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.317962Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.318204Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.318327Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:57.496487Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:38:57.508381Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.508522Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.522133Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.684557Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.775924Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:38:57.776026Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:38:57.810851Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:38:57.811157Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:38:57.811373Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:38:57.811438Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:38:57.811517Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:38:57.811588Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:38:57.811644Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:38:57.811699Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:38:57.812426Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:38:57.964785Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:38:57.965287Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:38:57.967773Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:38:57.967840Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:38:57.967941Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:38:57.971779Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:38:57.975677Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:38:57.979855Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:38:58.045465Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:38:58.063011Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:38:58.063131Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:38:58.078362Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:38:58.079759Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:38:58.416126Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:38:58.417984Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:38:58.420929Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:58.423656Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:38:58.423740Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... =0;src=[2:2950:3042];cookie=101:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.428354Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;self_id=[2:3637:3474];ev=NActors::IEventHandle;tablet_id=72075186224037912;tx_id=281474976715661;this=136649776523232;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4509;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=141:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.428667Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;self_id=[2:3647:3480];ev=NActors::IEventHandle;tablet_id=72075186224037911;tx_id=281474976715661;this=136649776522560;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4509;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=131:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.448207Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;self_id=[2:3648:3481];ev=NActors::IEventHandle;tablet_id=72075186224037910;tx_id=281474976715661;this=136649776517408;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=121:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.448666Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;self_id=[2:3649:3482];ev=NActors::IEventHandle;tablet_id=72075186224037909;tx_id=281474976715661;this=136649776459168;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4510;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=111:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.483188Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;self_id=[2:3658:3488];ev=NActors::IEventHandle;tablet_id=72075186224037907;tx_id=281474976715661;this=136649775202528;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=91:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.483758Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;self_id=[2:3660:3489];ev=NActors::IEventHandle;tablet_id=72075186224037913;tx_id=281474976715661;this=136649775201408;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=151:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.484262Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;self_id=[2:3662:3491];ev=NActors::IEventHandle;tablet_id=72075186224037914;tx_id=281474976715661;this=136649775200064;method=TTxController::StartProposeOnExecute;tx_info=281474976715661:TX_KIND_SCHEMA;min=4511;max=18446744073709551615;plan=0;src=[2:2950:3042];cookie=161:1;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.615188Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.615311Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.615359Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.637709Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.637815Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.637858Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.645302Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.645393Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.645430Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037908;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.651724Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.651799Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.651832Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037912;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.659932Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.660018Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.660046Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037907;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.666985Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.667073Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.667113Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.676128Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.676218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.676257Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037910;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.683130Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.683218Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.683258Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.690650Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.690718Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.690749Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.697780Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.697891Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; 2026-01-04T02:39:02.697931Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037911;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=32;result=not_found; waiting actualization: 0/0.000016s 2026-01-04T02:39:04.850304Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4449:4070] 2026-01-04T02:39:04.868780Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:4418:3708] , Record { OperationId: "\000\000\000\000\025\322An\361.Bp\335\364\002\221" Tables { PathId { OwnerId: 72057594046644480 LocalId: 38 } } Database: "" } 2026-01-04T02:39:04.868862Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `An.Bp', DatabaseName: `', Types: 2026-01-04T02:39:04.868913Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `An.Bp', PathId: [OwnerId: 72057594046644480, LocalPathId: 38], ColumnTags: Answer: 'Analyze sent. OperationId: 00000005ej85qf2bj2e3ez80mh' |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |90.4%| [TA] $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {RESULT} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ycloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_schema/unittest >> TColumnShardTestSchema::RebootForgetAfterFail [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2100;fline=native.cpp:71;event=parsing;size=9934960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=6442960;columns=10; WaitEmptyAfter=0;Tiers=;TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=167494877.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147494877.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; WaitEmptyAfter=0;Tiers={{Column=timestamp;EvictAfter=147493677.000000s;Name=cold;Codec=};};TTL={Column=timestamp;EvictAfter=0.000000s;Name=;Codec=}; 2026-01-04T02:37:58.854209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:37:58.884518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:37:58.884770Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:37:58.896771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:37:58.897007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:37:58.897231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:37:58.897361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:37:58.897465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:37:58.897603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:37:58.897712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:37:58.897827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:37:58.897961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:37:58.898085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.898182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:37:58.898282Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:37:58.898383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:37:58.925364Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:37:58.925511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:37:58.925570Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:37:58.925747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:58.925949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:37:58.926038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:37:58.926087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:37:58.926208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:37:58.926281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:37:58.926326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:37:58.926358Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:37:58.926524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:37:58.926599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:37:58.926645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:37:58.926676Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:37:58.926801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:37:58.926863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:37:58.926904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:37:58.926934Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:37:58.926981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:37:58.927017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:37:58.927045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:37:58.927094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:37:58.927133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:37:58.927159Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:37:58.927403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:37:58.927463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:37:58.927495Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:37:58.927627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:37:58.927673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.927701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:37:58.927749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:37:58.927809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:37:58.927857Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:37:58.927923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... UTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=6; 2026-01-04T02:39:06.382283Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=86; 2026-01-04T02:39:06.382314Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6369; 2026-01-04T02:39:06.382351Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6477; 2026-01-04T02:39:06.382394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=7; 2026-01-04T02:39:06.382520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=78; 2026-01-04T02:39:06.382559Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7028; 2026-01-04T02:39:06.382675Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=71; 2026-01-04T02:39:06.382758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=39; 2026-01-04T02:39:06.382885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=68; 2026-01-04T02:39:06.382975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=55; 2026-01-04T02:39:06.386197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3173; 2026-01-04T02:39:06.388502Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2238; 2026-01-04T02:39:06.388564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=9; 2026-01-04T02:39:06.388629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-04T02:39:06.388673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:39:06.388733Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2026-01-04T02:39:06.388779Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:39:06.388838Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=30; 2026-01-04T02:39:06.388868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:39:06.388910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2026-01-04T02:39:06.388959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=31; 2026-01-04T02:39:06.389096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=117; 2026-01-04T02:39:06.389126Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=21674; 2026-01-04T02:39:06.389230Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=21099992;raw_bytes=29608900;count=3;records=320000} evicted {blob_bytes=10565848;raw_bytes=16084450;count=1;records=160000} at tablet 9437184 2026-01-04T02:39:06.389332Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:39:06.389368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:39:06.389419Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:39:06.397100Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=1; 2026-01-04T02:39:06.397266Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:39:06.397358Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:39:06.397421Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494035055;tx_id=18446744073709551615;;current_snapshot_ts=1767494280011; 2026-01-04T02:39:06.397471Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:39:06.397524Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:06.397565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:06.397645Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:39:06.397898Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.176000s; 2026-01-04T02:39:06.399543Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:39:06.399784Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:39:06.399828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:39:06.399934Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:39:06.399996Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494035055;tx_id=18446744073709551615;;current_snapshot_ts=1767494280011; 2026-01-04T02:39:06.400044Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=2;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:39:06.400098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:06.400139Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:39:06.400250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:39:06.400658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.034000s; 2026-01-04T02:39:06.400697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: TEST_STEP=3;tablet_id=9437184;self_id=[1:1697:3533];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/cold' stopped at tablet 9437184 160000/10565848 160000/10565848 0/0 160000/10565848 |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] >> OperationMapping::IndexBuildSuccess [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_schema/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest >> TxUsage::The_Transaction_Starts_On_One_Version_And_Ends_On_The_Other [GOOD] Test command err: 2026-01-04T02:33:00.947917Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591328840314537424:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:00.948013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:33:00.999381Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:33:01.194027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:33:01.201013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:33:01.201156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:33:01.222967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:33:01.310816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:33:01.313915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591328840314537394:2081] 1767493980946335 != 1767493980946338 2026-01-04T02:33:01.376648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00196a/r3tmp/yandexxbw2cR.tmp 2026-01-04T02:33:01.376688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00196a/r3tmp/yandexxbw2cR.tmp 2026-01-04T02:33:01.376930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00196a/r3tmp/yandexxbw2cR.tmp 2026-01-04T02:33:01.377060Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:33:01.410046Z INFO: TTestServer started on Port 21610 GrpcPort 63659 2026-01-04T02:33:01.460892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:63659 2026-01-04T02:33:01.618332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:33:01.635180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:33:01.754798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:01.958339Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:33:04.182356Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328857494407596:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.182578Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.183106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328857494407625:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.183146Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591328857494407626:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.183314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:33:04.202216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:33:04.228723Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591328857494407629:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:33:04.543100Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591328857494407693:2640] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:33:04.581565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:04.649547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:33:04.748624Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591328857494407708:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:33:04.749216Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTM0ZTU2MGMtZjkxMTM0YzAtN2FmNjkxMS1kOTA3NjYxMw==, ActorId: [1:7591328857494407594:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dkqwn7njwqmhcvbcf1c44, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:33:04.751774Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:33:04.782774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591328861789375282:2820] 2026-01-04T02:33:05.948195Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591328840314537424:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:33:05.948273Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:33:11.047163Z :WriteToTopic_Demo_11_Table INFO: TTopicSdkTestSetup started 2026-01-04T02:33:11.058248Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T02:33:11.069542Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591328887559179298:2927] connected; active server actors: 1 2026-01-04T02:33:11.069914Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:33:11.072831Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T02:33:11.072958Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T02:33:11.075738Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:33:11.076434Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:33:11.076625Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T02:33:11.076670Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:33:11.076705Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] ... PendingWrites: 0 2026-01-04T02:39:05.540748Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.576986Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:05.577024Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577036Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.577055Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577077Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-04T02:39:05.577426Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.577446Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577453Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:05.577459Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.577467Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577473Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577477Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.577484Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:39:05.577492Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.577502Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.582103Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.582142Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.582157Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.582179Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.582194Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:39:05.640995Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:05.641032Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.641044Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.641063Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.641076Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.677367Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:05.677408Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677424Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.677447Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677463Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-04T02:39:05.677750Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.677782Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677794Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.677824Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677835Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:39:05.677832Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:05.677853Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677865Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.677893Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.677906Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.682452Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.682501Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.682519Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.682545Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.682562Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:39:05.741346Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:05.741390Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.741405Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.741429Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.741444Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.777617Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:05.777659Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.777673Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.777697Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.777711Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-04T02:39:05.778095Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.778113Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.778122Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.778130Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:05.778137Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.778147Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][{1, {1, 281474976715674}, 100000}][StateIdle] Try persist 2026-01-04T02:39:05.778150Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.778162Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.778179Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.778191Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:39:05.782751Z node 14 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:39:05.782782Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.782795Z node 14 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:05.782815Z node 14 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:05.782830Z node 14 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][{0, {1, 281474976715674}, 100000}][StateIdle] Try persist |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithInvalidRobotUserPasswordBad [GOOD] Test command err: 2026-01-04T02:38:47.298186Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330331131682697:2216];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:47.298342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.550684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.578566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.578752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.629142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.631417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.633743Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330331131682506:2081] 1767494327278671 != 1767494327278674 2026-01-04T02:38:47.740769Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.740795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.740809Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.740946Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.826266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.830495Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.836296Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.836331Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.837700Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:23723, port: 23723 2026-01-04T02:38:47.837776Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:47.855878Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.898351Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.942212Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:47.942852Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:47.942902Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:47.990234Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:48.038284Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:48.039605Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****A6Rg (4AFCFF3F) () has now valid token of ldapuser@ldap 2026-01-04T02:38:50.190568Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330345921553353:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.190668Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.219350Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.307944Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.347680Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.347787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.361131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.407797Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.407820Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.407831Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.407918Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.424391Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.542710Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.546133Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.546171Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.546843Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22195, port: 22195 2026-01-04T02:38:50.546941Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:50.555784Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:50.598286Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:50.642621Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****qcnA (ECC36B99) () has now valid token of ldapuser@ldap 2026-01-04T02:38:53.227891Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330358575184188:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:53.227960Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:53.242033Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:53.320139Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.322234Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330358575184161:2081] 1767494333227038 != 1767494333227041 2026-01-04T02:38:53.352066Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.352133Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:53.357366Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.390961Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:53.390984Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:53.390991Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:53.391061Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:53.433313Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:53.621443Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:53.625034Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:53.625064Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:53.625777Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:12474 ldap://localhost:12474 ldap://localhost:11111, port: 12474 2026-01-04T02:38:53.625880Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:53.639821Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:53.686379Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:53.730106Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:53.730715Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:53.730761Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:53.778170Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:53.822184Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:53.823292Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****XkGA (9777E58A) () has now valid token of ldapuser@ldap 2026-01-04T02:38:56.544218Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330371583386185:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:56.544262Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:56.554653Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:56.611659Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:56.653681Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:56.653764Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:56.660794Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:56.710428Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:56.710453Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:56.710461Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:56.710528Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:56.844710Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:56.897948Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:56.901853Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:56.901887Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:56.902645Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:14251, port: 14251 2026-01-04T02:38:56.902751Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:56.910721Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:56.954379Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:38:56.998534Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****W4XA (424647BE) () has now valid token of ldapuser@ldap 2026-01-04T02:39:00.148218Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330390116864395:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.148311Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.161346Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.230758Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.233079Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.233169Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.233382Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330390116864368:2081] 1767494340147318 != 1767494340147321 2026-01-04T02:39:00.264490Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.307761Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.307793Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.307802Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.307904Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.393481Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:00.397530Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.397564Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.398395Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:27712, port: 27712 2026-01-04T02:39:00.398499Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:00.410683Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:00.459620Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:27712. Invalid credentials 2026-01-04T02:39:00.460329Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****sOAw (6B9E3F16) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldap://localhost:27712. Invalid credentials)' 2026-01-04T02:39:00.461666Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.494186Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330400302435135:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.494298Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.508417Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:03.584894Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.586983Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591330400302435108:2081] 1767494343493177 != 1767494343493180 2026-01-04T02:39:03.628321Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.628413Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.633428Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.678431Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:03.678458Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:03.678466Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:03.678542Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:03.691163Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.810015Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:03.813792Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:03.813860Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:03.814628Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:20525, port: 20525 2026-01-04T02:39:03.814720Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:03.823751Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:03.866401Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:20525. Invalid credentials 2026-01-04T02:39:03.866825Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****kprg (86464322) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldap://localhost:20525. Invalid credentials)' |90.4%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildSuccess [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithoutDatabaseShouldSuccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_nonSecure::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-04T02:38:46.887315Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330329141043377:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:46.887390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.099053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.099184Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.140894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.159317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.166437Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330329141043350:2081] 1767494326885757 != 1767494326885760 2026-01-04T02:38:47.179764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.292373Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.292395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.292404Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.292499Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.297595Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.405962Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.409918Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.409969Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.410742Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:11013, port: 11013 2026-01-04T02:38:47.411722Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.418427Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.462206Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:47.510893Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****bEww (7968CB6A) () has now valid token of ldapuser@ldap 2026-01-04T02:38:50.238030Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330345558423347:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.238309Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.250413Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.328149Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.332310Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330345558423104:2081] 1767494330209229 != 1767494330209232 2026-01-04T02:38:50.373618Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.373700Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.378694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.464528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.464551Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.464575Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.464632Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.517396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.519000Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.523123Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.523144Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.523757Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:29393, port: 29393 2026-01-04T02:38:50.523876Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:50.527728Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:50.570065Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:50.570538Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:50.570597Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:50.618188Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:50.666171Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:50.666967Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Z2Cg (DB2E1882) () has now valid token of ldapuser@ldap 2026-01-04T02:38:53.879355Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330359829335812:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:53.879416Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:53.896584Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:53.969256Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.970812Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330359829335787:2081] 1767494333878814 != 1767494333878817 2026-01-04T02:38:53.995560Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.995639Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:54.009521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:54.011325Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:54.053111Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:54.053137Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:54.053144Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:54.053240Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:54.120100Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:54.123365Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:54.123405Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:54.124199Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:29959, port: 29959 2026-01-04T02:38:54.124271Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:54.127534Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:54.174379Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****sAqg (666E1A98) () has now valid token of ldapuser@ldap 2026-01-04T02:38:57.358725Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330375202268822:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:57.358788Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:57.375483Z node 4 :KQP_PROXY WARN: kqp_finalize_script_s ... 09 2026-01-04T02:38:57.459320Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.495374Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.495471Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.497510Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.565846Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.565872Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.565879Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.565976Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.654661Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:57.679736Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:57.683404Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:57.683440Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:57.684063Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://qqq:64299 ldap://localhost:64299 ldap://localhost:11111, port: 64299 2026-01-04T02:38:57.684156Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:57.691203Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:57.738082Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:57.738602Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:57.738658Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:57.786196Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:57.830156Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:57.830950Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****dJXA (E7F407CE) () has now valid token of ldapuser@ldap 2026-01-04T02:39:00.580480Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330388982553030:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.580544Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.592565Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.676272Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.677554Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.677640Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.678041Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330388982553003:2081] 1767494340579377 != 1767494340579380 2026-01-04T02:39:00.691249Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.758860Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.758892Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.758900Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.758977Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.875749Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:01.001191Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:01.005136Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:01.005170Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:01.005781Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22191, port: 22191 2026-01-04T02:39:01.005887Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:01.009361Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:39:01.050136Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:01.050704Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:01.050760Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:01.098191Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:01.146199Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:01.147122Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****QNEw (27978333) () has now valid token of ldapuser@ldap 2026-01-04T02:39:03.955174Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330402153883467:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.955286Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.985148Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:04.017605Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591330402153883437:2081] 1767494343954451 != 1767494343954454 2026-01-04T02:39:04.051168Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:04.087450Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:04.087537Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:04.092626Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:04.122082Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:04.122107Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:04.122114Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:04.122186Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:04.205779Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:04.210080Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:04.210114Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:04.210859Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:4592, port: 4592 2026-01-04T02:39:04.210959Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:04.214484Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-04T02:39:04.214563Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:4592. Bad search filter 2026-01-04T02:39:04.214756Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****EXhQ (E0206C0B) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:4592. Bad search filter)' 2026-01-04T02:39:04.278780Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildCanceled [GOOD] |90.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldSuccess [GOOD] |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.4%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> DataStreams::TestStreamTimeRetention [GOOD] >> DataStreams::TestUnsupported >> LdapAuthProviderTest_nonSecure::LdapRefreshGroupsInfoWithError [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetDefaultFilter [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithOneLoginPlaceholder [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithSearchAttribute [GOOD] >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] >> OperationMapping::IndexBuildRejected [GOOD] >> DataStreams::TestDeleteStreamWithEnforceFlagFalse [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoWithError [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer >> DataStreams::TestInvalidRetentionCombinations [GOOD] |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> OperationMapping::IndexBuildRejected [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression >> SplitPathTests::WithDatabaseShouldFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> TLdapUtilsSearchFilterCreatorTest::GetFilterWithFewLoginPlaceholders [GOOD] Test command err: 2026-01-04T02:38:46.986567Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330328155980253:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:46.986636Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.249900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.252689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.252889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.298414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.337487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.416152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.416179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.416186Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.416273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.490166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.582158Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.586411Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.586444Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.587249Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:8152, port: 8152 2026-01-04T02:38:47.587997Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.596351Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.642115Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:47.642617Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:47.642684Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:47.690150Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:47.738152Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:47.739253Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****6dFA (0EE9D3B0) () has now valid token of ldapuser@ldap 2026-01-04T02:38:47.992739Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:51.986393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330328155980253:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:51.986515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:52.991383Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****6dFA (0EE9D3B0) 2026-01-04T02:38:52.991526Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:8152, port: 8152 2026-01-04T02:38:52.991626Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:52.994473Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:52.994769Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:8152 return no entries 2026-01-04T02:38:52.995023Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****6dFA (0EE9D3B0) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:8152 return no entries)' 2026-01-04T02:38:56.993305Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****6dFA (0EE9D3B0) 2026-01-04T02:38:58.190022Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330381845662490:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:58.190085Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:58.201140Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:58.264417Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:58.265687Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330381845662463:2081] 1767494338189280 != 1767494338189283 2026-01-04T02:38:58.305390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:58.305481Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:58.307390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:58.344065Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:58.344090Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:58.344102Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:58.344212Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:58.443251Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:58.566364Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:58.569255Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:58.569287Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:58.569943Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24274, port: 24274 2026-01-04T02:38:58.570007Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:58.573872Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:58.574205Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy 2026-01-04T02:38:58.574406Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****frAw (EB404BB6) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy)' 2026-01-04T02:38:58.574670Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:58.574686Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:58.575391Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24274, port: 24274 2026-01-04T02:38:58.575452Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:58.577871Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:58.578239Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy 2026-01-04T02:38:58.578392Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****frAw (EB404BB6) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy)' 2026-01-04T02:38:59.195890Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:00.193704Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****frAw (EB404BB6) 2026-01-04T02:39:00.193976Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.193994Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.194804Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24274, port: 24274 2026-01-04T02:39:00.194866Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:00.197475Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:00.197825Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy 2026-01-04T02:39:00.198055Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****frAw (EB404BB6) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:24274. Server is busy)' 2026-01-04T02:39:03.190468Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330381845662490:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.190546Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:04.196522Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****frAw (EB404BB6) 2026-01-04T02:39:04.196818Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:04.196839Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:04.197545Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:24274, port: 24274 2026-01-04T02:39:04.197613Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:04.200611Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:04.246165Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:04.246572Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:04.246608Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.290125Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.334149Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.334802Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****frAw (EB404BB6) () has now valid token of ldapuser@ldap |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |90.5%| [TA] $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataStreams::TestShardPagination [GOOD] |90.5%| [TA] $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestInvalidRetentionCombinations [GOOD] Test command err: 2026-01-04T02:38:57.619251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330373795752558:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:57.619321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:57.803001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:57.826134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.826225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.895570Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.938043Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.990650Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.990674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.990687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.990815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:58.061636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:58.175733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:58.221771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:58.376606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:58.626513Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:01.514419Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330391847179870:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:01.515990Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:01.539293Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:01.624825Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:01.642940Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.643027Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.661066Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.721651Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:01.721678Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:01.721687Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:01.721768Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:01.749448Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:01.903058Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:01.985951Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:02.140723Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:02.299420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:02.319845Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7591330396142148779:3188], for# user2@builtin, access# DescribeSchema 2026-01-04T02:39:02.326483Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [4:7591330396142148782:3189], for# user2@builtin, access# DescribeSchema 2026-01-04T02:39:02.333032Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:05.444510Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330410654561479:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:05.444592Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:05.457008Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:05.543993Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:05.557519Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:05.557605Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:05.574024Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:05.629052Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:05.629082Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:05.629091Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:05.629164Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:05.662166Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:05.807485Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:05.854367Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:06.039078Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 10, code: 500080
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 144, storage 0, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 130048, code: 500080
: Error: write_speed per second in partition must have values from set {131072,524288,1048576}, got 1049600, code: 500080 |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/ut/unittest >> SplitPathTests::WithDatabaseShouldFail [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::PassAwayOnCreatingReadSession [GOOD] Test command err: 2026-01-04T02:39:06.986795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330415509641416:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:06.986861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:07.209226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:07.209340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:07.235310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:07.236150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:07.253691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:07.397727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:07.434078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:07.434095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:07.434099Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:07.434194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:07.628926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:07.634067Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7591330419804609493:2487] Handshake: worker# [1:7591330419804609491:2485] 2026-01-04T02:39:07.634278Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7591330419804609493:2487] Create read session: session# [1:7591330419804609494:2488] >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest >> Worker::Basic [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoGood [GOOD] >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-04T02:38:46.902243Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330328033562334:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:46.903211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:46.931258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:47.175934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.176079Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.178226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.179510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.180533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330328033562281:2081] 1767494326890233 != 1767494326890236 2026-01-04T02:38:47.193842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.378668Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.378693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.378700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.378799Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.381267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.492662Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.493490Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.493515Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.494868Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:29833, port: 29833 2026-01-04T02:38:47.494938Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.558378Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.607288Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:47.655975Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Vf8A (93EC36E9) () has now valid token of ldapuser@ldap 2026-01-04T02:38:50.039970Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330346871133313:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.040414Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.060828Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.132802Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.134291Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330346871133291:2081] 1767494330038825 != 1767494330038828 2026-01-04T02:38:50.172876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.172958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.179642Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.227289Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.227316Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.227325Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.227385Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.334238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.361918Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.366099Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.366136Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.366882Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:9872, port: 9872 2026-01-04T02:38:50.366976Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:50.418345Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:50.462600Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****YBqA (7E86EB30) () has now valid token of ldapuser@ldap 2026-01-04T02:38:53.740990Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330360001335933:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:53.741064Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:53.752405Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:53.818148Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.819523Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330360001335906:2081] 1767494333740013 != 1767494333740016 2026-01-04T02:38:53.864959Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.865052Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:53.872851Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.901028Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:53.901047Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:53.901066Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:53.901117Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:53.963779Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:53.966460Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:53.966488Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:53.967199Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:25092, port: 25092 2026-01-04T02:38:53.967263Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:54.018347Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:54.066116Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:54.067041Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:54.067116Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.114232Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.162198Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.164091Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****x9XA (6D7D40DC) () has now valid token of ldapuser@ldap 2026-01-04T02:38:54.165897Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:56.709274Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330370054587895:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:56.709326Z node 4 :METADATA_PROV ... 4T02:39:00.031137Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.032823Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330382956089722:2081] 1767494339959853 != 1767494339959856 2026-01-04T02:39:00.080549Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.080639Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.083554Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.102477Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.102502Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.102509Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.102591Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.153968Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:00.156579Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.156613Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.157342Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://qqq:14800 ldaps://localhost:14800 ldaps://localhost:11111, port: 14800 2026-01-04T02:39:00.157404Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:00.214415Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:00.258203Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:00.258831Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:00.258882Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.302275Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.350192Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.351409Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****3jGw (78F361DC) () has now valid token of ldapuser@ldap 2026-01-04T02:39:00.352654Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.377966Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330402985531578:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.378095Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.390791Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:03.484818Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.512570Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.512668Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.534935Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.578615Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:03.578639Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:03.578655Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:03.578856Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:03.589896Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.686205Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:03.689373Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:03.689415Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:03.690215Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:6436, port: 6436 2026-01-04T02:39:03.690299Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:03.742364Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:39:03.790169Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:03.790809Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:03.790870Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:03.834220Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:03.882188Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:03.883406Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****N5fA (2757D040) () has now valid token of ldapuser@ldap 2026-01-04T02:39:06.765371Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330415871185854:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:06.765448Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:06.778173Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:06.870221Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:06.871292Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7591330415871185827:2081] 1767494346764361 != 1767494346764364 2026-01-04T02:39:06.900716Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:06.900813Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:06.913112Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:06.947967Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:06.947997Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:06.948005Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:06.948102Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:07.000176Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:07.003750Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:07.003782Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:07.004548Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:20317, port: 20317 2026-01-04T02:39:07.004631Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:07.058385Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-04T02:39:07.058506Z node 7 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:20317. Bad search filter 2026-01-04T02:39:07.059144Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****fiBw (9447AFDA) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldaps://localhost:20317. Bad search filter)' 2026-01-04T02:39:07.067770Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestShardPagination [GOOD] Test command err: 2026-01-04T02:38:57.166830Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330375051928323:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:57.166970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:57.371325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:57.388809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.388940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.424795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.463620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.533155Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.533181Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.533193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.533319Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.567218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:57.725087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:57.787133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:57.939798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72)
: Error: retention hours and storage megabytes must fit one of: { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, { hours : [0, 24], storage : [0, 0]}, { hours : [0, 168], storage : [51200, 1048576]}, provided values: hours 168, storage 40960, code: 500080 2026-01-04T02:38:58.175170Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:58.224215Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330379346897784:3546] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/stream_TestStreamStorageRetention\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypePersQueueGroup, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:01.063229Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330390973019658:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:01.063289Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:01.077408Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:01.149219Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:01.167015Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.167076Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.193345Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.240227Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:01.240245Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:01.240250Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:01.240312Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:01.301269Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:01.403089Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:01.499189Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:01.679373Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:02.069014Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:06.202760Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330414782646700:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:06.202825Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:06.217129Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:06.296681Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:06.314603Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:06.314686Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:06.328826Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:06.379005Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:06.379033Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:06.379040Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:06.379122Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:06.513751Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:06.574650Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:06.697619Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:06.848711Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest::CanGetErrorIfAppropriateLoginProviderIsAbsent [GOOD] Test command err: 2026-01-04T02:38:47.391282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330330662033131:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:47.391435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.639613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.650845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.650961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.713753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.760861Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330330662033000:2081] 1767494327381094 != 1767494327381097 2026-01-04T02:38:47.763352Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.804280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.804308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.804323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.804445Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.869568Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.873612Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.873647Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.875213Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:17643, port: 17643 2026-01-04T02:38:47.875287Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:47.879126Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-04T02:38:47.879703Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****-uVA (AAB96DB4) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-04T02:38:47.880043Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.880072Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.880956Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://unavailablehost:17643, port: 17643 2026-01-04T02:38:47.881050Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:47.884693Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:184: Could not start TLS. Can't contact LDAP server 2026-01-04T02:38:47.884819Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****-uVA (AAB96DB4) () has now retryable error message 'Could not login via LDAP (Could not start TLS. Can't contact LDAP server)' 2026-01-04T02:38:47.921992Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.489631Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330345015186732:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.489699Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.506055Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.578963Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.580031Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330345015186705:2081] 1767494330488313 != 1767494330488316 2026-01-04T02:38:50.630426Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.630527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.639053Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.679344Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.679360Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.679369Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.679441Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.691930Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.784905Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.785999Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.786062Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.786787Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****oumQ (65643CB0) () has now permanent error message 'Could not login via LDAP (List of ldap server hosts is empty)' 2026-01-04T02:38:54.086260Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330362319262258:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:54.086311Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:54.099134Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:54.146981Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:54.210156Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:54.210234Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:54.217613Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:54.236753Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:54.236777Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:54.236788Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:54.236865Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:54.315970Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:54.318775Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:54.318803Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:54.319440Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****WKgQ (C87DFF17) () has now permanent error message 'Could not login via LDAP (Parameter BaseDn is empty)' 2026-01-04T02:38:54.340777Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:57.483413Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330374017740639:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:57.484153Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:57.496832Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:57.576789Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.579014Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330374017740606:2081] 1767494337482468 != 1767494337482471 2026-01-04T02:38:57.612903Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.612977Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.615500Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.654588Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.654613Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.654622Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.654700Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.670688Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path ... T_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:57.902222Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:57.903043Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****H--A (8076A573) () has now permanent error message 'Could not login via LDAP (Parameter BindDn is empty)' 2026-01-04T02:39:00.683416Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330387612504695:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.683463Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.698360Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.752170Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.753874Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330387612504674:2081] 1767494340683085 != 1767494340683088 2026-01-04T02:39:00.796050Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.796126Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.805333Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.845820Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.845838Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.845844Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.845899Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.897122Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:00.900084Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.900115Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.900863Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****9jmA (1A074EBE) () has now permanent error message 'Could not login via LDAP (Parameter BindPassword is empty)' 2026-01-04T02:39:00.968839Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.880661Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330403013784707:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.880723Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.892014Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:03.952779Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.954892Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591330403013784679:2081] 1767494343880215 != 1767494343880218 2026-01-04T02:39:03.960356Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.960435Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.982891Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:04.024954Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:04.024986Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:04.024994Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:04.025085Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:04.047913Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:04.209116Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:04.209148Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:04.209210Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****Xy4A (9012EACE)) 2026-01-04T02:39:04.886056Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:05.208859Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:05.208893Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:1557: Handle deferred tokens for database: /Root 2026-01-04T02:39:05.209179Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:05.209210Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:05.210164Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:26322, port: 26322 2026-01-04T02:39:05.210282Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:05.266313Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:05.314197Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:05.314805Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:05.314872Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:05.358306Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:05.406231Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:05.407669Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****Xy4A (9012EACE) () has now valid token of ldapuser@ldap 2026-01-04T02:39:06.983706Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330415935653321:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:06.983764Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:06.996347Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:07.070267Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:07.071330Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7591330415935653294:2081] 1767494346982827 != 1767494346982830 2026-01-04T02:39:07.096190Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:07.096267Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:07.099411Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:07.131304Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:07.131326Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:07.131346Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:07.131429Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:07.220743Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:07.247290Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:07.247323Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:07.247370Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:874: CanInitLoginToken, database /Root, login state is not available yet, deffer token (eyJh****JGzQ (310C3131)) 2026-01-04T02:39:08.036856Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:10.037749Z node 7 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****JGzQ (310C3131) () has now permanent error message 'Login state is not available' 2026-01-04T02:39:10.037820Z node 7 :TICKET_PARSER TRACE: ticket_parser_impl.h:2352: Finish waiting for login providers for 1 databases: /Root, |90.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsFromAdLdapServer [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_worker/unittest >> Worker::Basic [GOOD] Test command err: 2026-01-04T02:39:04.181109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330404525310776:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:04.181349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:04.413558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:04.462476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:04.462612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:04.478467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:04.547953Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:04.550206Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330404525310663:2081] 1767494344177670 != 1767494344177673 2026-01-04T02:39:04.654780Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:04.770104Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:04.770130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:04.770137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:04.770231Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:05.017553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:05.159018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:05.192330Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:05.273046Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handshake: worker# [1:7591330408820278982:2610] 2026-01-04T02:39:05.273143Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handshake: worker# [1:7591330408820278982:2610] 2026-01-04T02:39:05.273445Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:39:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:39:05.274152Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 39] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:39:05.274227Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Send handshake: worker# [1:7591330408820278982:2610] 2026-01-04T02:39:05.274287Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-04T02:39:05.274299Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:173: [Worker][1:7591330408820278982:2610] Handshake with writer: sender# [1:7591330408820278984:2610] 2026-01-04T02:39:05.282325Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Create read session: session# [1:7591330408820278990:2488] 2026-01-04T02:39:05.282401Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-04T02:39:05.282416Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7591330408820278982:2610] Handshake with reader: sender# [1:7591330408820278983:2610] 2026-01-04T02:39:05.282458Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:05.341010Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_10018226407804781593_v1 } } 2026-01-04T02:39:05.346002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:06.698883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330413115246469:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.698898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330413115246454:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.698969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330413115246468:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.699025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.700189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330413115246475:2365], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.700287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:06.704487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:06.747307Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330413115246476:2686] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:39:06.751306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330413115246473:2363], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:39:06.751306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330413115246474:2364], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:39:06.821160Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330413115246524:2718] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:06.823408Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330413115246530:2723] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:07.608128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:07.948444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called a ... pp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:09.861013Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-04T02:39:09.847000Z WriteTime: 2026-01-04T02:39:09.852000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-04T02:39:09.861547Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-04T02:39:09.847000Z WriteTime: 2026-01-04T02:39:09.852000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:09.861634Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 0 SeqNo: 1 CreateTime: 2026-01-04T02:39:09.847000Z WriteTime: 2026-01-04T02:39:09.852000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:09.862200Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 0 BodySize: 36 }] } 2026-01-04T02:39:09.862403Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:39:09.862459Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-04T02:39:09.862528Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 0 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-04T02:39:09.865223Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:39:09.865292Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-04T02:39:09.865347Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [0] } 2026-01-04T02:39:09.865429Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:09.865492Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:09.995042Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-04T02:39:09.989000Z WriteTime: 2026-01-04T02:39:09.990000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-04T02:39:09.995119Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-04T02:39:09.989000Z WriteTime: 2026-01-04T02:39:09.990000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:09.995164Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 1 SeqNo: 2 CreateTime: 2026-01-04T02:39:09.989000Z WriteTime: 2026-01-04T02:39:09.990000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:09.995277Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 36 }] } 2026-01-04T02:39:09.995381Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-04T02:39:09.996722Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:39:09.996780Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-04T02:39:09.996804Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-04T02:39:09.996833Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:09.996862Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:10.166269Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-04T02:39:10.161000Z WriteTime: 2026-01-04T02:39:10.162000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-04T02:39:10.166327Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:230: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-04T02:39:10.161000Z WriteTime: 2026-01-04T02:39:10.162000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:10.166377Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: 0 Records [{ Codec: RAW Data: 36b Offset: 2 SeqNo: 3 CreateTime: 2026-01-04T02:39:10.161000Z WriteTime: 2026-01-04T02:39:10.162000Z MessageGroupId: producer ProducerId: producer }] } 2026-01-04T02:39:10.166477Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 36 }] } 2026-01-04T02:39:10.166555Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 36b }] } 2026-01-04T02:39:10.167724Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:39:1][72075186224037890][1:7591330426000149085:2610] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:39:10.167762Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037890 } 2026-01-04T02:39:10.167782Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 39][1:7591330408820278984:2610] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } 2026-01-04T02:39:10.167808Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:189: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:10.167836Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:10.311841Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:125: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Handle NKikimr::NReplication::TEvYdbProxy::TEvTopicReaderGone { Result: { status: UNAVAILABLE, issues: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } } 2026-01-04T02:39:10.311906Z node 1 :REPLICATION_SERVICE INFO: topic_reader.cpp:138: [RemoteTopicReader][/Root/topic][0][1:7591330408820278983:2610] Leave 2026-01-04T02:39:10.312027Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:267: [Worker][1:7591330408820278982:2610] Reader has gone: sender# [1:7591330408820278983:2610]: NKikimr::NReplication::NService::TEvWorker::TEvGone { Status: UNAVAILABLE ErrorDescription: {
: Error: PartitionSessionClosed { Partition session id: 1 Topic: "topic" Partition: 0 Reason: ConnectionLost } } } 2026-01-04T02:39:10.312094Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7591330430295116582:2610] Handshake: worker# [1:7591330408820278982:2610] 2026-01-04T02:39:10.316155Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7591330430295116582:2610] Create read session: session# [1:7591330430295116583:2488] 2026-01-04T02:39:10.316204Z node 1 :REPLICATION_SERVICE DEBUG: worker.cpp:161: [Worker][1:7591330408820278982:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvHandshake 2026-01-04T02:39:10.316218Z node 1 :REPLICATION_SERVICE INFO: worker.cpp:165: [Worker][1:7591330408820278982:2610] Handshake with reader: sender# [1:7591330430295116582:2610] 2026-01-04T02:39:10.316277Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330430295116582:2610] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_worker/unittest |90.5%| [TA] $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest >> DataStreams::TestUnsupported [GOOD] >> TPQTestSlow::TestOnDiskStoredSourceIds >> RemoteTopicReader::ReadTopic [GOOD] >> SlowTopicAutopartitioning::CDC_Write |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_schema/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/topic/ut/with_direct_read_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::TestUploadRows >> KikimrIcGateway::TestLoadExternalTable >> TTxDataShardUploadRows::RetryUploadRowsToShard >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData >> ReadAttributesUtils::AttributesGatheringFilter [GOOD] >> ReadAttributesUtils::AttributesGatheringRecursive [GOOD] >> ReadAttributesUtils::ReplaceAttributesEmpty [GOOD] >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets >> KikimrIcGateway::TestCreateExternalTable >> KikimrProvider::TestFillAuthPropertiesNone >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace >> KikimrProvider::TestFillAuthPropertiesNone [GOOD] >> KikimrProvider::TestFillAuthPropertiesServiceAccount [GOOD] >> KikimrProvider::TestFillAuthPropertiesMdbBasic [GOOD] >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |90.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_worker/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData_MinWriteBatch [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:36.787665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:36.787781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.787822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:36.787859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:36.787925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:36.787981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:36.788053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:36.788139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:36.789019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:36.789628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:36.894855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:36.894925Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:36.912555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:36.913016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:36.913987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:36.937668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:36.937993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:36.938712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:36.938952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:36.941681Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.941859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:36.944623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:36.944734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:36.945216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:36.945292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:36.945993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:37.114226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.115974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:37.116625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 9:11.734769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:483: TBackup TPropose, opId: 102:0 HandleReply TEvOperationPlan, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:39:11.734877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:39:11.735041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:11.750065Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:799: [Export] [s3] Bootstrap: self# [1:3864:5826], attempt# 0 2026-01-04T02:39:11.774892Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:453: [Export] [s3] Handle TEvExportScan::TEvReady: self# [1:3864:5826], sender# [1:3863:5823] REQUEST: PUT /metadata.json HTTP/1.1 HEADERS: Host: localhost:4548 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: F1ECC2AD-61A3-428D-BE4B-84F50E856551 amz-sdk-request: attempt=1 content-length: 107 content-md5: dYmH+kf3J0a3vnT3Yvgqzw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /metadata.json / / 107 2026-01-04T02:39:11.782069Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:413: [Export] [s3] HandleMetadata TEvExternalStorage::TEvPutObjectResponse: self# [1:3864:5826], result# PutObjectResult { ETag: 758987fa47f72746b7be74f762f82acf } REQUEST: PUT /scheme.pb HTTP/1.1 HEADERS: Host: localhost:4548 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: BB146324-280A-4FB9-BC6F-CC7B6095AE6A amz-sdk-request: attempt=1 content-length: 357 content-md5: csvC5nqNTZsSLy4ymlp0/Q== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /scheme.pb / / 357 2026-01-04T02:39:11.788457Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:318: [Export] [s3] HandleScheme TEvExternalStorage::TEvPutObjectResponse: self# [1:3864:5826], result# PutObjectResult { ETag: 72cbc2e67a8d4d9b122f2e329a5a74fd } 2026-01-04T02:39:11.789453Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3863:5823] 2026-01-04T02:39:11.790010Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3864:5826], sender# [1:3863:5823], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:39:11.791902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:11.791959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:39:11.792239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:11.792283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 REQUEST: PUT /data_00.csv.zst HTTP/1.1 HEADERS: Host: localhost:4548 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 1AEEE393-D963-4F12-B5C6-E44920274415 amz-sdk-request: attempt=1 content-length: 740 content-md5: P/a/uWmNWYxyRT1pAtAE7A== content-type: binary/octet-stream user-agent2026-01-04T02:39:11.792972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 : aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-storage-class: STANDARD S3_MOCK::HttpServeWrite: /data_00.csv.zst / / 740 2026-01-04T02:39:11.793061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:322: TBackup TProposedWaitParts, opId: 102:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:11.794184Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:513: [Export] [s3] HandleData TEvExternalStorage::TEvPutObjectResponse: self# [1:3864:5826], result# PutObjectResult { ETag: 3ff6bfb9698d598c72453d6902d004ec } 2026-01-04T02:39:11.794244Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3864:5826], success# 1, error# , multipart# 0, uploadId# (empty maybe) 2026-01-04T02:39:11.794829Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3863:5823], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:39:11.804524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:11.804666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:11.804733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:39:11.804790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:39:11.804837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:11.804942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:39:11.811809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:39:11.838282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:11.838358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:11.838556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:11.838685Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 712 RawX2: 4294969994 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:11.838764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:11.838808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:11.838850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:11.838896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:11.839058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:11.843169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:11.843784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:11.843847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:11.843973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:11.844014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:11.844058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:11.844094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:11.844138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:11.844208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2719] message: TxId: 102 2026-01-04T02:39:11.844258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:11.844298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:11.844332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:11.844461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:11.848543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:11.848607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3847:5810] TestWaitNotification: OK eventTxId 102 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::ReplaceAttributesFilter [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> ReadAttributesUtils::AttributesGatheringEmpry [GOOD] |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestUnsupported [GOOD] Test command err: 2026-01-04T02:38:56.884239Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330370494084644:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:56.884342Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:57.088160Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:57.113642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.113760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.172801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.217113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.270941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.270967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.270975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.271076Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.311571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:57.458802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:57.544796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:57.725255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:57.864911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:38:57.907913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:00.790236Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330387221921175:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.790318Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.819854Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.918119Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.934301Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.934361Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.952302Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.996125Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.996146Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.996152Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.996224Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:01.054794Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:01.166924Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:01.281554Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:01.455920Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:01.627215Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:39:01.665670Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) encryption_type: NONE records { sequence_number: "0" shard_id: "shard-000000" } records { sequence_number: "1" shard_id: "shard-000000" } records { sequence_number: "2" shard_id: "shard-000000" } records { sequence_number: "3" shard_id: "shard-000000" } records { sequence_number: "4" shard_id: "shard-000000" } records { sequence_number: "5" shard_id: "shard-000000" } records { sequence_number: "6" shard_id: "shard-000000" } records { sequence_number: "7" shard_id: "shard-000000" } records { sequence_number: "8" shard_id: "shard-000000" } records { sequence_number: "9" shard_id: "shard-000000" } records { sequence_number: "10" shard_id: "shard-000000" } records { sequence_number: "11" shard_id: "shard-000000" } records { sequence_number: "12" shard_id: "shard-000000" } records { sequence_number: "13" shard_id: "shard-000000" } records { sequence_number: "14" shard_id: "shard-000000" } records { sequence_number: "15" shard_id: "shard-000000" } records { sequence_number: "16" shard_id: "shard-000000" } records { sequence_number: "17" shard_id: "shard-000000" } records { sequence_number: "18" shard_id: "shard-000000" } records { sequence_number: "19" shard_id: "shard-000000" } records { sequence_number: "20" shard_id: "shard-000000" } records { sequence_number: "21" shard_id: "shard-000000" } records { sequence_number: "22" shard_id: "shard-000000" } records { sequence_number: "23" shard_id: "shard-000000" } records { sequence_number: "24" shard_id: "shard-000000" } records { sequence_number: "25" shard_id: "shard-000000" } records { sequence_number: "26" shard_id: "shard-000000" } records { sequence_number: "27" shard_id: "shard-000000" } records { sequence_number: "28" shard_id: "shard-000000" } records { sequence_number: "29" shard_id: "shard-000000" } 2026-01-04T02:39:01.801207Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; encryption_type: NONE records { sequence_number: "30" shard_id: "shard-000000" } records { sequence_number: "31" shard_id: "shard-000000" } records { sequence_number: "32" shard_id: "shard-000000" } records { sequence_number: "33" shard_id: "shard-000000" } records { sequence_number: "34" shard_id: "shard-000000" } records { sequence_number: "35" shard_id: "shard-000000" } records { sequence_number: "36" shard_id: "shard-000000" } records { sequence_number: "37" shard_id: "shard-000000" } records { sequence_number: "38" shard_id: "shard-000000" } records { sequence_number: "39" shard_id: "shard-000000" } records { sequence_number: "40" shard_id: "shard-000000" } records { sequence_number: "41" shard_id: "shard-000000" } records { sequence_number: "42" shard_id: "shard-000000" } records { sequence_number: "43" shard_id: "shard-000000" } records { sequence_number: "44" shard_id: "shard-000000" } records { sequence_number: "45" shard_id: "shard-000000" } records { sequence_number: "46" shard_id: "shard-000000" } records { sequence_number: "47" shard_id: "shard-000000" } records { sequence_number: "48" shard_id: "shard-000000" } records { sequence_number: "49" shard_id: "shard-000000" } records { sequence_number: "50" shard_id: "shard-000000" } records { sequence_number: "51" shard_id: "shard-000000" } records { sequence_number: "52" shard_id: "shard-000000" } records { sequence_number: "53" shard_id: "shard-000000" } records { sequence_number: "54" shard_id: "shard-000000" } records { sequence_number: "55" shard_id: "shard-000000" } records { sequence_number: "56" shard_id: "shard-000000" } records { sequence_number: "57" shard_id: "shard-000000" } records { sequence_number: "58" shard_id: "shard-000000" } records { sequence_number: "59" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "60" shard_id: "shard-000000" } records { sequence_number: "61" shard_id: "shard-000000" } records { sequence_number: "62" shard_id: "shard-000000" } records { sequence_number: "63" shard_id: "shard-000000" } records { sequence_number: "64" shard_id: "shard-000000" } records { sequence_number: "65" shard_id: "shard-000000" } records { sequence_number: "66" shard_id: "shard-000000" } records { sequence_number: "67" shard_id: "shard-000000" } records { sequence_number: "68" shard_id: "shard-000000" } records { sequence_number: "69" shard_id: "shard-000000" } records { sequence_number: "70" shard_id: "shard-000000" } records { sequence_number: "71" shard_id: "shard-000000" } records { sequence_number: "72" shard_id: "shard-000000" } records { sequence_number: "73" shard_id: "shard-000000" } records { sequence_number: "74" shard_id: "shard-000000" } records { sequence_number: "75" shard_id: "shard-000000" } records { sequence_number: "76" shard_id: "shard-000000" } records { sequence_number: "77" shard_id: "shard-000000" } records { sequence_number: "78" shard_id: "shard-000000" } records { sequence_number: "79" shard_id: "shard-000000" } records { sequence_number: "80" shard_id: "shard-000000" } records { sequence_number: "81" shard_id: "shard-000000" } records { sequence_number: "82" shard_id: "shard-000000" } records { sequence_number: "83" shard_id: "shard-000000" } records { sequence_number: "84" shard_id: "shard-000000" } records { sequence_number: "85" shard_id: "shard-000000" } records { sequence_number: "86" shard_id: "shard-000000" } records { sequence_number: "87" shard_id: "shard-000000" } records { sequence_number: "88" shard_id: "shard-000000" } records { sequence_number: "89" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "90" shard_id: "shard-000000" } records { sequence_number: "91" shard_id: "shard-000000" } records { sequence_number: "92" shard_id: "shard-000000" } records { sequence_number: "93" shard_id: "shard-000000" } records { sequence_number: "94" shard_id: "shard-000000" } records { sequence_number: "95" shard_id: "shard-000000" } records { sequence_number: "96" shard_id: "shard-000000" } records { sequence_number: "97" shard_id: "shard-000000" } records { sequence_number: "98" shard_id: "shard-000000" } records { sequence_number: "99" shard_id: "shard-000000" } records { sequence_number: "100" shard_id: "shard-000000" } records { sequence_number: "101" shard_id: "shard-000000" } records { sequence_number: "102" shard_id: "shard-000000" } records { sequence_number: "103" shard_id: "shard-000000" } records { sequence_number: "104" shard_id: "shard-000000" } records { sequence_number: "105" shard_id: "shard-000000" } records { sequence_number: "106" shard_id: "shard-000000" } records { sequence_number: "107" shard_id: "shard-000000" } records { sequence_number: "108" shard_id: "shard-000000" } records { sequence_number: "109" shard_id: "shard-000000" } records { sequence_number: "110" shard_id: "shard-000000" } records { sequence_number: "111" shard_id: "shard-000000" } records { sequence_number: "112" shard_id: "shard-000000" } records { sequence_number: "113" shard_id: "shard-000000" } records { sequence_number: "114" shard_id: "shard-000000" } records { sequence_number: "115" shard_id: "shard-000000" } records { sequence_number: "116" shard_id: "shard-000000" } records { sequence_number: "117" shard_id: "shard-000000" } records { sequence_number: "118" shard_id: "shard-000000" } records { sequence_number: "119" shard_id: "shard-000000" } encryption_type: NONE records { sequence_number: "120" shard_id: "shard-000000" } records { sequence_number: "121" shard_id: "shard-000000" } records { sequence_number: "122" shard_id: "shard-000000" } records { sequence_number: "123" shard_id: "shard-000000" } records { sequence_number: "124" shard_id: "shard-000000" } records { sequence_number: "125" shard_id: "shard-000000" } records { sequence_number: "126" shard_id: "shard-000000" } records { sequence_number: "127" shard_id: "shard-000000" } records { sequence_number: "128" shard_id: "shard-000000" } records { sequence_number: "129" shard_id: "shard-000000" } records { sequence_number: "130" shard_id: "shard-000000" } records { sequence_number: "131" shard_id: "shard-000000" } records { sequence_number: "132" shard_id: "shard-000000" } records { sequence_number: "133" shard_id: "shard-000000" } records { sequence_number: "134" shard_id: "shard-000000" } records { sequence_number: "135" shard_id: "shard-000000" } records { sequence_number: "136" shard_id: "shard-000000" } records { sequence_number: "137" shard_id: "shard-000000" } records { sequence_number: "138" shard_id: "shard-000000" } records { sequence_number: "139" shard_id: "shard-000000" } records { sequence_number: "140" shard_id: "shard-000000" } records { sequence_number: "141" shard_id: "shard-000000" } records { sequence_number: "142" shard_id: "shard-000000" } records { sequence_number: "143" shard_id: "shard-000000" } records { sequence_number: "144" shard_id: "shard-000000" } records { sequence_number: "145" shard_id: "shard-000000" } records { sequence_number: "146" shard_id: "shard-000000" } records { sequence_number: "147" shard_id: "shard-000000" } records { sequence_number: "148" shard_id: "shard-000000" } records { sequence_number: "149" shard_id: "shard-000000" } 2026-01-04T02:39:05.790161Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330387221921175:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:05.790228Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:09.137051Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330428597155855:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:09.137131Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:09.155482Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:09.245141Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:09.274463Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:09.274561Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:09.301177Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:09.324647Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:09.360117Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:09.360148Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:09.360156Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:09.360231Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:09.612082Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:09.661478Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:09.822157Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |90.5%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_topic_reader/unittest >> RemoteTopicReader::ReadTopic [GOOD] Test command err: 2026-01-04T02:39:06.015052Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330412012997483:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:06.016083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:06.245456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:06.304209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:06.304308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:06.315856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:06.389777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:06.390828Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330412012997451:2081] 1767494346013021 != 1767494346013024 2026-01-04T02:39:06.479141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:06.600714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:06.600745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:06.600751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:06.600825Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:06.870217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:07.021778Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:07.084694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:08.646060Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330420602933168:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.646061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330420602933169:2354], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.646123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330420602933154:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.646395Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.646635Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330420602933177:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.646683Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:08.650823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:08.691149Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330420602933174:2356], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:39:08.696136Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330420602933176:2631] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:39:08.696638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:39:08.696809Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330420602933173:2355], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:39:08.789149Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330420602933224:2662] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:08.798586Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330420602933242:2670] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:09.539440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:09.949137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:10.287976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:39:10.575201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:39:11.116129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330412012997483:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:11.116270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:11.194629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:11.891316Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handshake: worker# [1:7591330412012998275:2487] 2026-01-04T02:39:11.896615Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Create read session: session# [1:7591330433487835785:2486] 2026-01-04T02:39:11.896877Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:11.908156Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_1_15693059550918974840_v1 } } 2026-01-04T02:39:11.913883Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 0 SeqNo: 1 CreateTime: 2026-01-04T02:39:11.788000Z WriteTime: 2026-01-04T02:39:11.791000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-04T02:39:11.914781Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:11.968599Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330433487835784:2957] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-04T02:39:11.962000Z WriteTime: 2026-01-04T02:39:11.963000Z MessageGroupId: producer ProducerId: producer }] } } 2026-01-04T02:39:12.065165Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:33: [RemoteTopicReader][/Root/topic][0][1:7591330437782803174:2989] Handshake: worker# [1:7591330412012998275:2487] 2026-01-04T02:39:12.071715Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:43: [RemoteTopicReader][/Root/topic][0][1:7591330437782803174:2989] Create read session: session# [1:7591330437782803175:2486] 2026-01-04T02:39:12.071964Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:54: [RemoteTopicReader][/Root/topic][0][1:7591330437782803174:2989] Handle NKikimr::NReplication::NService::TEvWorker::TEvPoll { SkipCommit: 0 } 2026-01-04T02:39:12.077298Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:85: [RemoteTopicReader][/Root/topic][0][1:7591330437782803174:2989] Handle NKikimr::NReplication::TEvYdbProxy::TEvStartTopicReadingSession { Result: { ReadSessionId: consumer_1_2_2895657818219108695_v1 } } 2026-01-04T02:39:12.079203Z node 1 :REPLICATION_SERVICE DEBUG: topic_reader.cpp:64: [RemoteTopicReader][/Root/topic][0][1:7591330437782803174:2989] Handle NKikimr::NReplication::TEvYdbProxy::TEvReadTopicResponse { Result: { PartitionId: 0 Messages [{ Codec: RAW Data: 9b Offset: 1 SeqNo: 2 CreateTime: 2026-01-04T02:39:11.962000Z WriteTime: 2026-01-04T02:39:11.963000Z MessageGroupId: producer ProducerId: producer }] } } |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_topic_reader/unittest |90.5%| [TA] $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} |90.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_topic_reader/test-results/unittest/{meta.json ... results_accumulator.log} >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsDisableRequestToAD [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] >> KikimrIcGateway::TestDropExternalTable >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists >> TExternalTableTest::ReadOnlyMode >> TExternalTableTest::ParallelCreateSameExternalTable >> KikimrIcGateway::TestCreateExternalTable [GOOD] >> KikimrIcGateway::TestCreateSameExternalTable >> YdbProxy::ReadTopic [GOOD] >> YdbProxy::ReadNonExistentTopic ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Zstd] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:39.433419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:39.433527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.433575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:39.433614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:39.433655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:39.433689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:39.433782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.433888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:39.434811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:39.435068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:39.509507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:39.509578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:39.524414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:39.524630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:39.524786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:39.533075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:39.533266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:39.533921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:39.534197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:39.537191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.537374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:39.538581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:39.538643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.538786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:39.538839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:39.538884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:39.539102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:39.692014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.693901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.694004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.694106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.694186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:39.694247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20190 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 627D394A-1111-4D91-971E-F5B1357594DD amz-sdk-request: attempt=1 content-length: 55 content-md5: B5SOCmjwb1RI3tHamcoRHA== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=100&uploadId=1 / 55 2026-01-04T02:39:14.793954Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3863:5825], result# UploadPartResult { ETag: 07948e0a68f06f5448ded1da99ca111c } 2026-01-04T02:39:14.794157Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3862:5824] 2026-01-04T02:39:14.794234Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3863:5825], sender# [1:3862:5824], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv.zst?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20190 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: DBF1397F-5836-4A51-8853-308AC688C568 amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv.zst / partNumber=101&uploadId=1 / 0 2026-01-04T02:39:14.797024Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3863:5825], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-04T02:39:14.797075Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3863:5825], success# 1, error# , multipart# 1, uploadId# 1 2026-01-04T02:39:14.802142Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3863:5825], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [f8f51a1e4a70db44fa91cc2ab9680824,9eba675fd7f187274786dff2f47292df,921325fb6b8811df3d06a44dbe1f8523,4eeb6b90e8e61075275bd8a42f56bd69,2840a487abe8cb9502b3d9c8a8e1c942,607d8f6e3b235a360d63796efd3a51c2,ed22e08df7fb8840f7cabc779cc86885,efeff2c7731061edd9a39059cc078045,4af01cb3455932f28e3bba713dcd57c9,dc94d36ecf3b36d183d75c84b9b2fac6,e2ce425dd2bb582abcc13d0d714c3554,b71e46686939d2cdf046520dd2774281,ab731a82a161e5e044b24e895a1713d6,1df51aaec89711e13a6f95c13113e36c,b6066b2ed343831b1b0ee0076179981e,332d34d77adc2b024a33d87e07d4233f,cf0093cc99590a0e8f9c199ed6deca07,8cc923ec76224e69263ac93b7bfabd30,690d66897e0780f2dfe3614e5a659a22,7502aae0ec253663b1cbfdc8ede92ab9,7d2c6f728ee0c12097dfe5441970b946,5fc7b9b675e0a125eea67cf05f82627f,fc8c5faa99cc7f4ce7ca320f8e7adb58,8e305c5aca758683ff25407a7bbd9220,181bce9c6393e22a0ac359a7b45d8187,639677548f0a8b776a6db92f44d96505,390ff8f57cfa4c04bfbed0d7a63c90e8,3dd76756e6558fd6c8c918210f7dc136,a3f5254fdad3ded54edef910e704c151,e9186373f80dbaa55dd04d07621de277,8898b965060a431b499261ec0cd3cee3,3ed51c736e64defe04980ce328b17aa4,bb0e45971888796588c12ea1c1bec162,e2b3defa84005d3892986ca6894b811f,656c7c809c8c8485f6e91892591cd284,779c6827126f255bde25ae242bf4c8ff,8883fc9b073e683558f1231c5f2142d0,19390a0e3340bcb6ccfe866a790f05cb,305182d3e9745fba3aad1973bb1bfc93,002819d72a6dc7954ecc1bcd2bd20254,325c6bc3cdd6fd83083cf0126c606218,b86932903843b9626e80bd9ccb5d0571,b5054116537a7c467bdb488c9d67dee7,fc3a45bd17a00b147e4f9c55bc2493da,1118e2f41e8839211163250796a65dce,b403ff17c2c269a79201a03ce439dc2a,88f2692ee439cfadef1cd21d58aac8d3,e5bef12f89b101af84d52299a5867d99,ed613335180c53f69d450ef8b176a4d5,150fd7dcdc86eb38c7f821ff4698d8bc,a0c18bf08acc6ebecac04a2520efee9b,e8463d7ce8f502d1575a433c1b30a9af,f123e0fc879e2fdc2c3e2f698fc4176d,d7ab79d73e4648e0a2bf8dec3a19c019,4e74b82f6a8ea7fad8790ee7dfcdb76e,f72bb1d8aa0f5c9265bae10a3784d8e8,924b317371d16363a37962b17a2ae4bb,7214b458c7e25c791e54bd430b835a6e,e79dba1b56122372af3fe7b06ea91bda,6aae345b94d78fc7c1ed0b8697cf5e62,fd3636ed699facb5f0c12f81741cabc5,2c4a198408c3eb9577fcd339ca62c539,59fbf761f9b7574b65fa6877b167bb8c,14f9f5cfdf3a6c33c577a54429b19cb6,c6d078b3be9cd7943e8145fd982baeef,198f55ae25539fbd54a4a6075beac2d1,939123b44e362c76a151a85af0247fb7,0147f8bd741be7780cbc900b6f4b0899,43453200aeaf201420737354cd73cfe4,de26d1339779fe0c538d01d5963fd423,5c903650e719f959dc9f37ea360c6319,23607b3f36e0a2abae7f1ed8e38596f3,0db9af920c6d1cf868e470bf7a349747,aed6ac19c60d08500582eea9dadcdfee,3f4e37ddd3e2e56a725323fad4d85cf6,942b269af420b4277d025cea489dcb25,89eddc25ba615b6cf09b9cd9a11a16bb,1d8e7f0613dc1919ee90133c468380bd,8bf1e4c1266d8437c1bd85e0fca6640a,e9eabcf5b61cf257f530b156dbd77a88,411f1661ae7650d2144e8c6f8a33b28f,6706ec5b8771e555779d5cbeca41aa75,b3a33ef21a8224ddc78a52e8d7ca8357,58749d344f42c192e572eda4ee66fb01,381aeb5ee3014e2c0fd9b85bd59ce005,9aed2297cd10dce10d68de3ff1830b42,be88e095fc3a13708b714db03b1f2744,5628e81ee17fb22fc828ed1b2169578b,a1cfb563fa4af884fe02ced05c26c881,fc602b8ee2e9746fb52823f8fd1f0f28,a1de256e94c7baa9b8ab905c892d1a14,6bff895b0b5f3552ad4bdc61b0d24148,fcba1d258a8651d831767b42e010e439,bef6e3d7088e671809fe584531f96971,f0b489242271d11200dbdbc78e4ce715,372d2d6877fff7c04433e492ad4dbd45,32191cf1972dcccd59c0b5a8b53d4f23,25928b7997b97ac58f18fbbe589573e8,472e53a27497661c6400410909405c4e,07948e0a68f06f5448ded1da99ca111c,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv.zst?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:20190 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 5A7E3C2B-399D-4CC4-88A9-CB464C47ABC5 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv.zst / uploadId=1 2026-01-04T02:39:14.812372Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3863:5825], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv.zst ETag: c902b621cdd1ee89b9f1c4e6c36e6e45 } 2026-01-04T02:39:14.812730Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3862:5824], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:39:14.823698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.823791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:14.824024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.824132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.824204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:14.824247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.824306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:14.824365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:14.824535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:14.828909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.829595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.829657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:14.829782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:14.829836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.829875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:14.829908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.829945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:14.830015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2026-01-04T02:39:14.830063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.830102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:14.830139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:14.830261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:14.834033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:14.834092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3848:5811] TestWaitNotification: OK eventTxId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_backup/unittest >> TBackupTests::ShouldSucceedOnLargeData[Raw] [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:38:39.845090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:38:39.845173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.845216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:38:39.845254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:38:39.845291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:38:39.845322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:38:39.845407Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:38:39.845511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:38:39.846242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:38:39.846471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:38:39.940103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:38:39.940165Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:39.955046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:38:39.955235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:38:39.955416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:38:39.963668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:38:39.963878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:38:39.964619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:38:39.964848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:38:39.967699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.967870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:38:39.968983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:38:39.969039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:38:39.969186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:38:39.969241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:38:39.969291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:38:39.969514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:38:40.121840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.122855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.122980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.123970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.124059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:38:40.124126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... [1:3862:5824], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 0 Checksum: } REQUEST: PUT /data_00.csv?partNumber=100&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9268 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: 00E3A869-E98F-4C09-9F3F-3C6E6CBB0DB0 amz-sdk-request: attempt=1 content-length: 130 content-md5: Wyd1w7MZYbbZucaVvuRDAw== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=100&uploadId=1 / 130 2026-01-04T02:39:14.787179Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3863:5825], result# UploadPartResult { ETag: 5b2775c3b31961b6d9b9c695bee44303 } 2026-01-04T02:39:14.787420Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:130: [Export] [scanner] Handle TEvExportScan::TEvFeed: self# [1:3862:5824] 2026-01-04T02:39:14.787525Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:471: [Export] [s3] Handle TEvExportScan::TEvBuffer: self# [1:3863:5825], sender# [1:3862:5824], msg# NKikimr::NDataShard::TEvExportScan::TEvBuffer { Last: 1 Checksum: } REQUEST: PUT /data_00.csv?partNumber=101&uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9268 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: C4CDF29C-8C7F-40EC-B15C-09A9044721FA amz-sdk-request: attempt=1 content-length: 0 content-md5: 1B2M2Y8AsgTpgAmY7PhCfg== content-type: binary/octet-stream user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 S3_MOCK::HttpServeWrite: /data_00.csv / partNumber=101&uploadId=1 / 0 2026-01-04T02:39:14.790625Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:604: [Export] [s3] Handle TEvExternalStorage::TEvUploadPartResponse: self# [1:3863:5825], result# UploadPartResult { ETag: d41d8cd98f00b204e9800998ecf8427e } 2026-01-04T02:39:14.790683Z node 1 :DATASHARD_BACKUP INFO: export_s3_uploader.cpp:716: [Export] [s3] Finish: self# [1:3863:5825], success# 1, error# , multipart# 1, uploadId# 1 2026-01-04T02:39:14.796925Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:538: [Export] [s3] Handle TEvDataShard::TEvS3Upload: self# [1:3863:5825], upload# { Id: 1 Status: Complete Error: (empty maybe) Parts: [a59dd9a97cf3685e69093fb2d96653c6,bdbb215613239cb3a835fee1fe7e7ca3,cb38dbc776d5763f1926dfb22d508c87,3c430d66d07a0a4b1fa889f321fce197,43baf91083f286b60bf15e7786459cd9,90b5581bef612fa3bf9b38b336af405f,fd4869c26a12d22ee79256d778954d04,a9459bc28198b0b6bd67732c492fd740,697a3f8386ea1ff4e327de943224cb1a,614da0b4ec9464e69cd0c59909e80fbb,9b94eb3f67aa4c8a0bcbf546833ed966,fd45c3afacec641ad19e59d2b31aeba4,fd69678aecbc149601f58cf13c64d33e,90c09ab4923bc9f97f825d36e32bf362,c1586416a281a4cca2b2b4e333d9b079,f31908576272623f9f0a19bf774cde8e,6fe3b42388304d2af07c629aeb683581,7bc90eec21ca5bb3648e6a48e83c5730,8e1dda26de1af89bdffe2eefdcebea1d,14dc42d90caa1575bbfffa9dc8f21d66,92efb2368eecb32d4075c09294fde0b7,98efff5f7c7ecb42e7af65142ce05af9,6206c81807b3b9283b0173ee2c682100,616b431b91aedc9de4593321eb42ba96,9ae4762563ffdec596cc9ca4cb8913e1,946ebf2d95b4796ea2faee21f017be79,45834a9948bb4ab8b62d1894156d13ed,6ad3fe7286856927c1e00422bc8da697,ef89464d20eae46829e1bf557e4d04ce,f128e5de32097d205453080b01c94ac3,c13e650ee2cfcecfdf4f578a2e5b1c2d,fc26314711b25d20fc654cf59301b806,56f6f2c574fba86496a87a7dd5fab46c,c7951eace72cfe0f14f808173e07bc64,3d9ad3340e58b973eaf8d4f14ba3b0f9,fc41d6fdfb52389dda8b26d7a0a3a889,9974b6ae96ffd0b756acb67088e890f9,cde8a5604010abe8fccfa9492144036f,0364e048eaac35c26d48b0c5072b5255,aac5a84927124d6ae4931e2650c80d9f,eab068fe4ca35c2f3e35890bd727eb4f,bc3646bdbcbc7f97dcddf2202ea9421f,6d3f63d672eda4a4617c9e7589a68bfc,0401bade6c3031b5be872238520b993a,1c6405688f86423480173e3e316a20bd,52395f68e877cbb8d7115a247331b0a7,4b0673ac18058554d2c53bf9f99b34b2,87bc1b9e650b31e81a9ad2531e3ef9da,b29053c8cd093c8b92ad3954c42cb7be,faf1084f6b33b00e2e822d1d3c3f0083,eedec03ee8d7eda4654db7206ad0889e,be4469dd028d5519a67098055f25513f,a7afa9827ec27c565cff1ed505a06f4b,91fe8109d2ad934c4364d90c29aaba71,73b81ea00e11db12d66497d30eb48446,cce69ef69777afeab34eefa515abc7f4,4e4ac1a421353964356400b8be8e21da,32cd6083b12660bcd4062af08d89eb05,71957b9db37811c7680638b82dc6384b,a8787e692c423a2dfa07dd261e72790a,283838ab16206b27738ea6653110f833,88bf084fb3029f0d5c0705eece930d70,1ed2f9f7221f1718b81fdf2d846347dd,406706cfbc454922dcad50b9c534b8d1,dbb606c993d798974ed4f5c9ebf195ca,1a4a3868dc6fa26c6b019d237f9ea6f4,82660a3c6b576a1b3fea925f3c179a2e,d393db2749ae42e854e85eeec2ea3592,b42c92ad14ee0e5351fec7e5a045a91b,2c7af27f9dc77efbcbe71c2d7997d6e9,278aba62ab1d9e3ff16df2d82ac5f5c7,6b8380404a7e7ec95ad5f3941d5d404c,c9813b9fc1d6b5087e64849076edd0f8,160785e4dac02a91c43a497ee59eea06,db529a9ba22f60f404031cfe85e966e9,9b70af168e2d3769bd8bc4dffa3202ea,9ac39c3843b6621ace44acf430a59e06,4603ff564a46e93951f246ed18926071,66b85f35ee76a7f71f50e9aad56758de,1665c284ad04d6b893b69372bf8fc6b9,8c1c27ec88fb52f06de6e7516a392672,0a5f992db51277a05ec12f0d6459ef21,8debe3a6023155561cb0890fc05bd7fb,938ece258b7596f8eea7e82bc2b8f88c,767ca0dcf0b154fa3c818044bbfc58fd,914cc7165d994bb05824332ac120446f,ab0ece250f5959a510170ee07aa21b5d,8bf4b44d67f062026b0010a8a0b39cc0,e0aa13fa8246e68c18905d3abadfc44d,27b021b75b6a95f63ea27f7ec238c05f,673e661e4cfea1e431678dd9881c2a8c,f101b34943f1831ae8c0b46ffcb1c2d6,562b32a8142b29c1a88e507ab1981a6b,fdea4c6fc2befb44614992ca8bf34b21,b7c8ec6acc45b037978482996e910b75,aec72fbd2e171b798900b22897d00941,710ef5b5e8eba750b6acc9b32dff42a3,821c7e22ef9c22098171e7f837dcfcc8,aecc9f6d0e6f54e938a10d40fda96d7b,5b2775c3b31961b6d9b9c695bee44303,d41d8cd98f00b204e9800998ecf8427e] } REQUEST: POST /data_00.csv?uploadId=1 HTTP/1.1 HEADERS: Host: localhost:9268 Accept: */* Connection: Upgrade, HTTP2-Settings Upgrade: h2c HTTP2-Settings: AAMAAABkAAQAoAAAAAIAAAAA amz-sdk-invocation-id: AF5B31B3-8190-4292-9A47-6049D40C64E3 amz-sdk-request: attempt=1 content-length: 11529 content-type: application/xml user-agent: aws-sdk-cpp/1.11.37 Linux/5.15.0-157-generic x86_64 Clang/20.1.8 x-amz-api-version: 2006-03-01 S3_MOCK::HttpServeAction: 4 / /data_00.csv / uploadId=1 2026-01-04T02:39:14.811998Z node 1 :DATASHARD_BACKUP DEBUG: export_s3_uploader.cpp:635: [Export] [s3] Handle TEvExternalStorage::TEvCompleteMultipartUploadResponse: self# [1:3863:5825], result# CompleteMultipartUploadResult { Bucket: Key: data_00.csv ETag: 5d8c28efc812b445ddd02900ff3ee599 } 2026-01-04T02:39:14.812306Z node 1 :DATASHARD_BACKUP DEBUG: export_scan.cpp:144: [Export] [scanner] Handle TEvExportScan::TEvFinish: self# [1:3862:5824], msg# NKikimr::NDataShard::TEvExportScan::TEvFinish { Success: 1 Error: } 2026-01-04T02:39:14.823252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.823316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:14.823490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.823574Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:297: TBackup TProposedWaitParts, opId: 102:0 HandleReply TEvSchemaChanged at tablet# 72057594046678944 message# Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 OpResult { Success: true Explain: "" BytesProcessed: 10000 RowsProcessed: 1000 } 2026-01-04T02:39:14.823656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:14.823706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.823751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:14.823788Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:14.823961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_backup_restore_common.h:127: Unable to make a bill: kind# TBackup, opId# 102:0, reason# domain is not a serverless db, domain# /MyRoot, domainPathId# [OwnerId: 72057594046678944, LocalPathId: 1], IsDomainSchemeShard: 1, ParentDomainId: [OwnerId: 72057594046678944, LocalPathId: 1], ResourcesDomainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:14.827383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.827907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:14.827958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:14.828064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:14.828109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.828146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:14.828174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.828201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:14.828268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 102 2026-01-04T02:39:14.828311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:14.828340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:14.828366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:14.828460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:14.831785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:14.831838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:3848:5811] TestWaitNotification: OK eventTxId 102 |90.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_backup/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] >> TExternalTableTest::DropExternalTable |90.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::MediumMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-04T02:39:13.235700Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:39:13.348624Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:13.349707Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:13.349854Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.349942Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:39:13.374722Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-04T02:39:13.374859Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:13.399847Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-04T02:39:13.400114Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.401698Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 1000 MaxSizeInPartition: 104857600 LifetimeSeconds: 10000 LowWatermark: 100 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "__ydb_compaction_consumer" ReadFromTimestampsMs: 0 Generation: 1 Important: true } EnableCompactification: true 2026-01-04T02:39:13.401870Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:13.401949Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:13.402453Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:13.402815Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:39:13.406092Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:13.406168Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:39:13.406216Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:39:13.406285Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:13.408037Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:13.409548Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:13.409599Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:13.409643Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:13.409703Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:13.409746Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:13.409822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:13.409923Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:13.409976Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:13.410017Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:13.410057Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:13.410099Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:13.410361Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:13.410403Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-04T02:39:13.411122Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:13.411358Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:13.411615Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:13.413741Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:13.413789Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-04T02:39:13.413851Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:13.413892Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:13.415084Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:13.416268Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:39:13.416310Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:13.416358Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:13.416402Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:13.416431Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:13.416470Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:13.416524Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-04T02:39:13.416572Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:39:13.416604Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:13.416633Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:39:13.416667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:39:13.416828Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:13.416881Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user __ydb_compaction_consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 1 2026-01-04T02:39:13.416963Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:13.417185Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:13.417378Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:13.417585Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:13.417694Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937 ... ot data from cache. Partition 0 offset 6 partno 0 count 1 parts_count 10 source 1 size 5243650 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-04T02:39:16.588392Z node 3 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 17. All 2 blobs are from cache. 2026-01-04T02:39:16.588426Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-04T02:39:16.588514Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588545Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588563Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588581Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588608Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 4 totakecount 2 count 1 size 635356 from pos 0 cbcount 2 2026-01-04T02:39:16.588637Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588655Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588674Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588690Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588708Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588725Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588744Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588763Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588782Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588799Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 5 totakecount 2 count 1 size 123358 from pos 0 cbcount 1 2026-01-04T02:39:16.588841Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588866Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588886Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588904Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588922Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588939Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588958Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588978Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.588994Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.589012Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 0 size 512005 from pos 0 cbcount 1 2026-01-04T02:39:16.589032Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 6 totakecount 1 count 1 size 123358 from pos 0 cbcount 1 2026-01-04T02:39:16.589101Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' 2026-01-04T02:39:16.589130Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 6 partno 0 count 1 parts 10 suffix '0' 2026-01-04T02:39:16.589177Z node 3 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-04T02:39:16.589198Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-04T02:39:16.589222Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 4:6 isTruncatedBlob 0 2026-01-04T02:39:16.594509Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 4 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 1 2026-01-04T02:39:16.610585Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 res.GetOffset() 5 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:39:16.611657Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 4:6 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-04T02:39:16.612642Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000000_00000_0000000001_00016 2026-01-04T02:39:16.612709Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000001_00006_0000000002_00014 2026-01-04T02:39:16.612758Z node 3 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-04T02:39:16.612934Z node 3 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-04T02:39:16.612972Z node 3 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:16.613053Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 3 partNo 0 count 1 size 283 2026-01-04T02:39:16.613110Z node 3 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 4 partNo 6 count 2 size 271 2026-01-04T02:39:16.613135Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000000_00000_0000000001_00016(+) to d0000000000_00000000000000000000_00000_0000000001_00016(+) 2026-01-04T02:39:16.613155Z node 3 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000001_00006_0000000002_00014(+) to d0000000000_00000000000000000001_00006_0000000002_00014(+) 2026-01-04T02:39:16.616459Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 0 count 1 actorID [3:138:2142] 2026-01-04T02:39:16.616524Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 1 count 2 actorID [3:138:2142] 2026-01-04T02:39:16.616558Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 3 count 1 size 3072032 actorID [3:138:2142] is actual 1 2026-01-04T02:39:16.616609Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 3 count 1 size 283 actorID [3:138:2142] 2026-01-04T02:39:16.616630Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 4 count 2 size 7415109 actorID [3:138:2142] is actual 1 2026-01-04T02:39:16.616664Z node 3 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 4 count 2 size 271 actorID [3:138:2142] 2026-01-04T02:39:16.616733Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-04T02:39:16.616781Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 1 partno 6 count 2 parts 14 suffix '0' size 271 2026-01-04T02:39:16.616818Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 3072032 2026-01-04T02:39:16.622043Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 7415109 2026-01-04T02:39:16.629406Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 3 partno 0 count 1 parts 16 suffix '0' size 283 2026-01-04T02:39:16.629497Z node 3 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 4 partno 6 count 2 parts 14 suffix '0' size 271 2026-01-04T02:39:16.629774Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:16.629833Z node 3 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-04T02:39:16.629870Z node 3 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response Write 3 done Got compacter offset = -1 2026-01-04T02:39:16.634091Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:315:2302], now have 1 active actors on pipe 2026-01-04T02:39:16.634179Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:39:16.634209Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:39:16.634298Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 6 for user __ydb_compaction_consumer 2026-01-04T02:39:16.634492Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [3:317:2304], now have 1 active actors on pipe Got start offset = 3 >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> ReadOnlyVDisk::TestStorageLoad [GOOD] >> TTxDataShardUploadRows::TestUploadRows [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry >> TTxDataShardUploadRows::TestUploadRowsDropColumnRace [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks >> TExternalTableTest::ReadOnlyMode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableShouldFailIfEntityOfAnotherTypeWithSameNameExists [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:39:16.460241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:16.460348Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:16.460420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:16.460461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:16.460509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:16.460568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:16.461505Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.463431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:16.610749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:16.610848Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.611770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.625432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:16.625997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:16.626175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:16.638871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:16.639062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:16.639745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:16.640035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:16.642938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.643131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:16.650525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:16.650862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:16.650924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:16.651526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:16.816612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.817981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.818669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... tate 2026-01-04T02:39:17.310477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:17.310505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:17.310541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:17.310568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:17.310603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:39:17.310654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:17.310699Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:17.310730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:17.310788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:39:17.310822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:39:17.310851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:39:17.310876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:39:17.311738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:17.311822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:17.311853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:39:17.311914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:39:17.311954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:39:17.312954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:17.313029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:17.313069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:39:17.313111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:39:17.313145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:39:17.313206Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:39:17.315126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:39:17.315735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:39:17.315933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:39:17.315974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:39:17.316313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:39:17.316386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.316420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:742:2732] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:17.316812Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:17.317037Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 185us result status StatusSuccess 2026-01-04T02:39:17.317355Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 103 2026-01-04T02:39:17.320449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:17.320757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 103:0, feature flag EnableReplaceIfExistsForExternalEntities 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "UniqueName" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-04T02:39:17.320848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_external_table.cpp:304: [72057594046678944] TAlterExternalTable Propose: opId# 103:0, path# /MyRoot/UniqueName, ReplaceIfExists: 1 2026-01-04T02:39:17.321102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, at schemeshard: 72057594046678944 2026-01-04T02:39:17.325601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusNameConflict Reason: "Check failed: path: \'/MyRoot/UniqueName\', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable" TxId: 103 SchemeshardId: 72057594046678944 PathId: 38 PathCreateTxId: 101, at schemeshard: 72057594046678944 2026-01-04T02:39:17.325854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusNameConflict, reason: Check failed: path: '/MyRoot/UniqueName', error: unexpected path type (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeView, state: EPathStateNoChanges), expected types: EPathTypeExternalTable, operation: CREATE EXTERNAL TABLE, path: /MyRoot/UniqueName TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:39:17.326118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:39:17.326154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:39:17.326503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:39:17.326584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.326636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:750:2740] TestWaitNotification: OK eventTxId 103 >> KikimrIcGateway::TestDropExternalTable [GOOD] >> KikimrIcGateway::TestDropExternalDataSource >> LdapAuthProviderTest_StartTls::LdapFetchGroupsWithCustomGroupAttributeGood [GOOD] >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReplaceExternalTableIfNotExistsShouldFailIfFeatureFlagIsNotSet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:39:16.460240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:16.460360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460409Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:16.460444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:16.460498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:16.460553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:16.460611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:16.461544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.463451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:16.606617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:16.606724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.607569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.620361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:16.620840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:16.621033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:16.633654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:16.633886Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:16.634698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:16.635043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:16.639982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.641226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:16.650533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:16.650813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:16.650860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:16.651545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:16.793026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.794972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.795461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:17.348120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:17.348162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:39:17.348214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:39:17.348250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:39:17.349456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:17.349588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:17.349633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:17.349715Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:39:17.349767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:17.351073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:17.351150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:17.351181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:17.351228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:39:17.351264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:39:17.351337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:39:17.352987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:17.354197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:39:17.354445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:39:17.354488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:39:17.354841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:39:17.354928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.354964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:719:2709] TestWaitNotification: OK eventTxId 101 2026-01-04T02:39:17.355404Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalDataSource" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:17.355609Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalDataSource" took 213us result status StatusSuccess 2026-01-04T02:39:17.356054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalDataSource" PathDescription { Self { Name: "ExternalDataSource" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalDataSource CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalDataSourceVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalDataSourceDescription { Name: "ExternalDataSource" PathId { OwnerId: 72057594046678944 LocalId: 38 } Version: 1 SourceType: "ObjectStorage" Location: "https://s3.cloud.net/my_bucket" Installation: "" Auth { None { } } Properties { } References { } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 102 2026-01-04T02:39:17.359628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:17.359998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 102:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "ExternalTable" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } ReplaceIfExists: true } 2026-01-04T02:39:17.360088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-04T02:39:17.360132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, at schemeshard: 72057594046678944 2026-01-04T02:39:17.362204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusPreconditionFailed Reason: "Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:17.362436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: Invalid TCreateExternalTable request: Unsupported: feature flag EnableReplaceIfExistsForExternalEntities is off, operation: CREATE EXTERNAL TABLE, path: /MyRoot/ExternalTable TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:39:17.362714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:39:17.362752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:39:17.363104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:39:17.363185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.363233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:727:2717] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:17.363668Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:17.363902Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 191us result status StatusPathDoesNotExist 2026-01-04T02:39:17.364055Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ExternalTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ExternalTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ParallelCreateSameExternalTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:39:16.460200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:16.460296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:16.460365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:16.460427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:16.460463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:16.460520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:16.461415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.463422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:16.591330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:16.591410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.592197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.608129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:16.608494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:16.609305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:16.621483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:16.622525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:16.625871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:16.630887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:16.640217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.641258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:16.650569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:16.650877Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:16.650929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:16.651573Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:16.820229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.820973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.821831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... ingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:17.422222Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:17.422449Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 184us result status StatusSuccess 2026-01-04T02:39:17.422742Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 125 2026-01-04T02:39:17.422997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 125: send EvNotifyTxCompletion 2026-01-04T02:39:17.423034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 125 TestWaitNotification wait txId: 126 2026-01-04T02:39:17.423147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 126: send EvNotifyTxCompletion 2026-01-04T02:39:17.423171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 126 TestWaitNotification wait txId: 127 2026-01-04T02:39:17.423217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 127: send EvNotifyTxCompletion 2026-01-04T02:39:17.423266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 127 2026-01-04T02:39:17.423845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 125, at schemeshard: 72057594046678944 2026-01-04T02:39:17.423949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 125: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.424000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 125: satisfy waiter [1:755:2745] 2026-01-04T02:39:17.424071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 126, at schemeshard: 72057594046678944 2026-01-04T02:39:17.424171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 126: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.424219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 126: satisfy waiter [1:755:2745] 2026-01-04T02:39:17.424292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 127, at schemeshard: 72057594046678944 2026-01-04T02:39:17.424470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 127: got EvNotifyTxCompletionResult 2026-01-04T02:39:17.424496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 127: satisfy waiter [1:755:2745] TestWaitNotification: OK eventTxId 125 TestWaitNotification: OK eventTxId 126 TestWaitNotification: OK eventTxId 127 2026-01-04T02:39:17.424996Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/NilNoviSubLuna" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:17.425170Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/NilNoviSubLuna" took 169us result status StatusSuccess 2026-01-04T02:39:17.425526Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/NilNoviSubLuna" PathDescription { Self { Name: "NilNoviSubLuna" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 125 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "NilNoviSubLuna" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 128 2026-01-04T02:39:17.429817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } } TxId: 128 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:17.430202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_external_table.cpp:428: [72057594046678944] CreateNewExternalTable, opId 128:0, feature flag EnableReplaceIfExistsForExternalEntities 0, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExternalTable FailOnExist: false CreateExternalTable { Name: "NilNoviSubLuna" SourceType: "General" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Uint64" } Columns { Name: "value" Type: "Uint64" } } 2026-01-04T02:39:17.430306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_external_table.cpp:301: [72057594046678944] TCreateExternalTable Propose: opId# 128:0, path# /MyRoot/NilNoviSubLuna 2026-01-04T02:39:17.430433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 128:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:39:17.433008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 128, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/NilNoviSubLuna\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges)" TxId: 128 SchemeshardId: 72057594046678944 PathId: 39 PathCreateTxId: 125, at schemeshard: 72057594046678944 2026-01-04T02:39:17.433235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 128, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/NilNoviSubLuna', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 39], type: EPathTypeExternalTable, state: EPathStateNoChanges), operation: CREATE EXTERNAL TABLE, path: /MyRoot/NilNoviSubLuna TestModificationResult got TxId: 128, wait until txId: 128 >> TExternalTableTest::DropExternalTable [GOOD] >> TExternalTableTest::Decimal |90.5%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest >> ReadOnlyVDisk::TestStorageLoad [GOOD] Test command err: RandomSeed# 15967971603616850481 Setting VDisk read-only to 1 for position 0 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:0:0] 2026-01-04T02:38:52.028768Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.032001Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.034918Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.039165Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.039295Z 1 00h02m38.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.057617Z 1 00h02m38.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.178906Z 1 00h02m38.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.362240Z 1 00h02m38.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.500461Z 1 00h02m38.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.659821Z 1 00h02m38.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.667217Z 1 00h02m38.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.757298Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.759159Z 1 00h02m39.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.769831Z 1 00h02m39.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.838516Z 1 00h02m39.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.847105Z 1 00h02m39.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.864545Z 1 00h02m39.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:52.984591Z 1 00h02m39.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.010336Z 1 00h02m40.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.033047Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.033696Z 1 00h02m40.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.305849Z 1 00h02m40.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.316803Z 1 00h02m40.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.325544Z 1 00h02m40.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.335677Z 1 00h02m40.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.348984Z 1 00h02m40.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.460060Z 1 00h02m40.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.473465Z 1 00h02m40.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.593196Z 1 00h02m40.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.601935Z 1 00h02m41.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.664293Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.664804Z 1 00h02m41.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.905009Z 1 00h02m41.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:53.916324Z 1 00h02m41.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.141208Z 1 00h02m41.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.194892Z 1 00h02m41.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.226051Z 1 00h02m41.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.239531Z 1 00h02m42.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.282517Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.283143Z 1 00h02m42.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.381081Z 1 00h02m42.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.537841Z 1 00h02m42.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.561116Z 1 00h02m42.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.687016Z 1 00h02m42.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.702251Z 1 00h02m42.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.713918Z 1 00h02m43.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.729885Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.730351Z 1 00h02m43.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:54.875120Z 1 00h02m43.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.013400Z 1 00h02m43.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.029985Z 1 00h02m43.400000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.040994Z 1 00h02m43.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.085354Z 1 00h02m43.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.096179Z 1 00h02m43.700000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.210928Z 1 00h02m43.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.259011Z 1 00h02m43.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.270818Z 1 00h02m44.000000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.284875Z 1 00h02m44.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.354769Z 1 00h02m44.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.365974Z 1 00h02m44.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.572499Z 1 00h02m44.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.588299Z 1 00h02m44.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.687648Z 1 00h02m44.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.837231Z 1 00h02m44.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.941234Z 1 00h02m45.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.957248Z 1 00h02m45.200000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:55.971814Z 1 00h02m45.300000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:56.033340Z 1 00h02m45.500000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:56.289917Z 1 00h02m45.600000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:56.320950Z 1 00h02m45.800000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:56.337926Z 1 00h02m45.900000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [1:5327:706] 2026-01-04T02:38:56.391237Z 1 00h02m46.100000s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:0:0]: (2181038080) Unavailable in read-only Sender# [ ... k read-only to 0 for position 4 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:4:0] Setting VDisk read-only to 0 for position 5 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:5:0] Setting VDisk read-only to 0 for position 6 Invoking SetVDiskReadOnly for vdisk [82000000:1:0:6:0] 2026-01-04T02:39:07.687039Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.690553Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.694834Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.699947Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.700122Z 8 00h20m54.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.734949Z 8 00h20m54.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.751957Z 8 00h20m54.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.977836Z 8 00h20m54.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:07.987480Z 8 00h20m54.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.010095Z 8 00h20m55.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.020798Z 8 00h20m55.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.099544Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.100407Z 8 00h20m55.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.110731Z 8 00h20m55.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.130190Z 8 00h20m55.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.339039Z 8 00h20m55.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.369160Z 8 00h20m56.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.478893Z 8 00h20m56.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.668638Z 8 00h20m56.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.684906Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.685861Z 8 00h20m56.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.726548Z 8 00h20m56.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.913004Z 8 00h20m56.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:08.924672Z 8 00h20m56.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.020359Z 8 00h20m56.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.030581Z 8 00h20m57.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.128099Z 8 00h20m57.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.140784Z 8 00h20m57.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.192376Z 8 00h20m57.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.243769Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.245234Z 8 00h20m57.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.356706Z 8 00h20m57.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.363585Z 8 00h20m57.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.632429Z 8 00h20m57.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.645033Z 8 00h20m58.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.667659Z 8 00h20m58.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.676704Z 8 00h20m58.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.745905Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.746834Z 8 00h20m58.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.771278Z 8 00h20m58.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.784357Z 8 00h20m58.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.914331Z 8 00h20m58.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.924829Z 8 00h20m59.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.948654Z 8 00h20m59.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:09.960424Z 8 00h20m59.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.115213Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.116267Z 8 00h20m59.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.261664Z 8 00h20m59.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.272341Z 8 00h20m59.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.406920Z 8 00h20m59.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.421793Z 8 00h21m00.012560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.529346Z 8 00h21m00.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.616852Z 8 00h21m00.312560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.758170Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.759228Z 8 00h21m00.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.775055Z 8 00h21m00.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.789731Z 8 00h21m00.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.803833Z 8 00h21m00.712560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.904727Z 8 00h21m00.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:10.913383Z 8 00h21m00.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.088876Z 8 00h21m01.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.111619Z 8 00h21m01.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.253916Z 8 00h21m01.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.269687Z 8 00h21m01.512560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.284296Z 8 00h21m01.612560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.318568Z 8 00h21m01.812560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.418395Z 8 00h21m01.912560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.660522Z 8 00h21m02.112560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.674187Z 8 00h21m02.212560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.705918Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] 2026-01-04T02:39:11.707655Z 8 00h21m02.412560s :BS_SKELETON ERROR: PDiskId# 1000 VDISK[82000000:_:0:7:0]: (2181038080) Unavailable in read-only Sender# [1:5376:755] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest >> TableCreator::CreateTables |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest >> HashShuffle::BackPressureInMemoryLoad [GOOD] >> HashShuffle::BackPressureWithSpilling [GOOD] >> HashShuffle::BackPressureWithSpillingLoad >> TTxDataShardUploadRows::TestUploadShadowRowsShadowData [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::ReadOnlyMode [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:39:16.460228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:16.460328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:16.460403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:16.460441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:16.460467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:16.460514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:16.460581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:16.461430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.463473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:16.611489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:16.611557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.612374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:16.624768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:16.625239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:16.625421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:16.637251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:16.637430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:16.638102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:16.638392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:16.640952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.641199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:16.650519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:16.650799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:16.650869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:16.650925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:16.651554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:16.818099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.819998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.820124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:16.820191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:216:2217] at schemeshard: 72057594046678944 Send TEvModif ... schemeshard: 72057594046678944 2026-01-04T02:39:17.735381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 129 ready parts: 1/1 2026-01-04T02:39:17.735520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 129 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:17.736416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.736524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 13 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.736571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:39:17.736627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 13 2026-01-04T02:39:17.736697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-04T02:39:17.737686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.737762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 2 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.737791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:39:17.737837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 2 2026-01-04T02:39:17.737892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:39:17.737992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 0/1, is published: true 2026-01-04T02:39:17.740690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 129:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:129 msg type: 269090816 2026-01-04T02:39:17.740878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 129, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:39:17.741636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Add transaction: 129 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 129 at step: 5000041 2026-01-04T02:39:17.743249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:17.743375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 129 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:17.743465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 129:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000041, at schemeshard: 72057594046678944 2026-01-04T02:39:17.743647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 129:0 128 -> 240 2026-01-04T02:39:17.743823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:39:17.743908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:39:17.744324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 FAKE_COORDINATOR: Erasing txId 129 2026-01-04T02:39:17.746135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:17.746175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:17.746346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 129, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:39:17.746445Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:17.746541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2853], at schemeshard: 72057594046678944, txId: 129, path id: 1 2026-01-04T02:39:17.746588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:895:2853], at schemeshard: 72057594046678944, txId: 129, path id: 41 2026-01-04T02:39:17.746658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 129:0, at schemeshard: 72057594046678944 2026-01-04T02:39:17.746697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 129:0 ProgressState 2026-01-04T02:39:17.746818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:39:17.746858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:39:17.746895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#129:0 progress is 1/1 2026-01-04T02:39:17.746936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:39:17.746981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 129, ready parts: 1/1, is published: false 2026-01-04T02:39:17.747023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 129 ready parts: 1/1 2026-01-04T02:39:17.747061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 129:0 2026-01-04T02:39:17.747097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 129:0 2026-01-04T02:39:17.747163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:39:17.747202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 129, publications: 2, subscribers: 0 2026-01-04T02:39:17.747265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:39:17.747294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 129, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-04T02:39:17.748359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.748462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.748499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:39:17.748546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:39:17.748590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-04T02:39:17.749522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.749606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 129 2026-01-04T02:39:17.749637Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 129 2026-01-04T02:39:17.749680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 129, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-04T02:39:17.749715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:39:17.749817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 129, subscribers: 0 2026-01-04T02:39:17.752756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 2026-01-04T02:39:17.753853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 129 TestModificationResult got TxId: 129, wait until txId: 129 >> KikimrIcGateway::TestSecretsExistingValidation-UseSchemaSecrets [GOOD] >> KikimrProvider::TestFillAuthPropertiesBasic [GOOD] >> KikimrProvider::TestFillAuthPropertiesAws [GOOD] >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/unittest |90.6%| [TA] $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::RetryUploadRowsToShard [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets |90.6%| [TA] {RESULT} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/ut_blobstorage/ut_read_only_vdisk/test-results/unittest/{meta.json ... results_accumulator.log} >> KikimrIcGateway::TestLoadExternalTable [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets >> KikimrIcGateway::TestCreateSameExternalTable [GOOD] >> KikimrIcGateway::TestCreateResourcePool >> TExternalTableTest::Decimal [GOOD] >> YdbProxy::ReadNonExistentTopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrProvider::AlterTableAddIndexWithTableSettings [GOOD] Test command err: Trying to start YDB, gRPC: 23088, MsgBus: 14203 2026-01-04T02:39:13.482988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330445666793320:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:13.483296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:13.699597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:13.699706Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:13.714940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:13.732292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330445666793290:2081] 1767494353481300 != 1767494353481303 2026-01-04T02:39:13.741656Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:13.757669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:13.870942Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:13.965171Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:13.965196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:13.965211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:13.965304Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:14.483555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:14.489983Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.543928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.664195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.785937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.840769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.030564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458551697051:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.030680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.030929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458551697061:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.030965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.494908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.519658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.542316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.565181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.589958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.619311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.649177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.717729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.773392Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458551697930:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.773459Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.773508Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458551697935:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.773663Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458551697937:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.773705Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.776559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:16.785879Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330458551697938:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:16.882091Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330458551697990:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_external_table/unittest >> TExternalTableTest::Decimal [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:39:17.250645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:17.250763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:17.250811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:17.250848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:17.250882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:17.250910Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:17.251002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:17.251107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:17.252005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:17.252298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:17.381926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:17.382033Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:17.382873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:17.392780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:17.396917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:17.397142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:17.401889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:17.402108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:17.402814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:17.403112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:17.408097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:17.408293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:17.409682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:17.409750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:17.409876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:17.409929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:17.409972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:17.410108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:17.555613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.556706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.556828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.556911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.556977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:17.557863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:210:2211] at schemeshard: 72057594046678944 Send TEvModif ... 8944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:39:19.390772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:39:19.390856Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:19.390884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:39:19.390913Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-04T02:39:19.390931Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 39 2026-01-04T02:39:19.390949Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:212:2212], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:39:19.390994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:19.391027Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:19.391115Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:19.391154Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:19.391193Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:19.391233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:19.391273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:39:19.391309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:19.391339Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:19.391366Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:19.391418Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:39:19.391450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:19.391479Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 3, subscribers: 0 2026-01-04T02:39:19.391525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:39:19.391549Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 2 2026-01-04T02:39:19.391565Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 39], 2 2026-01-04T02:39:19.392535Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.392594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.392625Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:19.392658Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:39:19.392691Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:39:19.393565Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.393626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.393649Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:19.393672Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 2 2026-01-04T02:39:19.393693Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:39:19.394235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.394290Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:19.394312Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:19.394331Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:39:19.394359Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:39:19.394411Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:39:19.395626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:19.396759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:19.397080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:39:19.397247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:39:19.397278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:39:19.397529Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:39:19.397596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:19.397626Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:747:2736] TestWaitNotification: OK eventTxId 101 2026-01-04T02:39:19.397990Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ExternalTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:19.398166Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ExternalTable" took 202us result status StatusSuccess 2026-01-04T02:39:19.398455Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ExternalTable" PathDescription { Self { Name: "ExternalTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeExternalTable CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ExternalTableVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ExternalTableDescription { Name: "ExternalTable" PathId { OwnerId: 72057594046678944 LocalId: 39 } Version: 1 SourceType: "ObjectStorage" DataSourcePath: "/MyRoot/ExternalDataSource" Location: "/" Columns { Name: "key" Type: "Decimal(35,9)" TypeId: 4865 Id: 1 NotNull: false TypeInfo { DecimalPrecision: 35 DecimalScale: 9 } } Content: "" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_external_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/dq/runtime/ut/unittest >> HashShuffle::BackPressureWithSpillingLoad [GOOD] Test command err: Blocked 254975 time(s) emptyPops 0 Blocked 2895 time(s) emptyPops 0 |90.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {BAZEL_UPLOAD} ydb/library/yql/dq/runtime/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/ydb_proxy/ut/unittest >> YdbProxy::ReadNonExistentTopic [GOOD] Test command err: 2026-01-04T02:37:05.664030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591329895230588900:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:05.676660Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:37:06.093949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.238192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:37:06.238308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:37:06.249188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:37:06.408305Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:37:06.413958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591329895230588794:2081] 1767494225641453 != 1767494225641456 2026-01-04T02:37:06.414019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:37:06.676565Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:37:06.742622Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:37:07.110151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:37:07.110187Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:37:07.110230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:37:07.110360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:37:07.539229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:37:07.798597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:09.423785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329912410459129:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.423785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329912410459114:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.423888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329912410459130:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.424199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.424455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591329912410459137:2362], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.424500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:37:09.427608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:37:09.458581Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329912410459136:2640] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:37:09.463292Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329912410459133:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:37:09.463292Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591329912410459134:2361], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:37:09.521788Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329912410459184:2671] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:09.550259Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591329912410459202:2679] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:37:10.505874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:37:10.657921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591329895230588900:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:37:10.661742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:37:10.929538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:37:11.368000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:37:11.754450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:37:12.146514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:37:21.315031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:37:21.315066Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.448483Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330455584117394:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:16.448563Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:16.460446Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:16.538341Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.577393Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.577513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.590696Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.686240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:16.818800Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:16.818823Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:16.818832Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:16.818921Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:17.084279Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_external_table/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataSplitThenPublish [GOOD] >> TTxDataShardUploadRows::UploadRowsToReplicatedTable >> KikimrIcGateway::TestDropExternalDataSource [GOOD] >> KikimrIcGateway::TestDropResourcePool >> TSchemeShardAuditSettings::CreateExtSubdomain |90.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/ydb_proxy/ut/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain >> TSchemeShardAuditSettings::CreateSubdomain >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |90.6%| [TA] $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TableCreator::CreateTables [GOOD] >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] |90.6%| [TA] $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/ydb_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] |90.6%| [TA] {RESULT} $(B)/ydb/library/yql/dq/runtime/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetry [GOOD] >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError >> KikimrIcGateway::TestCreateResourcePool [GOOD] >> KikimrIcGateway::TestAlterResourcePool >> TTicketParserTest::TicketFromCertificateWithValidationGood >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] >> TTxDataShardUploadRows::TestUploadRowsLocks [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRows ------- [TM] {asan, default-linux-x86_64, release} ydb/library/table_creator/ut/unittest >> TableCreator::CreateTables [GOOD] Test command err: 2026-01-04T02:39:18.775163Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330465360030072:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:18.775213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:19.023504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:39:19.073421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:19.073528Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:19.102714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:19.155562Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:19.265950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:39:19.487495Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:19.487517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:19.487615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:19.487695Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:19.682630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:19.697647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:19.699416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:19.779064Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.6%| [TM] {BAZEL_UPLOAD} ydb/library/table_creator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecords1MBMessagesOneByOneBySeqNo [GOOD] Test command err: 2026-01-04T02:38:56.783043Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330373000624401:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:56.791391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:56.824343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:57.062647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:57.086709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.086808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.093136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.227705Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.296008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.296031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.296069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.296182Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.350240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:57.488746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:57.566726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:57.705597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:57.795129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:57.862682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:38:57.930439Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:38:57.930486Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-04T02:38:57.931785Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:38:57.931827Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:38:57.937367Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:38:57.937444Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-04T02:38:57.937495Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-04T02:38:57.937525Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-04T02:39:00.690240Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330389695229232:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.690298Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.704253Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.798104Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.820103Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.820173Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.844234Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.897320Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.897343Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.897350Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.897426Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.898618Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:01.099332Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:01.207850Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:01.406054Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:01.558988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:39:01.611210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:39:01.628614Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037889 not found 2026-01-04T02:39:01.629979Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037891 not found 2026-01-04T02:39:01.630371Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037890 not found 2026-01-04T02:39:01.630394Z node 4 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 4, TabletId: 72075186224037888 not found 2026-01-04T02:39:01.636776Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:39:01.636835Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-04T02:39:01.636851Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-04T02:39:01.636868Z node 4 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-04T02:39:01.696573Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:04.812714Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330405238966058:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:04.813162Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:04.826735Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:04.908681Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:04.923594Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:04.923669Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:04.942436Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:05.008509Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:05.008540Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:05.008547Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:05.008627Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:05.071654Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:05.224069Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:05.273387Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:05.463852Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:05.593152Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:39:05.633117Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:39:05.662621Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropPersQueueGroup, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_pq.cpp:422) 2026-01-04T02:39:05.677025Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037889 not found 2026-01-04T02:39:05.678209Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037890 not found 2026-01-04T02:39:05.678227Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037888 not found 2026-01-04T02:39:05.678234Z node 7 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 7, TabletId: 72075186224037891 not found 2026-01-04T02:39:05.683454Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:39:05.683503Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-04T02:39:05.683518Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-04T02:39:05.683538Z node 7 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,1) wasn't found 2026-01-04T02:39:09.399461Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591330428625927052:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:09.399968Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:09.410112Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:09.489973Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:09.512258Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:09.512356Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:09.542271Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:09.595098Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:09.595122Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:09.595136Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:09.595236Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:09.644047Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:09.791746Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:09.845035Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:10.010742Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:10.402091Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.398294Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591330428625927052:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:14.398382Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.6%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflow [GOOD] >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] |90.6%| [TA] $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.6%| [TA] {RESULT} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapFetchGroupsUseInvalidSearchFilterBad [GOOD] Test command err: 2026-01-04T02:38:47.209221Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330330600757206:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:47.209853Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.425547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.459312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.459425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.486720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.528510Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.638477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.638501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.638507Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.638616Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.720564Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.862459Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.866810Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.866854Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.868406Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:24792, port: 24792 2026-01-04T02:38:47.868499Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.926333Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.974120Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:47.974660Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:47.974717Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:48.022284Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:48.070275Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:48.071998Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****2bmg (19889C38) () has now valid token of ldapuser@ldap 2026-01-04T02:38:48.210420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:52.201361Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330330600757206:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:52.201486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:53.213112Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****2bmg (19889C38) 2026-01-04T02:38:53.213296Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:24792, port: 24792 2026-01-04T02:38:53.213389Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:53.262863Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:53.263437Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24792 return no entries 2026-01-04T02:38:53.263942Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****2bmg (19889C38) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:24792 return no entries)' 2026-01-04T02:38:57.214849Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****2bmg (19889C38) 2026-01-04T02:38:58.593255Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330380999580700:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:58.593294Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:58.603315Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:58.658129Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:58.699577Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:58.699651Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:58.727215Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:58.739534Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:58.739558Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:58.739565Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:58.739646Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:58.818050Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:58.820687Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:58.820714Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:58.821200Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:30167, port: 30167 2026-01-04T02:38:58.821265Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:58.874394Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:58.875038Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:30167. Server is busy 2026-01-04T02:38:58.875604Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****nmxQ (261063A5) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:30167. Server is busy)' 2026-01-04T02:38:58.875821Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:58.875847Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:58.876537Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:30167, port: 30167 2026-01-04T02:38:58.876599Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:58.930303Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:58.930781Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldaps://localhost:30167. Server is busy 2026-01-04T02:38:58.931157Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:58.931265Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****nmxQ (261063A5) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldaps://localhost:30167. Server is busy)' 2026-01-04T02:38:59.597708Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:00.595565Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****nmxQ (261063A5) 2026-01-04T02:39:00.595876Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.595921Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.597139Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:30167, port: 30167 2026-01-04T02:39:00.597227Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:00.654220Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filt ... ce] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:12.284954Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330440831362433:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:12.285033Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:12.296344Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:12.347649Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:12.348589Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330440831362405:2081] 1767494352284137 != 1767494352284140 2026-01-04T02:39:12.411542Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:12.411563Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:12.411569Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:12.411641Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:12.423125Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:12.423218Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:12.434717Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:12.454233Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:12.457002Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:12.457033Z node 4 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:12.457577Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22399, port: 22399 2026-01-04T02:39:12.457649Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:12.465459Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:12.514233Z node 4 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:12.558495Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****-pbg (E8646B35) () has now valid token of ldapuser@ldap 2026-01-04T02:39:12.587728Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:15.225439Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330454568048134:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:15.225493Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:15.238190Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:15.308178Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:15.309666Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330454568048106:2081] 1767494355224749 != 1767494355224752 2026-01-04T02:39:15.340522Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:15.340593Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:15.342316Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:15.375471Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:15.375499Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:15.375506Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:15.375585Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:15.467679Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:15.690302Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:15.693973Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:15.694007Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:15.694715Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:32362, port: 32362 2026-01-04T02:39:15.694806Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:15.705790Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:15.750310Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:39:15.794176Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:15.794721Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:15.794769Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:15.838205Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:15.882237Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: groupDN 2026-01-04T02:39:15.883356Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****ae-A (B9107337) () has now valid token of ldapuser@ldap 2026-01-04T02:39:18.430817Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330467645064317:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:18.430898Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:18.443282Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:18.503806Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:18.505596Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591330467645064289:2081] 1767494358430149 != 1767494358430152 2026-01-04T02:39:18.549093Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:18.549179Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:18.566394Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:18.566413Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:18.566428Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:18.566514Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:18.568403Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:18.742806Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:18.777566Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:18.780157Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:18.780186Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:18.780722Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:22067, port: 22067 2026-01-04T02:39:18.780777Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:18.788487Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:18.830355Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: &(uid=ldapuser)(), attributes: memberOf 2026-01-04T02:39:18.830441Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:22067. Bad search filter 2026-01-04T02:39:18.830909Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****WJbw (20D17E25) () has now permanent error message 'Could not login via LDAP (Could not perform search for filter &(uid=ldapuser)() on server ldap://localhost:22067. Bad search filter)' |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest |90.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/table_creator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] >> ObjectDistribution::TestManyIrrelevantNodes [GOOD] >> Sequencer::Basic1 [GOOD] >> StoragePool::TestDistributionRandomProbability ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_LdapsScheme::LdapRefreshGroupsInfoDisableNestedGroupsGood [GOOD] Test command err: 2026-01-04T02:38:47.059209Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330330420549030:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:47.059391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.279991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.286881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.287048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.359692Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330330420549004:2081] 1767494327054038 != 1767494327054041 2026-01-04T02:38:47.369907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.375319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.476033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.476055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.476071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.476193Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.494798Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.541072Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.545225Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.545269Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.546864Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:4793, port: 4793 2026-01-04T02:38:47.546960Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.610345Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: groupDN 2026-01-04T02:38:47.655032Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****9OEg (B1C5C1C3) () has now valid token of ldapuser@ldap 2026-01-04T02:38:50.193385Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330344728093048:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.193458Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.209860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:50.311629Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.311712Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.313999Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330344728093022:2081] 1767494330192383 != 1767494330192386 2026-01-04T02:38:50.320942Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.322661Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.333033Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.412870Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.412890Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.412897Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.412972Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.484597Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.535796Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.539389Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.539436Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.540127Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:14166, port: 14166 2026-01-04T02:38:50.540229Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=invalidRobouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:50.598305Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:14166. Invalid credentials 2026-01-04T02:38:50.598937Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****3U7Q (09D7E27B) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=invalidRobouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:14166. Invalid credentials)' 2026-01-04T02:38:53.795422Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330357730877979:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:53.795481Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:53.807083Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:53.885411Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.886753Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330357730877952:2081] 1767494333794581 != 1767494333794584 2026-01-04T02:38:53.919469Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.919527Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:53.924071Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.972919Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:53.972940Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:53.972948Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:53.973047Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:54.023895Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:54.248486Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:54.250623Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:54.250656Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:54.251242Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:9329, port: 9329 2026-01-04T02:38:54.251341Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:54.302322Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:201: Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:9329. Invalid credentials 2026-01-04T02:38:54.302818Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****UpKQ (325FB138) () has now permanent error message 'Could not login via LDAP (Could not perform initial LDAP bind for dn cn=robouser,dc=search,dc=yandex,dc=net on server ldaps://localhost:9329. Invalid credentials)' 2026-01-04T02:38:56.763085Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330371037113699:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:56.763143Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:56.775258Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:56.835147Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:56.878287Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:56.878366Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:56.888593Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:56.921562Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty ... on server ldaps://localhost:7633 return no entries 2026-01-04T02:38:57.187464Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****zTlg (CD6553BB) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldaps://localhost:7633 return no entries)' 2026-01-04T02:39:00.272056Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330388766906259:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:00.272136Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:00.282799Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:00.355032Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:00.387776Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.387862Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.389700Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.420991Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:00.421016Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:00.421023Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:00.421100Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:00.477159Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:00.544767Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:00.548915Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:00.548955Z node 5 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:00.549730Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13712, port: 13712 2026-01-04T02:39:00.549827Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:00.602237Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:00.650133Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:00.650687Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:00.650737Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.694206Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.738176Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:00.739448Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****wTBw (05C3FC68) () has now valid token of ldapuser@ldap 2026-01-04T02:39:01.277585Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:04.274829Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****wTBw (05C3FC68) 2026-01-04T02:39:04.275009Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13712, port: 13712 2026-01-04T02:39:04.275134Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:04.326358Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:04.370178Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:04.370747Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:04.370781Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.418154Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.462076Z node 5 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:04.463044Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****wTBw (05C3FC68) () has now valid token of ldapuser@ldap 2026-01-04T02:39:05.272200Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591330388766906259:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:05.272276Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:09.277069Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****wTBw (05C3FC68) 2026-01-04T02:39:11.282972Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330436872028216:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:11.283042Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:11.299459Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:11.367809Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:11.404272Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:11.404337Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:11.426357Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:11.471625Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:11.471650Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:11.471658Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:11.471740Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:11.511205Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:11.636673Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:11.640840Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:11.640877Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:11.641651Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13278, port: 13278 2026-01-04T02:39:11.641725Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:11.694373Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:11.742677Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****br5g (97D2AC28) () has now valid token of ldapuser@ldap 2026-01-04T02:39:12.288134Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:16.283290Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7591330436872028216:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:16.283388Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:17.287291Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****br5g (97D2AC28) 2026-01-04T02:39:17.287427Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldaps, uris: ldaps://localhost:13278, port: 13278 2026-01-04T02:39:17.287495Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:17.342251Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:17.390823Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****br5g (97D2AC28) () has now valid token of ldapuser@ldap 2026-01-04T02:39:21.289058Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****br5g (97D2AC28) |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataPublishThenSplit [GOOD] >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateExtSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:21.664527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:21.664626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:21.664708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:21.664745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:21.664776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:21.664827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:21.665820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:21.667261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:21.758994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:21.759036Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.769546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:21.769939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:21.770095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:21.777135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:21.777396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:21.778086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:21.778347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:21.781017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.781173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:21.786064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:21.786244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:21.786357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:21.787095Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:21.950926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.951931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.953014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-04T02:39:22.803837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:22.803935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:22.803984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-04T02:39:22.804045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:22.804070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:39:22.804113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 134 2026-01-04T02:39:22.805023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.805365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.806752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-04T02:39:22.806798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 112:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:22.806878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 134 -> 135 2026-01-04T02:39:22.807009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:22.807055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 FAKE_COORDINATOR: Erasing txId 112 2026-01-04T02:39:22.808541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:22.808583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:22.808739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:39:22.808835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:22.808873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-04T02:39:22.808906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-04T02:39:22.809149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-04T02:39:22.809182Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 112:0 ProgressState 2026-01-04T02:39:22.809240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 135 -> 240 2026-01-04T02:39:22.809779Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.809930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.809961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:39:22.809988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-04T02:39:22.810017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:22.810634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.810703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.810739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:39:22.810765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-04T02:39:22.810790Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:39:22.810844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 0/1, is published: true 2026-01-04T02:39:22.812929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-04T02:39:22.812974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 112:0 ProgressState 2026-01-04T02:39:22.813042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-04T02:39:22.813069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.813097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-04T02:39:22.813122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.813153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: true 2026-01-04T02:39:22.813188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.813215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-04T02:39:22.813237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-04T02:39:22.813283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:39:22.813539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:22.813593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:39:22.813654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:39:22.813923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:22.813955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:39:22.814016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:22.814567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.814645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.816507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:39:22.816581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-04T02:39:22.816879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-04T02:39:22.816915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-04T02:39:22.817369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-04T02:39:22.817463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-04T02:39:22.817494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1064:3054] TestWaitNotification: OK eventTxId 112 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::CreateSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:21.664553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:21.664646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:21.664726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:21.664775Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:21.664805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:21.664882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:21.665900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:21.667215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:21.744562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:21.744619Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.758274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:21.758801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:21.760278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:21.768477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:21.768764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:21.771226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:21.773161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:21.779771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.780911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:21.786043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:21.786273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:21.786312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:21.787051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:21.961493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.962649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.962796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.962898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.963959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pp:665: Send tablet strongly msg operationId: 112:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:112 msg type: 269090816 2026-01-04T02:39:22.867730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 112, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 112 at step: 5000049 FAKE_COORDINATOR: advance: minStep5000049 State->FrontStep: 5000048 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 112 at step: 5000049 2026-01-04T02:39:22.868199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000049, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:22.868313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 112 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000049 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:22.868361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 112:0 HandleReply TEvOperationPlan, step: 5000049, at schemeshard: 72057594046678944 2026-01-04T02:39:22.868406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 43] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 112 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:22.868461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:39:22.868561Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 112:0 128 -> 130 2026-01-04T02:39:22.868722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:22.868780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:39:22.869794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.870734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 FAKE_COORDINATOR: Erasing txId 112 2026-01-04T02:39:22.872281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:22.872320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:22.872461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:39:22.872569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:22.872609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-04T02:39:22.872652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 43 2026-01-04T02:39:22.873033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 112:0, at schemeshard: 72057594046678944 2026-01-04T02:39:22.873078Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 112:0 ProgressState 2026-01-04T02:39:22.873162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-04T02:39:22.873194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.873225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#112:0 progress is 1/1 2026-01-04T02:39:22.873251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.873280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 112, ready parts: 1/1, is published: false 2026-01-04T02:39:22.873322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 112 ready parts: 1/1 2026-01-04T02:39:22.873366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 112:0 2026-01-04T02:39:22.873394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 112:0 2026-01-04T02:39:22.873448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:39:22.873480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 112, publications: 2, subscribers: 0 2026-01-04T02:39:22.873515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 1], 30 2026-01-04T02:39:22.873549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 112, [OwnerId: 72057594046678944, LocalPathId: 43], 18446744073709551615 2026-01-04T02:39:22.874112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.874220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 30 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.874259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:39:22.874290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 30 2026-01-04T02:39:22.874322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:22.874923Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.875005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:39:22.875038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:39:22.875065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-04T02:39:22.875102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:39:22.875184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-04T02:39:22.875410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:22.875464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:39:22.875579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:39:22.876003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:22.876043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:39:22.876099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:22.878740Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.878820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:39:22.880399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:39:22.880487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-04T02:39:22.880839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-04T02:39:22.880880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-04T02:39:22.881441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-04T02:39:22.881521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-04T02:39:22.881554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:1060:3050] TestWaitNotification: OK eventTxId 112 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata+UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> KikimrIcGateway::TestDropResourcePool [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.6%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:21.664518Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:21.664617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:21.664703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:21.664756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:21.664789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:21.664828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:21.665729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:21.667226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:21.755579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:21.755642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.769405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:21.769927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:21.770125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:21.777307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:21.777545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:21.778221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:21.778444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:21.781111Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.781278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:21.786108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:21.786273Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:21.786321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:21.787097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:21.946163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.948492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.948660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.948785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.948866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.948943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.949771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 65: Send tablet strongly msg operationId: 175:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:175 msg type: 269090816 2026-01-04T02:39:24.252777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 175, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 175 at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-04T02:39:24.254004Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:24.254109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:24.254161Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_unsafe.cpp:47: TDropForceUnsafe TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-04T02:39:24.254218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:24.254244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-04T02:39:24.254338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 130 2026-01-04T02:39:24.254492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:24.254554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-04T02:39:24.256151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.256564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 FAKE_COORDINATOR: Erasing txId 175 2026-01-04T02:39:24.259785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:24.259828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:24.260006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-04T02:39:24.260114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:24.260146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-04T02:39:24.260182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-04T02:39:24.260501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-04T02:39:24.260557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 175:0 ProgressState 2026-01-04T02:39:24.260627Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:24.260653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.260682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:24.260707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.260736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: false 2026-01-04T02:39:24.260763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.260789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-04T02:39:24.260812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-04T02:39:24.260879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-04T02:39:24.260909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 175, publications: 2, subscribers: 0 2026-01-04T02:39:24.260934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 1], 106 2026-01-04T02:39:24.260959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 175, [OwnerId: 72057594046678944, LocalPathId: 62], 18446744073709551615 2026-01-04T02:39:24.262195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.262274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.262311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:24.262340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-04T02:39:24.262373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:24.262907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.263003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.263033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:24.263059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-04T02:39:24.263088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-04T02:39:24.263155Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 175, subscribers: 0 2026-01-04T02:39:24.263665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:24.263707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:24.263803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-04T02:39:24.264738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:24.264778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:24.264833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:24.267274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.267373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.268892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:39:24.268981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-04T02:39:24.270115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-04T02:39:24.270163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-04T02:39:24.271468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-04T02:39:24.271549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-04T02:39:24.271582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:2883:4872] TestWaitNotification: OK eventTxId 175 |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> KikimrIcGateway::TestAlterResourcePool [GOOD] >> KikimrIcGateway::TestAlterStreamingQuery |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:21.664536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:21.664664Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:21.664765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:21.664802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:21.664834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:21.664898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:21.665858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:21.667261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:21.756469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:21.756542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.770145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:21.770681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:21.770830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:21.778510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:21.778748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:21.779396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:21.779612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:21.782269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.782431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:21.786137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.786247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:21.786290Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:21.786342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:21.787094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:21.951234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.952969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.953030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.953126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.953213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.953313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at step: 5000112 FAKE_COORDINATOR: advance: minStep5000112 State->FrontStep: 5000111 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 175 at step: 5000112 2026-01-04T02:39:24.424363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000112, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:24.424457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 175 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000112 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:24.424510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:190: TDropExtSubdomain TPropose, operationId: 175:0 HandleReply TEvOperationPlan, step: 5000112, at schemeshard: 72057594046678944 2026-01-04T02:39:24.424597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 62] name: USER_0 type: EPathTypeExtSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 175 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:24.424642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-04T02:39:24.424678Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 128 -> 134 2026-01-04T02:39:24.425169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.426940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.428030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-04T02:39:24.428079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:137: TDropExtSubdomain TDeleteExternalShards, operationId: 175:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:24.428186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 134 -> 135 2026-01-04T02:39:24.428327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:24.428377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 FAKE_COORDINATOR: Erasing txId 175 2026-01-04T02:39:24.430083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:24.430129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:24.430268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 175, path id: [OwnerId: 72057594046678944, LocalPathId: 62] 2026-01-04T02:39:24.430437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:24.430492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 175, path id: 1 2026-01-04T02:39:24.430530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 175, path id: 62 2026-01-04T02:39:24.430893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-04T02:39:24.430936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_extsubdomain.cpp:29: [72057594046678944] TDeleteSubdomainSystemShards opId# 175:0 ProgressState 2026-01-04T02:39:24.430966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 175:0 135 -> 240 2026-01-04T02:39:24.431575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.431653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.431683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:24.431725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-04T02:39:24.431767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:24.432400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.432485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:24.432521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:24.432547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-04T02:39:24.432578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-04T02:39:24.432637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-04T02:39:24.435238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-04T02:39:24.435286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-04T02:39:24.435362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:24.435392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.435444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:24.435472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.435501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-04T02:39:24.435531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:24.435560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-04T02:39:24.435584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-04T02:39:24.435643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 2026-01-04T02:39:24.436189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:24.436229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:24.436295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-04T02:39:24.436907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:24.436954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:24.437015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:24.437499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.437647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:24.439602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:39:24.439679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-04T02:39:24.440840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-04T02:39:24.440879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-04T02:39:24.442168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-04T02:39:24.442259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-04T02:39:24.442290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:3034:5022] TestWaitNotification: OK eventTxId 175 |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::UploadRowsToReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:18.032267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:18.151213Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:18.151658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:18.151733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:18.380543Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:18.380675Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:18.438805Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494355958339 != 1767494355958342 2026-01-04T02:39:18.446714Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:18.488112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:18.588789Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:18.853012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:18.865748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:18.968440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:19.002856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:39:19.003939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:39:19.004271Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:39:19.004553Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:19.038267Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:39:19.038887Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:19.039030Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:19.040299Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:19.040388Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:19.040450Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:19.040912Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:19.041048Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:19.041146Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:39:19.051864Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:19.074554Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:19.074731Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:19.074840Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:39:19.074902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:19.074944Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:19.074982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:19.075156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:19.075208Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:19.075621Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:19.075719Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:19.075778Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:19.075808Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:19.075841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:39:19.075865Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:39:19.075923Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:39:19.075969Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:19.076000Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:19.076083Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:19.076105Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:19.076164Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:39:19.076426Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:39:19.076457Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:39:19.076542Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:39:19.076697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:39:19.076727Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:39:19.076784Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:39:19.076837Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:39:19.076879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:39:19.076918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:39:19.076959Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:39:19.077226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:39:19.077258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:39:19.077292Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:39:19.077321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:39:19.077384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:39:19.077403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:39:19.077424Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:39:19.077445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:39:19.077482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:39:19.078504Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:39:19.078538Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:19.089160Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:39:19.089228Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:23.950239Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:23.950398Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:24.136205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:24.136292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:24.152775Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494361339517 != 1767494361339521 2026-01-04T02:39:24.156349Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:24.199801Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:24.280374Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:24.553884Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:24.567356Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:24.670546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:24.693473Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-04T02:39:24.693730Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:24.734269Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:24.734371Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:24.735469Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:24.735525Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:24.735565Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:24.735783Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:24.735890Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:24.735947Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-04T02:39:24.746546Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:24.746611Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:24.746698Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:24.746758Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-04T02:39:24.746785Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:24.746811Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:24.746843Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:24.747101Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:24.747173Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:24.747219Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:24.747250Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:24.747280Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:24.747308Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:24.747357Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-04T02:39:24.747450Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:39:24.747607Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:39:24.747666Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:39:24.749174Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:24.759726Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:39:24.759816Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:39:24.875545Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-04T02:39:24.876275Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:39:24.876349Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:24.876904Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:24.876941Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:39:24.876977Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:39:24.877159Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:39:24.877273Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:39:24.877522Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:24.877564Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:39:24.877994Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:39:24.878280Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:24.879617Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:39:24.879654Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:24.879944Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:39:24.879991Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:24.881167Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:24.881459Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:24.881500Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:24.881545Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:39:24.881617Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:39:24.881663Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:39:24.881750Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:24.883295Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:39:24.883433Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:39:24.883476Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:39:24.887650Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-04T02:39:24.887707Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting bulk upsert request on datashard: tablet# 72075186224037888, error# Can't execute bulk upsert at replicated table |90.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshRemoveUserBad [GOOD] >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.7%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::ShouldRejectOnChangeQueueOverflowAndRetryOnRetryableError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:15.995183Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:16.087104Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:16.087547Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:16.087604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:16.331267Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.331377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.384528Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494353375746 != 1767494353375749 2026-01-04T02:39:16.401127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.443892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.538163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:16.820168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.834135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:16.938943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.984191Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:900:2774] 2026-01-04T02:39:16.984521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:17.036370Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:17.036672Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-04T02:39:17.036880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:17.045218Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:17.046993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:17.047084Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:17.047139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:17.047574Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:17.047848Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:17.047958Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2774] in generation 1 2026-01-04T02:39:17.049716Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:17.049994Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:17.051565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:39:17.051665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:39:17.051743Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:39:17.052176Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:17.052536Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-04T02:39:17.052787Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:17.061854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:17.061927Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:957:2776] in generation 1 2026-01-04T02:39:17.062643Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-04T02:39:17.062849Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:17.071913Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:17.072082Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:17.073508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-04T02:39:17.073627Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-04T02:39:17.073717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-04T02:39:17.074072Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:17.074269Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:17.074336Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-04T02:39:17.074658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:17.074722Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:17.076018Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:39:17.076085Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:39:17.076136Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:39:17.076438Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:17.076534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:17.076605Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-04T02:39:17.087676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.117893Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:17.118168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.118316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-04T02:39:17.118357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:17.118398Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:17.118448Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:17.119109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.119155Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:39:17.119234Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.119291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-04T02:39:17.119326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:39:17.119358Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:39:17.119393Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:39:17.119465Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.119498Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-04T02:39:17.119539Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.119599Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-04T02:39:17.119621Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-04T02:39:17.119641Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-04T02:39:17.119660Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:39:17.119812Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:17.120022Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:17.120311Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:17.120364Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:17.120412Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:17.120455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at ... 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:39:25.993784Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:39:25.994363Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:71: [ChangeSender][72075186224037889:1][3:933:2795] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-04T02:39:25.994817Z node 3 :CHANGE_EXCHANGE DEBUG: change_sender_async_index.cpp:224: [AsyncIndexChangeSenderMain][72075186224037889:1][3:967:2821] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvEnqueueRecords { Records [{ Order: 33 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 34 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 35 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 36 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 37 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 38 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 39 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 40 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 41 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 42 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 43 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 44 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 45 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 46 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 47 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 48 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 49 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 50 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 51 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 52 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 53 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 54 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 55 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 56 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 57 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 58 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 59 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 60 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 61 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 62 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 63 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 64 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 65 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 66 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 67 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 68 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 69 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 70 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 71 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 72 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 73 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 74 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 75 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 76 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 77 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 78 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 79 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 80 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 81 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 82 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 83 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 84 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 85 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 86 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 87 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 88 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 89 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 90 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 91 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 92 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 93 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 94 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 95 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 },{ Order: 96 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 }] } 2026-01-04T02:39:25.995014Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1282: Upload rows: got OK from shard 72075186224037889 description: 2026-01-04T02:39:25.995166Z node 3 :RPC_REQUEST DEBUG: upload_rows_common_impl.h:1412: completed with status SUCCESS >> KqpDataIntegrityTrails::Ddl |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:15.680516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:15.853310Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:15.854444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:15.855109Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:16.215590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.215716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.282537Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494353497321 != 1767494353497324 2026-01-04T02:39:16.295818Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.345896Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.446987Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:16.734787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.748641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:16.860699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.906277Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:900:2774] 2026-01-04T02:39:16.906544Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.960862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.961137Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:906:2776] 2026-01-04T02:39:16.961349Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.969762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.971455Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:16.971550Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:16.971600Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:16.971977Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.972242Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.972330Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:947:2774] in generation 1 2026-01-04T02:39:16.974042Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.974291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.975645Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:39:16.975717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:39:16.975789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:39:16.976111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.976388Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:912:2780] 2026-01-04T02:39:16.976566Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.985328Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.985398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:957:2776] in generation 1 2026-01-04T02:39:16.986140Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2783] 2026-01-04T02:39:16.986333Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.995118Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.995291Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.996673Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037891 2026-01-04T02:39:16.996739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037891 2026-01-04T02:39:16.996780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037891 2026-01-04T02:39:16.997144Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.997375Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.997442Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037891 persisting started state actor id [1:968:2780] in generation 1 2026-01-04T02:39:16.997755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.997839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.999007Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:39:16.999064Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:39:16.999119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:39:16.999416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.999507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.999581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:969:2783] in generation 1 2026-01-04T02:39:17.010676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.038965Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:17.039210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.039315Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:974:2816] 2026-01-04T02:39:17.039347Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:17.039405Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:17.039445Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:17.040043Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.040122Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:39:17.040210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.040265Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:975:2817] 2026-01-04T02:39:17.040290Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:39:17.040311Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:39:17.040362Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:39:17.040437Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.040464Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-04T02:39:17.040530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037891 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.040597Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [1:976:2818] 2026-01-04T02:39:17.040619Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-04T02:39:17.040640Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-04T02:39:17.040661Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:39:17.040806Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:17.040944Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:17.041233Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:17.041291Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:17.041343Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:17.041384Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at ... 2:39:26.292565Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.293392Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:26.293450Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:39:26.293503Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:39:26.293797Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:39:26.294011Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:39:26.294799Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:26.294875Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:39:26.295330Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:39:26.295741Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:26.297218Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:39:26.297271Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.298097Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:39:26.298194Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:26.299053Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:26.299098Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:26.299149Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:39:26.299205Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:39:26.299253Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:39:26.299337Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.300262Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:26.301368Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:39:26.301833Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:39:26.301890Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:39:26.309058Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:26.309132Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:26.309181Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:26.309550Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:26.309684Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:26.312812Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:26.317382Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:26.430239Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:26.432309Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:39:26.466737Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:26.546850Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:39:26.547302Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:39:26.547473Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-04T02:39:26.558502Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.632835Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-04T02:39:26.633001Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=3 2026-01-04T02:39:26.644004Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.717721Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-04T02:39:26.717919Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=3 2026-01-04T02:39:26.728974Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.732682Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(36) Execute: at tablet# 72075186224037888 2026-01-04T02:39:26.743929Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-04T02:39:26.744023Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:26.745681Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:39:26.746137Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:39:26.746383Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:26.746436Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:39:26.746494Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:39:26.746831Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:39:26.746912Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:26.747614Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 1 2026-01-04T02:39:26.747957Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:39:26.748119Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715663, PendingAcks: 0 2026-01-04T02:39:26.748176Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715663, MessageQuota: 0 2026-01-04T02:39:26.790079Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:39:26.790159Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037888 2026-01-04T02:39:26.790356Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:26.790392Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:39:26.790432Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037888 for ReadTableScan 2026-01-04T02:39:26.790576Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:26.790648Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:26.790694Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest >> KikimrIcGateway::TestLoadBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets >> StoragePool::TestDistributionRandomProbability [GOOD] >> StoragePool::TestDistributionRandomProbabilityWithOverflow [GOOD] >> StoragePool::TestDistributionExactMin >> DataStreams::TestGetRecords1MBMessagesOneByOneByTS [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] >> KqpSysColV0::InnerJoinTables |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk >> KqpSystemView::PartitionStatsRanges >> KqpSystemView::NodesOrderByDesc >> KqpSysColV1::InnerJoinSelect >> KqpSystemView::QueryStatsScan >> KqpSysColV1::SelectRowAsterisk >> KqpSystemView::QueryStatsSimple >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::Select [GOOD] >> BasicStatistics::StatisticsOnShardsRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::TestUploadShadowRowsShadowDataAlterSplitThenPublish [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:15.879657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:15.968325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:15.968655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:15.968703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:16.215592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.215713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.282526Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494353384239 != 1767494353384242 2026-01-04T02:39:16.296190Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.346481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.435263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:16.734698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.748604Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:16.860803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.903188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:39:16.904348Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:39:16.904701Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:39:16.905019Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.957553Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:39:16.959410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.959583Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.962696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:16.962816Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:16.963585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:16.965331Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.965509Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.965626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:39:16.976433Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.009868Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:17.011036Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.011189Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:39:17.011247Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:17.011284Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:17.011313Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:17.011439Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.011508Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.011820Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:17.012417Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:17.012551Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:17.012598Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:17.013175Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:39:17.013224Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:39:17.013259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:39:17.014007Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:17.014056Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:17.014191Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:17.014222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:17.014262Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:39:17.014629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:39:17.015188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:39:17.015344Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:39:17.016146Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:39:17.016203Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:39:17.016298Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:39:17.016355Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:39:17.016391Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:39:17.016420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:39:17.016452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:39:17.017263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:39:17.017300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:39:17.017328Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:39:17.017357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:39:17.017889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:39:17.017941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:39:17.017978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:39:17.018004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:39:17.018046Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:39:17.019707Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:39:17.019744Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:39:17.030475Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:39:17.031178Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... pp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is DelayComplete 2026-01-04T02:39:29.425527Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompleteOperation 2026-01-04T02:39:29.425574Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:39:29.425613Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:39:29.425653Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037889 is Executed 2026-01-04T02:39:29.425684Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:39:29.425716Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037889 has finished 2026-01-04T02:39:29.425757Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:29.425784Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037889 2026-01-04T02:39:29.425840Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037889 has no attached operations 2026-01-04T02:39:29.425881Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037889 2026-01-04T02:39:29.436923Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:39:29.437029Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:39:29.437092Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037889 on unit CompleteOperation 2026-01-04T02:39:29.437174Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037889 at tablet 72075186224037889 send result to client [3:1345:3113], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:39:29.437247Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:39:29.437698Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [3:1345:3113], Recipient [3:1172:2964]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715668 Cleared: true 2026-01-04T02:39:29.437770Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-04T02:39:29.437906Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 4500} 2026-01-04T02:39:29.437963Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:39:29.437995Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:39:29.438074Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1172:2964], Recipient [3:1172:2964]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:29.438108Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:29.438174Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:39:29.438218Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:39:29.438258Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for WaitForStreamClearance 2026-01-04T02:39:29.438291Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit WaitForStreamClearance 2026-01-04T02:39:29.438326Z node 3 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [4500:281474976715668] at 72075186224037890 2026-01-04T02:39:29.438368Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-04T02:39:29.438401Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-04T02:39:29.438430Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit ReadTableScan 2026-01-04T02:39:29.438463Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-01-04T02:39:29.438710Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Continue 2026-01-04T02:39:29.438741Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:39:29.438768Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-04T02:39:29.438800Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:52: TPlanQueueUnit at 72075186224037890 out-of-order limits exceeded 2026-01-04T02:39:29.438847Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T02:39:29.439808Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [3:1365:3131], Recipient [3:1172:2964]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T02:39:29.439862Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-04T02:39:29.440137Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 1 2026-01-04T02:39:29.440879Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715668, Size: 54, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:39:29.480214Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715668, PendingAcks: 0 2026-01-04T02:39:29.480302Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715668, MessageQuota: 0 2026-01-04T02:39:29.481948Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-04T02:39:29.481988Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037890 2026-01-04T02:39:29.482097Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [3:1172:2964], Recipient [3:1172:2964]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:29.482145Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:29.482214Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:39:29.482243Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:39:29.482276Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [4500:281474976715668] at 72075186224037890 for ReadTableScan 2026-01-04T02:39:29.482302Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit ReadTableScan 2026-01-04T02:39:29.482330Z node 3 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [4500:281474976715668] at 72075186224037890 error: , IsFatalError: 0 2026-01-04T02:39:29.482365Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-04T02:39:29.482395Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit ReadTableScan 2026-01-04T02:39:29.482424Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompleteOperation 2026-01-04T02:39:29.482448Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-01-04T02:39:29.482602Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is DelayComplete 2026-01-04T02:39:29.482624Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompleteOperation 2026-01-04T02:39:29.482647Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [4500:281474976715668] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T02:39:29.482669Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [4500:281474976715668] at 72075186224037890 on unit CompletedOperations 2026-01-04T02:39:29.482693Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [4500:281474976715668] at 72075186224037890 is Executed 2026-01-04T02:39:29.482718Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [4500:281474976715668] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T02:39:29.482737Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [4500:281474976715668] at 72075186224037890 has finished 2026-01-04T02:39:29.482759Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:29.482782Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-04T02:39:29.482810Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-04T02:39:29.482834Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T02:39:29.493845Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:39:29.493925Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:39:29.493967Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [4500:281474976715668] at 72075186224037890 on unit CompleteOperation 2026-01-04T02:39:29.494032Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [4500 : 281474976715668] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1345:3113], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:39:29.494086Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestCreateStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 18229, MsgBus: 15419 2026-01-04T02:39:15.329858Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330451356295377:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:15.329950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:15.560817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:15.574005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:15.574149Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:15.630471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:15.683038Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:15.684288Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330451356295348:2081] 1767494355328186 != 1767494355328189 2026-01-04T02:39:15.736284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:15.736324Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:15.736341Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:15.736442Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:15.826839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:16.151225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.170193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:39:16.185105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-04T02:39:16.337637Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Trying to start YDB, gRPC: 26120, MsgBus: 25270 2026-01-04T02:39:18.295755Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330465924514543:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:18.295865Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:18.310504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:18.378859Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:18.380019Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330465924514517:2081] 1767494358295279 != 1767494358295282 2026-01-04T02:39:18.415340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:18.415418Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:18.421882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:18.451454Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:18.451475Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:18.451504Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:18.451607Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:18.605239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:18.780450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:18.799920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Trying to start YDB, gRPC: 21402, MsgBus: 8633 2026-01-04T02:39:21.380691Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330479214485315:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:21.380754Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:21.391959Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:21.477504Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330479214485288:2081] 1767494361379920 != 1767494361379923 2026-01-04T02:39:21.492133Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.507498Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:21.507595Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:21.511233Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:21.547531Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:21.547561Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:21.547568Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:21.547651Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:21.677965Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:21.927598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:21.945687Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 23315, MsgBus: 24044 2026-01-04T02:39:24.362176Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330491732350273:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:24.362256Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:24.373081Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:24.455746Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:24.457106Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330491732350246:2081] 1767494364361373 != 1767494364361376 2026-01-04T02:39:24.514741Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:24.514836Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:24.518520Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:24.550231Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:24.550259Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:24.550267Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:24.550351Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:24.667959Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:24.938992Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:25.386041Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:25.390271Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:27.128068Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330504617253137:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.128068Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330504617253138:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.128145Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330504617253122:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.128258Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.128539Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330504617253144:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.128572Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:27.130730Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:27.140467Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330504617253142:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:39:27.143388Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330504617253147:2569] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:39:27.143746Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:39:27.143901Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330504617253143:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:39:27.199543Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330504617253193:2599] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:27.242591Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330504617253211:2607] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:27.906502Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:28.205742Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:28.294164Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:29.362427Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330491732350273:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:29.362503Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDifferentIssuersBad [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_auditsettings/unittest >> TSchemeShardAuditSettings::AlterExtSubdomain-ExternalSchemeShard-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:21.664571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:21.664682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.664731Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:21.664768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:21.664809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:21.664839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:21.664913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:21.665005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:21.665917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:21.667263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:21.762874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:21.762954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:21.777558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:21.777777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:21.778019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:21.786147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:21.786413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:21.787084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:21.787377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:21.790440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.790670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:21.791954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:21.792025Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:21.792136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:21.792202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:21.792249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:21.792444Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:21.955441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.956946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:21.957521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57594046678944, cookie: 175 2026-01-04T02:39:30.725723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 106 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:30.725760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:30.725798Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 106 2026-01-04T02:39:30.725863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:30.726827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:30.726925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 62 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 175 2026-01-04T02:39:30.726956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 175 2026-01-04T02:39:30.726988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 175, pathId: [OwnerId: 72057594046678944, LocalPathId: 62], version: 18446744073709551615 2026-01-04T02:39:30.727024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 6 2026-01-04T02:39:30.727105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 0/1, is published: true 2026-01-04T02:39:30.730536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:39:30.730651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:74 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:39:30.730692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:73 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:39:30.730725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:75 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:39:30.731047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 175:0, at schemeshard: 72057594046678944 2026-01-04T02:39:30.731099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 175:0 ProgressState 2026-01-04T02:39:30.731193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:30.731230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:30.731271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#175:0 progress is 1/1 2026-01-04T02:39:30.731306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:30.731342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 175, ready parts: 1/1, is published: true 2026-01-04T02:39:30.731382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 175 ready parts: 1/1 2026-01-04T02:39:30.731421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 175:0 2026-01-04T02:39:30.731454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 175:0 2026-01-04T02:39:30.731638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 5 2026-01-04T02:39:30.733406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:30.733701Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 74 TxId_Deprecated: 0 TabletID: 72075186233409619 2026-01-04T02:39:30.735583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 74, at schemeshard: 72057594046678944 2026-01-04T02:39:30.735842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 4 Forgetting tablet 72075186233409619 2026-01-04T02:39:30.736610Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 73 TxId_Deprecated: 0 TabletID: 72075186233409618 2026-01-04T02:39:30.737060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6958: Transaction 281474976840657 reset current state at schemeshard 72075186233409618 because pipe to tablet 72075186233409619 disconnected 2026-01-04T02:39:30.737096Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6987: Pipe attached message is found and resent into the new pipe, opId:281474976840657:4294967295, dst tableId: 72075186233409619, msg type: 269090816, msg cookie: 0:281474976840657, at schemeshardId: 72075186233409618 2026-01-04T02:39:30.737216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 73, at schemeshard: 72057594046678944 2026-01-04T02:39:30.737415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 3 2026-01-04T02:39:30.737595Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 75 TxId_Deprecated: 0 TabletID: 72075186233409620 2026-01-04T02:39:30.738596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:30.741916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 75, at schemeshard: 72057594046678944 2026-01-04T02:39:30.742134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 2 Forgetting tablet 72075186233409618 Forgetting tablet 72075186233409620 2026-01-04T02:39:30.743557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:30.743610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:30.743731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 62] was 1 2026-01-04T02:39:30.745175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:39:30.745222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 62], at schemeshard: 72057594046678944 2026-01-04T02:39:30.745285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:30.745928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 175 2026-01-04T02:39:30.747310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:74 2026-01-04T02:39:30.747351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:74 tabletId 72075186233409619 2026-01-04T02:39:30.749587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:73 2026-01-04T02:39:30.749636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:73 tabletId 72075186233409618 2026-01-04T02:39:30.749735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:75 2026-01-04T02:39:30.749764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:75 tabletId 72075186233409620 2026-01-04T02:39:30.749855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:39:30.750105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 175, wait until txId: 175 TestWaitNotification wait txId: 175 2026-01-04T02:39:30.751569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 175: send EvNotifyTxCompletion 2026-01-04T02:39:30.751607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 175 2026-01-04T02:39:30.752830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 175, at schemeshard: 72057594046678944 2026-01-04T02:39:30.753003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 175: got EvNotifyTxCompletionResult 2026-01-04T02:39:30.753050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 175: satisfy waiter [1:8304:9332] TestWaitNotification: OK eventTxId 175 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_auditsettings/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Select [GOOD] Test command err: Trying to start YDB, gRPC: 3283, MsgBus: 62716 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_upload_rows/unittest >> TTxDataShardUploadRows::BulkUpsertDuringAddIndexRaceCorruption [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:15.736985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:15.853426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:15.854571Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:15.855132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:16.215642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.215775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.297775Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494353375755 != 1767494353375758 2026-01-04T02:39:16.310491Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.358460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.465419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:16.753704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.767668Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:16.872133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.915502Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:39:16.916838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:39:16.917155Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-04T02:39:16.917449Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.960032Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:39:16.960115Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:39:16.961036Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:39:16.961248Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:895:2770] 2026-01-04T02:39:16.961441Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:39:16.967608Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:39:16.967996Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.968131Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.969372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:39:16.969422Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:39:16.969462Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:39:16.969779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.969908Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.969976Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-04T02:39:16.970307Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:39:16.970384Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:39:16.971317Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:39:16.971366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:39:16.971404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:39:16.971589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:39:16.971660Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:39:16.971697Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-04T02:39:16.982508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.009928Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:39:17.011022Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.011163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-04T02:39:17.011214Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:39:17.011249Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:39:17.011287Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:39:17.011573Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.011635Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.011749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:39:17.011791Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:39:17.011853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:39:17.011966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-04T02:39:17.011993Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:39:17.012017Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:39:17.012039Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:39:17.012290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:895:2770], Recipient [1:895:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.012340Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:39:17.012541Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:39:17.012632Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:39:17.012738Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:39:17.012799Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:17.013194Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:39:17.013242Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:39:17.013289Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:39:17.014032Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:39:17.014099Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:39:17.014160Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:39:17.014226Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:39:17.014328Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:17.014367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:39:17.014408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:885:2764], serverId# [1:896:2771], sessionId# [0:0:0] 2026-01-04T02:39:17.014509Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:39:17.014548Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:39:17.014593 ... on plan for [0:10] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:39:30.237747Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:10] at 72075186224037888 has finished 2026-01-04T02:39:30.248649Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(36) Complete: at tablet# 72075186224037888 2026-01-04T02:39:30.248720Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:10] at 72075186224037888 on unit DirectOp 2026-01-04T02:39:30.248764Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 ... bulk upsert finished with status GENERIC_ERROR 2026-01-04T02:39:30.470547Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:39:30.470612Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976715662 ProcessProposeKqpTransaction 2026-01-04T02:39:30.473936Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1405:3187], Recipient [3:891:2770]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:39:30.474186Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:39:30.474248Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v23000/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v23000/18446744073709551615 ImmediateWriteEdgeReplied# v23000/18446744073709551615 2026-01-04T02:39:30.474298Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-04T02:39:30.474359Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CheckRead 2026-01-04T02:39:30.474452Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-04T02:39:30.474494Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:39:30.474533Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:39:30.474571Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:39:30.474622Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:11] at 72075186224037888 2026-01-04T02:39:30.474663Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-04T02:39:30.474696Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:39:30.474718Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:39:30.474742Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:39:30.474846Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 3 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:39:30.475136Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[3:1405:3187], 0} after executionsCount# 1 2026-01-04T02:39:30.475205Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[3:1405:3187], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:39:30.475301Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[3:1405:3187], 0} finished in read 2026-01-04T02:39:30.475370Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-04T02:39:30.475398Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:39:30.475425Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:11] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:39:30.475450Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:11] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:39:30.475493Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:11] at 72075186224037888 is Executed 2026-01-04T02:39:30.475513Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:11] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:39:30.475537Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:11] at 72075186224037888 has finished 2026-01-04T02:39:30.475580Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:39:30.475690Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:39:30.476458Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1405:3187], Recipient [3:891:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:39:30.476524Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } 2026-01-04T02:39:30.622343Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [3:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:39:30.622421Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [3:68:2115] TxId# 281474976715663 ProcessProposeKqpTransaction 2026-01-04T02:39:30.628474Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [3:1435:3211], Recipient [3:1117:2932]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false RangesSize: 1 2026-01-04T02:39:30.628612Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:39:30.628676Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037889 CompleteEdge# v8000/281474976710759 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v7000/18446744073709551615 ImmediateWriteEdgeReplied# v7000/18446744073709551615 2026-01-04T02:39:30.628718Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037889 changed HEAD read to non-repeatable v23000/18446744073709551615 2026-01-04T02:39:30.628772Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CheckRead 2026-01-04T02:39:30.628856Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:39:30.628901Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:39:30.628932Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:39:30.628961Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:39:30.629000Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037889 2026-01-04T02:39:30.629028Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:39:30.629045Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:39:30.629064Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:39:30.629085Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:39:30.629185Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 44 SchemaVersion: 2 } Columns: 2 Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:39:30.629457Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[3:1435:3211], 0} after executionsCount# 1 2026-01-04T02:39:30.629507Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[3:1435:3211], 0} sends rowCount# 2, bytes# 64, quota rows left# 32765, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:39:30.629579Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[3:1435:3211], 0} finished in read 2026-01-04T02:39:30.629629Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:39:30.629649Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:39:30.629671Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:39:30.629698Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:39:30.629742Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037889 is Executed 2026-01-04T02:39:30.629763Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:39:30.629792Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037889 has finished 2026-01-04T02:39:30.629863Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:39:30.629995Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:39:30.632184Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [3:1435:3211], Recipient [3:1117:2932]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:39:30.632251Z node 3 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 4 } } |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_upload_rows/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28627, MsgBus: 6951 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::StatisticsOnShardsRestart [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:30.447331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:30.545442Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:30.553548Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:30.553989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:30.554183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:30.987157Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.090696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:31.090825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:31.137643Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:31.221389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:31.877454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:31.878474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:31.878524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:31.878554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:31.878936Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:31.945614Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:32.528348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:35.914811Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:35.922696Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:35.926163Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:35.961335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:35.961465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:35.990801Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:35.992965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.189280Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.190032Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.190598Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.191246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.191458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.191614Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.191752Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.191909Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.192001Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:36.375005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:36.386750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:36.386918Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:36.400892Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.556255Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:36.617753Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:36.617876Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:36.647784Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:36.648167Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:36.648397Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:36.648476Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:36.648541Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:36.648597Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:36.648651Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:36.648702Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:36.649404Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:36.803642Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:32:36.804203Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:36.806637Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:32:36.806689Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:32:36.806777Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:36.810530Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:36.814770Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:36.819404Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:36.888649Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:36.907213Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:36.907333Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:36.923003Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:32:36.924466Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:32:37.242707Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:37.244770Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:37.247652Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:37.250699Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:37.250776Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... 2026-01-04T02:38:43.503427Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:38:43.503497Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:38:43.503730Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:38:43.517254Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:38:45.242901Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:38:45.243072Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 42 2026-01-04T02:38:45.243146Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 42 2026-01-04T02:38:49.243793Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:38:50.090823Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:38:50.090907Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:38:50.091162Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:38:50.105575Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:38:51.897293Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:38:51.897497Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 43 2026-01-04T02:38:51.897598Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 43 2026-01-04T02:38:55.657219Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:38:56.474667Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:38:56.474738Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:38:56.474961Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:38:56.487781Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:38:58.162691Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:38:58.162898Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 44 2026-01-04T02:38:58.163058Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 44 2026-01-04T02:39:01.708395Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:02.446419Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:02.446481Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:02.446621Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:39:02.470812Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:04.176932Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:39:04.177122Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 45 2026-01-04T02:39:04.177301Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 45 2026-01-04T02:39:08.012291Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:39:08.012378Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:08.012416Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:08.012453Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:39:08.206098Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:09.027221Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:09.027307Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:09.027538Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:39:09.041320Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:10.845106Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:39:10.845337Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 46 2026-01-04T02:39:10.845464Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 46 2026-01-04T02:39:14.583954Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:15.348124Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:15.348197Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:15.348442Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:39:15.361309Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:16.926753Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:39:16.926955Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 47 2026-01-04T02:39:16.927059Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 47 2026-01-04T02:39:20.718342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:21.582521Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:21.582597Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:21.582865Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:39:21.596649Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:23.339096Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:39:23.339271Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 48 2026-01-04T02:39:23.339354Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 48 2026-01-04T02:39:27.511664Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:28.313400Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 4, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:28.313471Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:28.313688Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 100, entries count: 4, are all stats full: 1 2026-01-04T02:39:28.327219Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:30.085304Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:39:30.085486Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 49 2026-01-04T02:39:30.085589Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 49 2026-01-04T02:39:30.101178Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:16546:9887]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:30.104571Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:39:30.104659Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:16546:9887], StatRequests.size() = 1 2026-01-04T02:39:30.108478Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:16562:9891]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:30.111948Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-04T02:39:30.112031Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:16562:9891], StatRequests.size() = 1 2026-01-04T02:39:30.115893Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:16578:9895]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:30.119470Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-04T02:39:30.119567Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 6, ReplyToActorId = [2:16578:9895], StatRequests.size() = 1 |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestAlterStreamingQuery [GOOD] Test command err: Trying to start YDB, gRPC: 23940, MsgBus: 12168 2026-01-04T02:39:13.473586Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330443335888278:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:13.474395Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:13.649221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:13.706312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:13.706420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:13.734760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:13.756794Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:13.817102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:13.965141Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:13.965163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:13.965183Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:13.965281Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:14.480522Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.501493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:14.531434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:39:14.550802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) Trying to start YDB, gRPC: 62894, MsgBus: 64218 2026-01-04T02:39:16.385695Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330458747927152:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:16.385763Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:16.398866Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:16.466913Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:16.468507Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330458747927127:2081] 1767494356385224 != 1767494356385227 2026-01-04T02:39:16.502394Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:16.502453Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:16.504306Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:16.537884Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:16.537913Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:16.537927Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:16.538044Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:16.598319Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:16.886263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:16.900626Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:39:16.914845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_table.cpp:352) 2026-01-04T02:39:16.962545Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330458747928031:2538] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/f1/f2/external_table\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:16.962693Z node 2 :KQP_GATEWAY ERROR: scheme.h:178: Unexpected error on scheme request, TxId: 281474976710660, ProxyStatus: ExecComplete, SchemeShardReason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges)
: Error: Scheme operation failed, status: ExecComplete, reason: Check failed: path: '/Root/f1/f2/external_table', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeExternalTable, state: EPathStateNoChanges) Trying to start YDB, gRPC: 19835, MsgBus: 24721 2026-01-04T02:39:19.130687Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330469707019671:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:19.130833Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:19.144194Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:19.237916Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:19.249248Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:19.249314Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:19.251333Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:19.289210Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:19.289236Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:19.289265Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:19.289353Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:19.430800Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:19.671706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:19.693996Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) Trying to start YDB, gRPC: 19259, MsgBus: 4552 2026-01-04T02:39:22.195684Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330480886467547:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:22.195735Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:22.207427Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:22.272837Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330480886467518:2081] 1767494362194829 != 1767494362194832 2026-01-04T02:39:22.277613Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:22.312474Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:22.312544Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:22.314332Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:22.345056Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:22.345076Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:22.345081Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:22.345142Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:22.377170Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:22.684312Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:22.698539Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:22.714477Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterResourcePool, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_resource_pool.cpp:155) Trying to start YDB, gRPC: 12898, MsgBus: 11827 2026-01-04T02:39:25.514865Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330494209397728:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:25.515015Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:25.528870Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:25.610240Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591330494209397700:2081] 1767494365513718 != 1767494365513721 2026-01-04T02:39:25.623236Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:25.628611Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:25.628722Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:25.647561Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:25.689463Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:25.689491Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:25.689500Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:25.689610Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:25.819126Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:26.187756Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:26.528958Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:26.534353Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:28.606255Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330507094300593:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:28.606343Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330507094300579:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:28.606385Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591330507094300595:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:28.606540Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:28.611034Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:28.612976Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591330507094300602:2572] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:39:28.619468Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591330507094300599:2342], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:39:28.619563Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591330507094300600:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:39:28.700285Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591330507094300658:2604] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:28.713278Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591330507094300668:2611] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:29.377470Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:29.709222Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:29.832312Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:30.515287Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591330494209397728:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:30.515399Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest |90.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpDataIntegrityTrails::Ddl [GOOD] |90.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_upload_rows/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_auditsettings/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV1::InnerJoinTables >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] >> DataStreams::TestGetRecordsStreamWithMultipleShards [GOOD] >> DataStreams::TestGetRecordsWithBigSeqno ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11747, MsgBus: 28478 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::BrokenReadLockAbortedTx [GOOD] Test command err: Trying to start YDB, gRPC: 27057, MsgBus: 29893 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert+LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 31048, MsgBus: 6225 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> StoragePool::TestDistributionExactMin [GOOD] >> StoragePool::TestDistributionExactMinWithOverflow [GOOD] >> StoragePool::TestDistributionRandomMin7p >> KqpSystemView::TopQueriesOrderByDesc ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Ddl [GOOD] Test command err: Trying to start YDB, gRPC: 29018, MsgBus: 24832 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::Upsert-LogEnabled-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 7002, MsgBus: 64132 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest >> KikimrIcGateway::TestLoadMdbBasicSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets >> TTicketParserTest::TicketFromCertificateWithValidationDefaultGroupGood [GOOD] >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad >> CompressExecutor::TestReorderedExecutor [GOOD] >> CompressExecutor::TestExecutorMemUsage ------- [TS] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data_integrity/unittest >> KqpDataIntegrityTrails::UpsertEvWriteQueryService-isOlap+useOltpSink [GOOD] Test command err: Trying to start YDB, gRPC: 29634, MsgBus: 8023 |90.8%| [TS] {BAZEL_UPLOAD} ydb/core/kqp/ut/data_integrity/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn >> TSchemeShardTTLTests::ShouldSkipDroppedColumn >> TSchemeShardTTLTests::ConditionalErase >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL |90.8%| [TA] $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data_integrity/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSysColV1::SelectRowAsterisk [GOOD] >> KqpSysColV1::InnerJoinSelect [GOOD] >> KqpSysColV0::InnerJoinTables [GOOD] >> KqpSystemView::PartitionStatsRanges [GOOD] >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ldap_auth_provider/ut/unittest >> LdapAuthProviderTest_StartTls::LdapRefreshGroupsInfoWithError [GOOD] Test command err: 2026-01-04T02:38:46.962909Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330329680526182:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:46.963183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:47.217048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:47.217147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:47.268040Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:47.274073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:47.293599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:47.296547Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330329680526074:2081] 1767494326957930 != 1767494326957933 2026-01-04T02:38:47.439194Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:47.439230Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:47.439238Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:47.439326Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:47.440188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:47.521961Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:47.525829Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:47.525860Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:47.526747Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:4327, port: 4327 2026-01-04T02:38:47.527467Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:47.533533Z node 1 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:47.582815Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****E4lQ (E7EC487F) () has now valid token of ldapuser@ldap 2026-01-04T02:38:50.039364Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330343292778658:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:50.040332Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:50.059162Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:50.136476Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:50.138347Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330343292778629:2081] 1767494330036765 != 1767494330036768 2026-01-04T02:38:50.177728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:50.177851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:50.185775Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:50.261100Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:50.281851Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:50.281868Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:50.281875Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:50.281952Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:50.389964Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:50.393095Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:50.393146Z node 2 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:50.394075Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:3802, port: 3802 2026-01-04T02:38:50.394156Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:50.413819Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:50.458146Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:50.458638Z node 2 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:340: LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:3802 return no entries 2026-01-04T02:38:50.458990Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****Zflw (B83CB67B) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:3802 return no entries)' 2026-01-04T02:38:53.389890Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330357969091033:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:53.389942Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:53.406174Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:53.489188Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.490679Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330357969091005:2081] 1767494333389254 != 1767494333389257 2026-01-04T02:38:53.535167Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.535269Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:53.541210Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.584353Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:53.584376Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:53.584387Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:53.584476Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:53.590230Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:53.864854Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:38:53.868408Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:38:53.868453Z node 3 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:38:53.869036Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:17984, port: 17984 2026-01-04T02:38:53.869091Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:38:53.879079Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:38:53.922354Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:38:53.966138Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:38:53.966567Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:38:53.966609Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.010209Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.054184Z node 3 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:38:54.055138Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****nE0A (E57ADAE7) () has now valid token of ldapuser@ldap 2026-01-04T02:38:54.394526Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:58.390185Z node 3 ... does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:25880 return no entries 2026-01-04T02:39:21.627395Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket eyJh****ibCA (85FC599D) () has now permanent error message 'Could not login via LDAP (LDAP user ldapuser does not exist. LDAP search for filter uid=ldapuser on server ldap://localhost:25880 return no entries)' 2026-01-04T02:39:26.650787Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591330499919548707:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:26.650826Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:26.665397Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:26.737428Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:26.782069Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:26.782163Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:26.783971Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:26.824951Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:26.824978Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:26.824984Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:26.825065Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:26.918084Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:26.969154Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1553: Updated state for /Root keys 1 2026-01-04T02:39:26.971923Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:26.971953Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:26.972669Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26212, port: 26212 2026-01-04T02:39:26.972728Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:26.987466Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:27.030254Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:27.030775Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy 2026-01-04T02:39:27.031448Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****5QHw (6435BF67) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy)' 2026-01-04T02:39:27.031794Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:27.031819Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:27.032486Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26212, port: 26212 2026-01-04T02:39:27.032564Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:27.047965Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:27.090412Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:27.090980Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy 2026-01-04T02:39:27.091557Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****5QHw (6435BF67) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy)' 2026-01-04T02:39:27.657119Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:28.652476Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****5QHw (6435BF67) 2026-01-04T02:39:28.652812Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:28.652839Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:28.653990Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26212, port: 26212 2026-01-04T02:39:28.654063Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:28.661697Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:28.706259Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:28.706774Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:323: Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy 2026-01-04T02:39:28.707240Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1893: Ticket eyJh****5QHw (6435BF67) () has now retryable error message 'Could not login via LDAP (Could not perform search for filter uid=ldapuser on server ldap://localhost:26212. Server is busy)' 2026-01-04T02:39:31.651238Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7591330499919548707:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.651343Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:32.657467Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****5QHw (6435BF67) 2026-01-04T02:39:32.657765Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:805: CanInitLoginToken, domain db /Root, request db , token db /Root, DomainLoginOnly 1 2026-01-04T02:39:32.657784Z node 6 :TICKET_PARSER TRACE: ticket_parser_impl.h:810: CanInitLoginToken, target database candidates(1): /Root 2026-01-04T02:39:32.658881Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26212, port: 26212 2026-01-04T02:39:32.658971Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:32.681377Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:32.730418Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:32.775052Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:32.775687Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:32.775733Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:32.818178Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:32.862359Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:32.864777Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****5QHw (6435BF67) () has now valid token of ldapuser@ldap 2026-01-04T02:39:36.662141Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1595: Refreshing ticket eyJh****5QHw (6435BF67) 2026-01-04T02:39:36.662239Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:232: init: scheme: ldap, uris: ldap://localhost:26212, port: 26212 2026-01-04T02:39:36.662304Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:179: start TLS 2026-01-04T02:39:36.671762Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:195: bind: bindDn: cn=robouser,dc=search,dc=yandex,dc=net 2026-01-04T02:39:36.722102Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:308: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: uid=ldapuser, attributes: memberOf 2026-01-04T02:39:36.771527Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:357: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (member:1.2.840.113556.1.4.1941:=uid=ldapuser,dc=search,dc=yandex,dc=net), attributes: 1.1 2026-01-04T02:39:36.772203Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:382: Try to get nested groups - tree traversal 2026-01-04T02:39:36.772248Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managerOfProject1,cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=project1,cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:36.814198Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=managers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)(entryDn=cn=developers,cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:36.862153Z node 6 :LDAP_AUTH_PROVIDER DEBUG: ldap_auth_provider.cpp:404: search: baseDn: dc=search,dc=yandex,dc=net, scope: subtree, filter: (|(entryDn=cn=people,ou=groups,dc=search,dc=yandex,dc=net)), attributes: memberOf 2026-01-04T02:39:36.863473Z node 6 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket eyJh****5QHw (6435BF67) () has now valid token of ldapuser@ldap |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ldap_auth_provider/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] >> KqpSystemView::QueryStatsScan [GOOD] >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::SelectRowAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 24928, MsgBus: 21933 2026-01-04T02:39:31.090259Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330520344067037:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.090304Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.380831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.421723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.422005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.463471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.486403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.521894Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330520344067011:2081] 1767494371088711 != 1767494371088714 2026-01-04T02:39:31.620032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.665061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.665076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.665086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.665152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.099578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.285669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.341601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.497824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.672843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.741110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.461774Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533228970770:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.461903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.462217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533228970780:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.462262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.832093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.865281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.902555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.933743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.965899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.000279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.034976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.119006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.202121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537523938950:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.202209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.202338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537523938955:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.202550Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537523938957:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.202601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.206092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.216379Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330537523938959:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:39:35.310398Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330537523939010:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.090756Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330520344067037:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.090843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType |90.8%| [TA] $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.8%| [TA] {RESULT} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV0::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 4992, MsgBus: 15422 2026-01-04T02:39:31.079241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330521744964705:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.079468Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.333735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.353273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.353400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.392769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.461524Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.462888Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330521744964592:2081] 1767494371071021 != 1767494371071024 2026-01-04T02:39:31.525927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.664447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.664466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.664483Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.664571Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.086641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.280794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.288731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.357879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.547750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.732370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.807770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.300206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330534629868359:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.300336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.300689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330534629868368:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.300720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.869327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.896178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.925250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.958472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.994986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.032797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.099980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.179457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538924836540:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.179511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538924836543:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538924836547:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.184068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.194849Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330538924836549:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.268298Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330538924836602:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.079197Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330521744964705:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.079258Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::PartitionStatsRanges [GOOD] Test command err: Trying to start YDB, gRPC: 16681, MsgBus: 62706 2026-01-04T02:39:31.079489Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330520790491963:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.079540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.341895Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.369358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.369477Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.442979Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.451796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.452608Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330520790491818:2081] 1767494371071065 != 1767494371071068 2026-01-04T02:39:31.575566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.664674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.664694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.664707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.664769Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.083661Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.298393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.312842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.358169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.501484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.655587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.746291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.473350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533675395579:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.473470Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.473731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533675395589:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.473788Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.866301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.896055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.925066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.958649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.003853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.041102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.121405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.206503Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537970363758:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.206566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.206903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537970363763:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.206909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330537970363764:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.207001Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.210612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.220400Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330537970363767:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.313180Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330537970363818:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.079272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330520790491963:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.079371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:37.395530Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494377378, txId: 281474976710673] shutting down |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ldap_auth_provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::StreamInnerJoinSelectAsterisk [GOOD] Test command err: Trying to start YDB, gRPC: 9320, MsgBus: 4875 2026-01-04T02:39:31.080204Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330523326211066:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.080276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.352362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.352500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.388166Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.395682Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330523326210910:2081] 1767494371071045 != 1767494371071048 2026-01-04T02:39:31.402840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.403372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.618711Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.666381Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.666408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.666421Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.666491Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.085183Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.324962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.332878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.387503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.536079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.712950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.795859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.411803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536211114676:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.412004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.412477Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536211114686:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.412549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.835112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.867354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.896993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.930999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.968637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.015672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.053266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.103516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.174350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330540506082850:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174695Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330540506082855:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174738Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330540506082856:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174762Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.190582Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330540506082859:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.253202Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330540506082910:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.079726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330523326211066:2188];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.079793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:37.405465Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494377425, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheCheckWarnings-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 63620, MsgBus: 62583 2026-01-04T02:39:31.077921Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330519810486970:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.079030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.301484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.385112Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.385254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.422794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.450893Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.453137Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330519810486862:2081] 1767494371071055 != 1767494371071058 2026-01-04T02:39:31.572911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.664823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.664847Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.664865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.665019Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.084074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.251505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.259690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.330744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.481260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.653512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.721401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.339139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330532695390623:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.339281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.339951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330532695390633:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.340007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.865272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.896174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.925344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.958579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.992255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.027168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.073891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.173924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536990358807:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174011Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536990358812:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536990358813:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.174454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.193250Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330536990358816:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.273911Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330536990358867:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.077265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330519810486970:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.077343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:36.876905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830091Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.830261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.830314Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.830338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.830433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.915759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.915828Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.931840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.932273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.932582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.943407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.943680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.946490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.948373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.957147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.964572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.964786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.964838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.965029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.142092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.143986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.144064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.144160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.144246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.144344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.144434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... thId: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.723766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:39:38.723958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.724018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:39:38.724073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:39:38.724132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:39:38.725746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.725824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:38.725866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:39:38.727315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.727354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.727399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.727443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.727566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:38.729055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:39:38.729193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:39:38.729461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.729562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:38.729605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.729865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:39:38.729931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.730101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:38.730186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:39:38.731907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:38.731949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:38.732118Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:38.732160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:39:38.732249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.732291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:39:38.732382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:38.732414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.732448Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:38.732501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.732540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:39:38.732578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.732613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:39:38.732715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:39:38.732775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:38.732817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:39:38.732859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:39:38.733569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:38.733667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:38.733719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:39:38.733755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:39:38.733825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:38.733914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:39:38.736562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:39:38.736974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:39:38.738527Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:675:2665] Bootstrap 2026-01-04T02:39:38.739548Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:675:2665] Become StateWork (SchemeCache [1:680:2670]) 2026-01-04T02:39:38.742531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:38.742866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.742996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "created_at" } }, at schemeshard: 72057594046678944 2026-01-04T02:39:38.743425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', at schemeshard: 72057594046678944 2026-01-04T02:39:38.744411Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:675:2665] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:39:38.747237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Cannot enable TTL on unknown column: \'created_at\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:38.747483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Cannot enable TTL on unknown column: 'created_at', operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:39:38.748057Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnUnspecifiedTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.959766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.959892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.959936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.959973Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.960009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.960064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.960128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.960202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.961050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.961320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:38.050376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:38.050452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:38.063739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:38.067951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:38.068150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:38.073775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:38.074042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:38.074750Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.074989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:38.077530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:38.077691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:38.078924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:38.078987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:38.079107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:38.079152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:38.079198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:38.079329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.203267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.204923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.726740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:39:38.726941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.727013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:39:38.727046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:39:38.727071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:39:38.728547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.728587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:38.728659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:39:38.730314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.730362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.730430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.730479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.730590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:38.731990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:39:38.732157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:39:38.732497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.732599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969456 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:38.732639Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.732907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:39:38.732960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:38.733112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:38.733184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:38.734749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:38.734795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:38.734979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:38.735029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:208:2209], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:39:38.735405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.735473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:39:38.735581Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:38.735621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.735659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:38.735700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.735744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:39:38.735818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:38.735875Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:39:38.735909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:39:38.735989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:38.736060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:39:38.736107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:39:38.736551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:38.736629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:38.736661Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:39:38.736717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:39:38.736753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:38.736825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:39:38.739151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:39:38.739624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:39:38.740888Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:673:2663] Bootstrap 2026-01-04T02:39:38.741859Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:673:2663] Become StateWork (SchemeCache [1:678:2668]) 2026-01-04T02:39:38.744513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:38.744823Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.744921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { }, at schemeshard: 72057594046678944 2026-01-04T02:39:38.745313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: TTL status must be specified, at schemeshard: 72057594046678944 2026-01-04T02:39:38.746132Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:673:2663] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:39:38.748843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "TTL status must be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:38.749129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: TTL status must be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:39:38.749668Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinSelect [GOOD] Test command err: Trying to start YDB, gRPC: 18235, MsgBus: 6211 2026-01-04T02:39:31.074843Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330522251701296:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.074933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.343907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.352236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.352410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.357975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.421914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330522251701260:2081] 1767494371070975 != 1767494371070978 2026-01-04T02:39:31.433685Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.557917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.665581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.665597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.665606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.665655Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.085900Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.310774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.330407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.373678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.508157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.661471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.729232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.315428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330535136605024:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.315526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.316575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330535136605034:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.316638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.869211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.901867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.933452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.963356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.000172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.040192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.087953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.180195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330539431573203:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180300Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330539431573208:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.180915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330539431573209:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.181233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.184855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.197343Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330539431573212:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:39:35.261795Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330539431573263:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.075176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330522251701296:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.075262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::LegacyTtlSettingsNoTiersAlterTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.830324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.830384Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.830442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.830538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.916505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.916562Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.933450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.937842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.938048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.942975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.943312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.946489Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.948367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.957204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.964086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.965052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.965117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.965304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.113596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.115992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.116034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.116086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.116124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.116172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.116247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 94046678944, txId: 102, path id: 38 2026-01-04T02:39:38.940039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.940092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:39:38.940597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:38.940692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:38.940738Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:39:38.940772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:39:38.940808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:38.940867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:39:38.943036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:39:38.955209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 847 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:39:38.955270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:38.955485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 847 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:39:38.955583Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 847 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:39:38.956225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:38.956273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:38.956389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 713 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:38.956438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:38.956537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 713 RawX2: 4294969995 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:38.956609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:38.956644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.956680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:38.956726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:38.959171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.959472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.959693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.959730Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:38.959818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:38.959854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:38.959911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:38.959948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:38.959978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:38.960038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:741:2719] message: TxId: 102 2026-01-04T02:39:38.960080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:38.960117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:38.960155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:38.960284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:38.961796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:38.961862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:802:2772] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:38.963182Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:38.963383Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 249us result status StatusSuccess 2026-01-04T02:39:38.963779Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic-EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 13659, MsgBus: 2108 2026-01-04T02:39:31.079092Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330521039254752:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.079151Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.357695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.357836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.426991Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.442962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.462427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.463379Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330521039254723:2081] 1767494371077659 != 1767494371077662 2026-01-04T02:39:31.664389Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.664409Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.664490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.664624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:31.704222Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.090020Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.268908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.274773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.326967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.492157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.692302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.773710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.469458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533924158511:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.469573Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.470046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330533924158521:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.470090Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.868485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.899019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.933372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.961192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.003077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.035400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.081138Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.179534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538219126689:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.179602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.179911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538219126694:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.179958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538219126695:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.179981Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.182890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.192922Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330538219126698:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.278066Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330538219126749:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.079289Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330521039254752:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.079341Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> StoragePool::TestDistributionRandomMin7p [GOOD] >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] >> KqpSysColV1::InnerJoinTables [GOOD] >> BasicStatistics::ServerlessGlobalIndex [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsScan [GOOD] Test command err: Trying to start YDB, gRPC: 32257, MsgBus: 5141 2026-01-04T02:39:31.078075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330519354982005:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.078348Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.296449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.407458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.407601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.450429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.455346Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330519354981897:2081] 1767494371071060 != 1767494371071063 2026-01-04T02:39:31.458248Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.467746Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.664786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:31.664810Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:31.664821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:31.664868Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.084395Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.351634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.365565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:32.418146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.581187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.754031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:32.836929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.397997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330532239885661:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.398160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.398471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330532239885670:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.398515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:34.831853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.870106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.902139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.933838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.965035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.002394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.032812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.088510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.183068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536534853836:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.183142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.183181Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536534853841:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.183909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330536534853844:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.183961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.186879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:35.197697Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330536534853843:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:35.289301Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330536534853896:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:36.077531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330519354982005:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.122423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:38.055877Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494377264, txId: 281474976710673] shutting down 2026-01-04T02:39:38.194474Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494378186, txId: 281474976710676] shutting down |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/datastreams/ut/unittest >> DataStreams::TestGetRecordsWithBigSeqno [GOOD] Test command err: 2026-01-04T02:38:58.869294Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330380576909139:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:58.869729Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:59.102329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:59.124826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:59.124940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:59.148514Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:59.248531Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:59.301303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:59.323161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:59.323191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:59.323209Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:59.323338Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:59.520194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:59.605538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:59.763210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:59.880378Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:59.981378Z node 1 :PERSQUEUE ERROR: partition_read.cpp:831: [72075186224037888][Partition][0][StateIdle] reading from too big offset - topic stream_TestGetRecordsStreamWithSingleShard partition 0 client $without_consumer EndOffset 30 offset 100000 2026-01-04T02:38:59.981452Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'stream_TestGetRecordsStreamWithSingleShard' partition: 0 messageNo: 0 requestId: error: trying to read from future. ReadOffset 100000, 0 EndOffset 30 2026-01-04T02:39:03.012547Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330400567661711:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:03.012659Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:03.026232Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:03.098450Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.124466Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.124559Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.151441Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.203291Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:03.203316Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:03.203323Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:03.203399Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:03.206588Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:03.403852Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:03.476216Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:03.712273Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:03.731973Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:39:04.019531Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:08.013111Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330400567661711:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:08.013206Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:18.081061Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:39:18.081095Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:29.852717Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591330512917800771:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:29.852761Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:29.867039Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:29.950692Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:29.966800Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:29.966882Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:29.985465Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:30.051831Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:30.051853Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:30.051860Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:30.051972Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:30.165781Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:30.311220Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:30.371858Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:30.579015Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:30.598558Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715659, at schemeshard: 72057594046644480 2026-01-04T02:39:30.858042Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:34.540260Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591330533335796121:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:34.540368Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:34.591550Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:34.736829Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:34.756953Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:34.757039Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:34.780506Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:34.782043Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:34.911045Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:34.911073Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:34.911081Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:34.911178Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:35.206178Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:35.207782Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:35.264802Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:35.495548Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:39:35.549031Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |90.9%| [TM] {BAZEL_UPLOAD} ydb/services/datastreams/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TA] $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {RESULT} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} |90.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/datastreams/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongColumnType [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:39.704320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:39.704427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.704470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:39.704503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:39.704538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:39.704566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:39.704619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.704711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:39.705558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:39.705859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:39.775032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:39.775077Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:39.786290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:39.786436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:39.786557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:39.793182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:39.793371Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:39.793862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:39.794048Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:39.796470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.796676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:39.797856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:39.797916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.798016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:39.798061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:39.798100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:39.798284Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:39.951934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.952903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.953922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.954023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:40.409480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:39:40.409643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.409695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:39:40.409723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:39:40.409757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:39:40.411397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.411470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:40.411550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:39:40.412970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.413009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.413038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.413072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.413182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:40.414175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:39:40.414283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:39:40.414510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:40.414584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:40.414619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.414818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:39:40.414854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.414958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:40.415000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:39:40.416797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:40.416829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:40.416974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:40.417019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:39:40.417262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.417303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:39:40.417391Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:40.417421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.417452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:40.417474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.417499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:39:40.417525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.417555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:39:40.417621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:39:40.417679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:40.417727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:39:40.417766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:39:40.418129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:40.418217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:40.418251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:39:40.418276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:39:40.418308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:40.418371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:39:40.420505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:39:40.420865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:39:40.421870Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:669:2659] Bootstrap 2026-01-04T02:39:40.422718Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:669:2659] Become StateWork (SchemeCache [1:674:2664]) 2026-01-04T02:39:40.424717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:40.424974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.425065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "String" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" } }, at schemeshard: 72057594046678944 2026-01-04T02:39:40.425401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Unsupported column type, at schemeshard: 72057594046678944 2026-01-04T02:39:40.426045Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:669:2659] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:39:40.428539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Unsupported column type" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:40.428770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Unsupported column type, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:39:40.429210Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TSchemeShardTTLTestsWithReboots::CopyTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSysColV1::InnerJoinTables [GOOD] Test command err: Trying to start YDB, gRPC: 23545, MsgBus: 14218 2026-01-04T02:39:33.876007Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330529822611528:2125];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:33.876312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:34.118936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:34.123086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:34.123187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:34.203707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:34.249207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:34.324400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:34.354443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:34.354466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:34.354476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:34.354549Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:34.745074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:34.822565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.887851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:34.967650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.132015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.208963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.943856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330542707515199:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.943993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.944379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330542707515209:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.944422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.384372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.416840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.446625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.478259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.512686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.551673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.582504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.642136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.728355Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330547002483377:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.728450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.728922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330547002483382:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.728976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330547002483383:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.729273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.733127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:37.747954Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330547002483386:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:37.809050Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330547002483437:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:38.870454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330529822611528:2125];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:38.870544Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTableNegative_UnknownColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:39.837327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:39.837414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.837456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:39.837492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:39.837522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:39.837546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:39.837606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.837675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:39.838438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:39.838684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:39.911118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:39.911183Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:39.920697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:39.920835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:39.920939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:39.927247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:39.927442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:39.927918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:39.928109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:39.930211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.930346Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:39.931149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:39.931188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.931255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:39.931288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:39.931321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:39.931473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.047003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.047662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.047739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.047807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.047875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.047963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:40.048580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... peration type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:40.531889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:40.531992Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:39:40.532144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.532196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:39:40.532226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:39:40.532251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:39:40.533735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.533786Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:39:40.533858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:39:40.535339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.535380Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.535422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.535463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.535601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:39:40.536985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:39:40.537121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:39:40.537400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:40.537493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:40.537537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.537820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:39:40.537874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:39:40.538028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:40.538115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:39:40.539765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:40.539809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:40.540011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:40.540085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:39:40.540186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.540230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:39:40.540325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:40.540368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.540405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:39:40.540431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.540465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:39:40.540502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:39:40.540531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:39:40.540560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:39:40.540622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:40.540667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:39:40.540699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:39:40.541437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:40.541549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:39:40.541586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:39:40.541631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:39:40.541679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:39:40.541752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:39:40.544334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:39:40.544779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:39:40.546232Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [1:677:2667] Bootstrap 2026-01-04T02:39:40.547315Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [1:677:2667] Become StateWork (SchemeCache [1:682:2672]) 2026-01-04T02:39:40.550262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TTLEnabledTable" Schema { Columns { Name: "key" Type: "Uint64" NotNull: true } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" } TtlSettings { Enabled { ColumnName: "created_at" } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:40.550611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:40.550996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Incorrect ttl column - not found in scheme, at schemeshard: 72057594046678944 2026-01-04T02:39:40.551991Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [1:677:2667] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:39:40.554762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Incorrect ttl column - not found in scheme" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:40.554962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Incorrect ttl column - not found in scheme, operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-04T02:39:40.555487Z node 1 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess >> KikimrIcGateway::TestLoadAwsSecretValueFromExternalDataSourceMetadata-UseSchemaSecrets [GOOD] >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets >> TSchemeShardTTLTests::TtlTiersValidation >> KqpSystemView::NodesOrderByDesc [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessGlobalIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:52.991263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:53.081224Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:53.087909Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:53.088175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:53.088279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:53.507578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:53.608233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:53.608401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:53.660410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:53.760698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:54.410358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:54.411450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:54.411499Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:54.411539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:54.411962Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:54.480131Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:55.042224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:38:58.340549Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:58.349655Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:38:58.354032Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:58.388650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:58.388787Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:58.418503Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:38:58.420620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:58.617134Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.618019Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.618771Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.619667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.619820Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.619981Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.620127Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.620264Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.620351Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:38:58.772377Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:38:58.807119Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:58.807250Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:58.820815Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:58.979824Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:59.042904Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:38:59.043019Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:38:59.075700Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:38:59.076011Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:38:59.076238Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:38:59.076306Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:38:59.076365Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:38:59.076437Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:38:59.076497Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:38:59.076553Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:38:59.077285Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:38:59.223530Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:38:59.224015Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:38:59.226250Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:38:59.226304Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:38:59.226396Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:38:59.229591Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:38:59.239822Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:38:59.244116Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:38:59.309115Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:38:59.327440Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:38:59.327540Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:38:59.340306Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:38:59.341522Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:38:59.694664Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:38:59.696760Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:38:59.700018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:59.703276Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:38:59.703359Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... atType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:23.367228Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 20 ] 2026-01-04T02:39:23.367271Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 20, ReplyToActorId = [2:4617:3826], StatRequests.size() = 1 2026-01-04T02:39:24.451295Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 21 ], ReplyToActorId[ [2:4653:3844]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:24.451612Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 21 ] 2026-01-04T02:39:24.451659Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [2:4653:3844], StatRequests.size() = 1 2026-01-04T02:39:24.549146Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:25.545150Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [2:4685:3859]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:25.545528Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:39:25.545578Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [2:4685:3859], StatRequests.size() = 1 2026-01-04T02:39:26.570914Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [2:4713:3870]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:26.571230Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:39:26.571279Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4713:3870], StatRequests.size() = 1 2026-01-04T02:39:26.657379Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:26.657528Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-04T02:39:26.657946Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:39:26.658127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:39:27.679658Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4747:3886]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:27.679897Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:39:27.679939Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4747:3886], StatRequests.size() = 1 2026-01-04T02:39:28.705704Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4779:3898]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:28.705977Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:39:28.706019Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4779:3898], StatRequests.size() = 1 2026-01-04T02:39:29.998128Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4818:3914]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:29.998304Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:39:29.998332Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4818:3914], StatRequests.size() = 1 2026-01-04T02:39:30.041693Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:39:30.041772Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:30.041831Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:30.041863Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:39:30.150114Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:31.158494Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4851:3928]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:31.158726Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:39:31.158768Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4851:3928], StatRequests.size() = 1 2026-01-04T02:39:32.355790Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4882:3940]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:32.356145Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:39:32.356197Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4882:3940], StatRequests.size() = 1 2026-01-04T02:39:32.545944Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:32.546509Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:39:32.546905Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:39:32.546986Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:39:32.729135Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:32.729232Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:32.729578Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:32.763081Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:33.831519Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4918:3958]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:33.831843Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:39:33.831905Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4918:3958], StatRequests.size() = 1 2026-01-04T02:39:34.377335Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:39:34.377419Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.673000s, at schemeshard: 72075186224037899 2026-01-04T02:39:34.377581Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-04T02:39:34.395414Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:35.056357Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4952:3974]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:35.056673Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:39:35.056727Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4952:3974], StatRequests.size() = 1 2026-01-04T02:39:36.279405Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4986:3988]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:36.279650Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:39:36.279690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4986:3988], StatRequests.size() = 1 2026-01-04T02:39:36.355984Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:37.347820Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5020:4003]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:37.348306Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:39:37.348363Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5020:4003], StatRequests.size() = 1 2026-01-04T02:39:38.401750Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5050:4015]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:38.402090Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:39:38.402132Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5050:4015], StatRequests.size() = 1 2026-01-04T02:39:38.456511Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:38.457091Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-04T02:39:38.457484Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:39:38.457566Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:39:38.534417Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:38.534495Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:38.534770Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:38.549450Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:39.508685Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:5084:4032]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:39.509005Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-04T02:39:39.509055Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:5084:4032], StatRequests.size() = 1 |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable >> TSchemeShardTTLTestsWithReboots::MoveTable >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::CompileCacheBasic+EnableCompileCacheView [GOOD] Test command err: Trying to start YDB, gRPC: 13636, MsgBus: 17773 2026-01-04T02:39:33.297815Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330532148507613:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:33.299388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:33.331465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:33.571241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:33.571350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:33.604303Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:33.625690Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:33.636653Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:33.737408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:33.737435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:33.737446Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:33.737546Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:33.882218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:34.234339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:34.294693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.306521Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:34.478140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.612869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.676230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.403201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330545033411328:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.403314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.403608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330545033411338:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.403684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.751414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.793263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.817975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.848085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.877696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.915069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.955703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.022761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.102206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330549328379503:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.102288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.102294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330549328379508:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.102645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330549328379510:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.102742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.105370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:37.117372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330549328379511:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:37.184407Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330549328379563:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:38.297351Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330532148507613:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:38.297421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL ------- [TM] {asan, default-linux-x86_64, release} ydb/core/security/ut/unittest >> TTicketParserTest::TicketFromCertificateWithValidationCheckIssuerBad [GOOD] Test command err: 2026-01-04T02:39:22.569778Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330481583527406:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:22.569879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:22.806578Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:22.811416Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:22.811541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:22.856819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:22.858677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330481583527379:2081] 1767494362568723 != 1767494362568726 2026-01-04T02:39:22.862223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:22.950789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:22.950815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:22.950835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:22.950928Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:22.991557Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:23.132267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:23.140870Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket 13BD6299B910BC9271D033209D989BAFD76417507AFD97690A8338A79B5C8356 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-04T02:39:25.740941Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330494603517714:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:25.741078Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:25.757560Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:25.833915Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:25.836542Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330494603517687:2081] 1767494365739666 != 1767494365739669 2026-01-04T02:39:25.862597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:25.862685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:25.872667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:25.932373Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:25.932415Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:25.932423Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:25.932494Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:25.964326Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:26.176127Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:26.183631Z node 2 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket FEE1C6658C9BEF6B9828FA33E9CA9A8115C804AF1D81EC4CB25D69BAFA257986 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-04T02:39:29.205198Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330511142375577:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:29.205269Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:29.216373Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:29.278978Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:29.281130Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591330511142375550:2081] 1767494369204410 != 1767494369204413 2026-01-04T02:39:29.319010Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:29.319086Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:29.320372Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:29.347731Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:29.347764Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:29.347771Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:29.347843Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:29.484002Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:29.516186Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:29.522090Z node 3 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket F5541364055A7DF60D2D89C733926B514997B960A25B87D3151CA1D34CD01154 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-04T02:39:29.522772Z node 3 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket F5541364055A7DF60D2D89C733926B514997B960A25B87D3151CA1D34CD01154: Cannot create token from certificate. Client certificate failed verification 2026-01-04T02:39:32.516469Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330523826465698:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:32.516568Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:32.533544Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:32.693800Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:32.696433Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330523826465659:2081] 1767494372508469 != 1767494372508472 2026-01-04T02:39:32.705895Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:32.724973Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.725091Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.738742Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.811206Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:32.811232Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:32.811245Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:32.811337Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.998776Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:33.061722Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:33.067514Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:39:33.069994Z node 4 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket B49E1EFF0D1779740DB72FFE2F33BE13FB1758872CA036EB9E13955ACEBB88D1 () has now valid token of C=RU,ST=MSK,L=MSK,O=YA,OU=UtTest,CN=localhost@cert 2026-01-04T02:39:36.922307Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330541320262854:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.922617Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:36.938061Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:37.012622Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.032596Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:37.032686Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:37.056191Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:37.116715Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:37.132018Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:37.132037Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:37.132050Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:37.132135Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:37.419927Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:37.432190Z node 5 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1906: Ticket 7321F0ED8569634C066C447E36BA30CD95B6D29B832E80A925E477A86D2E2294 () has now permanent error message 'Cannot create token from certificate. Client certificate failed verification' 2026-01-04T02:39:37.432820Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket 7321F0ED8569634C066C447E36BA30CD95B6D29B832E80A925E477A86D2E2294: Cannot create token from certificate. Client certificate failed verification |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/security/ut/unittest |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |90.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] |90.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings |90.9%| [TA] $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {RESULT} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/security/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTTLTestsWithReboots::AlterTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.831001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.831040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.831066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.831113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.831187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.932260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.932328Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.947982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.948612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.948811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.957061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.957346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.958170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.961333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.961511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.963534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.963610Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.963669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.963732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.963876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.964731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.119783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.122718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.122860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.122947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.123796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 7Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409551, request: TableId: 43 Expiration { ColumnId: 2 WallClockTimestamp: 1600466640225000 ColumnUnit: UNIT_MICROSECONDS } SchemaVersion: 1 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-04T02:39:42.017902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-04T02:39:42.018934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:42.019355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:39:42.019477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:39:42.019539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-04T02:39:42.019646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:42.019828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.019894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:42.024092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.024143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409550, at schemeshard: 72057594046678944 2026-01-04T02:39:42.025835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.025881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:39:42.027553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.027596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:39:42.028435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.028496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:42.028897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.029015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.029061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:2, run at: 2020-09-18T23:04:00.225000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.030613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.030714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.030759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:5, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.032625Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.032743Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.032781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:4, run at: 2020-09-18T23:04:00.226000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.032926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033111Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2020-09-18T23:04:00.227000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.033282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.033354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 2020-09-18T23:04:00.227000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.033378Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.100870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 5 2026-01-04T02:39:42.101063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-04T02:39:42.101153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable1, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:42.101262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:39:42.101320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 603 row count 2 2026-01-04T02:39:42.101354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTable3, is column=0, is olap=0, RowCount 2, DataSize 603 2026-01-04T02:39:42.101415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-04T02:39:42.101452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:39:42.101480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=TTLEnabledTable2, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:42.101516Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:39:42.101544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 41 shard idx 72057594046678944:4 data size 43 row count 1 2026-01-04T02:39:42.101595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], pathId map=TTLEnabledTable4, is column=0, is olap=0, RowCount 1, DataSize 43 2026-01-04T02:39:42.101657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable 2026-01-04T02:39:42.101689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 42 shard idx 72057594046678944:5 data size 627 row count 2 2026-01-04T02:39:42.101720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409550 maps to shardIdx: 72057594046678944:5 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 42], pathId map=TTLEnabledTable5, is column=0, is olap=0, RowCount 2, DataSize 627, with borrowed parts 2026-01-04T02:39:42.101846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409550: SplitByLoadNotEnabledForTable 2026-01-04T02:39:42.118912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.118975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409551, at schemeshard: 72057594046678944 2026-01-04T02:39:42.121092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:42.121297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:42.121392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:6, run at: 2020-09-18T23:04:00.230000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:42.121478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::NodesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 13648, MsgBus: 19584 2026-01-04T02:39:31.458109Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.014352s 2026-01-04T02:39:31.462021Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.007956s 2026-01-04T02:39:31.583370Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330522560461116:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.583422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.625615Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330522984880996:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.625669Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.671808Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591330521527597223:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.675587Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.688216Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591330519832714338:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.688295Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.974611Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.977849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.978070Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.978224Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:31.983575Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.983590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:32.067427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.067564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.068453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.068535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.068665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.068709Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.068786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.068803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.070876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:32.070926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:32.118456Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:32.118495Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:39:32.118511Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:39:32.118639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.121202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.121438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.121613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.170321Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:39:32.171414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:32.182969Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:32.200141Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.230519Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.260705Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.273772Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.341317Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.531013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:32.531051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:32.531077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:32.531194Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.598247Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.649185Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.742357Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.745935Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.742925Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:33.464281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976735657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:33.572645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:33.878291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.049425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:34.162183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.253610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330544035299653:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.253741Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.254002Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330544035299663:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.254045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:36.557388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.583213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330522560461116:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.583285Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:36.605241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.625722Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330522984880996:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.625790Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:36.649171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.669230Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330521527597223:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.669309Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:36.688145Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591330519832714338:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:36.688238Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:36.701587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.765004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.820189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.889752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.016836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976735669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.148086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330548330267951:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.148184Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.148453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330548330267956:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.148496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330548330267957:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.148523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:37.152161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976735670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:37.182813Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330548330267960:2398], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976735670 completed, doublechecking } 2026-01-04T02:39:37.266572Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330548330268034:4584] txid# 281474976735671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> BasicStatistics::TwoNodes [GOOD] >> KqpSystemView::TopQueriesOrderByDesc [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::TtlTiersValidation [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:42.121299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.121387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.121430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.121462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.121495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.121524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.121589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.121673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.122468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.122745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.207888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:42.207955Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.220705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.220902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.221050Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.229062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.229283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.229999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.230253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.232940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.233100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.234200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.234259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.234394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.234438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.234479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.234670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.387123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.388950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.389046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.389128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.389244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.389335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:43.184774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.184851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.184896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:43.184922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:39:43.184957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:43.185038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:39:43.185369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1294 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:39:43.185402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.185550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1294 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:39:43.185683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1294 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:39:43.186190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.186236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.186370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.186424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:43.186529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 715 RawX2: 4294969997 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.186594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.186632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.186683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:43.186737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:39:43.188976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:43.190688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:43.190806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.190905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.191189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.191254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:43.191358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:43.191396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.191432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:43.191464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.191498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:39:43.191563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:743:2721] message: TxId: 101 2026-01-04T02:39:43.191610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.191649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:43.191681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:43.191911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:43.193660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:43.193706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:744:2722] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:39:43.197138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 Delete { } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:43.197367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.197706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, at schemeshard: 72057594046678944 2026-01-04T02:39:43.200135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Tier 0: only the last tier in TTL settings can have Delete action" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:43.200377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Tier 0: only the last tier in TTL settings can have Delete action, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-04T02:39:43.203797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" TTLSettings { Enabled { ColumnName: "modified_at" Tiers { ApplyAfterSeconds: 3600 EvictToExternalStorage { Storage: "/Root/abc" } } Tiers { ApplyAfterSeconds: 7200 Delete { } } } } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:43.204045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.204391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, at schemeshard: 72057594046678944 2026-01-04T02:39:43.206629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "Only DELETE via TTL is allowed for row-oriented tables" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:43.206831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Only DELETE via TTL is allowed for row-oriented tables, operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSuccess [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:42.040611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.040733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.040781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.040817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.040858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.040890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.040943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.041034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.042049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.042377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.130240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:42.130303Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.144380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.144593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.144751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.163495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.163721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.164450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.164722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.168202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.168433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.169663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.169716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.169842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.169890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.169935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.170138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.298137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.299969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.300048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.300107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.300190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.300304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.300399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 104, path id: 38 2026-01-04T02:39:43.162184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.162247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:39:43.162739Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:39:43.162819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:39:43.162851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:39:43.162896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:39:43.162944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:43.163018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:39:43.165267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:39:43.178736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 983 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-04T02:39:43.178773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.178881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 983 } } CommitVersion { Step: 5000040 TxId: 104 } 2026-01-04T02:39:43.179022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 104 Step: 5000040 OrderId: 104 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 983 } } CommitVersion { Step: 5000040 TxId: 104 } debug: NTableState::TProposedWaitParts operationId# 104:0 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:39:43.179504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:39:43.179533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.179627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:39:43.179687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:43.179764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 104:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 104 Step: 0 Generation: 2 2026-01-04T02:39:43.179814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 104:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.179839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.179919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:43.179960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-04T02:39:43.181776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.182059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.182207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.182235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:39:43.182306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:39:43.182329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:39:43.182356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:39:43.182400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:39:43.182434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:39:43.182474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:735:2713] message: TxId: 104 2026-01-04T02:39:43.182501Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:39:43.182526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:39:43.182547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:39:43.182638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:43.183879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:39:43.183927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:844:2814] TestWaitNotification: OK eventTxId 104 2026-01-04T02:39:43.184297Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:43.184455Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 163us result status StatusSuccess 2026-01-04T02:39:43.184814Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 3 TTLSettings { Disabled { } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldFailOnSimultaneousDropColumnAndEnableTTL [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:42.423255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.423359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.423399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.423445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.423491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.423525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.423595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.423698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.424589Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.424923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.503920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:42.503978Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.515305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.515486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.515604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.521852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.522038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.522768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.523068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.525445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.525606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.526767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.526830Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.526937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.526978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.527017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.527223Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.683175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.684923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.685471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:39:43.479635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:39:43.480552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.480605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 101:0 ProgressState at tablet: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:39:43.481791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.481902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.481950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:43.481986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:39:43.482040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:39:43.482497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.482579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:39:43.482613Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:39:43.482632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:39:43.482660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:43.482715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:39:43.483172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:39:43.483199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.483305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:39:43.483394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1225 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:39:43.484597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.484637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:43.484749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.484791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:43.484868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:43.484913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.484939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.484979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:43.485011Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:39:43.487949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:43.488108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:43.488347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.488669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.488991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.489041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:43.489137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:43.489191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.489236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:43.489270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.489306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:39:43.489369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:735:2713] message: TxId: 101 2026-01-04T02:39:43.489411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:43.489450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:43.489481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:43.489614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:43.491310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:43.491356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:736:2714] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:39:43.494766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterTable AlterTable { Name: "TTLEnabledTable" DropColumns { Name: "modified_at" } TTLSettings { Enabled { ColumnName: "modified_at" } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:43.494997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /MyRoot/TTLEnabledTable, pathId: , opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.495326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', at schemeshard: 72057594046678944 2026-01-04T02:39:43.497510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Cannot enable TTL on dropped column: \'modified_at\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:43.497733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Cannot enable TTL on dropped column: 'modified_at', operation: ALTER TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] >> TSchemeShardColumnTableTTL::CreateColumnTable >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::ValidateTiers [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:55.467430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:55.550913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:55.558968Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:55.559238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:55.559378Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:55.956598Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:56.066906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:56.067065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:56.101930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:56.179124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:56.836914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:56.837852Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:56.837906Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:56.837930Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:56.838417Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:56.904721Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:57.440488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:02.900740Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:02.900984Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:02.910195Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:39:02.910273Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-04T02:39:02.913789Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:02.913953Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:02.954208Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:02.954306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:02.954719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:02.954778Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.005108Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:03.005370Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:39:03.007923Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.008196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.170623Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.170732Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.171793Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.171870Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.187104Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72075186224037888 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:03.187867Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.188407Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.268507Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.269100Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.269706Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.270285Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.270471Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.270739Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.270880Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.271002Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.271096Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.468028Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.480303Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:03.480386Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:03.512508Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:03.514096Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:03.514295Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:03.514369Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:03.514428Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:03.514482Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:03.514539Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:03.514689Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:03.515526Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:03.519318Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2365:2411] 2026-01-04T02:39:03.538825Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:39:03.565279Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2505:2449] Owner: [2:2504:2448]. Describe result: PathErrorUnknown 2026-01-04T02:39:03.565348Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2505:2449] Owner: [2:2504:2448]. Creating table 2026-01-04T02:39:03.565437Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2505:2449] Owner: [2:2504:2448]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:03.568726Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:03.568832Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2553:2458], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:03.607272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2618:2495] 2026-01-04T02:39:03.608157Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2618:2495], schemeshard id = 72075186224037897 2026-01-04T02:39:03.641903Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2661:2502], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:03.666049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:03.717520Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2505:2449] Owner: [2:2504:2448]. TEvProposeTransacti ... ] RequestId[ 23 ], ReplyToActorId[ [2:4317:2989]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:28.621125Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:39:28.621157Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [2:4317:2989], StatRequests.size() = 1 2026-01-04T02:39:29.607494Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [2:4348:2995]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:29.607822Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:39:29.607884Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [2:4348:2995], StatRequests.size() = 1 2026-01-04T02:39:30.133426Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:30.133573Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-04T02:39:30.133950Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:39:30.134180Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:39:30.595992Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [2:4379:3001]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:30.596366Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:39:30.596419Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [2:4379:3001], StatRequests.size() = 1 2026-01-04T02:39:31.640030Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [2:4412:3009]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:31.640376Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:39:31.640422Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [2:4412:3009], StatRequests.size() = 1 2026-01-04T02:39:32.730859Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [2:4447:3015]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:32.731254Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:39:32.731304Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [2:4447:3015], StatRequests.size() = 1 2026-01-04T02:39:33.566752Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:39:33.566834Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:33.566875Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:33.566918Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:39:33.580111Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:34.117558Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [2:4490:3024]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:34.117902Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:39:34.117951Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [2:4490:3024], StatRequests.size() = 1 2026-01-04T02:39:35.264884Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [2:4525:3030]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:35.265260Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:39:35.265308Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:4525:3030], StatRequests.size() = 1 2026-01-04T02:39:36.048973Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:36.049565Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:39:36.050000Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:39:36.050069Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:39:36.109569Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:36.109655Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:36.110457Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:39:36.124941Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:36.738872Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:4558:3038]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:36.739191Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:39:36.739234Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:4558:3038], StatRequests.size() = 1 2026-01-04T02:39:37.883065Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:4591:3046]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:37.883406Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:39:37.883461Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:4591:3046], StatRequests.size() = 1 2026-01-04T02:39:38.898564Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:4622:3052]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:38.898960Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:39:38.899025Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:4622:3052], StatRequests.size() = 1 2026-01-04T02:39:39.523756Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:39.999996Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:4659:3060]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:40.000380Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:39:40.000434Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:4659:3060], StatRequests.size() = 1 2026-01-04T02:39:41.031519Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 34 ], ReplyToActorId[ [2:4694:3066]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:41.031856Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 34 ] 2026-01-04T02:39:41.031918Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 34, ReplyToActorId = [2:4694:3066], StatRequests.size() = 1 2026-01-04T02:39:41.618077Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 1 2026-01-04T02:39:41.618375Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-04T02:39:41.618850Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:39:41.619092Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:39:41.641298Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:41.641366Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:41.641638Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 49, entries count: 2, are all stats full: 1 2026-01-04T02:39:41.659202Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:42.107487Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 35 ], ReplyToActorId[ [2:4729:3074]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:42.107746Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 35 ] 2026-01-04T02:39:42.107787Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 35, ReplyToActorId = [2:4729:3074], StatRequests.size() = 1 2026-01-04T02:39:42.108196Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [3:4731:2970]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:42.111039Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:39:42.111128Z node 3 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [3:4741:2974] 2026-01-04T02:39:42.111187Z node 3 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [3:4741:2974] 2026-01-04T02:39:42.114290Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4749:3076] 2026-01-04T02:39:42.114989Z node 3 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 3, client id = [3:4741:2974], server id = [2:4749:3076], tablet id = 72075186224037894, status = OK 2026-01-04T02:39:42.115190Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4749:3076], node id = 3, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:39:42.115237Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 3, schemeshard count = 1 2026-01-04T02:39:42.115425Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 18446744073709551615 2026-01-04T02:39:42.115496Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [3:4731:2970], StatRequests.size() = 1 2026-01-04T02:39:42.115672Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BackupCopyHasNoTtlSettings [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:43.199934Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:43.200001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.200031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:43.200054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:43.200080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:43.200100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:43.200130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.200189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:43.200760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:43.200972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:43.279192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:43.279249Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:43.290195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:43.290350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:43.290466Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:43.296583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:43.296758Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:43.297258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.297475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:43.299799Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.299947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:43.300968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:43.301015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.301122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:43.301163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:43.301201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:43.301365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.406759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.407980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.408029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.408103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.408163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.408217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 813 RawX2: 4294970076 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.240445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:39:44.240603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 813 RawX2: 4294970076 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.240673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:44.240792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 813 RawX2: 4294970076 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.240883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 1, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.240932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1090: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged CollectSchemaChanged: false 2026-01-04T02:39:44.242866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.243005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.255290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.255349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:39:44.255485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.255535Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:44.255634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 707 RawX2: 4294969989 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:44.255693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.255750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.255793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:39:44.255846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:44.255890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:44.257515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.257949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.258018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-04T02:39:44.258070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-04T02:39:44.258113Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-04T02:39:44.258192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-04T02:39:44.258234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-04T02:39:44.259892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.259941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:44.260053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:44.260090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:44.260129Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:44.260164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:44.260201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:44.260291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:735:2713] message: TxId: 102 2026-01-04T02:39:44.260339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:44.260415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:44.260460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:44.260627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:39:44.260668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:39:44.262254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:44.262316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:841:2800] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:44.262844Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:44.263109Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 265us result status StatusSuccess 2026-01-04T02:39:44.263634Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "ts" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: true IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::TopQueriesOrderByDesc [GOOD] Test command err: Trying to start YDB, gRPC: 17789, MsgBus: 24692 2026-01-04T02:39:35.350577Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330538173796068:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:35.350691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:35.383844Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:35.398219Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330540431793496:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:35.399155Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:35.415536Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330539723079516:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:35.415611Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:35.609243Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:35.611234Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:35.623923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:35.662038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:35.662191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:35.662868Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:35.663023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:35.663795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:35.663839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:35.700016Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:35.700236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:35.701382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:35.735846Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:39:35.739140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:35.743636Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:35.771616Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:35.816887Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:35.839626Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:35.839651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:35.839684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:35.839802Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:35.885213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:36.238373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:36.319424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.361644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:36.415395Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:36.424081Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:36.485472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.617373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.722980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:38.794309Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330551058700199:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:38.794417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:38.794708Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330551058700209:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:38.794786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.082403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.122692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.162933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.207685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.274125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.316901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.360497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.405643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:39.476669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330555353668591:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.476739Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.477154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330555353668596:2413], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.477222Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.477275Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330555353668597:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:39.480945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:39.499272Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330555353668600:2415], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:39:39.581143Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330555353668675:4709] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:40.350603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330538173796068:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:40.350672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:40.396167Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330540431793496:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:40.396268Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:40.415121Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591330539723079516:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:40.415207Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> TSchemeShardTTLTests::CheckCounters >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable_Negative [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.830261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.830304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.830334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.830417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.915824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.915886Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.931264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.931490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.932619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.951477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.951660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.952159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.952320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.957558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.964462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.964704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.964760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.964963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.134515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.135987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.136491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :39:44.222257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.222906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.223012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.223128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.223186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:44.223335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:44.223401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:44.223450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:39:44.223485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:44.223517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:39:44.223594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3110:4331] message: TxId: 101 2026-01-04T02:39:44.223654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:39:44.223744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:44.223786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:44.224864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-04T02:39:44.227934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:44.227977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:3111:4332] TestWaitNotification: OK eventTxId 101 2026-01-04T02:39:44.228444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:44.228677Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 271us result status StatusSuccess 2026-01-04T02:39:44.229132Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "str" Type: "String" TypeId: 4097 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "key" NextColumnId: 4 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "key" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 WARNING: All log messages before y_absl::InitializeLog() is called are written to STDERR W0000 00:00:1767494384.229772 623898 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TAlterColumnTable: 6:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 102 2026-01-04T02:39:44.231871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterTtlSettings { Enabled { ColumnName: "str" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:44.232043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.232482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, at schemeshard: 72057594046678944 2026-01-04T02:39:44.234317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:44.234541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: ttl update error: Unsupported column type. in alter constructor STANDALONE_UPDATE, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 102, wait until txId: 102 >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLUtility::GetExpireAfter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldCheckQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:44.079320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:44.079418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.079470Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:44.079504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:44.079543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:44.079578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:44.079635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.079733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:44.080628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:44.080940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:44.171755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:44.171816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:44.186299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:44.186541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:44.186706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:44.195139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:44.195367Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:44.196154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.196402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:44.199471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.199648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:44.200772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:44.200838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.200944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:44.200988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:44.201040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:44.201253Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.358776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359620Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.359995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.360598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:39:45.318556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:39:45.318600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:39:45.318650Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-04T02:39:45.318701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:39:45.319421Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:39:45.319500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:39:45.319529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:39:45.319572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 3 2026-01-04T02:39:45.319608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-04T02:39:45.319675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:39:45.320421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1120 } } CommitVersion { Step: 250 TxId: 103 } 2026-01-04T02:39:45.320462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-04T02:39:45.320627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1120 } } CommitVersion { Step: 250 TxId: 103 } 2026-01-04T02:39:45.320751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 103 Step: 250 OrderId: 103 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1120 } } CommitVersion { Step: 250 TxId: 103 } debug: NTableState::TProposedWaitParts operationId# 103:0 2026-01-04T02:39:45.321338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 951 RawX2: 4294970192 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:39:45.321388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409549, partId: 0 2026-01-04T02:39:45.321532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Source { RawX1: 951 RawX2: 4294970192 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:39:45.321595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:45.321688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 103:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 951 RawX2: 4294970192 } Origin: 72075186233409549 State: 2 TxId: 103 Step: 0 Generation: 2 2026-01-04T02:39:45.321779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 103:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.321831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.321872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:39:45.321942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-04T02:39:45.325063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:39:45.325714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:39:45.325794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.325903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.326135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.326172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:39:45.326250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:39:45.326279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:45.326310Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:39:45.326346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:45.326375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:39:45.326421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:816:2783] message: TxId: 103 2026-01-04T02:39:45.326458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:45.326493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:39:45.326532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:39:45.326625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:39:45.328003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:39:45.328049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:981:2918] TestWaitNotification: OK eventTxId 103 W0000 00:00:1767494385.328538 625704 text_format.cc:399] Warning parsing text-format NKikimrSchemeOp.TTableDescription: 9:35: text format contains deprecated field "ExpireAfterSeconds" TestModificationResults wait txId: 104 2026-01-04T02:39:45.330897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/SubDomain" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:45.331211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.331330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/SubDomain/Table4, opId: 104:0, schema: Name: "Table4" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "Timestamp" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 SysSettings { RunInterval: 1799999999 } Tiers { ApplyAfterSeconds: 3600 Delete { } } } }, at schemeshard: 72057594046678944 2026-01-04T02:39:45.331630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, at schemeshard: 72057594046678944 2026-01-04T02:39:45.333323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "TTL run interval cannot be less than limit: 1800" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:45.333559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot/SubDomain, subject: , status: StatusSchemeError, reason: TTL run interval cannot be less than limit: 1800, operation: CREATE TABLE, path: /MyRoot/SubDomain/Table4 TestModificationResult got TxId: 104, wait until txId: 104 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:44.612402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:44.612530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.612577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:44.612609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:44.612645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:44.612674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:44.612722Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.612798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:44.613748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:44.614091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:44.713379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:44.713445Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:44.729267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:44.729468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:44.729617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:44.738167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:44.738413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:44.739157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.739416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:44.742421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.742568Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:44.743629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:44.743686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.743988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:44.744032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:44.744062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:44.744254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.861214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ode 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-04T02:39:45.673544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.673584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-04T02:39:45.673674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.673704Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:45.673752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.673889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.673922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.673967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:45.674001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:39:45.676926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.677006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.679585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.679713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.679843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.680007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.680142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.680424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.680616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.680666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-04T02:39:45.680769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-04T02:39:45.680809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-04T02:39:45.680858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-04T02:39:45.680906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-04T02:39:45.680945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-04T02:39:45.681213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.681254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:45.681354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:39:45.681384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.681413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:39:45.681438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.681462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-04T02:39:45.681531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:787:2754] message: TxId: 101 2026-01-04T02:39:45.681584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.681626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:45.681658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:45.681843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:45.681893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-04T02:39:45.681916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-04T02:39:45.681970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:39:45.682014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-04T02:39:45.682054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-04T02:39:45.682122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:39:45.684315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:45.684365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:788:2755] TestWaitNotification: OK eventTxId 101 2026-01-04T02:39:45.684957Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:45.685227Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 241us result status StatusSuccess 2026-01-04T02:39:45.685848Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] >> TSchemeShardTTLTestsWithReboots::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceedAsyncOnIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:44.597544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:44.597632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.597679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:44.597716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:44.597754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:44.597784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:44.597855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.597942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:44.598727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:44.598948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:44.669408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:44.669469Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:44.682651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:44.682896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:44.683065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:44.691525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:44.691757Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:44.692628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.692880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:44.695896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.696083Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:44.697261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:44.697321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.697432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:44.697479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:44.697525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:44.697719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.861748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.862920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.863934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.864029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.864150Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.864258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:2 129 -> 240 2026-01-04T02:39:45.763010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.763047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409547, partId: 0 2026-01-04T02:39:45.763155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.763199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:45.763261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 734 RawX2: 4294970012 } Origin: 72075186233409547 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:39:45.763387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.763429Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.763464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:45.763503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:39:45.773563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.773726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.779173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.779371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.779952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.780388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:39:45.780629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.781045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.781379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:2, at schemeshard: 72057594046678944 2026-01-04T02:39:45.781439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:2 ProgressState 2026-01-04T02:39:45.781564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-04T02:39:45.781604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-04T02:39:45.781645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:2 progress is 2/3 2026-01-04T02:39:45.781680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 2/3 2026-01-04T02:39:45.781718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 2/3, is published: true 2026-01-04T02:39:45.782206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.782251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:39:45.782313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:39:45.782339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.782370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 3/3 2026-01-04T02:39:45.782410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.782441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 3/3, is published: true 2026-01-04T02:39:45.782551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:787:2754] message: TxId: 101 2026-01-04T02:39:45.782644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 3/3 2026-01-04T02:39:45.782693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:39:45.782738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:39:45.782906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:45.782946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:1 2026-01-04T02:39:45.782982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:1 2026-01-04T02:39:45.783018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:39:45.783040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:2 2026-01-04T02:39:45.783066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:2 2026-01-04T02:39:45.783118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:39:45.786346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:39:45.786430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:788:2755] TestWaitNotification: OK eventTxId 101 2026-01-04T02:39:45.786988Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:45.787279Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 297us result status StatusSuccess 2026-01-04T02:39:45.787906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::BuildAsyncIndexShouldSucceed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:45.073771Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:45.073874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.073915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:45.073945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:45.073978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:45.074006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:45.074049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.074133Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:45.074914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:45.075171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:45.143660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:45.143717Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:45.154896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:45.155064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:45.155209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:45.162493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:45.162706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:45.163312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.163559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:45.166539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.166694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:45.167894Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:45.167960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.168072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:45.168117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:45.168165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:45.168350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:45.308652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.309948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.310018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.310079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.310148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.310216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:45.310275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... CHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000042, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:46.386034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710760 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000042 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:46.386092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72057594046678944] TDropLock TPropose opId# 281474976710760:0 HandleReply TEvOperationPlan: step# 5000042 2026-01-04T02:39:46.386171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710760:0 128 -> 240 2026-01-04T02:39:46.387780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710760:0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.387825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 281474976710760:0 ProgressState 2026-01-04T02:39:46.387907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:39:46.387934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:39:46.387963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710760:0 progress is 1/1 2026-01-04T02:39:46.388007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:39:46.388041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710760, ready parts: 1/1, is published: true 2026-01-04T02:39:46.388088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:128:2153] message: TxId: 281474976710760 2026-01-04T02:39:46.388124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710760 ready parts: 1/1 2026-01-04T02:39:46.388150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710760:0 2026-01-04T02:39:46.388174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710760:0 2026-01-04T02:39:46.388228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 FAKE_COORDINATOR: Erasing txId 281474976710760 2026-01-04T02:39:46.389664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976710760 2026-01-04T02:39:46.389714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976710760 2026-01-04T02:39:46.389782Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 102, txId# 281474976710760 2026-01-04T02:39:46.389904Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976710760 2026-01-04T02:39:46.391315Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking 2026-01-04T02:39:46.391428Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Unlocking TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:39:46.391475Z node 1 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:39:46.392700Z node 1 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done 2026-01-04T02:39:46.392819Z node 1 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 102 Done TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalAsync, IndexName: UserDefinedIndexByValue, IndexColumn: value, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [1:787:2757], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 5000040, ApplyTxId: 281474976710759, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976710760, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:39:46.392891Z node 1 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 102, subscribers count# 1 2026-01-04T02:39:46.392995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:46.393034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:880:2839] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:46.393583Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:46.393862Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 254us result status StatusSuccess 2026-01-04T02:39:46.394338Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByValue" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::AlterTableShouldSucceedOnAsyncIndexedTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:45.803517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:45.803611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.803650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:45.803682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:45.803714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:45.803741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:45.803789Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.803893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:45.804651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:45.804908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:45.881880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:45.881928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:45.895440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:45.895638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:45.895796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:45.904254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:45.904505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:45.905216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.905508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:45.908337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.908480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:45.909472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:45.909523Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.909618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:45.909658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:45.909698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:45.909904Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.020601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.021992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.022045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.022143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.022231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:39:46.722461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:46.722528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:39:46.722553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:39:46.722590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:39:46.722629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:39:46.722682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:39:46.724766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:39:46.736605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 841 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:39:46.736659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:39:46.736768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 841 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:39:46.736855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 4 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 841 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:39:46.737655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:46.737692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409547, partId: 0 2026-01-04T02:39:46.737788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:46.737864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:39:46.737958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 727 RawX2: 4294970005 } Origin: 72075186233409547 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:39:46.738011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:46.738051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.738090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:46.738124Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:39:46.739356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.739948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.740135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.740181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:39:46.740251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:46.740277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:46.740304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:39:46.740328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:46.740348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:39:46.740391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:779:2746] message: TxId: 102 2026-01-04T02:39:46.740435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:39:46.740458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:39:46.740479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:39:46.740594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:39:46.741919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:39:46.741979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:910:2833] TestWaitNotification: OK eventTxId 102 2026-01-04T02:39:46.742515Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:46.742749Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 258us result status StatusSuccess 2026-01-04T02:39:46.743275Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "UserDefinedIndexByExpireAt" LocalPathId: 39 Type: EIndexTypeGlobalAsync State: EIndexStateReady KeyColumnNames: "modified_at" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> StoragePool::TestDistributionRandomMin7pWithOverflow [GOOD] Test command err: 2026-01-04T02:34:53.473495Z node 2 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:53.528092Z node 2 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:53.528557Z node 2 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 2 PDiskId# 1 Path# "SectorMap:1:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:53.534677Z node 2 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:53.535264Z node 2 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:53.536557Z node 2 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [2:49:2075] ControllerId# 72057594037932033 2026-01-04T02:34:53.536620Z node 2 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:53.536764Z node 2 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:53.537009Z node 2 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:53.558123Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:53.558214Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:53.561126Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:58:2079] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.561347Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:59:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.561520Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:60:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.561676Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:61:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.561856Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:62:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.562044Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:63:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.562187Z node 2 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [2:48:2074] Create Queue# [2:64:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.562224Z node 2 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:53.562351Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [2:49:2075] 2026-01-04T02:34:53.562397Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [2:49:2075] 2026-01-04T02:34:53.562463Z node 2 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:53.562559Z node 2 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:53.563090Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:34:53.566360Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } PDisks { NodeID: 2 PDiskID: 1 Path: "SectorMap:1:3200" PDiskGuid: 2 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:34:53.566550Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:34:53.566975Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:34:53.567320Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:34:53.568760Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:34:53.568845Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:34:53.569854Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:73:2077] ControllerId# 72057594037932033 2026-01-04T02:34:53.569898Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:34:53.570002Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:34:53.570156Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:34:53.584124Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:34:53.584211Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:34:53.586426Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:81:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.586624Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:82:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.586781Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:83:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.586952Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:84:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.587116Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:85:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.587271Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:86:2087] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.587412Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:72:2076] Create Queue# [1:87:2088] targetNodeId# 1 Marker# DSP01 2026-01-04T02:34:53.587442Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:34:53.587521Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:73:2077] 2026-01-04T02:34:53.587554Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:73:2077] 2026-01-04T02:34:53.587603Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:34:53.587650Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:34:53.588797Z node 2 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:53.589056Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:53.589639Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:73:2077] 2026-01-04T02:34:53.589712Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:53.589865Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:34:53.590011Z node 2 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:53.594043Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [2:49:2075] 2026-01-04T02:34:53.594113Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:53.594479Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:53.594541Z node 2 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:53.602654Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:53.605158Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:34:53.605217Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:34:53.605489Z node 2 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:53.605696Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:99:2093] 2026-01-04T02:34:53.605736Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:99:2093] 2026-01-04T02:34:53.606345Z node 2 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:34:53.606766Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:34:53.606946Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:34:53.607123Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:99:2093] 2026-01-04T02:34:53.614815Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [2:53:2064] 2026-01-04T02:34:53.614912Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [2:53:2064] 2026-01-04T02:34:53.615009Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [2:102:2089] 2026-01-04T02:34:53.615033Z node 2 :PIPE_CLIENT DEBU ... he tablet Unknown.65553.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123806Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65552.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123833Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65551.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123859Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65550.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123884Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65549.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123922Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65548.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123950Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65547.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.123977Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65546.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124001Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65545.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124027Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65544.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124053Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65543.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124078Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65542.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124103Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65541.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124129Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65540.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124156Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65539.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124182Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65538.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124208Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65537.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124234Z node 1 :HIVE WARN: hive_impl.cpp:572: HIVE#72057594037927937 The tablet Unknown.65536.Leader.1 is not ready for anything State:Unknown VolatileState:Stopped 2026-01-04T02:38:07.124361Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} queued, type NKikimr::NHive::TTxProcessPendingOperations 2026-01-04T02:38:07.124418Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.124493Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} hope 1 -> done Change{10, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:38:07.124549Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{3, NKikimr::NHive::TTxProcessPendingOperations} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:07.124660Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:515:2352] 2026-01-04T02:38:07.124693Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:515:2352] 2026-01-04T02:38:07.124782Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [1:146:2124] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:38:07.124835Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [1:146:2124] 2026-01-04T02:38:07.124898Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:515:2352] 2026-01-04T02:38:07.124975Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594037927937 Active! Generation: 3, Type: Hive started in 21msec Marker# TSYS24 2026-01-04T02:38:07.125007Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [72057594037927937] Activate 2026-01-04T02:38:07.125153Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [1:515:2352] 2026-01-04T02:38:07.125194Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [1:515:2352] 2026-01-04T02:38:07.125338Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [1:99:2093] 2026-01-04T02:38:07.125377Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [1:99:2093] 2026-01-04T02:38:07.125407Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [1:99:2093] 2026-01-04T02:38:07.125492Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [1:515:2352] 2026-01-04T02:38:07.125562Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [1:98:2093] EventType# 268959744 2026-01-04T02:38:07.125691Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [1:515:2352] 2026-01-04T02:38:07.125719Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [1:515:2352] 2026-01-04T02:38:07.125742Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [1:515:2352] 2026-01-04T02:38:07.125779Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [1:432:2281] EventType# 268637702 2026-01-04T02:38:07.126020Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-04T02:38:07.126077Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.126245Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{22, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:38:07.126302Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{31, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:07.126457Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [2:102:2089] 2026-01-04T02:38:07.126493Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [2:102:2089] 2026-01-04T02:38:07.126515Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [2:102:2089] 2026-01-04T02:38:07.126582Z node 2 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037927937]::SendEvent [2:102:2089] 2026-01-04T02:38:07.126682Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-04T02:38:07.126728Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.126848Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:07.126942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:07.127011Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{10, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:38:07.127073Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:4} Tx{4, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:07.127331Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:72: [72057594037927937] Push Sender# [2:101:2089] EventType# 268959744 2026-01-04T02:38:07.127456Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:38:07.127509Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.127566Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{11, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:38:07.127616Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{5, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:07.127743Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} queued, type NKikimr::NHive::TTxRegisterNode 2026-01-04T02:38:07.127774Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.127847Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:07.127901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037927937 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:07.127967Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} hope 1 -> done Change{11, redo 199b alter 0b annex 0, ~{ 4 } -{ }, 0 gb} 2026-01-04T02:38:07.128000Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{6, NKikimr::NHive::TTxRegisterNode} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:38:07.128121Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} queued, type NKikimr::NHive::TTxProcessBootQueue 2026-01-04T02:38:07.128153Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:38:07.128196Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} hope 1 -> done Change{12, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:38:07.128228Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:3:5} Tx{7, NKikimr::NHive::TTxProcessBootQueue} release 4194304b of static, Memory{0 dyn 0} Took 23.119587 seconds |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |91.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |91.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TMessageBusServerPersQueueGetPartitionOffsetsMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetTopicMetadataMetaRequestTest::SuccessfullyReplies [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailesOnNotATopic [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-04T02:39:48.172208Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:48.175766Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:48.176120Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:39:48.176185Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:48.176252Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:39:48.176970Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2026-01-04T02:39:48.177076Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:48.195047Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:39:48.195227Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:48.196072Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:39:48.196252Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:48.196644Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:48.197071Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2225] 2026-01-04T02:39:48.199551Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:48.199621Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:39:48.199667Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2225] 2026-01-04T02:39:48.199715Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:48.199773Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:48.199809Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:48.199850Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:48.199911Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:48.199966Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:48.200007Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:48.200039Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:39:48.200137Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:48.200356Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:48.200800Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2259], now have 1 active actors on pipe 2026-01-04T02:39:48.243760Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:48.246982Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:48.247239Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:39:48.247280Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:48.247315Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:39:48.247893Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-04T02:39:48.247957Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:48.249506Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:39:48.249589Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:48.250146Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:39:48.250247Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:48.250477Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:48.250644Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2326] 2026-01-04T02:39:48.251878Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:48.251916Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:39:48.251947Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2326] 2026-01-04T02:39:48.251979Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:48.252018Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:48.252047Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:39:48.252088Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:39:48.252114Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:48.252150Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:48.252182Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:48.252206Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:39:48.252262Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:48.252401Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:39:48.252752Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-04T02:39:48.253589Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:416:2363], now have 1 active actors on pipe 2026-01-04T02:39:48.253739Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-04T02:39:48.254144Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:416:2363] destroyed 2026-01-04T02:39:48.254308Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:418:2364] destroyed Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties+UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 32646, MsgBus: 27811 2026-01-04T02:39:13.460332Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330445838769450:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:13.460453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:13.713111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:13.713325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:13.758872Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:13.766315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:13.806131Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:13.806573Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330445838769416:2081] 1767494353456330 != 1767494353456333 2026-01-04T02:39:13.965260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:13.965290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:13.965307Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:13.965416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:14.014545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:14.467390Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.510115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:14.567814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.708736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.826234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.875122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:15.991423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330454428705880:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.991553Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.991790Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330454428705889:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.991832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.494763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.519185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.544036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.572982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.597042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.629130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.660033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.727116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.786173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458723674054:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.786237Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.786273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458723674059:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.786428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458723674061:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.786471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.789085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:16.798042Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330458723674063:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:16.888060Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330458723674114:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:18.226749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:39:18.260862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) Tr ... RN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.039693Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591330564160095434:2081] 1767494381944425 != 1767494381944428 2026-01-04T02:39:42.077767Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:42.077867Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:42.079795Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:42.119480Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:42.119498Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:42.119506Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:42.119579Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:42.202745Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:42.622626Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:42.633766Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:42.694557Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:42.859875Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:42.933137Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:43.064070Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:45.592008Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330581339966499:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.592108Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.592405Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330581339966508:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.592456Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.660473Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.692466Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.721552Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.749998Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.782892Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.820469Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.855961Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.903574Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.982916Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330581339967379:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.982996Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.983120Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330581339967384:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.983142Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330581339967386:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.983181Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:45.987045Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:45.997541Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330581339967388:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:46.086977Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330585634934735:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:46.946081Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330564160095463:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:46.946171Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:47.592937Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:39:47.618821Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:39:47.666333Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TConsoleTests::TestCreateTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::AlterColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:42.131219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.131315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.131359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.131401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.131441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.131467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.131544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.131635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.132479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.132772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.203189Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:42.203258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.218220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.218460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.218624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.229036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.229288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.230095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.230397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.233455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.233642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.234782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.234844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.234945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.234991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.235034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.235247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.364267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.365316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.365453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.365575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.365664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.365768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.367944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.368026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.368081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 2026-01-04T02:39:48.619735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.619818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.623524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.623664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.623745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.623833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.623969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.624026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:39:48.624137Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:39:48.624177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:48.624220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:39:48.624256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:48.624295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:39:48.624365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:3115:4335] message: TxId: 103 2026-01-04T02:39:48.624418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:39:48.624484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:39:48.624523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:39:48.625954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-04T02:39:48.628592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:39:48.628649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:4359:5514] TestWaitNotification: OK eventTxId 103 2026-01-04T02:39:48.629239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:39:48.629494Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 278us result status StatusSuccess 2026-01-04T02:39:48.630206Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 11 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 11 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnTableVersion: 3 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 3 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 3 Name: "saved_at" Type: "Datetime" TypeId: 49 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 4 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 5 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Disabled { } Version: 3 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 104 2026-01-04T02:39:48.634006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnTable AlterColumnTable { Name: "TTLEnabledTable" AlterSchema { AlterColumns { Name: "data" DefaultValue: "10" } } } } TxId: 104 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:39:48.634184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: alter_table.cpp:283: TAlterColumnTable Propose, path: /MyRoot/TTLEnabledTable, opId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:39:48.637710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 104:1, propose status:StatusSchemeError, reason: schema update error: sparsed columns are disabled, at schemeshard: 72057594046678944 2026-01-04T02:39:48.640040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 104, response: Status: StatusSchemeError Reason: "schema update error: sparsed columns are disabled" TxId: 104 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:39:48.640255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 104, database: /MyRoot, subject: , status: StatusSchemeError, reason: schema update error: sparsed columns are disabled, operation: ALTER COLUMN TABLE, path: /MyRoot/TTLEnabledTable TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:39:48.640621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:39:48.640669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:39:48.641113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:39:48.641228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:39:48.641265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:4705:5859] TestWaitNotification: OK eventTxId 104 >> TConsoleTests::TestCreateSharedTenant |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndEncryptedGroup [GOOD] Test command err: 2026-01-04T02:39:48.607887Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.616229Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.620482Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.620557Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.628351Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:3:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.629743Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest >> TBlobStorageWardenTest::TestCreatePDiskAndGroup [GOOD] Test command err: 2026-01-04T02:39:48.618835Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:1:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.626639Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.630438Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:0:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:0:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.630557Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:3:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.638034Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:3:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:3:0] oldSyncState# [0 0] DbBirthLsn# 0 2026-01-04T02:39:48.639498Z node 1 :BS_SYNCLOG WARN: blobstorage_synclog.cpp:179: PDiskId# 0 VDISK[3e000000:_:0:1:0]: (1040187392) Handle(TEvSyncLogRead): FULL_RECOVER(unequal guid); sourceVDisk# [3e000000:1:0:2:0] targetVDisk# [3e000000:1:0:1:0] oldSyncState# [0 0] DbBirthLsn# 0 Sending TEvPut Sending TEvGet Sending TEvVGet Sending TEvPut Sending TEvGet |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TPQTestInternal::TestPartitionedBigTest >> TPartitionTests::GetPartitionWriteInfoSuccess >> TPQTabletTests::Parallel_Transactions_1 >> TPQTest::TestUserInfoCompatibility >> TPQTabletTests::ProposeTx_Missing_Operations ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/provider/ut/unittest >> KikimrIcGateway::TestLoadDataSourceProperties-UseSchemaSecrets [GOOD] Test command err: Trying to start YDB, gRPC: 22187, MsgBus: 62803 2026-01-04T02:39:13.460153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330445502760043:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:13.460223Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:13.678708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:13.698483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:13.698654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:13.737002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:13.777285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:13.779862Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330445502760009:2081] 1767494353456292 != 1767494353456295 2026-01-04T02:39:13.942787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:13.965057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:13.965093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:13.965182Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:13.965266Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:14.468180Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.483733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:14.544293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.686956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.824804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.878824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.163806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458387663770:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.163916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.164150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458387663780:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.164204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.494844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.521862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.547590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.574072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.596451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.625229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.651313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.688450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.760264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458387664653:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.760337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.760379Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458387664658:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.760533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330458387664660:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.760575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:16.765676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:16.774892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330458387664662:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:16.838420Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330458387664713:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:18.297406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:39:18.304233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_ta ... 37.429040Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.585605Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:37.596152Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:37.714443Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.401033Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330558473312119:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.401156Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.401529Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330558473312129:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.401597Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.456251Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.489170Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.519600Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.549375Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.578980Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.609133Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.640884Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.700016Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:40.782875Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330558473312997:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.782971Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.783261Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330558473313002:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.783296Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591330558473313003:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.783375Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:40.787225Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:40.801488Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591330558473313006:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:39:40.902603Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591330558473313057:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:41.562292Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591330541293441117:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:41.562385Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:39:42.682317Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:43.195508Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:43.720147Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:44.181635Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:44.697990Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:45.168071Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:39:45.856165Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976715758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:45.891583Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976715759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-04T02:39:48.997029Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710722:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/provider/ut/unittest >> TPQTabletTests::Parallel_Transactions_1 [GOOD] >> TPQTabletTests::ProposeTx_Missing_Operations [GOOD] >> TPQTabletTests::ProposeTx_Unknown_Partition_1 >> TPQTabletTests::Parallel_Transactions_2 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |91.1%| [TA] $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> ExternalBlobsMultipleChannels::ChangeExternalCount >> ExternalBlobsMultipleChannels::Simple >> TPQTabletTests::ProposeTx_Unknown_Partition_1 [GOOD] >> TPQTabletTests::Parallel_Transactions_2 [GOOD] >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction >> ExternalBlobsMultipleChannels::SingleChannel |91.1%| [TA] {RESULT} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/provider/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTestInternal::TestPartitionedBigTest [GOOD] >> TPQTestInternal::TestBatchPacking [GOOD] >> TPQTestInternal::TestKeyRange [GOOD] >> TPQTestInternal::TestAsInt [GOOD] >> TPQTestInternal::TestAsIntWide [GOOD] >> TPQTestInternal::StoreKeys [GOOD] >> TPQTabletTests::ProposeTx_Unknown_WriteId >> TPQTabletTests::Partition_Send_Predicate_With_False >> TPartitionTests::CommitOffsetRanges >> TPQTabletTests::ProposeTx_Unknown_WriteId [GOOD] >> TPQTabletTests::Partition_Send_Predicate_With_False [GOOD] >> TPQTest::TestCompaction >> TPQTabletTests::ProposeTx_Unknown_Partition_2 >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c >> TPartitionTests::TabletConfig_Is_Newer_That_PartitionConfig [GOOD] >> TPartitionTests::GetPartitionWriteInfoSuccess [GOOD] >> TSourceIdTests::SourceIdStorageAdd [GOOD] >> TSourceIdTests::SourceIdStorageMinDS [GOOD] >> TSourceIdTests::SourceIdStorageDeleteByMaxCount [GOOD] >> TSourceIdTests::SourceIdStorageComplexDelete >> TPQTabletTests::ProposeTx_Unknown_Partition_2 [GOOD] >> TPartitionTests::TestNonConflictingActsBatchOk >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInTargetTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::SplittedTargetTopic [GOOD] >> TPartitionTests::Batching |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::StoreKeys [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageComplexDelete [GOOD] >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] >> TPartitionTests::GetPartitionWriteInfoError >> TPQTabletTests::ProposeTx_Command_After_Propose >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3c [GOOD] >> TPartitionTests::CommitOffsetRanges [GOOD] >> TPQTest::TestUserInfoCompatibility [GOOD] >> TPQTest::TestWaitInOwners >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c >> TPartitionTests::ConflictingActsInSeveralBatches >> TPQTabletTests::ProposeTx_Command_After_Propose [GOOD] >> TPartitionTests::Batching [GOOD] >> TPQTabletTests::ReadQuoter_ExclusiveLock >> TPartitionTests::ShadowPartitionCounters |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageDeleteAndOwnersMark [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetPartitionWriteInfoError [GOOD] >> TPartitionTests::ChangeConfig >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] >> TPartitionTests::FailedTxsDontBlock >> TPQTest::TestSourceIdDropByUserWrites >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5c [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions >> TMeteringSink::FlushPutEventsV1 [GOOD] >> PQCountersSimple::Partition >> TPartitionTests::ChangeConfig [GOOD] >> TPQTabletTests::Single_PQTablet_And_Multiple_Partitions [GOOD] >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult >> PQCountersSimple::Partition [GOOD] >> PQCountersSimple::PartitionLevelMetrics >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients >> TPartitionTests::After_TEvGetWriteInfoError_Comes_TEvTxCalcPredicateResult [GOOD] >> TPartitionTests::BlobKeyFilfer [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_Fewer_Senders_Than_Recipients [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match >> TPartitionTests::SetOffset >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_The_Number_Of_Senders_And_Recipients_Match [GOOD] >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::BlobKeyFilfer [GOOD] Test command err: 2026-01-04T02:39:53.076518Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.146884Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:53.146980Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:53.147028Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:53.147120Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:53.162475Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-04T02:39:53.163400Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\002\030\003\"\014session-id-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:39:53.189640Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-2" Value: "\010\000\020\004\030\005\"\014session-id-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-2" Value: "\000\000\000\000\000\000\000\000\004\000\000\000\005\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000001cclient-3" Value: "\010\000\020\006\030\007\"\014session-id-3(\0000\004@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-3" Value: "\000\000\000\000\000\000\000\000\006\000\000\000\007\000\000\000session-id-3" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:39:53.232437Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.253336Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.254342Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-1 actual is session-id-2 2026-01-04T02:39:53.254405Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: set offset in already dead session session-id-3 actual is session-id-2 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient-1" Value: "\010\000\020\010\030\t\"\014session-id-2(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient-1" Value: "\000\000\000\000\000\000\000\000\010\000\000\000\t\000\000\000session-id-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:39:53.265464Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.630763Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.676531Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:53.676587Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:53.676646Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:53.676695Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:53.691671Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:183:2196] 2026-01-04T02:39:53.694633Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:53.000000Z 2026-01-04T02:39:53.694886Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:183:2196] 2026-01-04T02:39:53.716363Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.757447Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.778201Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.788619Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.829685Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.870842Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.901905Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send change config Wait cmd write (initial) Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\250\271\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:54.048273Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\250\271\200\267\27038\001" StorageChannel: INLINE } Wait commit 1 done 2026-01-04T02:39:54.069644Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Wait cmd write (change config) Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-2" IncludeFrom: true To: "m0000000003cclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-2" IncludeFrom: true To: "m0000000003uclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-2" IncludeFrom: true To: "c0000000003Sclient-2" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-2|0000000000000000" IncludeFrom: true To: "c0000000003wclient-2|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\002\020\000\030\000\"\tsession-1(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-3" Value: "\010\000\020\000\030\000\"\000(\0000\007@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-3" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-1@\000\252\002\014\n\010client-3@\007" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\250\271\200\267\27038\001" StorageChannel: INLINE } Wait config changed 2026-01-04T02:39:54.112142Z node 2 :PERSQUEUE WARN: partition.cpp:2983: [72057594037927937][Partition][3][StateIdle] Partition 3 Unknown consumer 'client-2' 2026-01-04T02:39:54.399192Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.443328Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:54.443387Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:54.443429Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.443475Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:54.459640Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [3:183:2196] 2026-01-04T02:39:54.460641Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [3:183:2196] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c5a [GOOD] Test command err: 2026-01-04T02:39:51.336095Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.437249Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:51.444437Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:51.445930Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:51.446764Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:51.446824Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:51.446875Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:51.446965Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.447049Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:51.482422Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-04T02:39:51.483021Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:51.507232Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:51.510668Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:51.510823Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.511810Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:51.512123Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.512544Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:51.512964Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-04T02:39:51.513917Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:51.513980Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:51.514022Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-04T02:39:51.514086Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:51.514164Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:51.515639Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.516132Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:51.516185Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:51.516225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.516275Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.516306Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:39:51.516343Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.516393Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.516466Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:51.516512Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:51.516552Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.516580Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-04T02:39:51.516601Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-04T02:39:51.516630Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:39:51.516661Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:39:51.516700Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:51.516976Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.517019Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.517079Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:51.517262Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:51.517461Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.520782Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:51.521434Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:51.521504Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:51.521540Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.521576Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.521612Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.521646Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:51.521691Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.522692Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-04T02:39:51.524460Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-04T02:39:51.526230Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-04T02:39:51.526346Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-04T02:39:51.526426Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-04T02:39:51.526475Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-04T02:39:51.526514Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-04T02:39:51.526556Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-04T02:39:51.526622Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-04T02:39:54.192951Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-04T02:39:54.193017Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-04T02:39:54.193054Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-04T02:39:54.193101Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-04T02:39:54.193138Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.193174Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:54.193211Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.193260Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:39:54.193327Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-04T02:39:54.193359Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-04T02:39:54.193393Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:54.193429Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:54.193466Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.193643Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:54.195750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:54.195933Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:54.195999Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.196044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.196088Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.196132Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.196173Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.196225Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:54.196313Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-04T02:39:54.196400Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:39:54.196445Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:39:54.196493Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:39:54.196538Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:39:54.196586Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-04T02:39:54.196641Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-04T02:39:54.196686Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-04T02:39:54.196763Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-04T02:39:54.196826Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:54.196880Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:54.196920Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:54.196958Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:54.196999Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:54.197033Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-04T02:39:54.197110Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-04T02:39:54.197157Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-04T02:39:54.197194Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:39:54.197232Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-04T02:39:54.218261Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.249315Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.249392Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.249437Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.249488Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.249533Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.259847Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.280759Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.280842Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.280877Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.280929Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.280967Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.281053Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.291672Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.312709Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.312791Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.312830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.312874Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.312908Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.333616Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.344292Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.344365Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.344406Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.344448Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.344483Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.365147Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.365217Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.365253Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.365294Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:54.365329Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.375697Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.396657Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.407744Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:386:2330], now have 1 active actors on pipe |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::TwoServerlessTwoSharedDbs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:55.972967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:56.070080Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:56.077111Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:531:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:56.077455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:56.077652Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:56.403091Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:56.509245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:56.509411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:56.543629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:56.616458Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:57.332847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:57.333398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.333431Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.333454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.333737Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.397334Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:57.931034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:00.714675Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:00.722886Z node 3 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 3 2026-01-04T02:39:00.757704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.757795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.786200Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:39:00.787999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:00.976099Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:00.976198Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:00.977377Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.977985Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.978560Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.979343Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.979442Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.979698Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.979769Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.979968Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.980030Z node 3 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:00.995056Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.202636Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:01.241921Z node 3 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:01.242021Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:01.278858Z node 3 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:01.279423Z node 3 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:01.279641Z node 3 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:01.279698Z node 3 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:01.279770Z node 3 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:01.279833Z node 3 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:01.279900Z node 3 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:01.279955Z node 3 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:01.280744Z node 3 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:01.301724Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:01.301844Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [3:2212:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:01.337500Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2285:2643] 2026-01-04T02:39:01.337685Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [3:2285:2643], schemeshard id = 72075186224037897 2026-01-04T02:39:01.392480Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [3:2337:2667] 2026-01-04T02:39:01.393846Z node 3 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared1 2026-01-04T02:39:01.405968Z node 3 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Describe result: PathErrorUnknown 2026-01-04T02:39:01.406032Z node 3 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Creating table 2026-01-04T02:39:01.406116Z node 3 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Shared1/.metadata/_statistics 2026-01-04T02:39:01.411374Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [3:2394:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:01.415230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:01.422751Z node 3 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:39:01.422871Z node 3 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Subscribe on create table tx: 281474976720657 2026-01-04T02:39:01.434942Z node 3 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:39:01.568251Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared1/.metadata/script_executions 2026-01-04T02:39:01.626369Z node 3 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:39:01.972473Z node 3 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:39:02.052134Z node 3 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:39:02.052231Z node 3 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [3:2342:2671] Owner: [3:2341:2670]. Column diff is empty, finishing 2026-01-04T02:39:02.693077Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:02.845099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe ... node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 21, ReplyToActorId = [3:7280:3781], StatRequests.size() = 1 2026-01-04T02:39:39.969361Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 22 ], ReplyToActorId[ [3:7329:3793]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:39.969924Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 22 ] 2026-01-04T02:39:39.969984Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 22, ReplyToActorId = [3:7329:3793], StatRequests.size() = 1 2026-01-04T02:39:41.235826Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:41.406117Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:39:41.406205Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:41.406252Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:41.406293Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:39:41.573620Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 23 ], ReplyToActorId[ [3:7384:3808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:41.574156Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 23 ] 2026-01-04T02:39:41.574213Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 23, ReplyToActorId = [3:7384:3808], StatRequests.size() = 1 2026-01-04T02:39:43.075458Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 24 ], ReplyToActorId[ [3:7440:3824]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:43.075939Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 24 ] 2026-01-04T02:39:43.075987Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 24, ReplyToActorId = [3:7440:3824], StatRequests.size() = 1 2026-01-04T02:39:44.416169Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:39:44.416647Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:39:44.417047Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:39:44.417116Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:39:44.450680Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:44.450737Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:44.451019Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:44.465303Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:44.721222Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 25 ], ReplyToActorId[ [3:7486:3837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:44.721771Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 25 ] 2026-01-04T02:39:44.721847Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 25, ReplyToActorId = [3:7486:3837], StatRequests.size() = 1 2026-01-04T02:39:46.146962Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 26 ], ReplyToActorId[ [3:7536:3852]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:46.147294Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 26 ] 2026-01-04T02:39:46.147337Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 26, ReplyToActorId = [3:7536:3852], StatRequests.size() = 1 2026-01-04T02:39:46.991457Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038898 2026-01-04T02:39:46.991540Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224038898 2026-01-04T02:39:46.991924Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038898, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:47.007437Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-04T02:39:47.793015Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 27 ], ReplyToActorId[ [3:7585:3864]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:47.793480Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 27 ] 2026-01-04T02:39:47.793532Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 27, ReplyToActorId = [3:7585:3864], StatRequests.size() = 1 2026-01-04T02:39:48.930984Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:39:48.931052Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.024000s, at schemeshard: 72075186224037899 2026-01-04T02:39:48.931383Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:39:48.946650Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:49.026262Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:49.256333Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 28 ], ReplyToActorId[ [3:7632:3877]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:49.256680Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 28 ] 2026-01-04T02:39:49.256719Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 28, ReplyToActorId = [3:7632:3877], StatRequests.size() = 1 2026-01-04T02:39:50.747263Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 29 ], ReplyToActorId[ [3:7683:3894]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:50.747600Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:39:50.747646Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [3:7683:3894], StatRequests.size() = 1 2026-01-04T02:39:51.287549Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224038900 2026-01-04T02:39:51.287613Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.657000s, at schemeshard: 72075186224038900 2026-01-04T02:39:51.287944Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224038895] TTxSchemeShardStats::Execute: schemeshard id: 72075186224038900, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:39:51.302828Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224038895] TTxSchemeShardStats::Complete 2026-01-04T02:39:51.980755Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:39:51.981315Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:39:51.982052Z node 3 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 3 cookie: 0 2026-01-04T02:39:51.982267Z node 3 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:39:52.004751Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:52.004819Z node 3 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:52.005136Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:52.019691Z node 3 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:52.208373Z node 3 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [3:7736:3908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:52.208695Z node 3 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:39:52.208742Z node 3 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [3:7736:3908], StatRequests.size() = 1 2026-01-04T02:39:52.209349Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:7738:3823]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:52.214106Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:39:52.214966Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:39:52.215035Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:7753:3828] 2026-01-04T02:39:52.215100Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:7753:3828] 2026-01-04T02:39:52.216676Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224038895] EvServerConnected, pipe server id = [2:7756:3829] 2026-01-04T02:39:52.216935Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:7753:3828], server id = [2:7756:3829], tablet id = 72075186224038895, status = OK 2026-01-04T02:39:52.217046Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224038895] EvConnectNode, pipe server id = [2:7756:3829], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:39:52.217101Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224038895] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:39:52.217283Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:39:52.217355Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:7738:3823], StatRequests.size() = 1 2026-01-04T02:39:52.217460Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224038895] EvPropagateStatisticsResponse, cookie: 18446744073709551615 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TPQTabletTests::TEvReadSet_comes_before_TEvPlanStep [GOOD] >> CompressExecutor::TestExecutorMemUsage [GOOD] >> Compression::WriteRAW >> TPQTabletTests::Read_TEvTxCommit_After_Restart >> TPartitionTests::SetOffset [GOOD] >> TPQTabletTests::Read_TEvTxCommit_After_Restart [GOOD] >> TPartitionTests::OldPlanStep >> TConsoleTests::TestCreateSharedTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenant >> TConsoleTests::TestCreateTenant [GOOD] >> TConsoleTests::TestCreateTenantExtSubdomain >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] >> TPartitionTests::ConflictingActsInSeveralBatches [GOOD] >> TPartitionTests::OldPlanStep [GOOD] >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches >> TPartitionTests::IncorrectRange >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] >> TPartitionTests::ShadowPartitionCounters [GOOD] >> TPartitionTests::ShadowPartitionCountersFirstClass >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::TEvReadSet_For_A_Non_Existent_Tablet [GOOD] Test command err: 2026-01-04T02:39:54.140692Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.197336Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:54.200303Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:54.200554Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:54.200604Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:54.200633Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:54.200682Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:54.200725Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.200770Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:54.213742Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-04T02:39:54.213897Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:54.230681Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:54.233115Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:54.233243Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.234521Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:54.234658Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:54.234716Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:54.235126Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:54.235443Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-04T02:39:54.236187Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:54.236223Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:54.236257Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-04T02:39:54.236297Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:54.236353Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:54.236756Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:54.236791Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:54.236833Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:54.236895Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:54.236921Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.236957Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:54.237019Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:54.237051Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:54.237079Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:54.237107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:54.237136Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:54.237316Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:54.237371Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:54.237500Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:54.237703Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:54.238305Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:54.238345Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-04T02:39:54.238370Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:54.238399Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:54.238429Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:54.238681Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:39:54.238706Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:54.238725Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:54.238752Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:54.238770Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:54.238792Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:54.238820Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-04T02:39:54.238848Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:39:54.238885Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:54.238904Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:39:54.238926Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:39:54.239054Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:54.239098Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:54.239209Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:54.239355Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:54.239469Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:54.239622Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:39:54.243169Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:56.143126Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:56.143163Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:56.143388Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-04T02:39:56.143427Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-04T02:39:56.143460Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-04T02:39:56.143497Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-04T02:39:56.143533Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-04T02:39:56.143574Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-04T02:39:56.143611Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:39:56.143648Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-04T02:39:56.143689Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-04T02:39:56.143734Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-04T02:39:56.143778Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 72057594037950158 tx 67890 2026-01-04T02:39:56.143894Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-04T02:39:56.144019Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:56.144722Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-04T02:39:56.144777Z node 6 :PQ_TX INFO: pq_impl.cpp:2842: [PQ: 72057594037927937] Assume tablet 72057594037950158 dead, sending read set acks for tx 67890 2026-01-04T02:39:56.144816Z node 6 :PQ_TX DEBUG: transaction.cpp:358: [TxId: 67890] Predicate acks 1/1 2026-01-04T02:39:56.144867Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-04T02:39:56.144906Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-04T02:39:56.144946Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-04T02:39:56.144982Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-04T02:39:56.145019Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-04T02:39:56.145225Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-04T02:39:56.145269Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 72057594037950158 2026-01-04T02:39:56.145363Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:236:2229], now have 1 active actors on pipe 2026-01-04T02:39:56.145726Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 72057594037950158 TabletDest: 72057594037927937 TabletProducer: 72057594037950158 ReadSet: "\010\001" Seqno: 0 2026-01-04T02:39:56.145777Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 72057594037950158 2026-01-04T02:39:56.145830Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-04T02:39:56.145876Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-04T02:39:56.145915Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-04T02:39:56.145951Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-04T02:39:56.145990Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-04T02:39:56.146025Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-04T02:39:56.146099Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-04T02:39:56.146137Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-04T02:39:56.146182Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 72057594037950158 2026-01-04T02:39:56.146296Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-04T02:39:56.146340Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:39:56.147462Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:56.147543Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:56.147608Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:56.147650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:56.147682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:56.147733Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:39:56.147806Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 67890 2026-01-04T02:39:56.147844Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-04T02:39:56.147915Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:56.147949Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:56.147989Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:56.148178Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:56.148242Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:56.149796Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:56.149912Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:56.149945Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:56.149975Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:56.150001Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:56.150028Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:56.150054Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:56.150085Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:56.150136Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-04T02:39:56.150182Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:39:56.150220Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:39:56.150257Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:39:56.150296Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:39:56.150340Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-04T02:39:56.150378Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-04T02:39:56.150414Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-04T02:39:56.150483Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-04T02:39:56.150537Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 72057594037950158 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:39:56.150580Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-04T02:39:56.150617Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-04T02:39:56.150658Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-04T02:39:56.150687Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:39:56.150721Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 1/1 2026-01-04T02:39:56.150761Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-04T02:39:56.150799Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-04T02:39:56.150847Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-04T02:39:56.150915Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:39:56.152512Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:39:56.152572Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-04T02:39:56.152612Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-04T02:39:56.152667Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_RS_With_Abort >> TSourceIdTests::SourceIdWriterAddMessage [GOOD] >> TSourceIdTests::SourceIdWriterClean [GOOD] >> TSourceIdTests::SourceIdWriterFormCommand [GOOD] >> TSourceIdTests::SourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::SourceIdStorageTestClean >> TPartitionTests::IncorrectRange [GOOD] >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPQTabletTests::Multiple_PQTablets_1 >> TPartitionTests::ReserveSubDomainOutOfSpace >> TPQTabletTests::PQTablet_Send_RS_With_Abort [GOOD] >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::ChangeExternalCount [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:54.523434Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:54.624632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:54.625030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:54.625076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:54.933078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:54.933196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.013854Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494392158769 != 1767494392158772 2026-01-04T02:39:55.026945Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:55.075104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.162218Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:55.463810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:55.476806Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:55.578577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:55.930635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:39:56.145792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 101:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TPQTabletTests::Multiple_PQTablets_1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::ShouldSkipDroppedColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.830305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.830347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.830377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.830450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.924783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.924855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.939299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.939537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.939685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.949611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.949864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.950606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.950828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.957575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.964117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.964419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.964478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.964711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.136423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.137997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.138498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... console configs 2026-01-04T02:39:43.036683Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:44.428357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0521 2026-01-04T02:39:44.428494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0674 2026-01-04T02:39:44.459365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-04T02:39:44.459566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:39:44.459652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:44.459797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:39:44.459889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-04T02:39:44.459928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:44.459981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:39:44.470421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:39:47.039738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.014 2026-01-04T02:39:47.039835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0173 2026-01-04T02:39:47.070538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-04T02:39:47.070662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:39:47.070730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:47.070797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:39:47.070841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-04T02:39:47.070865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:47.070894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:39:47.081215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:39:49.610950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.014 2026-01-04T02:39:49.611068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0173 2026-01-04T02:39:49.641843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-04T02:39:49.642022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:39:49.642106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:49.642258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:39:49.642331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-04T02:39:49.642383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:49.642441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:39:49.652804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:39:53.112756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0047 2026-01-04T02:39:53.112851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.005 2026-01-04T02:39:53.153756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-04T02:39:53.153936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:39:53.153984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=indexImplTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:53.154085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:39:53.154135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 0 row count 0 2026-01-04T02:39:53.154157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=TTLEnabledTable, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:39:53.154186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:39:53.164581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:39:56.584294Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-04T02:39:56.584413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:56.584648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:39:56.584856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409547, request: TableId: 38 Expiration { ColumnId: 2 WallClockTimestamp: 60024000 ColumnUnit: UNIT_AUTO } SchemaVersion: 3 Indexes { OwnerId: 72057594046678944 PathId: 40 SchemaVersion: 1 KeyMap { IndexColumnId: 1 MainColumnId: 3 } KeyMap { IndexColumnId: 2 MainColumnId: 1 } } Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-04T02:39:56.585403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:56.586107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:39:56.586171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:39:56.589487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:39:56.589697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:39:56.589748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T01:01:00.024000Z, at schemeshard: 72057594046678944 2026-01-04T02:39:56.589862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::SourceIdStorageTestClean [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Multiple_PQTablets_2 >> TPartitionTests::ReserveSubDomainOutOfSpace [GOOD] >> TPartitionTests::FailedTxsDontBlock [GOOD] >> TPartitionTests::NonConflictingCommitsBatch >> TPartitionTests::EndWriteTimestamp_DataKeysBody >> TPQTabletTests::Multiple_PQTablets_2 [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0c [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::ReadQuoter_ExclusiveLock [GOOD] Test command err: 2026-01-04T02:39:51.550003Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.620468Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:51.624524Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:51.624887Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:51.624985Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:51.625025Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:51.625096Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:51.625171Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.625237Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:51.642872Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-04T02:39:51.643080Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:51.661539Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:51.664520Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:51.664661Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.665609Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:51.665774Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.666181Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:51.666596Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-04T02:39:51.667555Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:51.667617Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:51.667659Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-04T02:39:51.667713Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:51.667789Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:51.668394Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:51.668460Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:51.668501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.668559Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:51.668610Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.668682Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.668752Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:51.668791Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:51.668832Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:51.668871Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:51.668909Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:51.669157Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.669235Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:51.669413Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:51.669620Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.672264Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:51.672371Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:51.672427Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:51.672479Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.672521Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.672563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.672597Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:51.672663Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.673004Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:195:2201], now have 1 active actors on pipe 2026-01-04T02:39:51.673655Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:198:2203], now have 1 active actors on pipe 2026-01-04T02:39:51.674520Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 2 Data { Immediate: false } 2026-01-04T02:39:51.674588Z node 1 :PQ_TX WARN: pq_impl.cpp:3283: [PQ: 72057594037927937] TxId 2 empty list of operations 2026-01-04T02:39:51.674693Z node 1 :PQ_TX INFO: pq_impl.cpp:4801: [PQ: 72057594037927937] TxId: 2 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-04T02:39:51.964325Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.002058Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:52.005562Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:52.005826Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:52.005865Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:52.005896Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:52.005930Z node 2 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:52.005972Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.006019Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:52.019907Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [2:182:2195], now have 1 active actors on pipe 2026-01-04T02:39:52.020030Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:52.020250Z node 2 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 2(current 0) received from actor [2:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicNam ... persist 2026-01-04T02:39:57.100949Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.101014Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.101048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.101080Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.101105Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.121650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.121718Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.121748Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.121798Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.121844Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.142367Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.142433Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.142456Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.142479Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.142498Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.163021Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.163083Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.163114Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.163154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.163184Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.184816Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.184879Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.184905Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.184928Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.184948Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.216092Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.216168Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.216192Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.216215Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.216234Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.236721Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.236776Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.236806Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.236832Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.236851Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.257488Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.257552Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.257582Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.257616Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.257643Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.267941Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.288729Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.288796Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.288827Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.288861Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.288890Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.299154Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.319913Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.319995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.320028Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.320060Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.320087Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.340576Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.340632Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.340655Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.340680Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.340701Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.361284Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.361355Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.361387Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.361422Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.361450Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.382021Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.382091Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.382122Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.382172Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.382200Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.402738Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.402803Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.402842Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.402875Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.402902Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.455475Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.455538Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.455568Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.455619Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.455650Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::One_Tablet_For_All_Partitions >> TPartitionTests::TestNonConflictingActsBatchOk [GOOD] >> TPartitionTests::TestTxBatchInFederation >> TPQTabletTests::One_Tablet_For_All_Partitions [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c0a [GOOD] >> TPartitionTests::EndWriteTimestamp_DataKeysBody [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a >> TPQTabletTests::One_New_Partition_In_Another_Tablet >> TPartitionTests::EndWriteTimestamp_FromMeta >> TPQTabletTests::UpdateConfig_1 >> TPQTabletTests::One_New_Partition_In_Another_Tablet [GOOD] >> TPQTabletTests::Limit_On_The_Number_Of_Transactons >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5c3a [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c >> TPQTabletTests::UpdateConfig_1 [GOOD] >> TPartitionTests::EndWriteTimestamp_FromMeta [GOOD] >> TPartitionChooserSuite::TBoundaryChooserTest [GOOD] >> TPartitionChooserSuite::TBoundaryChooser_GetTabletIdTest [GOOD] >> TPQTestInternal::TestPartitionedBlobSimpleTest [GOOD] >> TPQTestInternal::TestToHex [GOOD] >> TPQUserInfoTest::UserDataDeprecatedSerializaion [GOOD] >> TPQUtilsTest::TLastCounter [GOOD] >> TPQTabletTests::UpdateConfig_2 >> TPartitionTests::ConflictingSrcIdForTxInDifferentBatches [GOOD] >> TPartitionTests::ConflictingCommitsInSeveralBatches >> TPartitionTests::EndWriteTimestamp_HeadKeys >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout >> TPQTabletTests::UpdateConfig_2 [GOOD] >> TPQTabletTests::Limit_On_The_Number_Of_Transactons [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4c [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout >> TPartitionTests::ShadowPartitionCountersFirstClass [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_When_There_Are_More_Senders_Than_Recipients [GOOD] |91.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQUtilsTest::TLastCounter [GOOD] |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::ShadowPartitionCountersRestore >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] >> TPQTabletTests::DropTablet_And_Tx >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort [GOOD] >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Recipients [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders >> TPartitionTests::ShadowPartitionCountersRestore [GOOD] >> TPQTabletTests::DropTablet_And_Tx [GOOD] >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_After_Timeout [GOOD] >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] >> TPQTabletTests::DropTablet_Before_Write >> TConsoleTests::TestCreateTenantExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongName >> TPQTabletTests::PQTablet_App_SendReadSet_With_Commit_After_Abort [GOOD] >> TPartitionTests::NonConflictingCommitsBatch [GOOD] >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop >> TPQTabletTests::Test_Waiting_For_TEvReadSet_Without_Senders [GOOD] >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts >> TPQTabletTests::DropTablet_Before_Write [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit >> TPQTest::DirectReadBadSessionOrPipe >> TPartitionTests::GetUsedStorage >> TPQTest::TestWaitInOwners [GOOD] >> TPQTest::TestWritePQCompact >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction >> TPQTabletTests::PQTablet_App_SendReadSet_With_Abort_After_Commit [GOOD] >> PQCountersSimple::PartitionLevelMetrics [GOOD] >> PQCountersSimple::PartitionWriteQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::EndWriteTimestamp_HeadKeys [GOOD] Test command err: 2026-01-04T02:39:51.332196Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.445956Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:51.446784Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:51.446863Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.446971Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:51.473154Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.475206Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:51.475572Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [1:180:2193] 2026-01-04T02:39:51.477363Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:51.477434Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:Initializer] Initializing completed. 2026-01-04T02:39:51.477488Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [1:180:2193] 2026-01-04T02:39:51.477576Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition {2, {0, 10}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:51.477665Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:51.478463Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process pending events. Count 0 2026-01-04T02:39:51.479069Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:39:51.479132Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.479203Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.479381Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.479441Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-04T02:39:51.480519Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:51.480696Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|5a576283-7829d9f-7440536a-37839cce_0 generated for partition {2, {0, 10}, 100001} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-04T02:39:51.481600Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:39:51.481657Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:39:51.482499Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:51.482558Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.483402Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:39:51.483493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:51.484326Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-04T02:39:51.484379Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-04T02:39:51.484443Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {2, {0, 10}, 100001} 2026-01-04T02:39:51.484654Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.484983Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:39:51.485062Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:39:51.485100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:39:51.485965Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:51.486020Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.486075Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:39:51.486707Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob processing sourceId 'SourceId' seqNo 2 partNo 0 2026-01-04T02:39:51.489006Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} part blob complete sourceId 'SourceId' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 118 count 1 nextOffset 101 batches 1 2026-01-04T02:39:51.489111Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:51.489165Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Batch completed (1) 2026-01-04T02:39:51.489208Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-04T02:39:51.489876Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 D0000100001_00000000000000000100_00000_0000000001_00000? size 104 WTime 127 2026-01-04T02:39:51.490817Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:51.511816Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.543061Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:39:51.543192Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:39:51.543308Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:51.543419Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {2, {0, 10}, 100001} 2026-01-04T02:39:51.543542Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: {2, {0, 10}, 100001}, SeqNo: 2, partNo: 0, Offset: 100 is stored on disk 2026-01-04T02:39:51.543782Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:39:51.543828Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.543894Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.543959Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.544013Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-04T02:39:51.544071Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:39:51.554418Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.575401Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:39:51.575493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.575539Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.575592Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:51.575634Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateIdle] Try persist 2026-01-04T02:39:51.575770Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:51.586378Z node ... PackedSize 0 count 0 nextOffset 80 batches 0 2026-01-04T02:39:57.989700Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 12 2026-01-04T02:39:57.989753Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:57.989793Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:57.989875Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait tx rollback for tx 8 Wait immediate tx complete 10 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 10 Errors { Kind: BAD_REQUEST Reason: "[TxId: (empty maybe), Topic: \'Root/PQ/rt3.dc1--account--topic\', Partition 0, SourceId \'src2\', SeqNo 10] MinSeqNo violation failure. SeqNo 5" } Wait tx committed for tx 12 2026-01-04T02:39:57.990422Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:58.011003Z node 3 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:39:58.021349Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:39:58.021526Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-04T02:39:58.021591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:58.021640Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:58.021700Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:58.021759Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:58.021818Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:58.021883Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:58.322015Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.373042Z node 4 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:58.373111Z node 4 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:58.373165Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:58.373222Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:58.399565Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [4:182:2194] 2026-01-04T02:39:58.401496Z node 4 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:58.000000Z 2026-01-04T02:39:58.401752Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [4:182:2194] 2026-01-04T02:39:58.412712Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.443779Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.464577Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.495608Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.529084Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.560048Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.632082Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.663123Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.122585Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.175886Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:59.175941Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:59.175986Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:59.176037Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:59.191165Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [5:184:2196] 2026-01-04T02:39:59.192788Z node 5 :PERSQUEUE INFO: partition_init.cpp:1016: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:39:59.193045Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [5:184:2196] 2026-01-04T02:39:59.203605Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.234942Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.255705Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.286757Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.317861Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.338559Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.390091Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.462161Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.895471Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.931229Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:59.931283Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:59.931328Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:59.931378Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:59.945893Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [6:182:2194] >>>> ADD BLOB 0 writeTimestamp=2026-01-04T02:39:59.937350Z >>>> ADD BLOB 1 writeTimestamp=2026-01-04T02:39:59.937369Z >>>> ADD BLOB 2 writeTimestamp=2026-01-04T02:39:59.937381Z >>>> ADD BLOB 3 writeTimestamp=2026-01-04T02:39:59.937390Z >>>> ADD BLOB 4 writeTimestamp=2026-01-04T02:39:59.937399Z >>>> ADD BLOB 5 writeTimestamp=2026-01-04T02:39:59.937409Z >>>> ADD BLOB 6 writeTimestamp=2026-01-04T02:39:59.937417Z >>>> ADD BLOB 7 writeTimestamp=2026-01-04T02:39:59.937426Z >>>> ADD BLOB 8 writeTimestamp=2026-01-04T02:39:59.937435Z >>>> ADD BLOB 9 writeTimestamp=2026-01-04T02:39:59.937444Z 2026-01-04T02:39:59.948629Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:59.000000Z 2026-01-04T02:39:59.948883Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 2 generation 0 [6:182:2194] 2026-01-04T02:39:59.959402Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.990368Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.011382Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.042400Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.073294Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.104525Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.176885Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.207996Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::DropTablet_And_UnplannedConfigTransaction [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_Send_ReadSet_Via_App_5a4a [GOOD] Test command err: 2026-01-04T02:39:57.232025Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.302179Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:57.306052Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:57.306382Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:57.306454Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:57.306494Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:57.306558Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:57.306631Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:57.306705Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:57.329958Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-04T02:39:57.330087Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:57.345642Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:57.348304Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:57.348409Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:57.349467Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:57.349634Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:57.350050Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:57.350421Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-04T02:39:57.351110Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:57.351148Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:57.351199Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-04T02:39:57.351240Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:57.351318Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:57.351896Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:57.351954Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.351998Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.352069Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:57.352105Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.352151Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.352210Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:57.352248Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:57.352279Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:57.352312Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:57.352362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.352548Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:57.352592Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:57.352728Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:57.352868Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:57.355044Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:57.355172Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:57.355237Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.355271Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.355301Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.355338Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.355384Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.355439Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:57.355755Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-04T02:39:57.356364Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-04T02:39:57.357344Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-04T02:39:57.357417Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-04T02:39:57.357494Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-04T02:39:57.357534Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-04T02:39:57.357588Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-04T02:39:57.357640Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-04T02:39:57.357695Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-04T02:39:57.357746Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-04T02:39:57.357923Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 179 RawX2: 4294969488 } Partitions { } 2026-01-04T02:39:57.358014Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:39:57.360326Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:39:57.360385Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-04T02:39:57.360445Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-04T02:39:57.360487Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-04T02:39:57.364425Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 179 RawX2: 4294969488 } } St ... partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.675237Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.675265Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.675286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.685607Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.706449Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.716937Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.716988Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.717021Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.717046Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.717067Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.737611Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.737674Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.737705Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.737752Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.737786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.758365Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.768921Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.768973Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.768995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.769030Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.769059Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.769391Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:380:2325], now have 1 active actors on pipe 2026-01-04T02:40:00.769677Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletProducer: 22226 ReadSet: "\010\002" Seqno: 0 2026-01-04T02:40:00.769711Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22226 2026-01-04T02:40:00.769735Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-04T02:40:00.769765Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 5/5 2026-01-04T02:40:00.769795Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-04T02:40:00.769867Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-04T02:40:00.769899Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-04T02:40:00.769930Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-04T02:40:00.769999Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-04T02:40:00.770034Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-04T02:40:00.770093Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-04T02:40:00.770133Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.770162Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.770282Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.770321Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:00.770383Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-04T02:40:00.770412Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.770437Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:00.770466Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.770639Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.772790Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.772917Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:00.772958Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.772984Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.773012Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.773042Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.773071Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.773104Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.773154Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-04T02:40:00.773195Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:40:00.773233Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:40:00.773273Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:40:00.773313Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:40:00.773360Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-04T02:40:00.773397Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-04T02:40:00.773427Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-04T02:40:00.773481Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-04T02:40:00.773525Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22224 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:00.773569Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22226 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:00.773593Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22225 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:00.773617Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:00.773638Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22223 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:00.773672Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-04T02:40:00.773698Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-04T02:40:00.773726Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-04T02:40:00.773749Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:40:00.773773Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-04T02:40:00.773915Z node 6 :PQ_TX INFO: pq_impl.cpp:3459: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSetAck Step: 100 TxId: 67890 TabletSource: 22226 TabletDest: 72057594037927937 TabletConsumer: 72057594037927937 Flags: 0 Seqno: 0 2026-01-04T02:40:00.773945Z node 6 :PQ_TX DEBUG: transaction.cpp:344: [TxId: 67890] Handle TEvReadSetAck txId 67890 2026-01-04T02:40:00.773971Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-04T02:40:00.773999Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-04T02:40:00.774040Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 2026-01-04T02:40:00.774061Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:40:00.774087Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/5 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Tx [GOOD] >> TPQTabletTests::DropTablet_And_PlannedConfigTransaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Non_Kafka_Transaction_Supportive_Partitions_Should_Not_Be_Deleted_After_Timeout [GOOD] Test command err: 2026-01-04T02:39:57.415718Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.481851Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:57.485564Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:57.485920Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:57.485998Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:57.486042Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:57.486093Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:57.486141Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:57.486214Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:57.513637Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-04T02:39:57.513772Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:57.534778Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:57.537500Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:57.537636Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:57.538580Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "consumer" Generation: 1 Important: true } 2026-01-04T02:39:57.538735Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:57.539126Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:57.539516Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-04T02:39:57.540600Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:57.540648Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:57.540689Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-04T02:39:57.540739Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:57.540819Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:57.541402Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:57.541850Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:57.541898Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.541936Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.541987Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.542019Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:39:57.542050Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.542128Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.542197Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:57.542233Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:57.542267Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:57.542295Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit request with generation 1 2026-01-04T02:39:57.542316Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer reinit with generation 1 done 2026-01-04T02:39:57.542339Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:39:57.542391Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:39:57.542425Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.542688Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:57.542731Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user consumer readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:57.542782Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:57.542942Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:57.543114Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:57.545471Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:57.545558Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:57.545611Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:57.545641Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.545673Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:57.545721Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:57.545753Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:57.545792Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:57.546156Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-04T02:39:57.546862Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-04T02:39:57.547686Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "consumer" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-04T02:39:57.547758Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-04T02:39:57.547830Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-04T02:39:57.547886Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-04T02:39:57.547918Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-04T02:39:57.547963Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-04T02:39:57.547998Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [ ... 0001} generation 2 [6:223:2142] 2026-01-04T02:40:00.207912Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:00.207953Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:00.207981Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-04T02:40:00.208009Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:00.208047Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.208072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.208105Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.208128Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:00.208181Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:00.208316Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.208461Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|c870a3d8-fe17448b-567737c5-25780f24_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-04T02:40:00.208507Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:00.208547Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:00.208589Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.208619Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.208653Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:00.208696Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.208727Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-04T02:40:00.208756Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:00.208804Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-04T02:40:00.208859Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 4 2026-01-04T02:40:00.209394Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-04T02:40:00.209436Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-04T02:40:00.209874Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-04T02:40:00.212448Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.212546Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100001(+) to D0000100002(-) 2026-01-04T02:40:00.213162Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.213333Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.213401Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:00.213450Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.213494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.213547Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.213594Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:00.213638Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.214033Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 0 CachedBlobs 0 2026-01-04T02:40:00.214165Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-04T02:40:00.214225Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-04T02:40:00.214283Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-04T02:40:00.214365Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:00.216788Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:00.237741Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.268908Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.268990Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.269044Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.269102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.269147Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.279475Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:40:00.279534Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.279562Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.279593Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.279617Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-04T02:40:00.394715Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.405229Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.662319Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.683140Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.683203Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.683236Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.683267Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.683296Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.683366Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.693866Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:40:00.693925Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.693957Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.693986Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.694025Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, {0, 3}, 100000}][StateIdle] Try persist 2026-01-04T02:40:00.714532Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step >> TPQTabletTests::Huge_ProposeTransacton >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] >> TConsoleTests::TestCreateServerlessTenant [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb >> TPQTest::TestWriteSplit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::GetUsedStorage [GOOD] Test command err: 2026-01-04T02:39:55.132053Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.182799Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:55.182868Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:55.182912Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:55.182971Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:55.199798Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:181:2194] 2026-01-04T02:39:55.201707Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:55.000000Z 2026-01-04T02:39:55.201974Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:181:2194] 2026-01-04T02:39:55.223007Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.264207Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.285082Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.295550Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.336653Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.377797Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.398455Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\370\310\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:55.547034Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.567708Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\370\310\200\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\005\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\005\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\370\310\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:55.622018Z node 1 :PERSQUEUE WARN: partition.cpp:3781: [72057594037927937][Partition][0][StateIdle] commit to future - topic Root/PQ/rt3.dc1--account--topic partition 0 client client EndOffset 10 offset 13 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\n\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\n\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\370\310\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:55.876626Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.912725Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:55.912785Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:55.912847Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:55.912909Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:55.927566Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:183:2196] 2026-01-04T02:39:55.929233Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:55.000000Z 2026-01-04T02:39:55.929449Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:183:2196] 2026-01-04T02:39:55.950450Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.991803Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.012788Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.023422Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.064842Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.106266Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.137341Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.681555Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.717432Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:56.717481Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:56.717520Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:56.717559Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:56.730493Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [3:183:2196] 2026-01-04T02:39:56.731804Z node 3 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:56.000000Z 2026-01-04T02:39:56.731990Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [3:183:2196] 2026-01-04T02:39:56.752966Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.794304Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.815248Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.825835Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.867251Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.908575Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.939525Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\340\320\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:57.085700Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.096912Z node 3 :PERSQUEUE WARN: partition.cpp:3014: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (invalid range) Begin 4 End 2 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" ... :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:00.859835Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.859885Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:00.859939Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:00.859964Z node 5 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.859986Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:00.860013Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-04T02:40:00.860042Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.860077Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-04T02:40:00.860107Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Wait kv request Wait kv request 2026-01-04T02:40:00.860456Z node 5 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 1, TxId 0 2026-01-04T02:40:00.860491Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:00.860527Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-04T02:40:00.860563Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:00.860593Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 4 2026-01-04T02:40:00.860611Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:00.860645Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.870973Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:00.871034Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:00.871103Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:00.871145Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.871178Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 6, PendingWrites: 0 2026-01-04T02:40:00.871210Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.871257Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:00.871300Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-04T02:40:00.871357Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.871429Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 5 (startOffset 0) session session-client-0 2026-01-04T02:40:00.871479Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:00.871510Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 3 2026-01-04T02:40:00.871544Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-04T02:40:00.871588Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:00.871611Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 4 2026-01-04T02:40:00.871629Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 4 2026-01-04T02:40:00.871647Z node 5 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.871669Z node 5 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset is set to 10 (startOffset 0) session session-client-0 2026-01-04T02:40:00.871692Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-04T02:40:00.890402Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(ABORTED), reason=incorrect offset range (gap) 2026-01-04T02:40:00.890487Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 6 2026-01-04T02:40:00.890520Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (6) 2026-01-04T02:40:00.890572Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.890782Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 6 Got KV request Got KV request Wait tx rollback for tx 0 2026-01-04T02:40:00.890972Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.911600Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:00.911688Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:00.911838Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:00.911892Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.911929Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.911962Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.912018Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.912052Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.912090Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait tx committed for tx 3 Wait tx committed for tx 4 Wait immediate tx complete 6 Got propose resutl: Origin: 72057594037927937 Status: ABORTED TxId: 6 Errors { Kind: BAD_REQUEST Reason: "incorrect offset range (gap)" } 2026-01-04T02:40:01.297919Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.342818Z node 6 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:01.342867Z node 6 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:01.342904Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:01.342945Z node 6 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:01.356441Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] bootstrapping {2, {0, 10}, 100001} [6:182:2194] 2026-01-04T02:40:01.358432Z node 6 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{2, {0, 10}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:40:01.000000Z 2026-01-04T02:40:01.358513Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{2, {0, 10}, 100001}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {2, {0, 10}, 100001} generation 0 [6:182:2194] 2026-01-04T02:40:01.369082Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.400203Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.421088Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.452337Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.483526Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.514741Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.587032Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:01.618192Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::RacyAlterTableAndConditionalErase [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:42.272577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.272676Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.272720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.272752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.272787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.272816Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.272872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.272956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.273790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.274106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.354642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:42.354706Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.366314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.366466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.366598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.374816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.375073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.375764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.376021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.378773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.378945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.380076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.380133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.380231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.380276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.380313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.380505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.501243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.503820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.503959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.504597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 2026-01-04T02:40:01.534813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:01.534920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:40:01.534962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_table.cpp:373: TAlterTable TPropose operationId# 102:0 HandleReply TEvOperationPlan, operationId: 102:0, stepId: 5000039, at schemeshard: 72057594046678944 2026-01-04T02:40:01.535155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 129 2026-01-04T02:40:01.535274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 2026-01-04T02:40:01.539251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:40:01.539303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:40:01.539575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:40:01.539610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:40:01.540093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:40:01.540146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 102:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:40:01.540984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:40:01.541073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:40:01.541121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:40:01.541165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:40:01.541236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:40:01.541318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:40:01.542132Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 907 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:40:01.542184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:01.542277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 907 } } CommitVersion { Step: 5000039 TxId: 102 } 2026-01-04T02:40:01.542382Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 102 Step: 5000039 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 907 } } CommitVersion { Step: 5000039 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-04T02:40:01.542847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 705 RawX2: 4294969987 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:40:01.542878Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:01.542986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 705 RawX2: 4294969987 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:40:01.543041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:40:01.543122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 705 RawX2: 4294969987 } Origin: 72075186233409546 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:40:01.543180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:01.543220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:40:01.543252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:40:01.543280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:40:01.545246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:40:01.546122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:40:01.546226Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:40:01.546495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:40:01.546534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:40:01.546638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:40:01.546671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:40:01.546711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:40:01.546734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:40:01.546760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:40:01.546819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:733:2711] message: TxId: 102 2026-01-04T02:40:01.546854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:40:01.546882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:40:01.546916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:40:01.547022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:40:01.548370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:40:01.548432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1008:2961] TestWaitNotification: OK eventTxId 102 2026-01-04T02:40:01.548724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:40:01.548801Z node 1 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__conditional_erase.cpp:398: Unsuccessful conditional erase: tabletId: 72075186233409546, status: SCHEME_ERROR, error: Schema version mismatch: got 1, expected 2, retry after: 300.000000s, at schemeshard: 72057594046678944 2026-01-04T02:40:01.550298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:40:01.550428Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:40:01.550468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:1, run at: 1970-01-01T00:06:00.045500Z, at schemeshard: 72057594046678944 2026-01-04T02:40:01.550505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::PQTablet_App_SendReadSet_Invalid_Step [GOOD] Test command err: 2026-01-04T02:39:59.938819Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.001732Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:40:00.005700Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:40:00.006093Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:00.006187Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:00.006251Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:40:00.006315Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:40:00.006383Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.006470Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:00.032759Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:208:2212], now have 1 active actors on pipe 2026-01-04T02:40:00.032931Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:40:00.052070Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.055115Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.055266Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.056234Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.056426Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:00.056849Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:00.057285Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:216:2142] 2026-01-04T02:40:00.058343Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:00.058401Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:40:00.058457Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:216:2142] 2026-01-04T02:40:00.058513Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:00.058605Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:00.059167Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:00.059218Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.059258Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.059317Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.059362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.059418Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.059481Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:40:00.059523Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:40:00.059560Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.059593Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:00.059653Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.059889Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:00.059956Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:00.060118Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.060320Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.062754Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.062869Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:00.062934Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.062979Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.063025Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.063069Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.063107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.063153Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.063487Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:223:2220], now have 1 active actors on pipe 2026-01-04T02:40:00.064147Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:226:2222], now have 1 active actors on pipe 2026-01-04T02:40:00.064952Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3186: [PQ: 72057594037927937] Handle TEvPersQueue::TEvProposeTransaction SourceActor { RawX1: 179 RawX2: 4294969488 } TxId: 67890 Data { Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } SendingShards: 22222 ReceivingShards: 22222 Immediate: false } 2026-01-04T02:40:00.065028Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3372: [PQ: 72057594037927937] distributed transaction 2026-01-04T02:40:00.065123Z node 1 :PQ_TX INFO: pq_impl.cpp:3696: [PQ: 72057594037927937] Propose TxId 67890, WriteId (empty maybe) 2026-01-04T02:40:00.065166Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state UNKNOWN 2026-01-04T02:40:00.065205Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State UNKNOWN 2026-01-04T02:40:00.065244Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3985: [PQ: 72057594037927937] schedule TEvProposeTransactionResult(PREPARED) 2026-01-04T02:40:00.065299Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from UNKNOWN to PREPARING 2026-01-04T02:40:00.065357Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3864: [PQ: 72057594037927937] Persist state PREPARED for TxId 67890 2026-01-04T02:40:00.065494Z node 1 :PQ_TX DEBUG: transaction.cpp:414: [TxId: 67890] Save tx TxId: 67890 State: PREPARED MinStep: 133 MaxStep: 30133 PredicatesReceived { TabletId: 22222 } PredicateRecipients: 22222 Operations { PartitionId: 0 CommitOffsetsBegin: 0 CommitOffsetsEnd: 0 Consumer: "user" Path: "/topic" } Kind: KIND_DATA SourceActor { RawX1: 179 RawX2: 4294969488 } Partitions { } 2026-01-04T02:40:00.065634Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:00.067885Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:00.067948Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARING 2026-01-04T02:40:00.068004Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARING 2026-01-04T02:40:00.068061Z node 1 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARING to PREPARED 2026-01-04T02:40:00.071794Z node 1 :PQ_TX DEBUG: pq_impl.cpp:3398: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvPlanStep Transactions { TxId: 67890 AckTo { RawX1: 179 RawX2: 4294969488 } } St ... 01-04T02:40:01.989472Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state PREPARED 2026-01-04T02:40:01.989511Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State PREPARED 2026-01-04T02:40:01.989553Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PREPARED to PLANNING 2026-01-04T02:40:01.989590Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNING to PLANNED 2026-01-04T02:40:01.989633Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4471: [PQ: 72057594037927937] TxQueue.size 1 2026-01-04T02:40:01.989675Z node 6 :PQ_TX INFO: pq_impl.cpp:650: [PQ: 72057594037927937] New ExecStep 100, ExecTxId 67890 2026-01-04T02:40:01.989740Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from PLANNED to CALCULATING 2026-01-04T02:40:01.989786Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3791: [PQ: 72057594037927937] PlanStep 100, PlanTxId 67890 2026-01-04T02:40:01.989876Z node 6 :PERSQUEUE DEBUG: partition.cpp:1381: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCalcPredicate Step 100, TxId 67890 2026-01-04T02:40:01.989931Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:01.989972Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:01.990041Z node 6 :PQ_TX DEBUG: partition.cpp:3048: [Partition][0][StateIdle] TxId 67890 affect consumer user 2026-01-04T02:40:01.990088Z node 6 :PQ_TX DEBUG: partition.cpp:1708: [Partition][0][StateIdle] The long answer to TEvTxCalcPredicate. TxId: 67890 2026-01-04T02:40:01.990129Z node 6 :PQ_TX DEBUG: partition.cpp:1711: [Partition][0][StateIdle] Send TEvTxCalcPredicateResult. TxId: 67890 2026-01-04T02:40:01.990190Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:01.990229Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:01.990274Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:01.990308Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:01.990537Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3488: [PQ: 72057594037927937] Handle TEvPQ::TEvTxCalcPredicateResult Step 100, TxId 67890, Partition 0, Predicate 1 2026-01-04T02:40:01.990576Z node 6 :PQ_TX DEBUG: transaction.cpp:244: [TxId: 67890] Handle TEvTxCalcPredicateResult 2026-01-04T02:40:01.990610Z node 6 :PQ_TX DEBUG: transaction.cpp:301: [TxId: 67890] Partition responses 1/1 2026-01-04T02:40:01.990648Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state CALCULATING 2026-01-04T02:40:01.990684Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State CALCULATING 2026-01-04T02:40:01.990721Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State CALCULATING FrontTxId 67890 2026-01-04T02:40:01.990759Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4482: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:40:01.990804Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATING to CALCULATED 2026-01-04T02:40:01.990845Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from CALCULATED to WAIT_RS 2026-01-04T02:40:01.990890Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 1 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-04T02:40:01.990934Z node 6 :PQ_TX INFO: pq_impl.cpp:4023: [PQ: 72057594037927937] Send TEvReadSet to tablet 22222 tx 67890 2026-01-04T02:40:01.991045Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 0 2026-01-04T02:40:01.991119Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:01.992176Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2817: [PQ: 72057594037927937] Handle TEvTabletPipe::TEvClientConnected 2026-01-04T02:40:01.992229Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2822: [PQ: 72057594037927937] Connected to tablet 22222 2026-01-04T02:40:01.993307Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:237:2230], now have 1 active actors on pipe 2026-01-04T02:40:01.994308Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [6:238:2231], now have 1 active actors on pipe 2026-01-04T02:40:01.994376Z node 6 :PERSQUEUE DEBUG: pqtablet_mock.cpp:72: Connected to tablet 72057594037927937 from tablet 22222 2026-01-04T02:40:01.994680Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3413: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet Step: 100 TxId: 67890 TabletSource: 22222 TabletDest: 72057594037927937 TabletProducer: 22222 ReadSet: "\010\002" Seqno: 0 2026-01-04T02:40:01.994725Z node 6 :PQ_TX INFO: pq_impl.cpp:3423: [PQ: 72057594037927937] Handle TEvTxProcessing::TEvReadSet tx 67890 tabletProducer 22222 2026-01-04T02:40:01.994782Z node 6 :PQ_TX DEBUG: transaction.cpp:308: [TxId: 67890] Handle TEvReadSet 67890 2026-01-04T02:40:01.994823Z node 6 :PQ_TX DEBUG: transaction.cpp:325: [TxId: 67890] Predicates 1/1 2026-01-04T02:40:01.994863Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS 2026-01-04T02:40:01.994901Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS 2026-01-04T02:40:01.994945Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State WAIT_RS FrontTxId 67890 2026-01-04T02:40:01.994987Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-04T02:40:01.995063Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-04T02:40:01.995109Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/1 2026-01-04T02:40:01.995206Z node 6 :PERSQUEUE DEBUG: partition.cpp:1485: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxRollback Step 100, TxId 67890 2026-01-04T02:40:01.995247Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:01.995553Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:01.995627Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:01.995682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:01.995718Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:01.995750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:01.995803Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:01.995892Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-04T02:40:01.995933Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:01.995975Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:01.996016Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:01.996173Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:01.996257Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:01.998546Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:01.998657Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:01.998702Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:01.998732Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:01.998758Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:01.998786Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:01.998813Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:01.998858Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:01.998923Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-04T02:40:01.998970Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:40:01.999014Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:40:01.999057Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:40:01.999102Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/1 2026-01-04T02:40:01.999147Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(ABORTED) 2026-01-04T02:40:01.999192Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-04T02:40:01.999231Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-04T02:40:01.999303Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-04T02:40:01.999361Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:01.999406Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-04T02:40:01.999444Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-04T02:40:01.999504Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 2026-01-04T02:40:01.999537Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 0, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:40:01.999578Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/1 >> TPQTest::DirectReadBadSessionOrPipe [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Supportive_Partitions_Should_Be_Deleted_With_Delete_Partition_Done_Event_Drop [GOOD] >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure >> PQCountersSimple::PartitionWriteQuota [GOOD] >> PQCountersSimple::PartitionFirstClass >> TPartitionTests::CorrectRange_Multiple_Transactions >> TPartitionTests::ConflictingCommitsInSeveralBatches [GOOD] >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] >> ExternalBlobsMultipleChannels::Simple [GOOD] >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] >> TPartitionTests::ConflictingCommitFails >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_TEvDeletePartitionDone_Came_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::DirectReadBadSessionOrPipe [GOOD] Test command err: 2026-01-04T02:39:59.261183Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.315411Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:59.320070Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:59.320369Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:59.320442Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:59.320513Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:59.320550Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:59.320589Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:59.320650Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:59.334502Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-04T02:39:59.334619Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:59.348923Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:59.351761Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:59.351898Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:59.353133Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:59.353273Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:59.353377Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:59.353867Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:59.354178Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-04T02:39:59.354871Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:59.354904Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:39:59.354944Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-04T02:39:59.355000Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:59.355064Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:59.355493Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:59.355530Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:59.355557Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:59.355599Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:59.355629Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:59.355659Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:59.355711Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:59.355755Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:59.355789Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:59.355822Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:39:59.355884Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:59.356059Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:59.356116Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:59.356250Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:59.356467Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:59.356954Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:59.356991Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-04T02:39:59.357023Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:59.357058Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:59.357088Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:59.357353Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:39:59.357377Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:59.357398Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:59.357425Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:39:59.357444Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:59.357466Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:59.357498Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-04T02:39:59.357544Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:39:59.357569Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:39:59.357587Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:39:59.357627Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:39:59.357728Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:59.357758Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:59.357896Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:59.358079Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:59.358195Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:59.358323Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:39:59.361486Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... w have 1 active actors on pipe 2026-01-04T02:40:02.623134Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.623175Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.623211Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with empty session id 2026-01-04T02:40:02.623268Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 6, Error Read prepare request with empty session id 2026-01-04T02:40:02.623292Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with empty session id Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "bad-session" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:180:2192] 2026-01-04T02:40:02.623583Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:226:2225], now have 1 active actors on pipe 2026-01-04T02:40:02.623639Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.623663Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.623695Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id bad-session 2026-01-04T02:40:02.623745Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 7, Error Read prepare request with unknown(old?) session id bad-session 2026-01-04T02:40:02.623768Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id bad-session 2026-01-04T02:40:02.624006Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:229:2227], now have 1 active actors on pipe 2026-01-04T02:40:02.624039Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.624062Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.624103Z node 8 :PERSQUEUE INFO: pq_impl.cpp:1702: [PQ: 72057594037927937] Got cmd delete session: ClientId: "user2" SessionId: "session2" 2026-01-04T02:40:02.624133Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72057594037927937] Destroy direct read session session2 2026-01-04T02:40:02.624188Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:02.624215Z node 8 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:02.624244Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:02.624267Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:02.624292Z node 8 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:02.624342Z node 8 :PERSQUEUE DEBUG: partition.cpp:3880: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user2 session is set to 0 (startOffset 0) session session2 2026-01-04T02:40:02.624367Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:02.624391Z node 8 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:02.624416Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:02.624577Z node 8 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:02.626514Z node 8 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:02.626612Z node 8 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:02.626646Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:02.626671Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:02.626694Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:02.626718Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:02.626740Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:02.626771Z node 8 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:02.626819Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user2" SessionId: "session2" Offset: 0 Count: 1 Bytes: 99999 DirectReadId: 1 PartitionSessionId: 1 } PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 } via pipe: [8:180:2192] 2026-01-04T02:40:02.627136Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:235:2232], now have 1 active actors on pipe 2026-01-04T02:40:02.627226Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.627254Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.627287Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2 2026-01-04T02:40:02.627336Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 9, Error Read prepare request with unknown(old?) session id session2 2026-01-04T02:40:02.627357Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2 Publish read Send publish read request: Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.627577Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:238:2234], now have 1 active actors on pipe 2026-01-04T02:40:02.627633Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.627656Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.627712Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.627754Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 10, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.627776Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdPublishRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 215\n RawX2: 34359740585\n}\nCookie: 123\nCmdPublishRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } Forget read Send forget read request: Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.628163Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [8:240:2236], now have 1 active actors on pipe 2026-01-04T02:40:02.628275Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:02.628317Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:02.628414Z node 8 :PERSQUEUE WARN: event_helpers.cpp:42: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.628500Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 11, Error Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } 2026-01-04T02:40:02.628531Z node 8 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: Read prepare request with unknown(old?) session id session2Partition: 0 PipeClient { RawX1: 215 RawX2: 34359740585 } Cookie: 123 CmdForgetRead { SessionKey { SessionId: "session2" PartitionSessionId: 1 } DirectReadId: 1 } Got direct read response: Status: 128 ErrorReason: "Read prepare request with unknown(old?) session id session2Partition: 0\nPipeClient {\n RawX1: 215\n RawX2: 34359740585\n}\nCookie: 123\nCmdForgetRead {\n SessionKey {\n SessionId: \"session2\"\n PartitionSessionId: 1\n }\n DirectReadId: 1\n}\n" ErrorCode: BAD_REQUEST PartitionResponse { Cookie: 123 } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> PQCountersSimple::PartitionFirstClass [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State >> TPartitionChooserSuite::THashChooserTest [GOOD] >> TPartitionChooserSuite::THashChooser_GetTabletIdTest [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test >> TPartitionTests::TEvTxCalcPredicate_Without_Conflicts [GOOD] >> TPQTest::TestReadRuleVersions >> TPartitionTests::CorrectRange_Multiple_Transactions [GOOD] >> TPartitionTests::CorrectRange_Rollback >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Deleting_State [GOOD] >> TPartitionTests::TestTxBatchInFederation [GOOD] >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit >> TPartitionTests::TestBatchingWithChangeConfig >> TPQTest::TestWriteSplit [GOOD] >> TPQTest::TestWriteTimeStampEstimate >> TConsoleTests::TestCreateTenantWrongName [GOOD] >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:39.704217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:39.704315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.704361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:39.704404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:39.704443Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:39.704471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:39.704525Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:39.704610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:39.705440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:39.705728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:39.800129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:39.800198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:39.815329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:39.815558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:39.815703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:39.823854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:39.824080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:39.824743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:39.824984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:39.828190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.828403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:39.829676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:39.829747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:39.829886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:39.829935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:39.829979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:39.830192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:39.967290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.968985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:39.969666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T02:40:02.899186Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:40:02.899254Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:40:02.899283Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:40:02.899311Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:40:02.899340Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:40:02.899408Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:40:02.899844Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1124 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:40:02.899899Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:02.900011Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1124 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:40:02.900112Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1124 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:40:02.901366Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 723 RawX2: 77309414037 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:02.901416Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:02.901540Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 723 RawX2: 77309414037 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:02.901597Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:40:02.901692Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 723 RawX2: 77309414037 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:02.901766Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:02.901830Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:02.901877Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:40:02.901926Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:40:02.905683Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:40:02.905895Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:40:02.906134Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:02.906446Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:02.906576Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:02.906627Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:40:02.906741Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:02.906786Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:02.906833Z node 18 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:02.906874Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:02.906919Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:40:02.907006Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [18:749:2727] message: TxId: 101 2026-01-04T02:40:02.907064Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:02.907114Z node 18 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:40:02.907153Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:40:02.907318Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:40:02.909482Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:40:02.909533Z node 18 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [18:750:2728] TestWaitNotification: OK eventTxId 101 2026-01-04T02:40:02.910048Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:40:02.910268Z node 18 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" took 251us result status StatusSuccess 2026-01-04T02:40:02.910808Z node 18 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithDyNumberColumn_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "DyNumber" TypeId: 4866 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TPQTabletTests::Kafka_Transaction_Several_Partitions_One_Tablet_Successful_Commit [GOOD] >> TPQTabletTests::Huge_ProposeTransacton [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::SingleChannel [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:54.602740Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:54.736253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:54.736735Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:54.736800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:55.032275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:55.032404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.093077Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494392158746 != 1767494392158749 2026-01-04T02:39:55.101519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:55.142687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.231981Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:55.501491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:55.514258Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:55.616623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:55.922786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:958:2815], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.922941Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.923058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:968:2820], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.924895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:974:2825], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.925110Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.930646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:56.052648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:972:2823], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:39:56.113016Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1030:2862] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:54.494039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:54.624449Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:54.624984Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:54.625043Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:54.934718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:54.934840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.002358Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494392158807 != 1767494392158810 2026-01-04T02:39:55.015787Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:55.064216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.167806Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:55.452350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:55.465615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:55.578713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:55.922759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:960:2816], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.922931Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.923058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:970:2821], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.924922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:976:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.925115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:55.930712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:56.053482Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:974:2824], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:39:56.122914Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1032:2863] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest >> ExternalBlobsMultipleChannels::WithNewColumnFamilyAndCompaction [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:39:54.534007Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:54.650279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:39:54.650691Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:39:54.650745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:39:54.933064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:54.933197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.004777Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494392183275 != 1767494392183278 2026-01-04T02:39:55.016997Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:55.064492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.168489Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:39:55.452930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:55.466487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:55.578572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:55.904679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 100:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:39:56.150590Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1014:2860], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:56.150713Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:56.150817Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:56.151604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1030:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:56.151687Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:56.156486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:56.291486Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1028:2868], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:39:56.339535Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1086:2907] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure >> TPQTest::TestWriteTimeStampEstimate [GOOD] >> TPQTest::TestWriteTimeLag >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted >> PQCountersLabeled::Partition >> TPartitionTests::CorrectRange_Rollback [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> TMultiBucketCounter::ManyCounters [GOOD] >> TPQRBDescribes::PartitionLocations >> TPartitionTests::DataTxCalcPredicateOk >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] >> TConsoleTests::TestCreateServerlessTenantWrongSharedDb [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::Kafka_Transaction_Incoming_Before_Previous_Is_In_DELETED_State_Should_Be_Processed_After_Previous_Complete_Erasure [GOOD] Test command err: 2026-01-04T02:40:00.077835Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.146107Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:40:00.149705Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:40:00.150056Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:00.150127Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:00.150188Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:40:00.150232Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:40:00.150279Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.150339Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:00.167714Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-04T02:40:00.167879Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:40:00.192646Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.195230Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.195348Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.196137Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.196271Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:00.196558Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:00.196927Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-04T02:40:00.197647Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:00.197683Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:40:00.197716Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-04T02:40:00.197762Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:00.197846Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:00.198364Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:00.198405Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.198434Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.198478Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.198515Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.198574Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.198632Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:40:00.198677Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:40:00.198708Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.198739Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:00.198783Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.198959Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:00.199020Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:00.199140Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.199284Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.201301Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:00.201383Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:00.201430Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.201460Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.201488Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.201517Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:00.201544Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.201582Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.201850Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:195:2201], now have 1 active actors on pipe 2026-01-04T02:40:00.202344Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:198:2203], now have 1 active actors on pipe 2026-01-04T02:40:00.202440Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-04T02:40:00.202473Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-04T02:40:00.202512Z node 1 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100000} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-04T02:40:00.202643Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:00.204389Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:00.205049Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:00.205294Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:00.205488Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100000} [1:204:2142] 2026-01-04T02:40:00.206152Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:40:00.207064Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:40:00.207336Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:40:00.207433Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100000 to M0000100001 2026-01-04T02:40:00.207617Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:40:00.207694Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100000 to D0000100001 2026-01-04T02:40:00.207880Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100000}:Initializer] Start initializing step TInitDataStep 2026-01-04T ... 72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.752229Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.752260Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.752291Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-04T02:40:04.752328Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:173: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Blobs compaction is stopped 2026-01-04T02:40:04.752796Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-04T02:40:04.752865Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-04T02:40:04.752916Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state WAIT_RS_ACKS 2026-01-04T02:40:04.752971Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State WAIT_RS_ACKS 2026-01-04T02:40:04.753021Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-04T02:40:04.753061Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-04T02:40:04.753090Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:40:04.753139Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-04T02:40:04.753180Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4642: [PQ: 72057594037927937] WriteId KafkaTransactionWriteId{1, 0} Partitions.size=0 2026-01-04T02:40:04.753231Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-04T02:40:04.753282Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-04T02:40:04.753327Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-04T02:40:04.753415Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:04.755805Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-04T02:40:04.755896Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-04T02:40:04.755941Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2547: [PQ: 72057594037927937] GetOwnership request for the next Kafka transaction while previous is being deleted. Saving it till the complete delete of the previous tx.%01 2026-01-04T02:40:04.756071Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:04.756146Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-04T02:40:04.756191Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-04T02:40:04.756230Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-04T02:40:04.756271Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 2026-01-04T02:40:04.756339Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'topic' requestId: 2026-01-04T02:40:04.756392Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'topic' partition 0 2026-01-04T02:40:04.756459Z node 6 :PQ_TX INFO: pq_impl.cpp:2616: [PQ: 72057594037927937] partition {0, KafkaTransactionWriteId{1, 0}, 100001} for WriteId KafkaTransactionWriteId{1, 0} 2026-01-04T02:40:04.756578Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:04.758622Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:04.759244Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:04.759583Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:04.759831Z node 6 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] bootstrapping {0, KafkaTransactionWriteId{1, 0}, 100001} [6:241:2142] 2026-01-04T02:40:04.760676Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:40:04.761775Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:40:04.762055Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:40:04.762169Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From M0000100001 to M0000100002 2026-01-04T02:40:04.762382Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:40:04.762472Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From D0000100001 to D0000100002 2026-01-04T02:40:04.762663Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitDataStep 2026-01-04T02:40:04.762720Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:40:04.762764Z node 6 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:40:04.762822Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:40:04.762865Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:40:04.762915Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:04.762972Z node 6 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:{0, KafkaTransactionWriteId{1, 0}, 100001}:Initializer] Initializing completed. 2026-01-04T02:40:04.763026Z node 6 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] init complete for topic 'topic' partition {0, KafkaTransactionWriteId{1, 0}, 100001} generation 2 [6:241:2142] 2026-01-04T02:40:04.763089Z node 6 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateInit] SYNC INIT topic topic partitition {0, KafkaTransactionWriteId{1, 0}, 100001} so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:04.763140Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:04.763179Z node 6 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process pending events. Count 0 2026-01-04T02:40:04.763221Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:04.763268Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.763308Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.763357Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.763406Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:04.763470Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:04.763641Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:04.763817Z node 6 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie -=[ 0wn3r ]=-|ca1f36a6-f5cf5576-b8a9f3d4-4438d6a7_0 generated for partition {0, KafkaTransactionWriteId{1, 0}, 100001} topic 'topic' owner -=[ 0wn3r ]=- 2026-01-04T02:40:04.763893Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:04.763944Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:04.764001Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:04.764051Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.764105Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:04.764161Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:04.764196Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Batch completed (1) 2026-01-04T02:40:04.764237Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:04.764289Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100001}][StateIdle] TPartition::ReplyOwnerOk. Partition: {0, KafkaTransactionWriteId{1, 0}, 100001} 2026-01-04T02:40:04.764412Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 5 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTest::TestPQPartialRead >> TPQTest::TestReadRuleVersions [GOOD] >> TPQTest::TestSeveralOwners ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersSimple::SupportivePartitionCountersPersist [GOOD] Test command err: 2026-01-04T02:39:54.138946Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.192259Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:54.192337Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:54.192389Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.192431Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:54.205420Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.220873Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:205:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:54.221643Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:213:2166] 2026-01-04T02:39:54.223306Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:213:2166] 2026-01-04T02:39:54.224741Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:214:2166] 2026-01-04T02:39:54.225766Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:214:2166] 2026-01-04T02:39:54.230779Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.231060Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|33c4b4af-9b1333a3-ae04ed4b-6a94e69c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.236082Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.236341Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8b0b424b-9f86ed92-8f9bc57b-4d7f258f_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.240672Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.240958Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|79bf56ce-1042a521-b2e8a0d-59c59de0_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.245221Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.245462Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|44348e31-30f755d0-2e2e0b1e-36d15d6c_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.246697Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.246955Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|eddbdf97-414fc66c-1ae84aae-6350f516_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 ACTUAL:
subsystem=SLI:
    Account=asdfgs:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    Account=total:
        sensor=WriteBigLatency: 0
        sensor=WritesTotal: 5
    sensor=Write:
        Account=asdfgs:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
subsystem=partitionWriteQuotaWait:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        sensor=PartitionWriteQuotaWaitOriginal:
                            Interval=0ms: 50
                            Interval=10000ms: 0
                            Interval=1000ms: 0
                            Interval=100ms: 0
                            Interval=10ms: 0
                            Interval=1ms: 0
                            Interval=20ms: 0
                            Interval=2500ms: 0
                            Interval=5000ms: 0
                            Interval=500ms: 0
                            Interval=50ms: 0
                            Interval=5ms: 0
                            Interval=9999999ms: 0
subsystem=readSession:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
    Account=total:
        Producer=total:
            Topic=total:
                TopicPath=total:
                    OriginDC=cluster:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=BytesRead: 0
                                sensor=MessagesRead: 0
subsystem=readTimeLag:
    Account=asdfgs:
        Producer=asdfgs:
            Topic=asdfgs--topic:
                TopicPath=asdfgs/topic:
                    OriginDC=Dc1:
                        Client=user:
                            ConsumerPath=shared/user:
                                sensor=TimeLags:
                                    Interval=10000ms: 0
                                    Interval=1000ms: 0
                                    Interval=100ms: 0
                                    Interval=10ms: 0
                                    Interval=180000ms: 0
                                    Interval=2000ms: 0
                                    Interval=200ms: 0
                                    Interval=20ms: 0
                                    Interval=30000ms: 0
                                    Interval=5000ms: 0
                                    Interval=500ms: 0
                                    Interval=50ms: 0
                                    Interval=60000ms: 0
                                    Interval=9999999ms: 0
                    OriginDC=cluster:
                        Client=user:

...
5:2218] txId 12345 config:
CacheSize: 10485760
PartitionConfig {
  MaxCountInPartition: 20000000
  MaxSizeInPartition: 104857600
  LifetimeSeconds: 0
  LowWatermark: 6291456
  SourceIdLifetimeSeconds: 3600
  MaxWriteInflightSize: 90000000
}
PartitionIds: 0
PartitionIds: 1
TopicName: "topic"
Version: 39
LocalDC: true
Topic: "topic"
TopicPath: "/topic"
YcCloudId: "somecloud"
YcFolderId: "somefolder"
YdbDatabaseId: "PQ"
YdbDatabasePath: "/Root/PQ"
Partitions {
  PartitionId: 0
}
Partitions {
  PartitionId: 1
}
FederationAccount: "federationAccount"
MeteringMode: METERING_MODE_RESERVED_CAPACITY
AllPartitions {
  PartitionId: 0
}
AllPartitions {
  PartitionId: 1
}
Consumers {
  Name: "user"
  ReadFromTimestampsMs: 0
  Generation: 39
}
2026-01-04T02:40:03.246294Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [15:213:2166]
2026-01-04T02:40:03.247056Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [15:213:2166]
2026-01-04T02:40:03.247772Z node 15 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [15:214:2166]
2026-01-04T02:40:03.248333Z node 15 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [15:214:2166]
2026-01-04T02:40:03.253206Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 0 requestId:  error: new GetOwnership request needed for owner 
2026-01-04T02:40:03.253544Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ddd67f36-c0428a4d-2d1efd08-2e9af488_0 generated for partition 0 topic 'topic' owner default
2026-01-04T02:40:03.259109Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-04T02:40:03.259414Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|5ecea779-fb8a02a7-5ac7567b-e84e5b4a_1 generated for partition 0 topic 'topic' owner default
2026-01-04T02:40:03.264825Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-04T02:40:03.265232Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7c4757ff-abba2753-78723a7f-b5bf46ff_2 generated for partition 0 topic 'topic' owner default
2026-01-04T02:40:03.270222Z node 15 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic' partition: 0 messageNo: 1 requestId:  error: new GetOwnership request needed for owner 
2026-01-04T02:40:03.270508Z node 15 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|8b44528a-3341eada-e2304a3b-ce1519e5_3 generated for partition 0 topic 'topic' owner default
Got start offset = 0
COUNTERS: 
subsystem=SLI:
    Account=federationAccount:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    Account=total:
        name=WriteBigLatency: 0
        name=WritesTotal: 5
    sensor=Write:
        Account=federationAccount:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
        Account=total:
            Duration=10000ms: 0
            Duration=1000ms: 0
            Duration=100ms: 3
            Duration=1500ms: 0
            Duration=2000ms: 0
            Duration=200ms: 1
            Duration=30000ms: 0
            Duration=5000ms: 0
            Duration=500ms: 1
            Duration=550ms: 0
            Duration=99999999ms: 0
2026-01-04T02:40:03.586432Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.628248Z node 16 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:03.628306Z node 16 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:03.628361Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.628415Z node 16 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:03.644773Z node 16 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.645453Z node 16 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 40 actor [16:205:2218] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 30720 BurstSize: 30720 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 40 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 40 } 2026-01-04T02:40:03.646122Z node 16 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [16:214:2166] 2026-01-04T02:40:03.649065Z node 16 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [16:214:2166] 2026-01-04T02:40:03.654516Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:03.654913Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|9ec859e8-2620b7de-af608fa3-3faf0b3e_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-04T02:40:03.661894Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:03.662136Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3636e792-17adb003-ff819fca-3f81ade2_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:03.683359Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.724647Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.745556Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.756130Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.797335Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.838543Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.869397Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-04T02:40:04.000918Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.001275Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1bfe9062-fb51cd65-89864bc6-84cb7f4c_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.032833Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.074168Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-04T02:40:04.277155Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.277503Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3a4d37b7-f51e4de7-10846c2d-d9b49af8_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.329588Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.340149Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-04T02:40:04.538657Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.538904Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1b9dd173-59099fc3-7b717abb-664f48f7_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.622754Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 2026-01-04T02:40:04.797332Z node 16 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.797773Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|eeea30a3-163c29cc-60774348-d522192c_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.851144Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.933942Z node 16 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 16 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Captured TEvRequest, cmd write size: 4 Captured kesus quota request event from [16:230:2166] Captured TEvRequest, cmd write size: 4 Got start offset = 0 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> PQCountersLabeled::Partition [GOOD] >> PQCountersLabeled::PartitionFirstClass >> TPartitionTests::CorrectRange_Commit >> TPQTest::TestCmdReadWithLastOffset >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline >> TPQTest::TestWriteTimeLag [GOOD] >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::In_Kafka_Txn_Only_Supportive_Partitions_That_Exceeded_Timeout_Should_Be_Deleted [GOOD] Test command err: 2026-01-04T02:40:00.534439Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.596141Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:40:00.600066Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:40:00.600406Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:00.600486Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:00.600531Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:40:00.600574Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:40:00.600626Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.600697Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:00.617846Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:180:2193], now have 1 active actors on pipe 2026-01-04T02:40:00.618004Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:40:00.639241Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.642326Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.642458Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:00.644094Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:00.644276Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:00.644364Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:00.644958Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:00.645392Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:188:2142] 2026-01-04T02:40:00.646355Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:00.646403Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:0:Initializer] Initializing completed. 2026-01-04T02:40:00.646448Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 2 [1:188:2142] 2026-01-04T02:40:00.646508Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:00.646598Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:00.647169Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:00.647222Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:00.647260Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.647314Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.647351Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.647388Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.647457Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit request with generation 1 2026-01-04T02:40:00.647511Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:40:00.647553Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.647605Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:00.647663Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:00.647918Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:00.647980Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:00.648175Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:00.648459Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:40:00.648945Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:00.648978Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [topic:1:Initializer] Initializing completed. 2026-01-04T02:40:00.649013Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:40:00.649039Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:00.649074Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:00.649295Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:40:00.649315Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:00.649332Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.649358Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:00.649378Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:00.649397Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:00.649424Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit request with generation 1 2026-01-04T02:40:00.649471Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:40:00.649503Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:00.649521Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:00.649552Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:00.649639Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'topic' partition 1 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:00.649668Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:00.649790Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.649972Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.650076Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:00.650201Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:40:00.653338Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdl ... :40:04.775395Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:04.775912Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Add new write blob: topic 'topic' partition {0, KafkaTransactionWriteId{2, 0}, 100001} compactOffset 0,1 HeadOffset 0 endOffset 0 curOffset 1 D0000100001_00000000000000000000_00000_0000000001_00000? size 64 WTime 1800332 2026-01-04T02:40:04.776156Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:04.776306Z node 6 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 100001 offset 0 partNo 0 count 1 size 64 2026-01-04T02:40:04.778263Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 100001 offset 0 count 1 size 64 actorID [6:138:2142] 2026-01-04T02:40:04.778357Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 100001 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-04T02:40:04.778401Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:04.778449Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::HandleWriteResponse writeNewSize# 11 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:04.778499Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] TPartition::ReplyWrite. Partition: {0, KafkaTransactionWriteId{2, 0}, 100001} 2026-01-04T02:40:04.778541Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Answering for message sourceid: '2', Topic: 'topic', Partition: {0, KafkaTransactionWriteId{2, 0}, 100001}, SeqNo: 0, partNo: 0, Offset: 0 is stored on disk 2026-01-04T02:40:04.778686Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:04.778720Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.778750Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.778781Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.778816Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:04.778856Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:04.778909Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 128 CachedBlobs 2 2026-01-04T02:40:04.778950Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'topic' partition: 0 messageNo: 0 requestId: cookie: 123 2026-01-04T02:40:04.779393Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3103: [PQ: 72057594037927937] Transaction for Kafka producer {Id: 1, Epoch: 0} is expired 2026-01-04T02:40:04.779434Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5321: [PQ: 72057594037927937] send TEvPQ::TEvDeletePartition to partition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-04T02:40:04.779723Z node 6 :PERSQUEUE DEBUG: partition.cpp:4395: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Handle TEvPQ::TEvDeletePartition 2026-01-04T02:40:04.779939Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:04.779978Z node 6 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from D0000100000(+) to D0000100001(-) 2026-01-04T02:40:04.781843Z node 6 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 100000 offset 0 count 1 actorID [6:138:2142] 2026-01-04T02:40:04.781934Z node 6 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 100000 offset 0 partno 0 count 1 parts 0 suffix '63' size 64 2026-01-04T02:40:04.781988Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:04.782087Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-04T02:40:04.782123Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx events 2026-01-04T02:40:04.782154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.782182Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.782212Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.782241Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] Try persist 2026-01-04T02:40:04.782277Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{1, 0}, 100000}][StateIdle] No data for blobs compaction 2026-01-04T02:40:04.782606Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1199: [PQ: 72057594037927937] Topic 'topic' counters. CacheSize 64 CachedBlobs 1 2026-01-04T02:40:04.782688Z node 6 :PQ_TX DEBUG: pq_impl.cpp:5243: [PQ: 72057594037927937] Handle TEvPQ::TEvDeletePartitionDone {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-04T02:40:04.782725Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:5237: [PQ: 72057594037927937] DeletePartition {0, KafkaTransactionWriteId{1, 0}, 100000} 2026-01-04T02:40:04.782765Z node 6 :PQ_TX INFO: pq_impl.cpp:4656: [PQ: 72057594037927937] delete WriteId KafkaTransactionWriteId{1, 0} 2026-01-04T02:40:04.782825Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:04.784446Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:04.805395Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.836495Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:04.836570Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.836616Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.836662Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.836707Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:04.942213Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:05.211073Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:05.211151Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.211190Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:05.211230Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.211268Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:05.234391Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] No data for blobs compaction 2026-01-04T02:40:05.265541Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.286405Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:05.286477Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.286507Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:05.286540Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.286568Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:05.286635Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.297083Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx events 2026-01-04T02:40:05.297144Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.297187Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:05.297219Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:05.297253Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][{0, KafkaTransactionWriteId{2, 0}, 100001}][StateIdle] Try persist 2026-01-04T02:40:05.307530Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPQTest::TestSeveralOwners [GOOD] >> TPQTest::TestReserveBytes >> TMeteringSink::FlushResourcesReservedV1 [GOOD] >> TMeteringSink::FlushThroughputV1 [GOOD] >> TMeteringSink::FlushStorageV1 [GOOD] >> TMeteringSink::UsedStorageV1 [GOOD] >> TMeteringSink::UnusedStorageV1 [GOOD] >> TMultiBucketCounter::InsertAndUpdate [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace_IgnoreQuotaDeadline [GOOD] >> TPartitionTests::CorrectRange_Commit [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers >> TSourceIdTests::ProtoSourceIdStorageParseAndAdd [GOOD] >> TSourceIdTests::HeartbeatEmitter [GOOD] >> TSourceIdTests::SourceIdMinSeqNo [GOOD] >> TSourceIdTests::ExpensiveCleanup >> TConsoleTests::TestCreateTenantWrongNameExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantWrongPool |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TMultiBucketCounter::InsertAndUpdate [GOOD] |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::UserActCount >> TSourceIdTests::ExpensiveCleanup [GOOD] >> TPartitionTests::CorrectRange_Multiple_Consumers [GOOD] >> TPartitionTests::ConflictingTxIsAborted >> TPQTest::DirectReadOldPipe >> TPartitionTests::TestBatchingWithChangeConfig [GOOD] >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] >> TPartitionTests::ConflictingTxIsAborted [GOOD] >> TPartitionTests::TestBatchingWithProposeConfig >> Compression::WriteRAW [GOOD] >> Compression::WriteGZIP >> TPartitionTests::ConflictingTxProceedAfterRollback ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TSourceIdTests::ExpensiveCleanup [GOOD] Test command err: 2026-01-04T02:40:06.803078Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.867089Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:06.867172Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:06.867215Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.867291Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:06.883995Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:06.884432Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:06.884887Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-04T02:40:06.885917Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:06.885982Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:1:Initializer] Initializing completed. 2026-01-04T02:40:06.886025Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] 2026-01-04T02:40:06.886072Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:06.886135Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:06.886690Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:40:06.886767Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:06.886817Z node 1 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ChangeConfig]) 2026-01-04T02:40:06.886876Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:06.886906Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.886972Z node 1 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ChangeConfig]) 2026-01-04T02:40:06.887025Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:06.887053Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:06.887105Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:06.887352Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:06.887422Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:06.887531Z node 1 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 1 2026-01-04T02:40:06.887621Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|30dd76fc-428b3c27-7e468007-5ea0aec_0 generated for partition 1 topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 2026-01-04T02:40:06.887667Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:06.887845Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 2026-01-04T02:40:06.887999Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:06.888123Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:06.888195Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:06.888244Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:06.888283Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:06.888355Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:06.888403Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.888439Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:06.888493Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:06.888522Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:06.888555Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:06.888618Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyOwnerOk. Partition: 1 2026-01-04T02:40:06.888674Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:40:06.888883Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:40:06.888952Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 24. Cookie: 1 2026-01-04T02:40:06.889030Z node 1 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 1 2026-01-04T02:40:06.889069Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:06.889099Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:06.889174Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:06.889225Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.889264Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:06.889350Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 0 partNo 0 2026-01-04T02:40:06.890240Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 0 partNo 0 FormedBlobsCount 0 NewHead: Offset 100 PartNo 0 PackedSize 120 count 1 nextOffset 101 batches 1 2026-01-04T02:40:06.890305Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:06.890338Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:06.890376Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:06.890873Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 100,1 HeadOffset 0 endOffset 0 curOffset 101 d0000000001_00000000000000000100_00000_0000000001_00000? size 106 WTime 127 2026-01-04T02:40:06.891155Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:06.911819Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.942683Z node 1 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:06.942754Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:06.942858Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 24 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:06.942937Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-04T02:40:06.943001Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 0, partNo: 0, Offset: 100 is stored on disk 2026-01-04T02:40:06.943181Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 user client-1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 100 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:06.943209Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:06.943236Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:06.943261Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.943294Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:06.943319Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partit ... 2 Iteration 143 Iteration 144 Iteration 145 Iteration 146 Iteration 147 Iteration 148 Iteration 149 Iteration 150 Iteration 151 Iteration 152 Iteration 153 Iteration 154 Iteration 155 Iteration 156 Iteration 157 Iteration 158 Iteration 159 Iteration 160 Iteration 161 Iteration 162 Iteration 163 Iteration 164 Iteration 165 Iteration 166 Iteration 167 Iteration 168 Iteration 169 Iteration 170 Iteration 171 Iteration 172 Iteration 173 Iteration 174 Iteration 175 Iteration 176 Iteration 177 Iteration 178 Iteration 179 Iteration 180 Iteration 181 Iteration 182 Iteration 183 Iteration 184 Iteration 185 Iteration 186 Iteration 187 Iteration 188 Iteration 189 Iteration 190 Iteration 191 Iteration 192 Iteration 193 Iteration 194 Iteration 195 Iteration 196 Iteration 197 Iteration 198 Iteration 199 Iteration 200 Iteration 201 Iteration 202 Iteration 203 Iteration 204 Iteration 205 Iteration 206 Iteration 207 Iteration 208 Iteration 209 Iteration 210 Iteration 211 Iteration 212 Iteration 213 Iteration 214 Iteration 215 Iteration 216 Iteration 217 Iteration 218 Iteration 219 Iteration 220 Iteration 221 Iteration 222 Iteration 223 Iteration 224 Iteration 225 Iteration 226 Iteration 227 Iteration 228 Iteration 229 Iteration 230 Iteration 231 Iteration 232 Iteration 233 Iteration 234 Iteration 235 Iteration 236 Iteration 237 Iteration 238 Iteration 239 Iteration 240 Iteration 241 Iteration 242 Iteration 243 Iteration 244 Iteration 245 Iteration 246 Iteration 247 Iteration 248 Iteration 249 Iteration 250 Iteration 251 Iteration 252 Iteration 253 Iteration 254 Iteration 255 Iteration 256 Iteration 257 Iteration 258 Iteration 259 Iteration 260 Iteration 261 Iteration 262 Iteration 263 Iteration 264 Iteration 265 Iteration 266 Iteration 267 Iteration 268 Iteration 269 Iteration 270 Iteration 271 Iteration 272 Iteration 273 Iteration 274 Iteration 275 Iteration 276 Iteration 277 Iteration 278 Iteration 279 Iteration 280 Iteration 281 Iteration 282 Iteration 283 Iteration 284 Iteration 285 Iteration 286 Iteration 287 Iteration 288 Iteration 289 Iteration 290 Iteration 291 Iteration 292 Iteration 293 Iteration 294 Iteration 295 Iteration 296 Iteration 297 Iteration 298 Iteration 299 Iteration 300 Iteration 301 Iteration 302 Iteration 303 Iteration 304 Iteration 305 Iteration 306 Iteration 307 Iteration 308 Iteration 309 Iteration 310 Iteration 311 Iteration 312 Iteration 313 Iteration 314 Iteration 315 Iteration 316 Iteration 317 Iteration 318 Iteration 319 Iteration 320 Iteration 321 Iteration 322 Iteration 323 Iteration 324 Iteration 325 Iteration 326 Iteration 327 Iteration 328 Iteration 329 Iteration 330 Iteration 331 Iteration 332 Iteration 333 Iteration 334 Iteration 335 Iteration 336 Iteration 337 Iteration 338 Iteration 339 Iteration 340 Iteration 341 Iteration 342 Iteration 343 Iteration 344 Iteration 345 Iteration 346 Iteration 347 Iteration 348 Iteration 349 Iteration 350 Iteration 351 Iteration 352 Iteration 353 Iteration 354 Iteration 355 Iteration 356 Iteration 357 Iteration 358 Iteration 359 Iteration 360 Iteration 361 Iteration 362 Iteration 363 Iteration 364 Iteration 365 Iteration 366 Iteration 367 Iteration 368 Iteration 369 Iteration 370 Iteration 371 Iteration 372 Iteration 373 Iteration 374 Iteration 375 Iteration 376 Iteration 377 Iteration 378 Iteration 379 Iteration 380 Iteration 381 Iteration 382 Iteration 383 Iteration 384 Iteration 385 Iteration 386 Iteration 387 Iteration 388 Iteration 389 Iteration 390 Iteration 391 Iteration 392 Iteration 393 Iteration 394 Iteration 395 Iteration 396 Iteration 397 Iteration 398 Iteration 399 Iteration 400 Iteration 401 Iteration 402 Iteration 403 Iteration 404 Iteration 405 Iteration 406 Iteration 407 Iteration 408 Iteration 409 Iteration 410 Iteration 411 Iteration 412 Iteration 413 Iteration 414 Iteration 415 Iteration 416 Iteration 417 Iteration 418 Iteration 419 Iteration 420 Iteration 421 Iteration 422 Iteration 423 Iteration 424 Iteration 425 Iteration 426 Iteration 427 Iteration 428 Iteration 429 Iteration 430 Iteration 431 Iteration 432 Iteration 433 Iteration 434 Iteration 435 Iteration 436 Iteration 437 Iteration 438 Iteration 439 Iteration 440 Iteration 441 Iteration 442 Iteration 443 Iteration 444 Iteration 445 Iteration 446 Iteration 447 Iteration 448 Iteration 449 Iteration 450 Iteration 451 Iteration 452 Iteration 453 Iteration 454 Iteration 455 Iteration 456 Iteration 457 Iteration 458 Iteration 459 Iteration 460 Iteration 461 Iteration 462 Iteration 463 Iteration 464 Iteration 465 Iteration 466 Iteration 467 Iteration 468 Iteration 469 Iteration 470 Iteration 471 Iteration 472 Iteration 473 Iteration 474 Iteration 475 Iteration 476 Iteration 477 Iteration 478 Iteration 479 Iteration 480 Iteration 481 Iteration 482 Iteration 483 Iteration 484 Iteration 485 Iteration 486 Iteration 487 Iteration 488 Iteration 489 Iteration 490 Iteration 491 Iteration 492 Iteration 493 Iteration 494 Iteration 495 Iteration 496 Iteration 497 Iteration 498 Iteration 499 Iteration 500 Iteration 501 Iteration 502 Iteration 503 Iteration 504 Iteration 505 Iteration 506 Iteration 507 Iteration 508 Iteration 509 Iteration 510 Iteration 511 Iteration 512 Iteration 513 Iteration 514 Iteration 515 Iteration 516 Iteration 517 Iteration 518 Iteration 519 Iteration 520 Iteration 521 Iteration 522 Iteration 523 Iteration 524 Iteration 525 Iteration 526 Iteration 527 Iteration 528 Iteration 529 Iteration 530 Iteration 531 Iteration 532 Iteration 533 Iteration 534 Iteration 535 Iteration 536 Iteration 537 Iteration 538 Iteration 539 Iteration 540 Iteration 541 Iteration 542 Iteration 543 Iteration 544 Iteration 545 Iteration 546 Iteration 547 Iteration 548 Iteration 549 Iteration 550 Iteration 551 Iteration 552 Iteration 553 Iteration 554 Iteration 555 Iteration 556 Iteration 557 Iteration 558 Iteration 559 Iteration 560 Iteration 561 Iteration 562 Iteration 563 Iteration 564 Iteration 565 Iteration 566 Iteration 567 Iteration 568 Iteration 569 Iteration 570 Iteration 571 Iteration 572 Iteration 573 Iteration 574 Iteration 575 Iteration 576 Iteration 577 Iteration 578 Iteration 579 Iteration 580 Iteration 581 Iteration 582 Iteration 583 Iteration 584 Iteration 585 Iteration 586 Iteration 587 Iteration 588 Iteration 589 Iteration 590 Iteration 591 Iteration 592 Iteration 593 Iteration 594 Iteration 595 Iteration 596 Iteration 597 Iteration 598 Iteration 599 Iteration 600 Iteration 601 Iteration 602 Iteration 603 Iteration 604 Iteration 605 Iteration 606 Iteration 607 Iteration 608 Iteration 609 Iteration 610 Iteration 611 Iteration 612 Iteration 613 Iteration 614 Iteration 615 Iteration 616 Iteration 617 Iteration 618 Iteration 619 Iteration 620 Iteration 621 Iteration 622 Iteration 623 Iteration 624 Iteration 625 Iteration 626 Iteration 627 Iteration 628 Iteration 629 Iteration 630 Iteration 631 Iteration 632 Iteration 633 Iteration 634 Iteration 635 Iteration 636 Iteration 637 Iteration 638 Iteration 639 Iteration 640 Iteration 641 Iteration 642 Iteration 643 Iteration 644 Iteration 645 Iteration 646 Iteration 647 Iteration 648 Iteration 649 Iteration 650 Iteration 651 Iteration 652 Iteration 653 Iteration 654 Iteration 655 Iteration 656 Iteration 657 Iteration 658 Iteration 659 Iteration 660 Iteration 661 Iteration 662 Iteration 663 Iteration 664 Iteration 665 Iteration 666 Iteration 667 Iteration 668 Iteration 669 Iteration 670 Iteration 671 Iteration 672 Iteration 673 Iteration 674 Iteration 675 Iteration 676 Iteration 677 Iteration 678 Iteration 679 Iteration 680 Iteration 681 Iteration 682 Iteration 683 Iteration 684 Iteration 685 Iteration 686 Iteration 687 Iteration 688 Iteration 689 Iteration 690 Iteration 691 Iteration 692 Iteration 693 Iteration 694 Iteration 695 Iteration 696 Iteration 697 Iteration 698 Iteration 699 Iteration 700 Iteration 701 Iteration 702 Iteration 703 Iteration 704 Iteration 705 Iteration 706 Iteration 707 Iteration 708 Iteration 709 Iteration 710 Iteration 711 Iteration 712 Iteration 713 Iteration 714 Iteration 715 Iteration 716 Iteration 717 Iteration 718 Iteration 719 Iteration 720 Iteration 721 Iteration 722 Iteration 723 Iteration 724 Iteration 725 Iteration 726 Iteration 727 Iteration 728 Iteration 729 Iteration 730 Iteration 731 Iteration 732 Iteration 733 Iteration 734 Iteration 735 Iteration 736 Iteration 737 Iteration 738 Iteration 739 Iteration 740 Iteration 741 Iteration 742 Iteration 743 Iteration 744 Iteration 745 Iteration 746 Iteration 747 Iteration 748 Iteration 749 Iteration 750 Iteration 751 Iteration 752 Iteration 753 Iteration 754 Iteration 755 Iteration 756 Iteration 757 Iteration 758 Iteration 759 Iteration 760 Iteration 761 Iteration 762 Iteration 763 Iteration 764 Iteration 765 Iteration 766 Iteration 767 Iteration 768 Iteration 769 Iteration 770 Iteration 771 Iteration 772 Iteration 773 Iteration 774 Iteration 775 Iteration 776 Iteration 777 Iteration 778 Iteration 779 Iteration 780 Iteration 781 Iteration 782 Iteration 783 Iteration 784 Iteration 785 Iteration 786 Iteration 787 Iteration 788 Iteration 789 Iteration 790 Iteration 791 Iteration 792 Iteration 793 Iteration 794 Iteration 795 Iteration 796 Iteration 797 Iteration 798 Iteration 799 Iteration 800 Iteration 801 Iteration 802 Iteration 803 Iteration 804 Iteration 805 Iteration 806 Iteration 807 Iteration 808 Iteration 809 Iteration 810 Iteration 811 Iteration 812 Iteration 813 Iteration 814 Iteration 815 Iteration 816 Iteration 817 Iteration 818 Iteration 819 Iteration 820 Iteration 821 Iteration 822 Iteration 823 Iteration 824 Iteration 825 Iteration 826 Iteration 827 Iteration 828 Iteration 829 Iteration 830 Iteration 831 Iteration 832 Iteration 833 Iteration 834 Iteration 835 Iteration 836 Iteration 837 Iteration 838 Iteration 839 Iteration 840 Iteration 841 Iteration 842 Iteration 843 Iteration 844 Iteration 845 Iteration 846 Iteration 847 Iteration 848 Iteration 849 Iteration 850 Iteration 851 Iteration 852 Iteration 853 Iteration 854 Iteration 855 Iteration 856 Iteration 857 Iteration 858 Iteration 859 Iteration 860 Iteration 861 Iteration 862 Iteration 863 Iteration 864 Iteration 865 Iteration 866 Iteration 867 Iteration 868 Iteration 869 Iteration 870 Iteration 871 Iteration 872 Iteration 873 Iteration 874 Iteration 875 Iteration 876 Iteration 877 Iteration 878 Iteration 879 Iteration 880 Iteration 881 Iteration 882 Iteration 883 Iteration 884 Iteration 885 Iteration 886 Iteration 887 Iteration 888 Iteration 889 Iteration 890 Iteration 891 Iteration 892 Iteration 893 Iteration 894 Iteration 895 Iteration 896 Iteration 897 Iteration 898 Iteration 899 Iteration 900 Iteration 901 Iteration 902 Iteration 903 Iteration 904 Iteration 905 Iteration 906 Iteration 907 Iteration 908 Iteration 909 Iteration 910 Iteration 911 Iteration 912 Iteration 913 Iteration 914 Iteration 915 Iteration 916 Iteration 917 Iteration 918 Iteration 919 Iteration 920 Iteration 921 Iteration 922 Iteration 923 Iteration 924 Iteration 925 Iteration 926 Iteration 927 Iteration 928 Iteration 929 Iteration 930 Iteration 931 Iteration 932 Iteration 933 Iteration 934 Iteration 935 Iteration 936 Iteration 937 Iteration 938 Iteration 939 Iteration 940 Iteration 941 Iteration 942 Iteration 943 Iteration 944 Iteration 945 Iteration 946 Iteration 947 Iteration 948 Iteration 949 Iteration 950 Iteration 951 Iteration 952 Iteration 953 Iteration 954 Iteration 955 Iteration 956 Iteration 957 Iteration 958 Iteration 959 Iteration 960 Iteration 961 Iteration 962 Iteration 963 Iteration 964 Iteration 965 Iteration 966 Iteration 967 Iteration 968 Iteration 969 Iteration 970 Iteration 971 Iteration 972 Iteration 973 Iteration 974 Iteration 975 Iteration 976 Iteration 977 Iteration 978 Iteration 979 Iteration 980 Iteration 981 Iteration 982 Iteration 983 Iteration 984 Iteration 985 Iteration 986 Iteration 987 Iteration 988 Iteration 989 Iteration 990 Iteration 991 Iteration 992 Iteration 993 Iteration 994 Iteration 995 Iteration 996 Iteration 997 Iteration 998 Iteration 999 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TEvTxCalcPredicate_With_Conflicts [GOOD] Test command err: 2026-01-04T02:39:53.565490Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.620141Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:53.620211Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:53.620250Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:53.620310Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:53.633319Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [1:181:2194] 2026-01-04T02:39:53.636138Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:53.000000Z 2026-01-04T02:39:53.636187Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [1:181:2194] 2026-01-04T02:39:53.657192Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.698332Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.719155Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.729574Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.770657Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.811824Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.832561Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.967482Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|80874727-6bb474eb-230043fa-ab289df9_0 generated for partition {0, {0, 1111}, 123} topic 'rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-04T02:39:53.979828Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.000607Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.195089Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-04T02:39:54.238782Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-04T02:39:54.498289Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.672189Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-04T02:39:54.788195Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 2026-01-04T02:39:55.088679Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2026-01-04T02:39:55.388412Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-04T02:39:55.647547Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.781389Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-04T02:39:55.923702Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-04T02:39:56.175610Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-04T02:39:56.510089Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.924657Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:56.966587Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:56.966652Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:56.966696Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:56.966745Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:56.981408Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] bootstrapping {0, {0, 1111}, 123} [2:183:2196] 2026-01-04T02:39:56.982879Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:{0, {0, 1111}, 123}:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:56.000000Z 2026-01-04T02:39:56.982932Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{0, {0, 1111}, 123}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {0, {0, 1111}, 123} generation 0 [2:183:2196] 2026-01-04T02:39:57.003787Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.044798Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.065566Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.076083Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.117197Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.158445Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.189401Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.313926Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie owner1|6407d8b1-abeaa7f5-a7394cde-4590c48_0 generated for partition {0, {0, 1111}, 123} topic 'Root/PQ/rt3.dc1--account--topic' owner owner1 Send write: 0 2026-01-04T02:39:57.335972Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.356922Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 1 2026-01-04T02:39:57.575165Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:57.606367Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 2 2026-01-04T02:39:57.847980Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 3 2026-01-04T02:39:58.125530Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:58.157564Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 4 2026-01-04T02:39:58.481606Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 5 2026-01-04T02:39:58.746743Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 6 2026-01-04T02:39:58.997255Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:59.193706Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 7 2026-01-04T02:39:59.270526Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 8 2026-01-04T02:39:59.521757Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Send write: 9 2026-01-04T02:39:59.878602Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:00.313828Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2: ... [StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:07.952940Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:07.973578Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:07.973676Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:07.973733Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:07.973765Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:07.973820Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:07.973876Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:07.994534Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:07.994613Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:07.994659Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:07.994706Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:07.994746Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:07.994782Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.015390Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.026101Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.026182Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.026244Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.026298Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.026343Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.026385Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.046980Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.047067Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.047137Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.047183Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.047236Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.047288Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.067912Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.067980Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.068067Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.068126Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.068191Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.068225Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.088812Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.088897Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.088944Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.088977Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.089013Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.089060Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.120836Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.120930Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.121002Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.121029Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.121083Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.121115Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.141766Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.141858Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.141931Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.141976Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.142016Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.142046Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.142658Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-04T02:40:08.142725Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.142783Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.142845Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.142904Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.142963Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:08.143006Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-04T02:40:08.143081Z node 5 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0, NewHead=Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 2026-01-04T02:40:08.143140Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-04T02:40:08.143183Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:08.143221Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:08.143275Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-04T02:40:08.143639Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:08.164354Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:08.175260Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:08.175430Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-04T02:40:08.175494Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.175566Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:08.175621Z node 5 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 2 affect SourceId sourceid 2026-01-04T02:40:08.175686Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.175768Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.175831Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:08.175885Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.175947Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] >> TPartitionTests::ConflictingCommitFails [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback >> TPQTest::DirectReadOldPipe [GOOD] >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test >> TPartitionTests::DataTxCalcPredicateOk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:37.830084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:37.830190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:37.830269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:37.830312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:37.830347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:37.830435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:37.830522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:37.831325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:37.833137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:37.923687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:37.923756Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:37.938689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:37.938883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:37.939029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:37.949104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:37.949333Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:37.950042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:37.950296Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:37.957095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.958136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:37.963920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:37.963989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:37.964112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:37.964164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:37.964222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:37.964713Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:38.128177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.131992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.132097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:38.132189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 26-01-04T02:40:08.535227Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.535325Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:40:08.535410Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:40:08.535476Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:40:08.537600Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.537691Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:40:08.537770Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:40:08.539604Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.539660Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.539739Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:08.539833Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:40:08.540047Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:40:08.541495Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:40:08.541669Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:40:08.542094Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:08.542244Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 115964119150 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:40:08.542321Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:08.542639Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:40:08.542710Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:08.542966Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:40:08.543087Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:40:08.544899Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:40:08.544964Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:40:08.545221Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:40:08.545287Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [27:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:40:08.545678Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.545744Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:40:08.545943Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:40:08.546008Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:08.546075Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:40:08.546141Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:08.546202Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:40:08.546269Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:08.546337Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:40:08.546392Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:40:08.546496Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:40:08.546550Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:40:08.546614Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:40:08.547358Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:40:08.547502Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:40:08.547564Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:40:08.547622Z node 27 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:40:08.547695Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:40:08.547805Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:40:08.550216Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:40:08.550773Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:40:08.551884Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [27:684:2674] Bootstrap 2026-01-04T02:40:08.553505Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [27:684:2674] Become StateWork (SchemeCache [27:689:2679]) 2026-01-04T02:40:08.556622Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:40:08.557128Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:08.557289Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "DyNumber" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-04T02:40:08.557887Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-04T02:40:08.559184Z node 27 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [27:684:2674] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:40:08.562389Z node 27 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:40:08.562835Z node 27 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:40:08.563686Z node 27 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 >> TPartitionTests::DataTxCalcPredicateError |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPending [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain >> TPQTest::TestCmdReadWithLastOffset [GOOD] >> TPQTest::TestAccountReadQuota >> TConsoleTests::TestCreateTenantWrongPool [GOOD] >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] >> TPartitionTests::ConflictingTxProceedAfterRollback [GOOD] >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches >> TestMalformedRequest::ContentLengthHigher [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingCommitProccesAfterRollback [GOOD] Test command err: 2026-01-04T02:39:52.478305Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.550104Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:52.550172Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:52.550224Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.550294Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:52.565682Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:181:2194] 2026-01-04T02:39:52.567585Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:52.000000Z 2026-01-04T02:39:52.567881Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [1:181:2194] 2026-01-04T02:39:52.589045Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.630200Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.651167Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.661754Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.703126Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.744528Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.765320Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\300\261\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:52.916612Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.937364Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\300\261\200\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000000|0000000000000000" IncludeFrom: true To: "e0000000000|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000000cclient" Value: "\010\004\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000000uclient" Value: "\004\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000000" Value: "\030\000(\300\261\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:53.238877Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.282487Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:53.282542Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:53.282606Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:53.282655Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:53.297180Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-04T02:39:53.297411Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:53.297683Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:182:2195] 2026-01-04T02:39:53.298691Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request Got KV request 2026-01-04T02:39:53.298894Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-04T02:39:53.299041Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:39:53.299139Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-04T02:39:53.299250Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:39:53.299319Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request Got KV request 2026-01-04T02:39:53.299537Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-04T02:39:53.299610Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-04T02:39:53.299717Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-04T02:39:53.299813Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 1 size 684 so 0 eo 1 d0000000000_00000000000000000000_00000_0000000001_00000 2026-01-04T02:39:53.299974Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-04T02:39:53.300013Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:39:53.300062Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:53.000000Z 2026-01-04T02:39:53.300102Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:39:53.300190Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-04T02:39:53.300345Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:39:53.300389Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:53.300430Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-04T02:39:53.300472Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:182:2195] 2026-01-04T02:39:53.300525Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 1 Head Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000001_00000 size 684 2026-01-04T02:39:53.300581Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:53.300620Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:53.300658Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:53.300715Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.300752Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:53.300791Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.300822Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:53.300905Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:53.301085Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:53.321774Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.352700Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:53.352783Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.352824Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:53.352863Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.352898Z node 2 :PERSQUEUE DEBUG: partition.cp ... artition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:10.703936Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:10.703989Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.704027Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:10.704072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.704125Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:10.704179Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 1 2026-01-04T02:40:10.704221Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 1 2026-01-04T02:40:10.704258Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:10.704296Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:10.704354Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.704568Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request Got KV request Got KV request Wait tx committed for tx 1 2026-01-04T02:40:10.705397Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:10.705505Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:10.705553Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.705589Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.705625Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.705667Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.705704Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.705750Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait for no tx committed 2026-01-04T02:40:10.726372Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.726442Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.726485Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.726526Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.726565Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.736879Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:10.757634Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.757694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.757736Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.757769Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.757798Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.778343Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.778399Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.778429Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.778461Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.778491Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.798971Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.799019Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.799042Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.799066Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.799088Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.819524Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.819579Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.819619Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.819651Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.819672Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.840194Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.840254Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.840286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.840320Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.840352Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.872197Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.872257Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.872287Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.872318Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.872347Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.892829Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.892895Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.892920Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.892944Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.892976Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.913422Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.913494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.913524Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.913553Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.913581Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.934122Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.934200Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.934230Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.934263Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.934293Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:10.954834Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:10.954892Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.954925Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:10.954960Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:10.954989Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_bsvolume/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_bsvolume/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardColumnTableTTL::CreateColumnTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:44.780711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:44.780805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.780849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:44.780883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:44.780917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:44.780943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:44.780987Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:44.781073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:44.781826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:44.782075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:44.846728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:44.846770Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:44.856024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:44.856174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:44.856285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:44.862196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:44.862361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:44.862840Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:44.863036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:44.865185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.865307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:44.866164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:44.866204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:44.866271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:44.866300Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:44.866326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:44.866477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.976254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.976879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.976943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.976996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.977592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d: 72057594046678944 2026-01-04T02:40:10.003046Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003147Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003305Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003382Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003475Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003556Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003623Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003698Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003765Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.003863Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.006773Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.006919Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.006981Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007073Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007145Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007214Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007283Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007387Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:10.007432Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:40:10.007564Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:10.007612Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:10.007654Z node 4 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:10.007693Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:10.007738Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:40:10.007814Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [4:3113:4331] message: TxId: 101 2026-01-04T02:40:10.007901Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:10.007966Z node 4 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:40:10.008024Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:40:10.009097Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-04T02:40:10.011720Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:40:10.011780Z node 4 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [4:3114:4332] TestWaitNotification: OK eventTxId 101 2026-01-04T02:40:10.012353Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:40:10.012633Z node 4 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 312us result status StatusSuccess 2026-01-04T02:40:10.013268Z node 4 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 64 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "TTLEnabledTable" Schema { Columns { Id: 1 Name: "key" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "modified_at" Type: "Uint64" TypeId: 4 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "modified_at" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_SECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } Version: 1 } ColumnShardCount: 64 Sharding { ColumnShards: 72075186233409546 ColumnShards: 72075186233409547 ColumnShards: 72075186233409548 ColumnShards: 72075186233409549 ColumnShards: 72075186233409550 ColumnShards: 72075186233409551 ColumnShards: 72075186233409552 ColumnShards: 72075186233409553 ColumnShards: 72075186233409554 ColumnShards: 72075186233409555 ColumnShards: 72075186233409556 ColumnShards: 72075186233409557 ColumnShards: 72075186233409558 ColumnShards: 72075186233409559 ColumnShards: 72075186233409560 ColumnShards: 72075186233409561 ColumnShards: 72075186233409562 ColumnShards: 72075186233409563 ColumnShards: 72075186233409564 ColumnShards: 72075186233409565 ColumnShards: 72075186233409566 ColumnShards: 72075186233409567 ColumnShards: 72075186233409568 ColumnShards: 72075186233409569 ColumnShards: 72075186233409570 ColumnShards: 72075186233409571 ColumnShards: 72075186233409572 ColumnShards: 72075186233409573 ColumnShards: 72075186233409574 ColumnShards: 72075186233409575 ColumnShards: 72075186233409576 ColumnShards: 72075186233409577 ColumnShards: 72075186233409578 ColumnShards: 72075186233409579 ColumnShards: 72075186233409580 ColumnShards: 72075186233409581 ColumnShards: 72075186233409582 ColumnShards: 72075186233409583 ColumnShards: 72075186233409584 ColumnShards: 72075186233409585 ColumnShards: 72075186233409586 ColumnShards: 72075186233409587 ColumnShards: 72075186233409588 ColumnShards: 72075186233409589 ColumnShards: 72075186233409590 ColumnShards: 72075186233409591 ColumnShards: 72075186233409592 ColumnShards: 72075186233409593 ColumnShards: 72075186233409594 ColumnShards: 72075186233409595 ColumnShards: 72075186233409596 ColumnShards: 72075186233409597 ColumnShards: 72075186233409598 ColumnShards: 72075186233409599 ColumnShards: 72075186233409600 ColumnShards: 72075186233409601 ColumnShards: 72075186233409602 ColumnShards: 72075186233409603 ColumnShards: 72075186233409604 ColumnShards: 72075186233409605 ColumnShards: 72075186233409606 ColumnShards: 72075186233409607 ColumnShards: 72075186233409608 ColumnShards: 72075186233409609 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "modified_at" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest |91.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} |91.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersLabeled::PartitionFirstClass [GOOD] >> PQCountersLabeled::ImportantFlagSwitching >> TPartitionTests::DataTxCalcPredicateError [GOOD] |91.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_bsvolume/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestCompaction [GOOD] >> TPQTest::TestDirectReadHappyWay >> TPartitionTests::DataTxCalcPredicateOrder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::TestBatchingWithProposeConfig [GOOD] Test command err: 2026-01-04T02:39:52.195707Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.269198Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:52.269286Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:52.269342Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.269410Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:52.285548Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-04T02:39:52.287583Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:52.000000Z 2026-01-04T02:39:52.287769Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-04T02:39:52.309598Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.351012Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.372014Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.382551Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.423821Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.465111Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.485918Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "m0000000003cclient-1" IncludeFrom: true To: "m0000000003cclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "m0000000003uclient-1" IncludeFrom: true To: "m0000000003uclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003Sclient-1" IncludeFrom: true To: "c0000000003Sclient-1" IncludeTo: true } } CmdDeleteRange { Range { From: "c0000000003wclient-1|0000000000000000" IncludeFrom: true To: "c0000000003wclient-1|FFFFFFFFFFFFFFFF" IncludeTo: true } } CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\000\030\000\"\000(\0000\000@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000" StorageChannel: INLINE } CmdWrite { Key: "_config_3" Value: "\022\t\030\200\243\0058\200\200\200\005\030\000\"\027rt3.dc1--account--topic(\0020\001\272\001 /Root/PQ/rt3.dc1--account--topic\352\001\000\372\001\002\010\000\212\002\007account\220\002\001\242\002\002\010\000\252\002\014\n\010client-2@\000" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\300\261\200\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:39:52.950991Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:52.984505Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:52.984548Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:52.984593Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.984628Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:39:52.996841Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-04T02:39:52.997000Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:52.997240Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:182:2195] 2026-01-04T02:39:52.997924Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Got KV request Got KV request Got KV request 2026-01-04T02:39:52.998058Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMetaStep Got KV request 2026-01-04T02:39:52.998157Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:39:52.998223Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000000 to m0000000001 Got KV request 2026-01-04T02:39:52.998297Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:39:52.998343Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000000 to d0000000001 Got KV request Got KV request 2026-01-04T02:39:52.998503Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:523: key[0]: d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-04T02:39:52.998558Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:531: add key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-04T02:39:52.998628Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:629: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] check key d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-04T02:39:52.998691Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:659: [Root/PQ/rt3.dc1--account--topic:0:TInitDataRangeStep] Got data offset 0 count 50 size 684 so 0 eo 50 d0000000000_00000000000000000000_00000_0000000050_00000 2026-01-04T02:39:52.998781Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDataStep 2026-01-04T02:39:52.998811Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:39:52.998841Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:39:52.000000Z 2026-01-04T02:39:52.998868Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:39:52.998931Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000000|0000000000000000 to e0000000001 Got KV request 2026-01-04T02:39:52.999063Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:39:52.999103Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:52.999136Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Initializing completed. 2026-01-04T02:39:52.999168Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 0 generation 0 [2:182:2195] 2026-01-04T02:39:52.999203Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic Root/PQ/rt3.dc1--account--topic partitition 0 so 0 endOffset 50 Head Offset 50 PartNo 0 PackedSize 0 count 0 nextOffset 50 batches 0 SYNC INIT DATA KEY: d0000000000_00000000000000000000_00000_0000000050_00000 size 684 2026-01-04T02:39:52.999247Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:52.999276Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:52.999314Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:52.999341Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:52.999365Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:52.999393Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:52.999422Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:52.999516Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:52.999648Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:53.020335Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.051177Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:53.051241Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.051275Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:53.051303Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.051335Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:53.061576Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:53.082288Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:53.082350Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:53.082381Z node 2 ... rocess user action and tx events 2026-01-04T02:40:11.859099Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.879661Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.900242Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.920804Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.942432Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.963690Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:11.984240Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.004848Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.025476Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.046033Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:12.056624Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.077202Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.097778Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.118314Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.138840Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.201476Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.222174Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Send disk status response with cookie: 0 Wait tx rollback for tx 0 2026-01-04T02:40:12.222414Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:12.222546Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:12.222603Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:12.222651Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-04T02:40:12.222722Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:12.222775Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:12.222814Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:12.222850Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:12.222901Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Wait immediate tx complete 2 Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 2 2026-01-04T02:40:12.223149Z node 5 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 3 2026-01-04T02:40:12.223198Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:12.223237Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:12.223274Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:12.223319Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:40:12.223430Z node 5 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-1 drop done 2026-01-04T02:40:12.223470Z node 5 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 3 2026-01-04T02:40:12.223512Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:12.223546Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:12.223583Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 2026-01-04T02:40:12.224116Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:12.234526Z node 5 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:12.234773Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:12.234980Z node 5 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:12.235109Z node 5 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'Root/PQ/rt3.dc1--account--topic' partition 0 error: cannot finish read request. Consumer client-1 is gone from partition 2026-01-04T02:40:12.235283Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:12.235333Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:12.235375Z node 5 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:12.235436Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:12.235471Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:12.235515Z node 5 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-04T02:40:12.235571Z node 5 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-04T02:40:12.235611Z node 5 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-04T02:40:12.235657Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:12.235692Z node 5 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:12.235731Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:12.235928Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got KV request Got KV request 2026-01-04T02:40:12.236822Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 readTimeStamp for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:12.236880Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 send read request for offset 5 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 0 Got KV request Got KV request Got KV request Got batch complete: 1 Got KV request Got KV request Got KV request 2026-01-04T02:40:12.237114Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 4 Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 user client-0 offset 5 partno 0 count 1 size 1024000 endOffset 50 max time lag 0ms effective offset 5 2026-01-04T02:40:12.237337Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 4 added 1 blobs, size 0 count 45 last offset 0, current partition end offset: 50 2026-01-04T02:40:12.237381Z node 5 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 4. Send blob request. Got KV request Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 4 2026-01-04T02:40:12.237594Z node 5 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:12.237697Z node 5 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:12.237743Z node 5 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:12.237784Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:12.237843Z node 5 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:12.237880Z node 5 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:12.237912Z node 5 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:12.237955Z node 5 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 4 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::ContentLengthHigher [GOOD] Test command err: 2026-01-04T02:38:00.483970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330129448824276:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:00.484454Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:00.518723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:00.782433Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:00.782537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:00.788323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:00.880969Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:00.881270Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:00.882238Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330129448824247:2081] 1767494280480470 != 1767494280480473 2026-01-04T02:38:00.932654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:00.932690Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:00.932703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:00.932810Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:01.069516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:01.100609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:01.284167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:01.289157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:01.360870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.456448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.492542Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:01.495241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.537683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.566197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.594322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.623782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.652668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.681780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:01.710041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:03.582189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330142333727787:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.582192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330142333727798:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.582297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.582558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330142333727802:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.582626Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:03.585684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:03.594294Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330142333727801:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:38:03.694521Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330142333727854:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:03.999758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:04.028262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:04.055538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:04.083731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:04.112414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesh ... :10.791917Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:10.791957Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 4ms 2026-01-04T02:40:10.792434Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:10.792486Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:40:10.792626Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 6ms 2026-01-04T02:40:10.793141Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:10.861075Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330690699979638:2814], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:10.861093Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330690699979639:2815], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:10.861167Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:10.861548Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330690699979642:2816], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:10.861592Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:10.865073Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330690699979636:2813]: Pool not found 2026-01-04T02:40:10.865316Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:40:11.233090Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330694994946958:2821], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.233099Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330694994946959:2822], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:11.233219Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.233543Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330694994946962:2823], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.233623Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.237816Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330694994946956:2820]: Pool not found 2026-01-04T02:40:11.238105Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:40:11.240766Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330694994946982:2828], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.240793Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330694994946983:2829], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:11.240853Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.241123Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330694994946986:2830], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.241200Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:11.243972Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330694994946980:2827]: Pool not found 2026-01-04T02:40:11.244275Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:40:11.700770Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:36338) connection closed by inactivity timeout |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> TConsoleTests::TestCreateTenantWrongPoolExtSubdomain [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CheckCounters [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2145] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:121:2058] recipient: [1:116:2145] Leader for TabletID 72057594046678944 is [1:139:2160] sender: [1:141:2058] recipient: [1:116:2145] 2026-01-04T02:39:45.750598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:45.750683Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.750721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:45.750755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:45.750797Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:45.750825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:45.750876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:45.750945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:45.751778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:45.752063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:45.842553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:45.842604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:45.847483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:45.847633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:45.847742Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:45.852531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:45.852732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:45.853396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:45.853586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:45.855263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.855446Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:45.856484Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:45.856542Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:45.856702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:45.856749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:45.856806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:45.856961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:46.023217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.024975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.025080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.025162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.025247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:46.025330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2214] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... tifyTxCompletionResult 2026-01-04T02:40:12.608772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1753:3653] 2026-01-04T02:40:12.609210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 107 Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-04T02:40:12.721754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-04T02:40:12.722597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0001 2026-01-04T02:40:12.722741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-04T02:40:12.722801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.722881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-04T02:40:12.723431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-04T02:40:12.723473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.723518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-04T02:40:12.811418Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-04T02:40:12.811521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:40:12.811626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:40:12.811795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409549, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1767507676768683 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-04T02:40:12.811922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:220: Run conditional erase, tabletId: 72075186233409548, request: TableId: 40 Expiration { ColumnId: 2 WallClockTimestamp: 1767507676768683 ColumnUnit: UNIT_AUTO } SchemaVersion: 4 Limits { BatchMaxBytes: 512000 BatchMinKeys: 1 BatchMaxKeys: 256 } DatabaseName: "/MyRoot", at schemeshard: 72057594046678944 2026-01-04T02:40:12.812426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:40:12.812907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7100: Conditional erase accepted: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:40:12.813569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:40:12.813622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:40:12.814210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:354: TTxScheduleConditionalErase Execute: at schemeshard: 72057594046678944 2026-01-04T02:40:12.814249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:404: Successful conditional erase: tabletId: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:40:12.816764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:40:12.816999Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:40:12.817050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-04T07:21:16.768683Z, at schemeshard: 72057594046678944 2026-01-04T02:40:12.818232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:458: TTxScheduleConditionalErase Complete: at schemeshard: 72057594046678944 2026-01-04T02:40:12.818309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:40:12.818387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:40:12.818421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__conditional_erase.cpp:113: Skip conditional erase: shardIdx: 72057594046678944:3, run at: 2026-01-04T07:21:16.768683Z, at schemeshard: 72057594046678944 2026-01-04T02:40:12.818464Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:40:12.839990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:40:12.893513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-04T02:40:12.893646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-04T02:40:12.893722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-04T02:40:12.893783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.893905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-04T02:40:12.894080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-04T02:40:12.894124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.894200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 2026-01-04T02:40:12.926214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:40:12.989850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-04T02:40:12.989946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409549 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 40] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0 2026-01-04T02:40:12.989993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:3 data size 0 row count 0 2026-01-04T02:40:12.990042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.990153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable 2026-01-04T02:40:12.990309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 40 shard idx 72057594046678944:4 data size 0 row count 0 2026-01-04T02:40:12.990334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409549 maps to shardIdx: 72057594046678944:4 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], pathId map=TTLEnabledTableMoved, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:40:12.990390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409549: SplitByLoadNotEnabledForTable Name: "SchemeShard/NumShardsByTtlLag" Ranges: "0" Ranges: "900" Ranges: "1800" Ranges: "3600" Ranges: "7200" Ranges: "14400" Ranges: "28800" Ranges: "57600" Ranges: "86400" Ranges: "inf" Values: 0 Values: 2 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TPQTest::TestAccountReadQuota [GOOD] >> TPQTest::TestAlreadyWritten >> BasicStatistics::ServerlessTimeIntervals [GOOD] >> THiveTest::TestCreateSubHiveCreateManyTabletsWithReboots [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots >> TPartitionTests::ConflictingSrcIdTxAndWritesDifferentBatches [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead >> TPQTest::TestDirectReadHappyWay [GOOD] >> TPQTest::TestMessageNo >> TPQTest::TestReserveBytes [GOOD] >> TPQTest::TestSetClientOffset |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic >> S3SettingsConversion::Basic [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForPendingExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning >> KqpScan::ScanDuringSplit10 >> S3SettingsConversion::FoldersStyleDeduction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::ServerlessTimeIntervals [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:56.772090Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:56.864708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:56.873307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:56.873669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:56.873833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:57.268318Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.360596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.360727Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.408802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.492338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:58.183831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:58.184879Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:58.184932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:58.184967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:58.185378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:58.252710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:58.809466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:01.717347Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:01.725290Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:39:01.728355Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:01.765279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.765399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.796372Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:01.798431Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.982616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.983242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.983809Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.984412Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.984633Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.984852Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.985061Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.985273Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.985340Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.107632Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:39:02.174555Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:02.174672Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:02.187585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:02.328012Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:02.415491Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:02.415611Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:02.446837Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:02.447161Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:02.447369Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:02.447429Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:02.447484Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:02.447535Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:02.447596Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:02.447652Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:02.448227Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:02.591933Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:39:02.592281Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:39:02.593933Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:39:02.593979Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:39:02.594036Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:39:02.596771Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:02.599535Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:39:02.603254Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:39:02.667356Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:39:02.685143Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.685239Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.700309Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:39:02.701650Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:39:02.995230Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:39:02.996993Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:02.999647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:03.003181Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:39:03.003303Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... 6Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 29 ] 2026-01-04T02:39:31.751327Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 29, ReplyToActorId = [2:5661:4406], StatRequests.size() = 1 2026-01-04T02:39:32.138152Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-04T02:39:32.138242Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 29.216000s, at schemeshard: 72075186224037905 2026-01-04T02:39:32.138528Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:39:32.154562Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:32.442559Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:32.907305Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 30 ], ReplyToActorId[ [2:5704:4428]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:32.907788Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 30 ] 2026-01-04T02:39:32.907844Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 30, ReplyToActorId = [2:5704:4428], StatRequests.size() = 1 2026-01-04T02:39:33.927107Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 31 ], ReplyToActorId[ [2:5733:4440]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:33.927560Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 31 ] 2026-01-04T02:39:33.927601Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 31, ReplyToActorId = [2:5733:4440], StatRequests.size() = 1 2026-01-04T02:39:34.480486Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:39:34.480923Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 7 2026-01-04T02:39:34.481428Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 0 2026-01-04T02:39:34.481520Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:39:34.551438Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:34.551508Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:34.551793Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:34.570249Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:34.858512Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 32 ], ReplyToActorId[ [2:5766:4456]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:34.858909Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 32 ] 2026-01-04T02:39:34.858964Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 32, ReplyToActorId = [2:5766:4456], StatRequests.size() = 1 2026-01-04T02:39:34.859509Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 33 ], ReplyToActorId[ [2:5768:4458]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:34.863999Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 33 ] 2026-01-04T02:39:34.864070Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 33, ReplyToActorId = [2:5768:4458], StatRequests.size() = 1 2026-01-04T02:39:39.228619Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:39.228721Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:39.229156Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:39.244158Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:43.760050Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:43.760132Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:43.760460Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:43.775291Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:48.415718Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:48.415783Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:48.416077Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:48.430818Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:50.976063Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:39:52.733572Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:52.733650Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:52.733995Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:52.748026Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:53.492681Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037899 2026-01-04T02:39:53.492745Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 24.266000s, at schemeshard: 72075186224037899 2026-01-04T02:39:53.492907Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:39:53.507243Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:39:55.422134Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:39:55.422221Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:55.422267Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:39:55.422309Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:39:58.963917Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:39:58.963981Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:39:58.964237Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:39:58.978254Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:02.672047Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037905 2026-01-04T02:40:02.672114Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 23.010000s, at schemeshard: 72075186224037905 2026-01-04T02:40:02.672295Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 26, entries count: 1, are all stats full: 1 2026-01-04T02:40:02.685743Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:07.145000Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 2, schemeshard count = 2 2026-01-04T02:40:07.145433Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2026-01-04T02:40:07.145931Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 0 2026-01-04T02:40:07.146164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-04T02:40:07.221839Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:40:07.221924Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:07.222195Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:40:07.236029Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:12.816392Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:40:12.816468Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:12.816661Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:40:12.829133Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPartitionTests::DataTxCalcPredicateOrder [GOOD] >> TPQTest::TestMessageNo [GOOD] >> TPQTest::TestGetTimestamps |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Basic [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQRBDescribes::PartitionLocations [GOOD] >> TPQTabletTests::DropTablet >> TPartitionTests::DifferentWriteTxBatchingOptions |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStyleDeduction [GOOD] |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQTabletTests::DropTablet [GOOD] >> TPQTabletTests::Cancel_Tx >> TPQTest::The_Value_Of_CreationUnixTime_Must_Not_Decrease [GOOD] >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations >> TPQTabletTests::Cancel_Tx [GOOD] >> TPQTabletTests::Config_TEvTxCommit_After_Restart |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage >> TPQTabletTests::Config_TEvTxCommit_After_Restart [GOOD] >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] >> S3SettingsConversion::FoldersStrictStyle [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQTest::TestWritePQCompact [GOOD] >> TPQTest::TestWritePQBigMessage |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::FoldersStrictStyle [GOOD] >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] >> TPQTest::TestGetTimestamps [GOOD] >> TPQTest::TestMaxTimeLagRewind |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::ConflictingSrcIdForTxWithHead [GOOD] Test command err: 2026-01-04T02:40:06.631507Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.708085Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:06.708161Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:06.708203Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.708252Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:06.722933Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-04T02:40:06.724483Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:40:06.000000Z 2026-01-04T02:40:06.724693Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-04T02:40:06.745769Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.787179Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.808111Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.818656Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.860124Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.901485Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.922258Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\236\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:07.074890Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.095813Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\236\201\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\002\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\002\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\360\236\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:07.452540Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.496560Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:07.496615Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:07.496678Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:07.496737Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:07.514702Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:183:2196] 2026-01-04T02:40:07.516437Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:40:07.000000Z 2026-01-04T02:40:07.516655Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:183:2196] 2026-01-04T02:40:07.537636Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.578923Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.599745Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.610290Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.651521Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.692684Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.723616Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\000\020\001\030\001\"\tsession-1(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\246\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:07.869064Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\246\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:07.890737Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\003\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\003\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\246\201\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\000\020\001\030\001\"\tsession-2(\0000\003@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\246\201\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-2" Value: "\010\001\020\001\030\001\"\tsession-2(\0000\003@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-2" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-2" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient-1" Value: "\010\006\020\001\030\001\"\tsession-1(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient-1" Value: "\006\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session-1" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\330\246\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:08.242356Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.282856Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:08.282916Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:08.282964Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:08.283017Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:08.618368Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candida ... RSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.215230Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:17.215278Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.215311Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.215350Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 4, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.215384Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:17.236129Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:17.236315Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-04T02:40:17.236375Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.236429Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:17.236484Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 0 affect SourceId src1 2026-01-04T02:40:17.236567Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:17.236615Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:17.236679Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.236724Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:17.236763Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait 1st KV request Wait kv request 2026-01-04T02:40:17.236918Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 0 2026-01-04T02:40:17.236967Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.237007Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:17.237054Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:17.237097Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.237157Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:17.237201Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 0 2026-01-04T02:40:17.237266Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0, NewHead=Offset 1 PartNo 0 PackedSize 0 count 0 nextOffset 1 batches 0 2026-01-04T02:40:17.237330Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 0 2026-01-04T02:40:17.237377Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 3, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:17.237419Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:17.237464Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request Wait tx committed for tx 0 2026-01-04T02:40:17.237877Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:17.248235Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:17.248345Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:17.248436Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-04T02:40:17.248478Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.248514Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:17.248557Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:17.248578Z node 6 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:17.248606Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 3, PendingWrites: 0 2026-01-04T02:40:17.248638Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.248673Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:17.248732Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 8. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 20 2026-01-04T02:40:17.248779Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:17.248803Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1227: [72057594037927937][Partition][0][StateIdle] Already written message. Topic: 'Root/PQ/rt3.dc1--account--topic' Partition: 0 SourceId: 'src1'. Message seqNo: 10. InitialSeqNo: (NULL). Committed seqNo: 10. Writing seqNo: (NULL). EndOffset: 1. CurOffset: 1. Offset: 30 2026-01-04T02:40:17.248821Z node 6 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:17.248886Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob processing sourceId 'src1' seqNo 11 partNo 0 2026-01-04T02:40:17.249602Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 0 part blob complete sourceId 'src1' seqNo 11 partNo 0 FormedBlobsCount 0 NewHead: Offset 40 PartNo 0 PackedSize 84 count 1 nextOffset 41 batches 1 2026-01-04T02:40:17.249664Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 3 2026-01-04T02:40:17.249694Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (3) 2026-01-04T02:40:17.249746Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:17.250280Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][0][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 0 compactOffset 40,1 HeadOffset 1 endOffset 1 curOffset 41 d0000000000_00000000000000000040_00000_0000000001_00000? size 70 WTime 10238 2026-01-04T02:40:17.250487Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got batch complete: 3 Got KV request Got KV request Got KV request Wait 2nd KV request Wait kv request 2026-01-04T02:40:17.250675Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:17.271253Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:17.271360Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:17.271458Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 17 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:17.271515Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:40:17.271596Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 8, partNo: 0, Offset: 1 is already written 2026-01-04T02:40:17.271640Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:40:17.271678Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 10, partNo: 0, Offset: 1 is already written 2026-01-04T02:40:17.271715Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyWrite. Partition: 0 2026-01-04T02:40:17.271756Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][0][StateIdle] Answering for message sourceid: 'src1', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 0, SeqNo: 11, partNo: 0, Offset: 40 is stored on disk 2026-01-04T02:40:17.271997Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.272048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.272089Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.272136Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.272175Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:17.272234Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_NewSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] >> TPQTest::TestPQPartialRead [GOOD] >> TPQTest::TestPQRead >> TConsoleTests::TestCreateTenantAlreadyExists [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_1_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldSucceed-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:43.822550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:43.822625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.822655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:43.822684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:43.822709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:43.822729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:43.822766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.822841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:43.823541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:43.823780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:43.892650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:43.892696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:43.905393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:43.905538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:43.905660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:43.917124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:43.917298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:43.917824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.918064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:43.920428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.920555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:43.921443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:43.921485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.921556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:43.921594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:43.921627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:43.921763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:44.050889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.051875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.051998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.052919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:44.053010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:40:17.984391Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:40:17.984430Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:40:17.984469Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:40:17.984511Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:40:17.984599Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 0/1, is published: true 2026-01-04T02:40:17.985848Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1126 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:40:17.985897Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:17.986051Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1126 } } CommitVersion { Step: 5000038 TxId: 101 } 2026-01-04T02:40:17.986183Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 101 Step: 5000038 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1126 } } CommitVersion { Step: 5000038 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:40:17.988159Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 120259086991 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:17.988209Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:17.988331Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 717 RawX2: 120259086991 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:17.988395Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:40:17.988492Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 717 RawX2: 120259086991 } Origin: 72075186233409546 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:40:17.988563Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:17.988612Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:17.988658Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:40:17.988702Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:40:17.990047Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:40:17.990154Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:40:17.991843Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:17.992017Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:17.992274Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:17.992325Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:40:17.992423Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:17.992459Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:17.992499Z node 28 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:40:17.992535Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:17.992574Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:40:17.992640Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [28:745:2723] message: TxId: 101 2026-01-04T02:40:17.992691Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:40:17.992736Z node 28 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:40:17.992769Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:40:17.992925Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:40:17.994812Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:40:17.994863Z node 28 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [28:746:2724] TestWaitNotification: OK eventTxId 101 2026-01-04T02:40:17.995348Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:40:17.995591Z node 28 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" took 272us result status StatusSuccess 2026-01-04T02:40:17.996171Z node 28 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathDescription { Self { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLTableWithpgint8Column_UNIT_NANOSECONDS" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "pgint8" TypeId: 12288 Id: 2 NotNull: false TypeInfo { PgTypeId: 20 } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 ColumnUnit: UNIT_NANOSECONDS Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTabletTests::All_New_Partitions_In_Another_Tablet [GOOD] Test command err: Bucket: 100 elems count: 97 Bucket: 200 elems count: 104 Bucket: 500 elems count: 288 Bucket: 1000 elems count: 528 Bucket: 2000 elems count: 1008 Bucket: 5000 elems count: 2976 2026-01-04T02:40:04.959749Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330663053081844:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:04.960631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:04.993030Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:04.993584Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330661560995152:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:04.993930Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:05.002376Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:05.169672Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:05.171101Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:05.211432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:05.211606Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:05.233227Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:05.233333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:05.277760Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:05.278070Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:40:05.279831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:05.293102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:05.387226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c6f/r3tmp/yandexrdegix.tmp 2026-01-04T02:40:05.387280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c6f/r3tmp/yandexrdegix.tmp 2026-01-04T02:40:05.387522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c6f/r3tmp/yandexrdegix.tmp 2026-01-04T02:40:05.387619Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:05.409203Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:05.421504Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:05.422463Z INFO: TTestServer started on Port 4066 GrpcPort 18456 PQClient connected to localhost:18456 2026-01-04T02:40:05.605781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:05.699157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:05.970357Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:05.999010Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:07.874879Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330674445897346:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:07.874886Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330674445897354:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:07.874983Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:07.875221Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330674445897361:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:07.875260Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:07.879218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:07.893969Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330674445897360:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:40:07.951217Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330674445897389:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:08.313468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:08.317240Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330674445897403:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:08.317832Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZTBiMzA5NTYtYjUyMzI4ZjgtZWMwMjBiN2UtZDY0NGYxYWU=, ActorId: [2:7591330674445897344:2304], ActorState: ExecuteState, LegacyTraceId: 01ke3e0np1agh4sf9fc885dbyq, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:08.320533Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:08.322836Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330680232952464:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:08.323290Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTFjYzBiNDUtOTgxM2Q5ZjUtNjE1MmIxYjctYmZjMTcwODg=, ActorId: [1:7591330680232952439:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3e0nvt766866rwsmnetw8f, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:08.323616Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At ... WAIT_RS 2026-01-04T02:40:17.906448Z node 6 :PQ_TX INFO: pq_impl.cpp:4013: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSet to 0 receivers. Wait TEvTxProcessing::TEvReadSet from 1 senders. 2026-01-04T02:40:17.906503Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4519: [PQ: 72057594037927937] HaveParticipantsDecision 1 2026-01-04T02:40:17.906738Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS to EXECUTING 2026-01-04T02:40:17.906796Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 0/2 2026-01-04T02:40:17.906886Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-04T02:40:17.906941Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.906985Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:17.907027Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.907092Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:40:17.907377Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-1 reinit with generation 2 done 2026-01-04T02:40:17.907422Z node 6 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user client-3 reinit with generation 2 done 2026-01-04T02:40:17.907453Z node 6 :PERSQUEUE DEBUG: partition.cpp:3831: [72057594037927937][Partition][0][StateIdle] Topic 'topic' partition 0 user user drop done 2026-01-04T02:40:17.907498Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 67890 2026-01-04T02:40:17.907542Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:17.907584Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:17.907633Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:17.908110Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][1][StateIdle] Handle TEvPQ::TEvTxCommit Step 100, TxId 67890 2026-01-04T02:40:17.908152Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:17.908179Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:17.908205Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.908242Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:40:17.908316Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][1][StateIdle] Schedule reply tx done 67890 2026-01-04T02:40:17.908348Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:17.908377Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:17.908409Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:17.908611Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:17.908697Z node 6 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:17.912404Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:17.912714Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:17.913457Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:17.913509Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:17.913542Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.913572Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.913608Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.913640Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:17.913677Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:17.914213Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 0 2026-01-04T02:40:17.914284Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:40:17.914332Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:40:17.914382Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:40:17.914432Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 1/2 2026-01-04T02:40:17.914484Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4382: [PQ: 72057594037927937] TxId 67890 status has not changed 2026-01-04T02:40:17.914771Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:17.914937Z node 6 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:17.915037Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:17.915071Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:17.915102Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.915130Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:17.915161Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:17.915187Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:17.915223Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:40:17.915367Z node 6 :PQ_TX INFO: pq_impl.cpp:3534: [PQ: 72057594037927937] Handle TEvPQ::TEvTxDone Step 100, TxId 67890, Partition 1 2026-01-04T02:40:17.915407Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state EXECUTING 2026-01-04T02:40:17.915438Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State EXECUTING 2026-01-04T02:40:17.915469Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4361: [PQ: 72057594037927937] TxId 67890 State EXECUTING FrontTxId 67890 2026-01-04T02:40:17.915500Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4546: [PQ: 72057594037927937] Responses received from the partitions 2/2 2026-01-04T02:40:17.915542Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4247: [PQ: 72057594037927937] TxId: 67890 send TEvPersQueue::TEvProposeTransactionResult(COMPLETE) 2026-01-04T02:40:17.915622Z node 6 :PQ_TX INFO: pq_impl.cpp:4555: [PQ: 72057594037927937] Complete TxId 67890 2026-01-04T02:40:17.915905Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config PartitionConfig { LifetimeSeconds: 86400 WriteSpeedInBytesPerSecond: 10485760 } TopicName: "rt3.dc1--account--topic" Version: 2 LocalDC: true TopicPath: "/Root/PQ/rt3.dc1--account--topic" YdbDatabasePath: "" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "account" MeteringMode: METERING_MODE_REQUEST_UNITS AllPartitions { PartitionId: 0 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 1 ParentPartitionIds: 2 TabletId: 72057594037927937 } AllPartitions { PartitionId: 2 ChildPartitionIds: 0 ChildPartitionIds: 1 TabletId: 22222 } Consumers { Name: "client-1" Generation: 2 } Consumers { Name: "client-3" Generation: 2 } 2026-01-04T02:40:17.916005Z node 6 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:17.916101Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTING to EXECUTED 2026-01-04T02:40:17.916190Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4032: [PQ: 72057594037927937] TPersQueue::SendEvReadSetAckToSenders 2026-01-04T02:40:17.916257Z node 6 :PQ_TX INFO: pq_impl.cpp:4034: [PQ: 72057594037927937] Send TEvTxProcessing::TEvReadSetAck {TEvReadSet step# 100 txid# 67890 TabletSource# 22222 TabletDest# 72057594037927937 SetTabletConsumer# 72057594037927937 Flags# 0 Seqno# 0} 2026-01-04T02:40:17.916314Z node 6 :PQ_TX INFO: pq_impl.cpp:4586: [PQ: 72057594037927937] delete partitions for TxId 67890 2026-01-04T02:40:17.916366Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from EXECUTED to WAIT_RS_ACKS 2026-01-04T02:40:17.916421Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-04T02:40:17.916463Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4595: [PQ: 72057594037927937] HaveAllRecipientsReceive 1, AllSupportivePartitionsHaveBeenDeleted 1 2026-01-04T02:40:17.916505Z node 6 :PQ_TX DEBUG: transaction.cpp:406: [TxId: 67890] PredicateAcks: 0/0 2026-01-04T02:40:17.916554Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4674: [PQ: 72057594037927937] add an TxId 67890 to the list for deletion 2026-01-04T02:40:17.916604Z node 6 :PQ_TX INFO: pq_impl.cpp:4315: [PQ: 72057594037927937] TxId 67890 moved from WAIT_RS_ACKS to DELETING 2026-01-04T02:40:17.916654Z node 6 :PQ_TX DEBUG: pq_impl.cpp:3886: [PQ: 72057594037927937] Delete key for TxId 67890 2026-01-04T02:40:17.916745Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72057594037927937] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:40:17.919136Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72057594037927937] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:40:17.919212Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4369: [PQ: 72057594037927937] Try execute txs with state DELETING 2026-01-04T02:40:17.919271Z node 6 :PQ_TX DEBUG: pq_impl.cpp:4414: [PQ: 72057594037927937] TxId 67890, State DELETING 2026-01-04T02:40:17.919323Z node 6 :PQ_TX INFO: pq_impl.cpp:4619: [PQ: 72057594037927937] delete TxId 67890 |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> ColumnShardTiers::DSConfigs |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedDeflateContentLengthHigher [GOOD] Test command err: 2026-01-04T02:38:07.943992Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330159279017139:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:07.944724Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:08.096552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:08.101827Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:08.101895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:08.166400Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:08.234684Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:08.237418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330159279017109:2081] 1767494287941775 != 1767494287941778 2026-01-04T02:38:08.272910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:08.272944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:08.272953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:08.273071Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:08.397258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:08.458610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:08.636324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:38:08.641719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:38:08.698742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.783816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.824532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.862364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.889896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.916714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.944950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:08.950036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:08.974489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:09.002094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:09.026705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.231761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330172163920646:2367], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.231762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330172163920654:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.231839Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.232070Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330172163920661:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.232114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:10.234789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:10.244032Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330172163920660:2371], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:38:10.336999Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330172163920713:3099] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:10.597294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.623353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.649607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.677573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.703482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:10.729522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... :17.233762Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:17.233798Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 4ms 2026-01-04T02:40:17.234146Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:17.234177Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:40:17.234303Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 5ms 2026-01-04T02:40:17.234815Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:40:17.288293Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330718078175376:2807], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:17.288318Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175375:2806], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.288419Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.288762Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175379:2808], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.288832Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.292494Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330718078175373:2805]: Pool not found 2026-01-04T02:40:17.292783Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:40:17.703647Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175398:2813], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.703677Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330718078175399:2814], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:17.703755Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.704056Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175402:2815], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.704134Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.709656Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330718078175396:2812]: Pool not found 2026-01-04T02:40:17.710081Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:40:17.712941Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175422:2820], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.713032Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591330718078175423:2821], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:40:17.713109Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.714036Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330718078175426:2822], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.714395Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:17.718030Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591330718078175420:2819]: Pool not found 2026-01-04T02:40:17.718249Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:40:18.150697Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:38280) connection closed by inactivity timeout |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::StyleDeduction [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_Active_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test >> Compression::WriteGZIP [GOOD] >> Compression::WriteZSTD >> HttpRequest::ProbeBaseStats [GOOD] >> TConsoleTests::TestCreateTenantAlreadyExistsExtSubdomain [GOOD] >> TConsoleTests::TestGetUnknownTenantStatus |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> PQCountersLabeled::ImportantFlagSwitching [GOOD] >> PQCountersLabeled::PartitionKeyCompaction |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryTrue_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::ProbeBaseStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:56.748636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:56.870263Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:56.878884Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:56.879270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:56.879415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:57.237693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.334646Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.334786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.381783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.451504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:58.098313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:58.099160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:58.099203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:58.099229Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:58.099531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:58.162302Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:58.686326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:01.784143Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:01.792859Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:39:01.797174Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:01.830728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.830856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.859830Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:01.861894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:02.045958Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.046631Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.047227Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048044Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048266Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048458Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048681Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048903Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.048988Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:02.191858Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:39:02.237690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:02.237835Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:02.251465Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:02.410535Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:02.517666Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:02.517757Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:02.547342Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:02.547681Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:02.547948Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:02.548021Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:02.548079Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:02.548145Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:02.548206Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:02.548264Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:02.549029Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:02.695727Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:39:02.696301Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:39:02.698935Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:39:02.699001Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:39:02.699091Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:02.703249Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:02.707583Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:39:02.712166Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:39:02.780937Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:39:02.799171Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.799282Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.813580Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:39:02.814867Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:39:03.132410Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:03.134416Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:03.136996Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:03.139981Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:39:03.140075Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... request id = 55, ReplyToActorId = [2:5641:4637], StatRequests.size() = 1 2026-01-04T02:40:04.701541Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 56 ], ReplyToActorId[ [2:5676:4651]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:04.701907Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 56 ] 2026-01-04T02:40:04.701955Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 56, ReplyToActorId = [2:5676:4651], StatRequests.size() = 1 2026-01-04T02:40:05.670401Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:40:05.670474Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:05.670504Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:05.670536Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:40:05.971296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:06.175657Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 57 ], ReplyToActorId[ [2:5716:4670]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:06.176026Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 57 ] 2026-01-04T02:40:06.176072Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 57, ReplyToActorId = [2:5716:4670], StatRequests.size() = 1 2026-01-04T02:40:07.523770Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 58 ], ReplyToActorId[ [2:5750:4686]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:07.524133Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 58 ] 2026-01-04T02:40:07.524182Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 58, ReplyToActorId = [2:5750:4686], StatRequests.size() = 1 2026-01-04T02:40:08.887147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:08.887396Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-04T02:40:08.887497Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-04T02:40:09.127287Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:40:09.127361Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:09.127550Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:40:09.141881Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:09.929927Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 59 ], ReplyToActorId[ [2:5784:4702]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:09.930266Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 59 ] 2026-01-04T02:40:09.930311Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 59, ReplyToActorId = [2:5784:4702], StatRequests.size() = 1 2026-01-04T02:40:11.525866Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 60 ], ReplyToActorId[ [2:5827:4728]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:11.526222Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 60 ] 2026-01-04T02:40:11.526267Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 60, ReplyToActorId = [2:5827:4728], StatRequests.size() = 1 2026-01-04T02:40:12.595354Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 61 ], ReplyToActorId[ [2:5860:4742]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:12.595656Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 61 ] 2026-01-04T02:40:12.595700Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 61, ReplyToActorId = [2:5860:4742], StatRequests.size() = 1 2026-01-04T02:40:13.612259Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:13.844425Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 62 ], ReplyToActorId[ [2:5897:4758]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:13.844729Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 62 ] 2026-01-04T02:40:13.844778Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 62, ReplyToActorId = [2:5897:4758], StatRequests.size() = 1 2026-01-04T02:40:14.902263Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 63 ], ReplyToActorId[ [2:5930:4774]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:14.902639Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 63 ] 2026-01-04T02:40:14.902693Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 63, ReplyToActorId = [2:5930:4774], StatRequests.size() = 1 2026-01-04T02:40:15.855977Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:15.856193Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 2026-01-04T02:40:15.856292Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-04T02:40:15.933753Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:40:15.933833Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:15.934044Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-04T02:40:15.947049Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:16.088060Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 64 ], ReplyToActorId[ [2:5965:4792]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:16.088410Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 64 ] 2026-01-04T02:40:16.088456Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 64, ReplyToActorId = [2:5965:4792], StatRequests.size() = 1 2026-01-04T02:40:17.260870Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 65 ], ReplyToActorId[ [2:5999:4808]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:17.261064Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 65 ] 2026-01-04T02:40:17.261094Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 65, ReplyToActorId = [2:5999:4808], StatRequests.size() = 1 2026-01-04T02:40:18.260036Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 66 ], ReplyToActorId[ [2:6030:4822]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:18.260327Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 66 ] 2026-01-04T02:40:18.260370Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 66, ReplyToActorId = [2:6030:4822], StatRequests.size() = 1 2026-01-04T02:40:19.279216Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:19.505390Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 67 ], ReplyToActorId[ [2:6068:4837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:19.505702Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 67 ] 2026-01-04T02:40:19.505752Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 67, ReplyToActorId = [2:6068:4837], StatRequests.size() = 1 2026-01-04T02:40:20.710743Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 68 ], ReplyToActorId[ [2:6106:4853]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:20.711054Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 68 ] 2026-01-04T02:40:20.711099Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 68, ReplyToActorId = [2:6106:4853], StatRequests.size() = 1 2026-01-04T02:40:21.683989Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:21.684185Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 14 2026-01-04T02:40:21.684275Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 14 2026-01-04T02:40:21.782867Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:40:21.782939Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:21.783176Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 50, entries count: 2, are all stats full: 1 2026-01-04T02:40:21.799264Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:21.929929Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 69 ], ReplyToActorId[ [2:6142:4871]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:21.930243Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 69 ] 2026-01-04T02:40:21.930287Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 69, ReplyToActorId = [2:6142:4871], StatRequests.size() = 1 2026-01-04T02:40:21.931165Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 70 ], ReplyToActorId[ [2:6145:4874]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:21.931343Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 70 ] 2026-01-04T02:40:21.931394Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 70, ReplyToActorId = [2:6145:4874], StatRequests.size() = 1 Answer: 'HTTP/1.1 200 Ok Content-Type: application/json Connection: Close { "row_count":1000, "bytes_size":11272 }' |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> TPQTestSlow::TestOnDiskStoredSourceIds [GOOD] Test command err: 2026-01-04T02:39:13.235701Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:39:13.348638Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:13.349638Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:13.349793Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.349889Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:39:13.373932Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.399868Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:13.401621Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:39:13.405193Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:39:13.411505Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:13.412995Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:13.425621Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:13.426279Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1704766-c3b2ec83-d3bbb9d2-92435353_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.433907Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6052b199-490de20-46d0043f-39d44877_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.457865Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:13.458342Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|24954009-7632b739-b7ca1f17-39a55977_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.467618Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:13.468043Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|6dbeb2b3-56a98c0b-e9b31f96-ec53e080_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.475478Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:13.475911Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bb1e30a9-4b81028d-2a151f94-5c9d87b9_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.483958Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:13.484368Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7458ed96-733525d8-2e5759ae-150cc4_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:13.846542Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-04T02:39:13.898239Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:13.898306Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:13.898365Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.898419Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:187:2057] recipient: [2:186:2195] Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:189:2057] recipient: [2:186:2195] 2026-01-04T02:39:13.948946Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:13.949011Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:13.949070Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:13.949126Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2026-01-04T02:39:13.959648Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:188:2196] 2026-01-04T02:39:14.021574Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.052779Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.063357Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.094489Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.146291Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.177368Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.321602Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.332252Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.614240Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:14.686656Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:15.048851Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:15.215885Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:15.401642Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:269:2057] recipient: [2:14:2061] 2026-01-04T02:39:15.527757Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:15.528800Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-04T02:39:15.530039Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:275:2196] 2026-01-04T02:39:15.532637Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition ... R INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:22.214159Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:22.318874Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:22.436274Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:22.671828Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:22.776503Z node 47 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 47 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [47:292:2279] sender: [47:392:2057] recipient: [47:14:2061] 2026-01-04T02:40:23.219225Z node 48 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 48 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:107:2057] recipient: [48:105:2138] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:112:2057] recipient: [48:105:2138] 2026-01-04T02:40:23.280468Z node 48 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:23.280540Z node 48 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:23.280589Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:23.280654Z node 48 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [48:153:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927938 is [48:157:2176] sender: [48:158:2057] recipient: [48:151:2172] Leader for TabletID 72057594037927937 is [48:111:2142] sender: [48:183:2057] recipient: [48:14:2061] 2026-01-04T02:40:23.302754Z node 48 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:23.303683Z node 48 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 48 actor [48:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 48 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 48 } 2026-01-04T02:40:23.304803Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [48:189:2142] 2026-01-04T02:40:23.308004Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [48:189:2142] 2026-01-04T02:40:23.310171Z node 48 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [48:190:2142] 2026-01-04T02:40:23.312587Z node 48 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [48:190:2142] 2026-01-04T02:40:23.322900Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.323448Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|86019d64-87165237-250ce369-55d0eed4_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.331702Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f403b24e-e310c63f-d1be9c76-9044090a_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.370336Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.370945Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|721cd3da-547bbfcf-3e027c2e-ad863011_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.384346Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.384899Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|16763e0-a579fc1e-d3021b06-4d8f9cd1_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.399013Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.399524Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c3a6d5e2-2d62ddd2-e871e415-4c19d777_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.409685Z node 48 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.410289Z node 48 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e21b8dcf-154be9b3-ce5bea97-5436b801_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.859164Z node 49 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 49 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:107:2057] recipient: [49:105:2138] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:112:2057] recipient: [49:105:2138] 2026-01-04T02:40:23.907283Z node 49 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:23.907345Z node 49 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:23.907383Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:23.907426Z node 49 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [49:153:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927938 is [49:157:2176] sender: [49:158:2057] recipient: [49:151:2172] Leader for TabletID 72057594037927937 is [49:111:2142] sender: [49:183:2057] recipient: [49:14:2061] 2026-01-04T02:40:23.927974Z node 49 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:23.928622Z node 49 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 49 actor [49:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 SourceIdMaxCounts: 3 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 49 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 49 } 2026-01-04T02:40:23.929415Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [49:189:2142] 2026-01-04T02:40:23.931592Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [49:189:2142] 2026-01-04T02:40:23.933019Z node 49 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [49:190:2142] 2026-01-04T02:40:23.934496Z node 49 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [49:190:2142] 2026-01-04T02:40:23.941482Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.941971Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|3a61c4e8-b1a50dc8-1326a6f3-88d157ad_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.947296Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|291cfb95-4450af24-73be3336-534101af_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.970559Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.971118Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|71ac7c71-10fa43e3-45fd92a3-bae7edfe_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.978462Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.978870Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|85e4254b-9cfd2e46-8319d03e-c9a561da_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.988425Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.988888Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|7d3cc1fa-731f41d6-3381703-ae564bed_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:23.998653Z node 49 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:23.999257Z node 49 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a7a03cbe-edcb95b1-c1e51757-183a3979_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> S3SettingsConversion::Port [GOOD] |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> TConsoleTests::TestGetUnknownTenantStatus [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] >> TPQTest::TestWritePQBigMessage [GOOD] >> TPQTest::TestWritePQ >> TConsoleTests::TestAlterTenantModifyStorageResourcesForRunningExtSubdomain [GOOD] >> TConsoleTests::TestAlterUnknownTenant |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> TPQTest::TestSetClientOffset [GOOD] >> TPQTest::TestReadSessions >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] |91.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpScan::ScanDuringSplit10 [GOOD] >> KqpScan::ScanDuringSplitThenMerge ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTests::CreateTableShouldFailOnWrongUnit-EnableTablePgTypes-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:39:41.227274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:41.227368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:41.227408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:41.227436Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:41.227471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:41.227499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:41.227549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:41.227619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:41.228324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:41.228545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:41.303140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:41.303195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:41.316850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:41.317091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:41.317249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:41.326731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:41.326958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:41.327626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:41.327893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:41.330745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:41.330943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:41.332306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:41.332365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:41.332472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:41.332517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:41.332559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:41.332747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:41.456570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.457976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.458054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -04T02:40:27.003045Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.003161Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:40:27.003258Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:40:27.003332Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:40:27.005546Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.005626Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:40:27.005713Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:40:27.007663Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.007727Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.007820Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:27.007940Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:40:27.008177Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:40:27.009955Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:40:27.010219Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:40:27.010800Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:27.011014Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 158913792111 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:40:27.011107Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:27.011519Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:40:27.011627Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:40:27.012015Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:40:27.012150Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:40:27.014494Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:40:27.014592Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:40:27.014957Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:40:27.015048Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [37:209:2210], at schemeshard: 72057594046678944, txId: 1, path id: 1 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:40:27.015785Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.015893Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:40:27.016154Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:40:27.016241Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:27.016335Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:40:27.016416Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:27.016500Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:40:27.016586Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:40:27.016673Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:40:27.016743Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:40:27.016852Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:40:27.016940Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:40:27.017026Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:40:27.017840Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:40:27.018044Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:40:27.018131Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:40:27.018218Z node 37 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:40:27.018310Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:40:27.018463Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:40:27.022268Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:40:27.023086Z node 37 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:40:27.024716Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [37:684:2674] Bootstrap 2026-01-04T02:40:27.027292Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [37:684:2674] Become StateWork (SchemeCache [37:689:2679]) 2026-01-04T02:40:27.031583Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTable CreateTable { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:40:27.032282Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:40:27.032518Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:452: TCreateTable Propose, path: /MyRoot/TTLEnabledTable, opId: 101:0, schema: Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" } Columns { Name: "modified_at" Type: "pgint8" } KeyColumnNames: "key" TTLSettings { Enabled { ColumnName: "modified_at" ColumnUnit: UNIT_AUTO } }, at schemeshard: 72057594046678944 2026-01-04T02:40:27.033476Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, at schemeshard: 72057594046678944 2026-01-04T02:40:27.035439Z node 37 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [37:684:2674] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:40:27.038233Z node 37 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "To enable TTL on integral PG type column \'ValueSinceUnixEpochModeSettings\' should be specified" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:40:27.038723Z node 37 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: To enable TTL on integral PG type column 'ValueSinceUnixEpochModeSettings' should be specified, operation: CREATE TABLE, path: /MyRoot/TTLEnabledTable 2026-01-04T02:40:27.040266Z node 37 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::DifferentWriteTxBatchingOptions [GOOD] Test command err: 2026-01-04T02:40:03.272308Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.322658Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:03.322739Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:03.322794Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.322852Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:03.341389Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [1:181:2194] 2026-01-04T02:40:03.343391Z node 1 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:40:03.000000Z 2026-01-04T02:40:03.343642Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [1:181:2194] 2026-01-04T02:40:03.364789Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.406038Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.426865Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.437393Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.478514Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.519678Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.540547Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\207\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:03.681904Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:03.702654Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\207\201\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "m0000000003cclient" Value: "\010\001\020\001\030\001\"\007session(\0000\001@\001" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\001\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\207\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:03.756245Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 2026-01-04T02:40:03.756390Z node 1 :PERSQUEUE WARN: partition.cpp:3024: [72057594037927937][Partition][3][StateIdle] Partition 3 Consumer 'client' Bad request (gap) Offset 1 Begin 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\263\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\207\201\267\27038\001" StorageChannel: INLINE } Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\264\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\270\207\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:04.091044Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.136919Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:04.136981Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:04.137048Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:04.137111Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:04.150731Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [2:183:2196] 2026-01-04T02:40:04.152633Z node 2 :PERSQUEUE INFO: partition_init.cpp:1032: [Root/PQ/rt3.dc1--account--topic:3:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp from keys completed. Value 2026-01-04T02:40:04.000000Z 2026-01-04T02:40:04.152876Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 3 generation 0 [2:183:2196] 2026-01-04T02:40:04.173974Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.216475Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.237318Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.247826Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.289142Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.330579Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.361637Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\217\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:04.510070Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000003cclient" Value: "\010\000\020\001\030\001\"\007session(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000003uclient" Value: "\000\000\000\000\000\000\000\000\001\000\000\000\001\000\000\000session" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\217\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:04.532322Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000003|0000000000000000" IncludeFrom: true To: "e0000000003|0000000000000001" IncludeTo: false } } CmdWrite { Key: "I0000000003" Value: "\010\271`\020\262\222\004" StorageChannel: INLINE } CmdWrite { Key: "i0000000003" Value: "\030\000(\240\217\201\267\27038\001" StorageChannel: INLINE } 2026-01-04T02:40:04.867472Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.901188Z node 3 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:04.901234Z node 3 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:04.901263Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:04.901299Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:04.911667Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitConfigStep Got KV request 2026-01-04T02:40:04.911841Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:04.912040Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [3:182:2195] 2026-01-04T02:40:04.912662Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [Root/PQ/rt3.dc1--account--topic:0:Initializer] Start initializing step TInitDiskStatusStep Got KV request Go ... action and tx events 2026-01-04T02:40:28.145286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:28.145334Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:28.145363Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.145404Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 2, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:28.145448Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:28.145504Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:28.145707Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-04T02:40:28.145754Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:28.145791Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[Tx]) 2026-01-04T02:40:28.145868Z node 6 :PERSQUEUE DEBUG: partition.cpp:1616: [72057594037927937][Partition][0][StateIdle] TxId 26 affect SourceId src1 2026-01-04T02:40:28.145930Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:28.145955Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.145982Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.146014Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.146041Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:28.146090Z node 6 :PERSQUEUE DEBUG: partition.cpp:1672: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvGetWriteInfoResponse 2026-01-04T02:40:28.146111Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:28.146149Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:28.146181Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.146204Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.146240Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.146266Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Wait for no tx committed 2026-01-04T02:40:28.146425Z node 6 :PERSQUEUE DEBUG: partition.cpp:1433: [72057594037927937][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1, TxId 26 2026-01-04T02:40:28.146488Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:28.146531Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:28.146566Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.146594Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.146632Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[Tx]) 2026-01-04T02:40:28.146676Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: 26 2026-01-04T02:40:28.146742Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-04T02:40:28.146785Z node 6 :PERSQUEUE DEBUG: partition.cpp:3950: [72057594037927937][Partition][0][StateIdle] Schedule reply tx done 26 2026-01-04T02:40:28.146826Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 1, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:28.146864Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:28.146900Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist Got KV request Got batch complete: 1 Got KV request Got KV request 2026-01-04T02:40:28.167641Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.188270Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.198626Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:28.219448Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.240156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.260827Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.281865Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.312586Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.333156Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.364776Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.385268Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events Send disk status response with cookie: 0 Wait tx committed for tx 26 2026-01-04T02:40:28.385553Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:28.385699Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-04T02:40:28.385764Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:28.385830Z node 6 :PERSQUEUE DEBUG: partition.cpp:2416: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ImmediateTx]) 2026-01-04T02:40:28.385886Z node 6 :PERSQUEUE DEBUG: partition.cpp:1608: [72057594037927937][Partition][0][StateIdle] TxId (empty maybe) affect SourceId src1 2026-01-04T02:40:28.385939Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:28.385999Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.386064Z node 6 :PERSQUEUE DEBUG: partition.cpp:2480: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ImmediateTx]) 2026-01-04T02:40:28.386123Z node 6 :PERSQUEUE DEBUG: partition.cpp:3067: [72057594037927937][Partition][0][StateIdle] TPartition::CommitWriteOperations TxId: (empty maybe) 2026-01-04T02:40:28.386190Z node 6 :PERSQUEUE DEBUG: partition.cpp:3095: [72057594037927937][Partition][0][StateIdle] Head=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0, NewHead=Offset 231 PartNo 0 PackedSize 0 count 0 nextOffset 231 batches 0 2026-01-04T02:40:28.386267Z node 6 :PERSQUEUE DEBUG: partition.cpp:3941: [72057594037927937][Partition][0][StateIdle] schedule TEvPersQueue::TEvProposeTransactionResult(COMPLETE), reason= 2026-01-04T02:40:28.386314Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:28.386359Z node 6 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:28.386411Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:28.386613Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got KV request Got batch complete: 1 Got KV request Got KV request Send disk status response with cookie: 0 Wait immediate tx complete 28 2026-01-04T02:40:28.386877Z node 6 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:28.407399Z node 6 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][0][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:28.407487Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][0][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:28.407632Z node 6 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 1 2026-01-04T02:40:28.407708Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:28.407754Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:28.407810Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:28.407900Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:28.407945Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:28.408011Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction Got propose resutl: Origin: 72057594037927937 Status: COMPLETE TxId: 28 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpSystemView::QueryStatsSimple [GOOD] >> TConsoleTests::TestGetUnknownTenantStatusExtSubdomain [GOOD] >> TConsoleTests::TestListTenants >> KqpScripting::EndOfQueryCommit >> KqpYql::ColumnNameConflict >> KqpYql::UuidPrimaryKeyDisabled >> KqpYql::JsonNumberPrecision >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce >> KqpYql::TableRange >> KqpScripting::StreamExecuteYqlScriptScanCancelation >> KqpScripting::ScanQuery >> KqpScripting::StreamExecuteYqlScriptMixed >> KqpScripting::StreamScanQuery >> KqpScripting::UnsafeTimestampCast >> TConsoleTests::TestAlterUnknownTenant [GOOD] >> TConsoleTests::TestAlterUnknownTenantExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/sysview/unittest >> KqpSystemView::QueryStatsSimple [GOOD] Test command err: Trying to start YDB, gRPC: 13014, MsgBus: 7801 2026-01-04T02:39:31.296342Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330521137983727:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.296380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.442477Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330522384924320:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.442622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.486464Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591330522987074478:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:31.488161Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:31.661297Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.673039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.680766Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:31.798644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.798764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.799399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.799603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.800457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:31.800516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:31.837540Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:39:31.837589Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:31.839897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.840245Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.843600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:31.881589Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:31.910738Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.922397Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:31.928836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:32.110602Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:39:32.110623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:39:32.110637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:39:32.110721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:32.310910Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.484608Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.503133Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:32.688652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976725657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:32.839137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:33.074088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:33.262927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:33.361964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.417640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538317855199:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.417746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.418059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330538317855209:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.418123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:35.744894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.794586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.843306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.892336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.930349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:35.985045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.044351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.148382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976725669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:36.223378Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330542612823590:2410], DatabaseId ... Connecting 2026-01-04T02:40:17.972262Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:17.974054Z node 16 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 18 Cookie 18 2026-01-04T02:40:17.974089Z node 16 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 17 Cookie 17 2026-01-04T02:40:17.975427Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(17, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:17.976124Z node 16 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(18, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:17.991646Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:18.034539Z node 17 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:18.048275Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:18.048304Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:18.048315Z node 16 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:18.048422Z node 16 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:18.342554Z node 18 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:18.669569Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:18.699743Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:18.752579Z node 16 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:18.775003Z node 18 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:18.775634Z node 17 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:18.811131Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:18.934771Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:19.011068Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:22.643911Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330738898118735:2357], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:22.644038Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:22.645186Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330738898118745:2358], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:22.645276Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:22.739763Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:22.747369Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[16:7591330717423279960:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:22.747460Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:22.796525Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:22.854822Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:22.914462Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:22.978067Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:23.044602Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:23.108172Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:23.314653Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:23.415206Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330743193087142:2411], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:23.415302Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:23.415490Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330743193087147:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:23.415533Z node 16 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [16:7591330743193087148:2415], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:23.415830Z node 16 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:23.419857Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:23.442356Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [16:7591330743193087151:2416], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:23.541538Z node 16 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [16:7591330743193087226:4769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:27.065017Z node 16 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494427053, txId: 281474976715675] shutting down |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/sysview/unittest >> KqpScripting::StreamExecuteYqlScriptData |91.5%| [TA] $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/sysview/test-results/unittest/{meta.json ... results_accumulator.log} >> PQCountersLabeled::PartitionKeyCompaction [GOOD] >> PQCountersLabeled::PartitionBlobCompactionCounters >> KqpScripting::QueryStats >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_RegisteredSourceId_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test >> TConsoleTests::TestAlterUnknownTenantExtSubdomain [GOOD] >> TConsoleTests::TestAlterBorrowedStorage >> TConsoleTests::TestListTenants [GOOD] >> TConsoleTests::TestListTenantsExtSubdomain >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveConfig_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionNotExists_Test [GOOD] >> TPartitionGraphTest::BuildGraph [GOOD] >> TPartitionScaleManagerGraphCmpTest::Equal [GOOD] >> TPartitionScaleManagerGraphCmpTest::ExtraRootPartitionsInSourceTopic [GOOD] >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> KqpYql::UuidPrimaryKeyDisabled [GOOD] >> TPartitionTests::UserActCount [GOOD] >> TPartitionTests::TooManyImmediateTxs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyDisabled [GOOD] Test command err: Trying to start YDB, gRPC: 23602, MsgBus: 11567 2026-01-04T02:40:30.735971Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330776413066940:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.059062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.059222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.126099Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.130450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.178595Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.180034Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330776413066893:2081] 1767494430730226 != 1767494430730229 2026-01-04T02:40:31.364589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.368224Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.368254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.368277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.368389Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.745851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.997192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.006537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:33.838608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330789297969667:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.838714Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.838964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330789297969676:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.839014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.290049Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330793592936987:2503] txid# 281474976710658, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-04T02:40:34.323939Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592936995:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.324037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.324255Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592936997:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.324318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.341686Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330793592937004:2512] txid# 281474976710659, issues: { message: "Uuid as primary key is forbiden by configuration: key" severity: 1 } 2026-01-04T02:40:34.358908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937012:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.359010Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.359382Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937015:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.359471Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.380273Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330793592937021:2521] txid# 281474976710660, issues: { message: "Uuid as primary key is forbiden by configuration: val" severity: 1 } 2026-01-04T02:40:34.390300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937029:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.390372Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.390618Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937031:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.390685Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.407651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.542492Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937120:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.542605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.543117Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793592937122:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.543199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-04T02:39:10.003285Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1767494350003255 2026-01-04T02:39:10.196681Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330432751452199:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:10.196793Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:10.228399Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330430717140931:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:10.228699Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:10.228694Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:10.234734Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:10.338977Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:10.392466Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:10.421732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:10.421905Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:10.423244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:10.423308Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:10.431754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:10.435131Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:10.439144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:10.515012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:10.531386Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:10.557335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00269b/r3tmp/yandexYB5pfI.tmp 2026-01-04T02:39:10.557392Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00269b/r3tmp/yandexYB5pfI.tmp 2026-01-04T02:39:10.557555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00269b/r3tmp/yandexYB5pfI.tmp 2026-01-04T02:39:10.557640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:10.582462Z INFO: TTestServer started on Port 22134 GrpcPort 24822 2026-01-04T02:39:10.628543Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:24822 2026-01-04T02:39:10.745599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:11.205261Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:11.232382Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:12.695672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330441341388101:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:12.695677Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330441341388093:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:12.695812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:12.696225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330441341388110:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:12.696295Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:12.698951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:12.717698Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330441341388108:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:39:12.999028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:13.000037Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330441341388195:2982] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:13.006072Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330439307075783:2306], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:13.006698Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZmQyOTE2OTYtZmQ0ZTZiOWMtOGNkN2FkNzQtZmQ1OWFlZTI=, ActorId: [2:7591330439307075758:2300], ActorState: ExecuteState, LegacyTraceId: 01ke3dyzty5f0022hpbqeg0xyz, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:13.012534Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:39:13.025121Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330445636355546:2345], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:13.025525Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2M0MDdjNC0xMTBiYzBlYi00ZmU4ZjkxNC00MjljMjExNQ==, ActorId: [1:7591330441341388090:2327], ActorState: ExecuteState, LegacyTraceId: 01ke3dyzspbt6epqwnar61kdyv, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:13.025916Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2 ... on cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-04T02:40:33.120845Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 1db35269-9942e974-57d90251-cdf6cfdf has messages 1 2026-01-04T02:40:33.120949Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 read done: guid# 1db35269-9942e974-57d90251-cdf6cfdf, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 549 2026-01-04T02:40:33.120991Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 response to read: guid# 1db35269-9942e974-57d90251-cdf6cfdf 2026-01-04T02:40:33.121207Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 Process answer. Aval parts: 0 2026-01-04T02:40:33.121932Z :DEBUG: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:40:33.122084Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 grpc read done: success# 1, data# { read { } } 2026-01-04T02:40:33.122237Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-04T02:40:33.122211Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 got read request: guid# 7caedd-bed89c16-27782b4d-b86f8cee 2026-01-04T02:40:33.122440Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T02:40:33.122503Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T02:40:33.122557Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-04T02:40:33.122587Z :DEBUG: [/Root] Take Data. Partition 0. Read: {2, 1} (3-3) 2026-01-04T02:40:33.122659Z :DEBUG: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-04T02:40:33.122935Z :INFO: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] Closing read session. Close timeout: 0.000000s 2026-01-04T02:40:33.122998Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-04T02:40:33.123063Z :INFO: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] Counters: { Errors: 0 CurrentSessionLifetimeMs: 28 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:40:33.123203Z :NOTICE: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:40:33.123264Z :DEBUG: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] [null] Abort session to cluster 2026-01-04T02:40:33.123990Z :NOTICE: [/Root] [/Root] [6997239f-a8fa06c0-5a8ab13a-36e914c8] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:40:33.124249Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0] Write session: close. Timeout = 0 ms 2026-01-04T02:40:33.124299Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0] Write session will now close 2026-01-04T02:40:33.124359Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0] Write session: aborting 2026-01-04T02:40:33.124493Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 grpc read done: success# 0, data# { } 2026-01-04T02:40:33.124518Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 grpc read failed 2026-01-04T02:40:33.124543Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 grpc closed 2026-01-04T02:40:33.124592Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_1367276890512542068_v1 is DEAD 2026-01-04T02:40:33.124802Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:40:33.124858Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0] Write session: destroy 2026-01-04T02:40:33.125015Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_1367276890512542068_v1 2026-01-04T02:40:33.125084Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7591330786308352936:2532] destroyed 2026-01-04T02:40:33.125140Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_1367276890512542068_v1 2026-01-04T02:40:33.125319Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591330786308352933:2529] disconnected. 2026-01-04T02:40:33.125353Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591330786308352933:2529] disconnected; active server actors: 1 2026-01-04T02:40:33.125378Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591330786308352933:2529] client user disconnected session shared/user_11_1_1367276890512542068_v1 2026-01-04T02:40:33.125478Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0 grpc read done: success: 0 data: 2026-01-04T02:40:33.125491Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0 grpc read failed 2026-01-04T02:40:33.125631Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 2 sessionId: test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0 2026-01-04T02:40:33.125658Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|f07a82bb-73295778-a8a54450-cb037137_0 is DEAD 2026-01-04T02:40:33.126057Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:40:33.126322Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7591330760538548874:2474] destroyed 2026-01-04T02:40:33.126399Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:40:33.126436Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:33.126455Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.126472Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:33.126505Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.126533Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:40:33.139546Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:33.139590Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.139607Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:33.139632Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.139648Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:40:33.239906Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:33.239961Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.239980Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:33.240008Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.240022Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:40:33.340338Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:33.340372Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.340384Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:33.340405Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:33.340420Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:40:33.626501Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7591330786308352962:2533] TxId: 281474976720681. Ctx: { TraceId: 01ke3e1ectatk4c199ke39nan8, Database: /Root, SessionId: ydb://session/3?node_id=11&id=MmFiNjkwMzAtYzM1MzViNWUtNGU5NmRjNjktZDBmOTRjOTc=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-04T02:40:33.626665Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7591330786308352967:2533], TxId: 281474976720681, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3e1ectatk4c199ke39nan8. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=11&id=MmFiNjkwMzAtYzM1MzViNWUtNGU5NmRjNjktZDBmOTRjOTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7591330786308352962:2533], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionScaleManagerGraphCmpTest::EqualSplitted [GOOD] Test command err: 2026-01-04T02:40:05.133006Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330667438915962:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:05.133051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:05.180052Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:05.188671Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330667018132692:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:05.189197Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:05.195368Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:05.376671Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:05.390678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:05.430404Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:05.430569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:05.431040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:05.431103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:05.437549Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:40:05.437756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:05.458849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:05.520285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:05.570211Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:05.587967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c6d/r3tmp/yandex68pRoh.tmp 2026-01-04T02:40:05.588000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c6d/r3tmp/yandex68pRoh.tmp 2026-01-04T02:40:05.588206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c6d/r3tmp/yandex68pRoh.tmp 2026-01-04T02:40:05.588338Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:05.625765Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:05.627284Z INFO: TTestServer started on Port 14132 GrpcPort 11273 PQClient connected to localhost:11273 2026-01-04T02:40:05.853785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:05.946345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:06.147533Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:06.192320Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:08.061098Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330679903034792:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:08.061099Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330679903034801:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:08.061202Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:08.061446Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330679903034807:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:08.061508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:08.067344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:08.083185Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330679903034806:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:40:08.157014Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330679903034836:2137] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:08.523666Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330679903034851:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:08.524043Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZWE0OWNlODctYTI4Mzg0NzEtOTgwZDQyYWQtZjljNTg3ZjM=, ActorId: [2:7591330679903034790:2300], ActorState: ExecuteState, LegacyTraceId: 01ke3e0nvv8z30dsbb0h3aybhe, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:08.534301Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:08.579260Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330680323819371:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:08.580898Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=N2I0MzQ2NmEtMzBiOTU0M2YtNjIwZjgyMGQtZGRiNTUxODQ=, ActorId: [1:7591330680323819331:2334], ActorState: ExecuteState, LegacyTraceId: 01ke3e0pbfa4e5bjbgz3wnaqe7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:08.581168Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:08.608653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but p ... ror=timeout; === CheckClustersList. Ok 2026-01-04T02:40:29.710461Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:30.144566Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:30.592343Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:31.187360Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:40:31.723760Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:40:32.401599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Run query: --!syntax_v1 UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (11131928866524144434, "Root", "00415F536F757263655F35", 1767494433124, 1767494433124, 0, 13); 2026-01-04T02:40:33.255799Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:40:33.255837Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:40:33.255854Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:40:33.255871Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-04T02:40:33.256021Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7591330786622032340:4141], Recipient [3:7591330769442162244:3600]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7591330786622032339:4141] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-04T02:40:33.256123Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7591330786622032339:4141], Recipient [3:7591330769442162244:3600]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "A_Source_5" 2026-01-04T02:40:33.256202Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [3:7591330769442162244:3600], Recipient [3:7591330786622032339:4141]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-04T02:40:33.256225Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) InitTable: SourceId=A_Source_5 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-04T02:40:33.256279Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7591330786622032339:4141], Recipient [3:7591330769442162244:3600]: NActors::TEvents::TEvPoison 2026-01-04T02:40:33.256345Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [3:7591330726492486943:2072], Recipient [3:7591330786622032339:4141]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-04T02:40:33.256362Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) StartKqpSession 2026-01-04T02:40:33.258936Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [3:7591330726492487153:2269], Recipient [3:7591330786622032339:4141]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=3&id=MmU0YTYwYmYtM2ExYzg2MjMtMThhOTY2MDctNGQ4ZDZhNQ==" NodeId: 3 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-04T02:40:33.258961Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) Select from the table 2026-01-04T02:40:33.517359Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [3:7591330726492487153:2269], Recipient [3:7591330786622032339:4141]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=MmU0YTYwYmYtM2ExYzg2MjMtMThhOTY2MDctNGQ4ZDZhNQ==" PreparedQuery: "cd37f21d-5420089d-1c164c9c-d67aa08f" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01ke3e1emzd7pbbkq7j8s24nh2" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1767494433124 } items { uint64_value: 1767494433124 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS C 2026-01-04T02:40:33.517545Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-04T02:40:33.517571Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-04T02:40:33.517716Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [3:7591330786622032405:4141], Recipient [3:7591330769442162244:3600]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [3:7591330786622032339:4141] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-04T02:40:33.517795Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [3:7591330786622032339:4141], Recipient [3:7591330769442162244:3600]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 2026-01-04T02:40:33.517894Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [3:7591330769442162244:3600], Recipient [3:7591330786622032339:4141]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-04T02:40:33.517931Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) Update the table 2026-01-04T02:40:33.518218Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [3:7591330786622032339:4141], Recipient [3:7591330769442162244:3600]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2026-01-04T02:40:33.636314Z node 3 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [3:7591330726492487153:2269], Recipient [3:7591330786622032339:4141]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=3&id=MmU0YTYwYmYtM2ExYzg2MjMtMThhOTY2MDctNGQ4ZDZhNQ==" PreparedQuery: "4e22c0a3-38dbf71d-2620a087-ac546ab6" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 69 1 2026-01-04T02:40:33.636357Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-04T02:40:33.636392Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) ReplyResult: Partition=1, SeqNo=13 2026-01-04T02:40:33.636420Z node 3 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [3:7591330786622032339:4141] (SourceId=A_Source_5, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 11131928866524144434 AND Topic = "Root" AND ProducerId = "00415F536F757263655F35" |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> KqpYql::TableRange [GOOD] >> KqpYql::TableNameConflict >> KqpYql::ColumnNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch >> KqpYql::JsonNumberPrecision [GOOD] >> KqpYql::JsonCast >> KqpScripting::UnsafeTimestampCast [GOOD] >> KqpScripting::SystemTables >> TPQTest::The_Keys_Are_Loaded_In_Several_Iterations [GOOD] >> TPQTestInternal::RestoreKeys [GOOD] >> Compression::WriteZSTD [GOOD] >> Compression::WriteWithMixedCodecs >> KqpScripting::EndOfQueryCommit [GOOD] >> KqpScripting::ExecuteYqlScriptPg >> KqpScripting::ScanQuery [GOOD] >> KqpScripting::ScanQueryDisable >> KqpScripting::StreamScanQuery [GOOD] >> KqpScripting::SyncExecuteYqlScriptSeveralQueries >> KqpScripting::StreamExecuteYqlScriptData [GOOD] >> KqpScripting::StreamExecuteYqlScriptEmptyResults >> TPartitionTests::TooManyImmediateTxs [GOOD] >> TPartitionTests::WriteSubDomainOutOfSpace >> KqpYql::InsertCV-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTestInternal::RestoreKeys [GOOD] Test command err: 2026-01-04T02:40:02.419062Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:40:02.468824Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:02.468886Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:02.468924Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:02.468970Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:40:02.485225Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:02.503120Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:02.504003Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:40:02.505777Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:40:02.508676Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:40:02.510066Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:40:02.549882Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:02.550398Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b81d262a-4984fe09-dd584ac5-ff5aff28_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:236:2057] recipient: [1:103:2137] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:239:2057] recipient: [1:238:2234] Leader for TabletID 72057594037927937 is [1:240:2235] sender: [1:241:2057] recipient: [1:238:2234] 2026-01-04T02:40:02.663480Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:02.663540Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:02.664332Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:02.664375Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:02.664870Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:289:2235] 2026-01-04T02:40:02.666257Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:290:2235] 2026-01-04T02:40:02.672088Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:40:02.672955Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [1:290:2235] 2026-01-04T02:40:02.684763Z node 1 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:40:02.685420Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [1:289:2235] 2026-01-04T02:40:02.685997Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [1:240:2235] sender: [1:318:2057] recipient: [1:14:2061] Got start offset = 40000 2026-01-04T02:40:03.006224Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-04T02:40:03.054996Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:03.055077Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:03.055140Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.055246Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] 2026-01-04T02:40:03.069091Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.069975Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "user1" Generation: 2 Important: true } 2026-01-04T02:40:03.070633Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:188:2142] 2026-01-04T02:40:03.072536Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [2:188:2142] 2026-01-04T02:40:03.074691Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:189:2142] 2026-01-04T02:40:03.075889Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [2:189:2142] 2026-01-04T02:40:03.105846Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:03.106316Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e97e5b82-f03635c-48be48da-d9fff80c_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:235:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:238:2057] recipient: [2:237:2232] Leader for TabletID 72057594037927937 is [2:239:2233] sender: [2:240:2057] recipient: [2:237:2232] 2026-01-04T02:40:03.225275Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:03.225333Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:03.226007Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.226053Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:03.226529Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:288:2233] 2026-01-04T02:40:03.228128Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:289:2233] 2026-01-04T02:40:03.234468Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:40:03.235037Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:289:2233] 2026-01-04T02:40:03.248870Z node 2 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:40:03.249577Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:288:2233] 2026-01-04T02:40:03.250119Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:239:2233] sender: [2:317:2057] recipient: [2:14:2061] Got start offset = 40000 2026-01-04T02:40:03.604972Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] Le ... 0000000000000000234_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000235_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000236_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000237_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000238_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000239_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000240_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000241_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000242_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000243_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000244_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000245_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000246_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000247_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000248_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000249_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000250_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000251_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000252_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000253_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000254_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000255_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000256_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000257_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000258_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000259_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000260_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000261_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000262_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000263_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000264_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000265_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000266_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000267_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000268_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000269_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000270_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000271_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000272_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000273_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000274_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000275_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000276_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000277_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000278_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000279_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000280_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000281_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000282_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000283_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000284_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000285_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000286_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000287_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000288_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000289_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000290_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000291_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000292_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000293_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000294_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000295_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000296_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000297_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000298_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000299_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000300_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000301_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000302_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000303_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000304_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000305_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000306_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000307_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000308_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000309_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000310_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000311_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000312_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000313_00000_0000000001_00000? size 50062 SYNC INIT DATA KEY: d0000000000_00000000000000000314_00000_0000000001_00000? size 28702 2026-01-04T02:40:37.018164Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:37.018201Z node 9 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:37.018232Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:37.018260Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:37.018285Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:37.018310Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:37.018335Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:37.018381Z node 9 :PERSQUEUE DEBUG: partition.cpp:757: [72057594037927937][Partition][0][StateIdle] Init complete for topic 'rt3.dc1--asdfgs--topic' Partition: 0 SourceId: sourceid SeqNo: 315 offset: 314 MaxOffset: 315 2026-01-04T02:40:37.018411Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 13 2026-01-04T02:40:37.018453Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:976: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user send read request for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 1 rrg 13 2026-01-04T02:40:37.018508Z node 9 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:37.018866Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:37.019850Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 3 Topic 'rt3.dc1--asdfgs--topic' partition 0 user user offset 0 partno 0 count 1 size 1024000 endOffset 315 max time lag 0ms effective offset 0 2026-01-04T02:40:37.020077Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 8352628 count 168 last offset 0, current partition end offset: 315 2026-01-04T02:40:37.020108Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-04T02:40:37.020263Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:464: No blob in L1. Partition 0 offset 0 partno 0 count 167 parts_count 0 actorID [9:3737:5380] 2026-01-04T02:40:37.020327Z node 9 :PERSQUEUE DEBUG: read.h:142: [72057594037927937][PQCacheProxy]Reading cookie 3. Have to read 1 of 1 from KV 2026-01-04T02:40:37.020422Z node 9 :PERSQUEUE DEBUG: pq_l2_cache.cpp:223: PQ Cache (L2). Missed blob. tabletId '72057594037927937' partition 0 offset 0 partno 0 count 167 parts_count 0 2026-01-04T02:40:37.049105Z node 9 :PERSQUEUE DEBUG: read.h:178: [72057594037927937][PQCacheProxy]Got results. 1 of 1 from KV. Status 1 2026-01-04T02:40:37.049168Z node 9 :PERSQUEUE DEBUG: read.h:195: [72057594037927937][PQCacheProxy]Got results. result 0 from KV. Status 0 2026-01-04T02:40:37.049266Z node 9 :PERSQUEUE DEBUG: cache_eviction.h:398: Prefetched blob in L1. Partition 0 offset 0 count 167 size 8352628 actorID [9:3737:5380] 2026-01-04T02:40:37.049421Z node 9 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 167 parts 0 suffix '0' size 8352628 2026-01-04T02:40:37.049581Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:40:37.066630Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 167 count 11 size 550146 from pos 0 cbcount 11 2026-01-04T02:40:37.072448Z node 9 :PERSQUEUE DEBUG: partition_read.cpp:1026: Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp done, result 130 queuesize 0 startOffset 0 2026-01-04T02:40:37.073264Z node 9 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 9 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [9:3696:5380] sender: [9:3914:2057] recipient: [9:14:2061] 2026-01-04T02:40:37.073790Z node 9 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [9:3913:5430], now have 1 active actors on pipe Got start offset = 0 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionTests::WriteSubDomainOutOfSpace [GOOD] >> KqpScripting::StreamExecuteYqlScriptMixed [GOOD] >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan >> KqpScripting::StreamExecuteYqlScriptScanCancelation [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration >> KqpPragma::OrderedColumns >> KqpYql::RefSelect >> TConsoleTests::TestListTenantsExtSubdomain [GOOD] >> TConsoleTests::TestMergeConfig >> TPartitionTests::WriteSubDomainOutOfSpace_DisableExpiration [GOOD] >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message >> TConsoleTests::TestAlterBorrowedStorage [GOOD] >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant >> KqpScripting::QueryStats [GOOD] >> KqpScripting::Pure >> PQCountersLabeled::PartitionBlobCompactionCounters [GOOD] >> PQCountersLabeled::NewConsumersCountersAppear >> KqpScan::ScanDuringSplitThenMerge [GOOD] >> KqpScan::ScanPg >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionActive_BoundaryFalse_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test >> KqpScripting::StreamExecuteYqlScriptClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] >> KqpYql::FlexibleTypes >> TPQTest::TestAlreadyWritten [GOOD] >> TPQTest::TestAlreadyWrittenWithoutDeduplication ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionTests::The_DeletePartition_Message_Arrives_Before_The_ApproveWriteQuota_Message [GOOD] Test command err: 2026-01-04T02:40:07.775157Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.841302Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:07.841376Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:07.841429Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:07.841474Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:07.855827Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:181:2194] 2026-01-04T02:40:07.856858Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition 1 generation 0 [1:181:2194] Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:07.883199Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.914993Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:07.925766Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.946662Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.947287Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:07.957977Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.979008Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:07.999866Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.010702Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.031834Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.042488Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.063328Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.074081Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.095240Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.116386Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.137487Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.204264Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.215043Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.236002Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:08.257036Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Range { From: "e0000000001|0000000000000000" IncludeFrom: true To: "e0000000001|0000000000000001" IncludeTo: false } } CmdWrite { Key: "m0000000001cclient" Value: "\010\000\020\002\030\003\"\nsession-id(\0000\001@\000" StorageChannel: INLINE } CmdWrite { Key: "m0000000001uclient" Value: "\000\000\000\000\000\000\000\000\002\000\000\000\003\000\000\000session-id" StorageChannel: INLINE } CmdWrite { Key: "i0000000001" Value: "\030\000(\0008\001" StorageChannel: INLINE } 2026-01-04T02:40:08.278322Z node 1 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][1][StateIdle] Got error: strict commit can't set offset 1 to future, consumer client, actual end offset is 0 Got cmd write: CmdDeleteRange { Ra ... cess user action and tx pending commits 2026-01-04T02:40:39.416074Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:39.416109Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:39.416157Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:40:39.416323Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][1][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:40:39.427418Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.458572Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.490545Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.522050Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.602026Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.633192Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.759097Z node 4 :PERSQUEUE INFO: partition.cpp:4331: [72057594037927937][Partition][1][StateIdle] SubDomainOutOfSpace was changed. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. SubDomainOutOfSpace: 0 2026-01-04T02:40:39.759165Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:39.759214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:39.759252Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:39.759293Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:39.759329Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:39.782194Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1792: [72057594037927937][Partition][1][StateIdle] Send write quota request. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1. Amount: 22. Cookie: 2 2026-01-04T02:40:39.782340Z node 4 :PERSQUEUE DEBUG: partition.cpp:4259: [72057594037927937][Partition][1][StateIdle] Got quota. Topic: "Root/PQ/rt3.dc1--account--topic". Partition: 1: Cookie: 2 2026-01-04T02:40:39.782391Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:39.782430Z node 4 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:39.782484Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:39.782521Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:39.782568Z node 4 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:39.782693Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob processing sourceId 'SourceId' seqNo 1 partNo 0 2026-01-04T02:40:39.783531Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][1][StateIdle] Topic 'Root/PQ/rt3.dc1--account--topic' partition 1 part blob complete sourceId 'SourceId' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 200 PartNo 0 PackedSize 118 count 1 nextOffset 201 batches 1 2026-01-04T02:40:39.783614Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:39.783657Z node 4 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][1][StateIdle] Batch completed (1) 2026-01-04T02:40:39.783700Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:39.784235Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:1711: [72057594037927937][Partition][1][StateIdle] Add new write blob: topic 'Root/PQ/rt3.dc1--account--topic' partition 1 compactOffset 200,1 HeadOffset 101 endOffset 101 curOffset 201 d0000000001_00000000000000000200_00000_0000000001_00000? size 105 WTime 1328 2026-01-04T02:40:39.784604Z node 4 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][1][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:39.806456Z node 4 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 4 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:39.817250Z node 4 :PERSQUEUE DEBUG: partition.cpp:2305: [72057594037927937][Partition][1][StateIdle] Writing. Can't process user action and tx events 2026-01-04T02:40:39.817339Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:492: [72057594037927937][Partition][1][StateIdle] Received TPartition::Handle TEvHandleWriteResponse. 2026-01-04T02:40:39.817419Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][1][StateIdle] TPartition::HandleWriteResponse writeNewSize# 22 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:39.817510Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:60: [72057594037927937][Partition][1][StateIdle] TPartition::ReplyWrite. Partition: 1 2026-01-04T02:40:39.817587Z node 4 :PERSQUEUE DEBUG: partition_write.cpp:365: [72057594037927937][Partition][1][StateIdle] Answering for message sourceid: 'SourceId', Topic: 'Root/PQ/rt3.dc1--account--topic', Partition: 1, SeqNo: 1, partNo: 0, Offset: 200 is stored on disk 2026-01-04T02:40:39.817820Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:40:39.817863Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:39.817900Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:39.817939Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:39.817975Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][1][StateIdle] Try persist 2026-01-04T02:40:39.818022Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:193: [72057594037927937][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:40:40.091882Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.137370Z node 5 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:40.137434Z node 5 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:40.137479Z node 5 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:40.137529Z node 5 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:40:40.151314Z node 5 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] bootstrapping {1, {2, 3}, 4} [5:184:2196] 2026-01-04T02:40:40.152056Z node 5 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][{1, {2, 3}, 4}][StateInit] init complete for topic 'Root/PQ/rt3.dc1--account--topic' partition {1, {2, 3}, 4} generation 0 [5:184:2196] 2026-01-04T02:40:40.163213Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.194362Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.215133Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.247115Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.278262Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.299102Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.350954Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.423479Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.501930Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.705408Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.736461Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.964499Z node 5 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 5 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:40.986495Z node 5 :PERSQUEUE ERROR: partition.cpp:3926: [72057594037927937][Partition][{1, {2, 3}, 4}][StateIdle] Got error: The transaction is completed Got cmd write: CmdDeleteRange { Range { From: "M0000000004" IncludeFrom: true To: "M0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "D0000000004" IncludeFrom: true To: "D0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "X0000000004" IncludeFrom: true To: "X0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "J0000000004" IncludeFrom: true To: "J0000000005" IncludeTo: false } } CmdDeleteRange { Range { From: "K0000000004" IncludeFrom: true To: "K0000000005" IncludeTo: false } } |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpYql::TableNameConflict [GOOD] >> KqpYql::ColumnTypeMismatch [GOOD] >> KqpYql::JsonCast [GOOD] >> TConsoleTests::TestMergeConfig [GOOD] >> TConsoleTests::TestCreateSubSubDomain >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] >> KqpScripting::ExecuteYqlScriptPg [GOOD] >> KqpScripting::ScriptValidate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableNameConflict [GOOD] Test command err: Trying to start YDB, gRPC: 10823, MsgBus: 15245 2026-01-04T02:40:30.736256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330773523308396:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.133003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.133114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.179909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.193076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.217919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330773523308349:2081] 1767494430728374 != 1767494430728377 2026-01-04T02:40:31.221605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.343462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367435Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367532Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.755101Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:32.002882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.012674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.086466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.246295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.394405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.473628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.878502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330786408212111:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.878610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.878930Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330786408212121:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.878992Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.309466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.337408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.364641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.391792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.418731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.452719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.489335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.563633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.631012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790703180287:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.631109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.631413Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790703180292:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.631459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790703180293:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.631491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.635183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.645154Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330790703180296:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.705574Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330790703180347:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.736514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330773523308396:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.736578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: RANGE is not supported on Kikimr clusters. Trying to start YDB, gRPC: 2318, MsgBus: 17746 2026-01-04T02:40:37.164472Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330803133437748:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:37.164535Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.177267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.220788Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.221884Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330803133437713:2081] 1767494437163587 != 1767494437163590 2026-01-04T02:40:37.275340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.275409Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:37.280294Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:37.329821Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:37.329848Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:37.329856Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:37.329941Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:37.397625Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:37.710927Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:37.716980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:37.720817Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:37.778753Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:37.927902Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:37.982446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.169505Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.286901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816018341485:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.286973Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.287347Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816018341495:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.287442Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.342097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.376277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.405014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.434658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.466120Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.503874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.530936Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.579825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.639437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816018342362:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.639522Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.639658Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816018342367:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.639699Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816018342368:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.639777Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.642308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:40.652040Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330816018342371:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:40.730617Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330816018342422:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.165924Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330803133437748:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.167055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:12:30: Error: At function: KiCreateTable!
:12:30: Error: Table name conflict: db.[/Root/Test] is used to reference multiple tables. |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::JsonCast [GOOD] Test command err: Trying to start YDB, gRPC: 10993, MsgBus: 30088 2026-01-04T02:40:30.736352Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330776782219438:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736523Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.057855Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.066984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.067101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.148261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.156622Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.239237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367155Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.746004Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.988630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.059370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.253403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.390710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.457836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.838693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330789667123160:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.838816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.839025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330789667123169:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.839071Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.285237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.311813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.339837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.369179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.398998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.428938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.461370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.507219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.580593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793962091342:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.580687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793962091347:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.580708Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.580999Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330793962091350:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.581068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.584967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.597249Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330793962091349:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.698453Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330793962091402:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.736585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330776782219438:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.736655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3226, MsgBus: 21148 2026-01-04T02:40:37.219885Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330803253304696:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:37.219929Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.230307Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.307353Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.340813Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.340890Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:37.346656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:37.387955Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:37.387973Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:37.387979Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:37.388043Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:37.479177Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:37.813502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:37.831734Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:37.893516Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.046244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.163062Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.281934Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.452004Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816138208435:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.452124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.452458Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816138208444:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.452544Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.524781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.557056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.588366Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.619768Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.648684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.718426Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.750944Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.790154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.884520Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816138209316:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.884610Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.884812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816138209321:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.884855Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.884935Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816138209322:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.888451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:40.896817Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330816138209325:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:40.960455Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330816138209376:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.220018Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330803253304696:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.220084Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [[#]] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ColumnTypeMismatch [GOOD] Test command err: Trying to start YDB, gRPC: 7974, MsgBus: 21002 2026-01-04T02:40:30.735949Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330774790429144:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.042032Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.081318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.081464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.128390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.132557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.139192Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330774790429098:2081] 1767494430728371 != 1767494430728374 2026-01-04T02:40:31.299030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367071Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367078Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.749279Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.963526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.049995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.220651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.365528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.442162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.885659Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787675332853:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.885740Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.886270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787675332863:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.886334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.303484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.335837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.363644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.388795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.416733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.450243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.482832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.532089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.616301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791970301029:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.616427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.616729Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791970301034:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.616776Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791970301035:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.617066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.619990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.629960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330791970301038:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.714346Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330791970301089:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.736661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330774790429144:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.736714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:7:30: Error: At function: KiCreateTable!
:7:30: Error: Duplicate column: Value. Trying to start YDB, gRPC: 21505, MsgBus: 1104 2026-01-04T02:40:37.235234Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:75913308043 ... R WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:37.410357Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:37.410426Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:37.481891Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:37.777919Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:37.791394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:37.844448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.007896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.066617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.242618Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.286357Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817212873286:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.286443Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.286607Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817212873295:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.286639Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.346586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.376631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.404692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.435021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.464883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.499010Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.563389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.642128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.709673Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817212874168:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.709764Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.710078Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817212874173:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.710121Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817212874174:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.710335Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.713384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:40.726501Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330817212874177:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:40.824505Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330817212874228:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.235985Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330804327969587:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.236137Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:42.294758Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330825802809132:2535], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-04T02:40:42.295273Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTdiOTE2OTgtY2VjNTA5OGItODgwN2VhM2QtN2FlZDlmZDU=, ActorId: [2:7591330825802809124:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3e1q8y69frp86wdzvectwm, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 26 } message: "At function: KiWriteTable!" end_position { row: 5 column: 26 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert type: Struct<\'Key\':Uint64,\'Value\':Uint64> to Struct<\'Key\':Uint64?,\'Value\':String?>" end_position { row: 6 column: 27 } severity: 1 issues { position { row: 6 column: 27 } message: "Failed to convert \'Value\': Uint64 to Optional" end_position { row: 6 column: 27 } severity: 1 } } issues { position { row: 6 column: 27 } message: "Failed to convert input columns types to scheme types" end_position { row: 6 column: 27 } issue_code: 2031 severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:5:26: Error: At function: KiWriteTable!
:6:27: Error: Failed to convert type: Struct<'Key':Uint64,'Value':Uint64> to Struct<'Key':Uint64?,'Value':String?>
:6:27: Error: Failed to convert 'Value': Uint64 to Optional
:6:27: Error: Failed to convert input columns types to scheme types, code: 2031 >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] >> KqpScripting::SystemTables [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] >> KqpYql::InsertCV-useSink [GOOD] >> KqpYql::InsertCVList+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SyncExecuteYqlScriptSeveralQueries [GOOD] Test command err: Trying to start YDB, gRPC: 30760, MsgBus: 17570 2026-01-04T02:40:30.812584Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330774595267557:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.814512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.089929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.113358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.113473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.164159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.168522Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.257018Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367257Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367287Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367386Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.832013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.996828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.003583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.068906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.225924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.391203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.472771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.972613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787480171249:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.972745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.973185Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787480171259:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.973240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.285605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.312373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.339520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.367376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.392956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.424493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.461174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.508218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.592404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791775139422:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.592490Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.592901Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791775139427:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.592944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791775139428:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.593046Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.596263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.605486Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330791775139431:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.701327Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330791775139482:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.811707Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330774595267557:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.811772Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.886456Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436890, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 22785, MsgBus: 29296 2026-01-04T02:40:37.857642Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330803195849702:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:37.857711Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.893927Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.972324Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.983374Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.983449Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:38.007576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:38.064138Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:38.064162Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:38.064169Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:38.064246Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:38.173016Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:38.491413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:38.498136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:38.504791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.563181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.757532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.829656Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.919492Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:41.034999Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330820375720698:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.035124Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.035445Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330820375720708:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.035508Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.109760Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.139289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.182909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.218834Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.259502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.299453Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.331106Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.384169Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.495526Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330820375721587:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.495658Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.495987Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330820375721593:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.496013Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330820375721592:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.496083Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.499780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.513955Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330820375721596:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:41.594915Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330820375721647:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.857904Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330803195849702:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.857969Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 64650, MsgBus: 9515 2026-01-04T02:40:30.738242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330774323200270:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.738324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.022194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.069453Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.069562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.142573Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.153059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.197892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.366667Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.366744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.366749Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.366827Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.755508Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.982721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.005882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.054267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.221234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.374403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.443126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.850919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787208103980:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.851017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.851302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330787208103990:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.851346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.285733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.316646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.344918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.374428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.403411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.437244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.472966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.530519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.603619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791503072155:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.603726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.603988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791503072161:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.604012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791503072160:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.604088Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.607640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.617649Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330791503072164:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.713385Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330791503072215:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.738302Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330774323200270:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.738380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.230677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.771627Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436806, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 11722, MsgBus: 25270 2026-01-04T02:40:37.659936Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330806159574411:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:37.660022Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.676592Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.732456Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.779881Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.779964Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:37.785008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:37.887183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:37.887777Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:37.887787Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:37.887795Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:37.887907Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:38.314667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:38.330461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.389053Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.578799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.661664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.667986Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.868751Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330819044478123:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.868846Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.869129Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330819044478133:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.869192Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.922820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.949094Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.979505Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.009108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.044160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.083090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.135953Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.192124Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.287162Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823339446299:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.287244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823339446304:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.287257Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.287430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823339446306:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.287474Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.290667Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.301937Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330823339446308:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:41.386930Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330823339446359:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.660010Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330806159574411:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.660084Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpYql::RefSelect [GOOD] >> KqpYql::ScriptUdf |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TConsoleTests::TestAlterStorageUnitsOfSharedTenant [GOOD] >> TConsoleTests::TestAlterServerlessTenant >> KqpPragma::OrderedColumns [GOOD] >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptEmptyResults [GOOD] Test command err: Trying to start YDB, gRPC: 5598, MsgBus: 24899 2026-01-04T02:40:31.393641Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330778182735842:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:31.393798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.633888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.638380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.638502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.655764Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.728821Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330778182735803:2081] 1767494431392099 != 1767494431392102 2026-01-04T02:40:31.736303Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.810502Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.810563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.810573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.810697Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.921643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:32.289492Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.301959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.363984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.430151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:32.525043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.678274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.742550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.545717Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791067639565:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.545842Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.546568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791067639575:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.546617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.910476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.944015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.971251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.998670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.031524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.063459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.095871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.135019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.196188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330795362607739:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.196273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.196351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330795362607744:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.196546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330795362607746:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.196594Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.200285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:35.210320Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330795362607748:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:35.310049Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330795362607799:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:36.394020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330778182735842:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:36.394101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:37.170639Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494437198, ... 9846 2026-01-04T02:40:38.059532Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330810510201146:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:38.059594Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:38.083042Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:38.172481Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:38.183120Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:38.183205Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:38.193230Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:38.245509Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:38.299350Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:38.299380Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:38.299389Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:38.299470Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:38.740046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:38.752921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:38.774932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.845592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.003680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.062105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.073794Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:41.458892Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823395104839:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.458984Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.459293Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823395104849:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.459365Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.529763Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.563594Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.591725Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.623040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.687889Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.722471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.759587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.809300Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.893023Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823395105719:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.893102Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.893323Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823395105725:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.893367Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330823395105726:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.893424Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.896792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.907592Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330823395105729:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:42.009482Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330827690073076:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:43.059711Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330810510201146:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:43.059795Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:43.719464Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494443757, txId: 281474976710673] shutting down 2026-01-04T02:40:43.863087Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494443897, txId: 281474976710675] shutting down |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SystemTables [GOOD] Test command err: Trying to start YDB, gRPC: 22126, MsgBus: 6760 2026-01-04T02:40:31.025266Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330778330856644:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:31.025403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.262411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.262530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.262956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.269153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.325324Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330774035889245:2081] 1767494430987898 != 1767494430987901 2026-01-04T02:40:31.325910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.421363Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.421384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.421388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.421437Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.531436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.988155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:31.997491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.033960Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:32.066176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.210253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.393652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.477476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.280027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791215760311:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.280154Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.280459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791215760321:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.280522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.623790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.649772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.678359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.703411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.729829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.758778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.791855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.858488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.922323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791215761193:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.922390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.922454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791215761198:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.922611Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791215761200:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.922654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.926274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.937266Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330791215761202:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:35.035295Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330795510728549:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:36.025343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330778330856644:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:36.025432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.342054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... rations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.360818Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.422541Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.433325Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330803345263837:2081] 1767494437344710 != 1767494437344713 2026-01-04T02:40:37.463227Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.463308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:37.468515Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:37.518356Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:37.518383Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:37.518391Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:37.518489Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:37.575172Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:37.923218Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:37.928600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:37.944087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.003482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.161787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.227914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.362574Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.604567Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816230167592:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.604644Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.605045Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816230167601:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.605099Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.683620Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.718102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.751386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.805333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.833467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.866823Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.894858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.928918Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.990460Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816230168469:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990524Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990582Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816230168474:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990712Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330816230168476:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990755Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.993509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.002255Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330816230168477:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:41.080236Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330820525135825:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.345464Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330803345263866:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.345520Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:42.872421Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494442861, txId: 281474976710673] shutting down 2026-01-04T02:40:43.000541Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494442990, txId: 281474976710675] shutting down 2026-01-04T02:40:43.970128Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494443981, txId: 281474976710677] shutting down >> KqpScripting::Pure [GOOD] |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryDisable [GOOD] Test command err: Trying to start YDB, gRPC: 16414, MsgBus: 16911 2026-01-04T02:40:30.736426Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330773558774832:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736485Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.028309Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.036173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.036302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.150452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.162238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.210843Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.749680Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:32.006024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.015660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.065095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.217296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.380607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.451118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.929798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330786443678548:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.929894Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.930153Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330786443678558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.930212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.285446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.315577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.338951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.363678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.389893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.418718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.448817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.492571Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.578348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790738646719:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.578424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.578568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790738646724:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.578606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790738646725:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.578643Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.584930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.597511Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330790738646728:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.654320Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330790738646779:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.738417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330773558774832:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.738483Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.851540Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436862, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 16820, MsgBus: 11718 2026-01-04T02:40:37.785182Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330804997899841:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:37.785235Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:37.845518Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:37.869642Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:37.903038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:37.903109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:37.908068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:38.029472Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:38.029499Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:38.029506Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:38.029578Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:38.078526Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:38.399105Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:38.406774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:38.420161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.483804Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.615853Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.713615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.840800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.989563Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817882803548:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.989666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990019Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330817882803558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:40.990135Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.044042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.071781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.103584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.139161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.171297Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.208856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.246945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.330959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.411770Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330822177771727:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.411868Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.412060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330822177771732:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.412092Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.412118Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330822177771733:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.416048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.426266Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330822177771736:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:41.498141Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330822177771787:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:42.785630Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330804997899841:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:42.785698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:43.627562Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494443659, txId: 281474976710673] shutting down |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TableConcat >> KqpYql::EvaluateExpr3 >> SlowTopicAutopartitioning::CDC_Write [GOOD] >> KqpYql::BinaryJsonOffsetBound >> KqpYql::FlexibleTypes [GOOD] >> KqpYql::EvaluateIf ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptLeadingEmptyScan [GOOD] Test command err: Trying to start YDB, gRPC: 10514, MsgBus: 25083 2026-01-04T02:40:30.780013Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330774470833837:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.780080Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.029943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.035975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.036088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.057842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.149030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.282471Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367166Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367277Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.786617Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.992390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.049070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.205155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.368675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.439224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.164035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791650704845:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.164163Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.164535Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791650704855:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.164598Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.563754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.601345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.627577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.651622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.682605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.713245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.745524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.804605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.874223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791650705725:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.874347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.875121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791650705730:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.875169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330791650705731:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.875225Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.878348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.888025Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330791650705734:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.959284Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330791650705785:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.780194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330774470833837:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.780261Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.915952Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436925, txId: 281474976710673] shutting down 2026-01-04T02:40:37.320965Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494437352, txId: 281474976710675] shutting down 2026-01-04T02:40:38.037842Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot ... 5049 2026-01-04T02:40:39.092599Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330811690624176:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:39.092654Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:39.104239Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:39.197871Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:39.219785Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:39.219876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:39.225005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:39.274937Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:39.294125Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:39.294147Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:39.294156Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:39.294222Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:39.708773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:39.715031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:39.721679Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.778621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.917795Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.976287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.100013Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:42.554970Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330824575527901:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.555068Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.555344Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330824575527911:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.555413Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.620543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.647450Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.673078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.703581Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.732277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.759618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.787920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.846122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.913054Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330824575528779:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.913105Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330824575528784:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.913125Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.913270Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330824575528786:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.913307Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.916402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:42.930658Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330824575528787:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:42.997012Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330824575528839:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:44.092496Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330811690624176:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:44.092562Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:44.616294Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494444639, txId: 281474976715673] shutting down 2026-01-04T02:40:45.050050Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494445080, txId: 281474976715675] shutting down |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 63439, MsgBus: 63121 2026-01-04T02:40:33.576464Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330787148857030:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:33.576575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:33.770875Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:33.862605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:33.862720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:33.865681Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330787148856987:2081] 1767494433575039 != 1767494433575042 2026-01-04T02:40:33.873559Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:33.879509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:33.951317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:33.951673Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:33.951691Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:33.951790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:34.053323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:34.305101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:34.315526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:34.373042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.504765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.582864Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:34.640799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.691911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.247065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330800033760751:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.247178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.247510Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330800033760761:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.247559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.532876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.563237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.595642Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.623171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.651957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.683511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.738956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.778277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:36.848176Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330800033761632:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.848311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.848405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330800033761637:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.848440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330800033761639:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.848477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:36.852008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:36.861748Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330800033761641:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:36.943113Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330800033761692:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:38.576880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330787148857030:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:38.578743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:39.419967Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494439445, txId: 281474976710675] shutting down Trying to start YDB, gRPC: 28467, MsgBus: 8636 2026-01-04T02:40:40.197321Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330818888442616:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:40.197370Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:40.210992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:40.279906Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:40.324591Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:40.324676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:40.326686Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:40.391437Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:40.391465Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:40.391473Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:40.391550Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:40.461984Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:40.782837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:40.787497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:40.798121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.845993Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.013133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.077358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.237477Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:43.269298Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330831773346333:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.269378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.269568Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330831773346343:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.269592Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.325048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.386449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.419706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.448178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.473873Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.503091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.533772Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.578024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.653417Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330831773347213:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.653514Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330831773347218:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.653534Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.653797Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330831773347220:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.653911Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.656530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:43.665869Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330831773347221:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:43.750251Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330831773347273:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:45.197528Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330818888442616:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:45.197592Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScan >> KqpScripting::LimitOnShard >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_InactiveActor_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test >> KqpScripting::SelectNullType >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] >> KqpPragma::ResetPerQuery >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/slow/unittest >> SlowTopicAutopartitioning::CDC_Write [GOOD] Test command err: 2026-01-04T02:39:13.157868Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330445669694973:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:13.159595Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:13.206923Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:13.383645Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:13.415995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:13.416107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:13.462890Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:13.472367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:13.607151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:13.699489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001be0/r3tmp/yandexVZWxdE.tmp 2026-01-04T02:39:13.699607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001be0/r3tmp/yandexVZWxdE.tmp 2026-01-04T02:39:13.701643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001be0/r3tmp/yandexVZWxdE.tmp 2026-01-04T02:39:13.701796Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:13.899458Z INFO: TTestServer started on Port 16706 GrpcPort 31506 PQClient connected to localhost:31506 2026-01-04T02:39:14.072906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:14.162148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.164361Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:15.663245Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330454259630555:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.663284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330454259630550:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.663798Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.664080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330454259630567:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.664152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:15.672837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:15.683268Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330454259630565:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:39:15.785571Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330454259630631:2644] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:16.104954Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330454259630639:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:16.111230Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MWI3ZDI2OGYtZTMxMDhjZDUtZjExYWUzOTUtZDc2MTc5YjY=, ActorId: [1:7591330454259630548:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3dz2p8889cv7tnkst1s7wb, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:16.132821Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:39:16.145546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.175160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:16.248401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591330458554598223:2822] 2026-01-04T02:39:18.157936Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330445669694973:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:18.158011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T02:39:22.402722Z :TopicSplitMerge INFO: TTopicSdkTestSetup started 2026-01-04T02:39:22.544021Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037894][] pipe [1:7591330484324402333:2988] connected; active server actors: 1 2026-01-04T02:39:22.544296Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T02:39:22.545126Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037894][streamImpl] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037894 2026-01-04T02:39:22.545227Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037894][streamImpl] BALANCER INIT DONE for streamImpl: (0, 72075186224037893) 2026-01-04T02:39:22.547937Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037894][streamImpl] TEvClientConnected TabletId 72075186224037893, NodeId 1, Generation 1 2026-01-04T02:39:22.614180Z node 1 :CHANGE_EXCHANGE WARN: change_sender_cdc_stream.cpp:398: [CdcChangeSenderMain][72075186224037892:1][1:7591330484324402395:2477] Failed entry at 'ResolveTopic': entry# { Path: TableId: [72057594046644480:51:0] RequestType: ByTableId Operation: OpTopic RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo } 2026-01-04T02:39:22.639283Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037894][streamImpl] updating configuration. Deleted partitions []. Added partitions [] >>>>> 2026-01-04T02:39:22.649669Z Start table insert 2026-01-04T02:39:23.561331Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186224037894][streamImpl] TPersQueueReadBalancer::HandleWakeup 2026-01-04T02:39:23.561387Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186224037894][streamImpl] Send TEvPersQueue::TEvStatus TabletId: 72075186224037893 Cookie: 1 2026-01-04T02:39:23.575 ... 3} (1084-1084) 2026-01-04T02:40:45.555600Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 4} (1085-1085) 2026-01-04T02:40:45.555618Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 5} (1086-1086) 2026-01-04T02:40:45.555637Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 6} (1087-1087) 2026-01-04T02:40:45.555659Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 7} (1088-1088) 2026-01-04T02:40:45.555678Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 8} (1089-1089) 2026-01-04T02:40:45.555694Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 9} (1090-1090) 2026-01-04T02:40:45.555713Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 10} (1091-1091) 2026-01-04T02:40:45.555731Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 11} (1092-1092) 2026-01-04T02:40:45.555747Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 12} (1093-1093) 2026-01-04T02:40:45.555761Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 13} (1094-1094) 2026-01-04T02:40:45.555779Z :DEBUG: [/Root] Take Data. Partition 61. Read: {119, 14} (1095-1095) 2026-01-04T02:40:45.555797Z :DEBUG: [/Root] Take Data. Partition 61. Read: {120, 0} (1096-1096) 2026-01-04T02:40:45.555818Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 0} (1097-1097) 2026-01-04T02:40:45.555854Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 1} (1098-1098) 2026-01-04T02:40:45.555881Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 2} (1099-1099) 2026-01-04T02:40:45.555905Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 3} (1100-1100) 2026-01-04T02:40:45.555924Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 4} (1101-1101) 2026-01-04T02:40:45.555940Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 5} (1102-1102) 2026-01-04T02:40:45.555956Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 6} (1103-1103) 2026-01-04T02:40:45.555970Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 7} (1104-1104) 2026-01-04T02:40:45.555984Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 8} (1105-1105) 2026-01-04T02:40:45.555998Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 9} (1106-1106) 2026-01-04T02:40:45.556012Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 10} (1107-1107) 2026-01-04T02:40:45.556030Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 11} (1108-1108) 2026-01-04T02:40:45.556045Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 12} (1109-1109) 2026-01-04T02:40:45.556062Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 13} (1110-1110) 2026-01-04T02:40:45.556080Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 14} (1111-1111) 2026-01-04T02:40:45.556097Z :DEBUG: [/Root] Take Data. Partition 61. Read: {121, 15} (1112-1112) 2026-01-04T02:40:45.556119Z :DEBUG: [/Root] Take Data. Partition 61. Read: {122, 0} (1113-1113) 2026-01-04T02:40:45.556138Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 0} (1114-1114) 2026-01-04T02:40:45.556156Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 1} (1115-1115) 2026-01-04T02:40:45.556177Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 2} (1116-1116) 2026-01-04T02:40:45.556195Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 3} (1117-1117) 2026-01-04T02:40:45.556214Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 4} (1118-1118) 2026-01-04T02:40:45.556235Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 5} (1119-1119) 2026-01-04T02:40:45.556255Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 6} (1120-1120) 2026-01-04T02:40:45.556273Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 7} (1121-1121) 2026-01-04T02:40:45.556291Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 8} (1122-1122) 2026-01-04T02:40:45.556308Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 9} (1123-1123) 2026-01-04T02:40:45.556327Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 10} (1124-1124) 2026-01-04T02:40:45.556344Z :DEBUG: [/Root] Take Data. Partition 61. Read: {123, 11} (1125-1125) 2026-01-04T02:40:45.556463Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1123, size 206787 bytes 2026-01-04T02:40:45.556491Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 891, size 164094 bytes 2026-01-04T02:40:45.556504Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1126, size 207389 bytes 2026-01-04T02:40:45.556515Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1448, size 266680 bytes 2026-01-04T02:40:45.556526Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 691, size 127290 bytes 2026-01-04T02:40:45.556536Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 701, size 129079 bytes 2026-01-04T02:40:45.556549Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1126, size 207327 bytes 2026-01-04T02:40:45.556560Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1463, size 269453 bytes 2026-01-04T02:40:45.556573Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1428, size 262960 bytes 2026-01-04T02:40:45.556585Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] The application data is transferred to the client. Number of messages 1343, size 247338 bytes 2026-01-04T02:40:45.556622Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.559708Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.564781Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.570841Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.576196Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.579088Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.585299Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.590311Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.595576Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.602196Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Returning serverBytesSize = 0 to budget 2026-01-04T02:40:45.671550Z :INFO: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] Closing read session. Close timeout: 0.000000s 2026-01-04T02:40:45.671773Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:origin/feed/streamImpl:64:81:1342:0 -:origin/feed/streamImpl:78:80:837:0 -:origin/feed/streamImpl:76:79:890:0 -:origin/feed/streamImpl:62:78:1427:0 -:origin/feed/streamImpl:63:77:1122:0 -:origin/feed/streamImpl:77:76:690:0 -:origin/feed/streamImpl:57:75:1231:0 -:origin/feed/streamImpl:55:74:1484:0 -:origin/feed/streamImpl:61:73:1125:0 -:origin/feed/streamImpl:75:72:700:0 -:origin/feed/streamImpl:68:71:1361:0 -:origin/feed/streamImpl:58:70:1462:0 -:origin/feed/streamImpl:56:69:1447:0 -:origin/feed/streamImpl:67:68:1104:0 -:origin/feed/streamImpl:65:67:1125:0 -:origin/feed/streamImpl:70:66:1345:0 -:origin/feed/streamImpl:69:65:1167:0 -:origin/feed/streamImpl:59:64:1399:0 -:origin/feed/streamImpl:66:63:1264:0 -:origin/feed/streamImpl:60:62:1429:0 -:origin/feed/streamImpl:80:61:797:0 -:origin/feed/streamImpl:79:60:659:0 -:origin/feed/streamImpl:34:59:1789:0 -:origin/feed/streamImpl:42:58:1692:0 -:origin/feed/streamImpl:48:57:2730:0 -:origin/feed/streamImpl:35:56:1731:0 -:origin/feed/streamImpl:47:55:2693:0 -:origin/feed/streamImpl:41:54:2770:0 -:origin/feed/streamImpl:33:53:1786:0 -:origin/feed/streamImpl:44:52:2728:0 -:origin/feed/streamImpl:38:51:1770:0 -:origin/feed/streamImpl:43:50:2848:0 -:origin/feed/streamImpl:36:49:1737:0 -:origin/feed/streamImpl:40:48:1689:0 -:origin/feed/streamImpl:39:47:2885:0 -:origin/feed/streamImpl:28:45:1816:0 -:origin/feed/streamImpl:27:44:1793:0 -:origin/feed/streamImpl:72:43:973:0 -:origin/feed/streamImpl:71:42:840:0 -:origin/feed/streamImpl:53:41:2778:0 -:origin/feed/streamImpl:45:40:2866:0 -:origin/feed/streamImpl:54:39:1661:0 -:origin/feed/streamImpl:46:38:2747:0 -:origin/feed/streamImpl:51:37:2770:0 -:origin/feed/streamImpl:52:36:2760:0 -:origin/feed/streamImpl:49:35:2741:0 -:origin/feed/streamImpl:74:34:938:0 -:origin/feed/streamImpl:73:33:746:0 -:origin/feed/streamImpl:50:32:2743:0 -:origin/feed/streamImpl:20:31:1796:0 -:origin/feed/streamImpl:16:30:1999:0 -:origin/feed/streamImpl:19:29:1849:0 -:origin/feed/streamImpl:31:28:1726:0 -:origin/feed/streamImpl:15:27:2037:0 -:origin/feed/streamImpl:18:26:2023:0 -:origin/feed/streamImpl:21:25:1792:0 -:origin/feed/streamImpl:22:24:1808:0 -:origin/feed/streamImpl:26:23:1850:0 -:origin/feed/streamImpl:32:21:3109:0 -:origin/feed/streamImpl:23:20:1851:0 -:origin/feed/streamImpl:25:19:1844:0 -:origin/feed/streamImpl:24:18:1803:0 -:origin/feed/streamImpl:29:17:3127:0 -:origin/feed/streamImpl:30:16:1740:0 -:origin/feed/streamImpl:12:15:1969:0 -:origin/feed/streamImpl:10:14:1745:0 -:origin/feed/streamImpl:13:13:1831:0 -:origin/feed/streamImpl:8:12:1705:0 -:origin/feed/streamImpl:7:9:2004:0 -:origin/feed/streamImpl:14:8:1880:0 -:origin/feed/streamImpl:6:7:1816:0 -:origin/feed/streamImpl:5:6:1720:0 -:origin/feed/streamImpl:3:5:2457:0 -:origin/feed/streamImpl:4:4:1789:0 -:origin/feed/streamImpl:11:11:2022:0 -:origin/feed/streamImpl:37:46:1766:0 -:origin/feed/streamImpl:2:3:2935:0 -:origin/feed/streamImpl:1:2:4235:0 -:origin/feed/streamImpl:9:10:2045:0 -:origin/feed/streamImpl:17:22:1869:0 -:origin/feed/streamImpl:0:1:103350:0 2026-01-04T02:40:45.671807Z :INFO: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] Counters: { Errors: 0 CurrentSessionLifetimeMs: 19416 BytesRead: 46043904 MessagesRead: 250000 BytesReadCompressed: 46043904 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:40:45.671902Z :NOTICE: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:40:45.671995Z :DEBUG: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] [] Abort session to cluster 2026-01-04T02:40:45.672938Z :NOTICE: [/Root] [/Root] [a3e5fedc-af619500-8204994e-bf4ca678] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } >>>>> 2026-01-04T02:40:45.675624Z End 2026-01-04T02:40:45.676564Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037894][streamImpl] pipe [1:7591330763497299527:4487] disconnected. 2026-01-04T02:40:45.676614Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037894][streamImpl] pipe [1:7591330763497299527:4487] disconnected; active server actors: 1 2026-01-04T02:40:45.676638Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037894][streamImpl] pipe [1:7591330763497299527:4487] client consumer-1 disconnected session consumer-1_1_1_2357415800594510060_v1 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/slow/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 28635, MsgBus: 6810 2026-01-04T02:40:30.754056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330773658535409:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.756839Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.024658Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.041739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.041884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.067438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.164532Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.200438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.367083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.367126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.367135Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.367218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.762597Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.972856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:31.987234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:32.050183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.202280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.397294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.470472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.018128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790838406403:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.018321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.018691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790838406413:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.018786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.340084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.370650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.400437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.429768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.461340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.493038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.530175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.606810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.674323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790838407284:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.674404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.674526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790838407289:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.674578Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330790838407290:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.674617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.678023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.688663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330790838407293:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:34.788189Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330790838407346:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.751238Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330773658535409:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.751314Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:37.332551Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:7591330799428342244:2530] 2026-01-04T02:40:37.333941Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591330799428342292:2536] TxId: 281474976715674. Ctx: { TraceId: 01ke3e1hfbd6qncan85yhtd4zm, Database: /Root, SessionId: ydb:// ... thin specified timeout 76ms, session id ydb://session/3?node_id=2&id=ZTQ3M2VjZC03NjAwYzY3MS1mMDg5ZjYxZC1hNzA2OTZhOQ== } 2026-01-04T02:40:45.514018Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747820:2669] 2026-01-04T02:40:45.572802Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 79ms, session id ydb://session/3?node_id=2&id=ZjkzODg2YjMtYzk3MTk1NWYtY2ExYWUwYzEtODU3NzEyYzk= } 2026-01-04T02:40:45.599545Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747841:2678] 2026-01-04T02:40:45.603728Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 82ms, session id ydb://session/3?node_id=2&id=MWY1YzdlMWQtYTY2ZWUzNDktZmI5N2JlZmQtOWM1YjU4ZDk= } 2026-01-04T02:40:45.684001Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747855:2684] 2026-01-04T02:40:45.693024Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 85ms, session id ydb://session/3?node_id=2&id=ZTQ3ZmZkNDgtNzg2YTUzNDgtYjU4MTg3NjctZTEyNmI1NmM= } 2026-01-04T02:40:45.777846Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747876:2693] 2026-01-04T02:40:45.785414Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 88ms, session id ydb://session/3?node_id=2&id=NWQ1MGUyNGEtZjdjZWRhMjUtMzVkYTRkMzAtMjE4NDdjODg= } 2026-01-04T02:40:45.932502Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 91ms, session id ydb://session/3?node_id=2&id=ZGUwY2IzZTQtOGZlMmE1MGQtYzExM2E1MS1jYzY4YmM4Nw== } 2026-01-04T02:40:45.967739Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747911:2708] 2026-01-04T02:40:45.978999Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=OGM0YzgxYjAtZTE3YTVmOTctMjA5MzQ2NmEtOWFiODhmNWE= } 2026-01-04T02:40:46.071219Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330837499747926:2714] 2026-01-04T02:40:46.081391Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 97ms, session id ydb://session/3?node_id=2&id=OTE4NDI5NGMtZWMwZDlkYTktNDBhZTg4ZWUtNGQxNjg5NGI= } 2026-01-04T02:40:46.181958Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715243:2723] 2026-01-04T02:40:46.182637Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7591330841794715282:2728] TxId: 281474976710674. Ctx: { TraceId: 01ke3e1v07b9c9g49nws29naz2, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-04T02:40:46.182987Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=, ActorId: [2:7591330841794715253:2728], ActorState: ExecuteState, LegacyTraceId: 01ke3e1v07b9c9g49nws29naz2, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# 2026-01-04T02:40:46.183516Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494446214, txId: 281474976710673] shutting down 2026-01-04T02:40:46.183598Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7591330841794715287:2730], TxId: 281474976710674, task: 2. Ctx: { TraceId : 01ke3e1v07b9c9g49nws29naz2. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7591330841794715282:2728], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:40:46.184104Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7591330841794715289:2731], TxId: 281474976710674, task: 3. Ctx: { TraceId : 01ke3e1v07b9c9g49nws29naz2. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7591330841794715282:2728], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:40:46.184354Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7591330841794715290:2732], TxId: 281474976710674, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3e1v07b9c9g49nws29naz2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [2:7591330841794715282:2728], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:40:46.184560Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7591330841794715291:2733], TxId: 281474976710674, task: 5. Ctx: { TraceId : 01ke3e1v07b9c9g49nws29naz2. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7591330841794715282:2728], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:40:46.185934Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:7591330841794715286:2729], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e1v07b9c9g49nws29naz2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=Zjk5ZGM4NmEtZDYzZjE3MjEtNDYwZThkM2EtNzM5MmUzMmM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [2:7591330841794715282:2728], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:40:46.190420Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=2&id=ODgxYmY1ODgtOTFkZTY5ZTMtZTAxNWM2M2ItNzY3NjljOWM= } 2026-01-04T02:40:46.287888Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715331:2739] 2026-01-04T02:40:46.293078Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=YWE1YjQ2NTQtODljMWM3LTMyY2NmZjFlLTM2MDI2NzJi } 2026-01-04T02:40:46.392416Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715360:2749] 2026-01-04T02:40:46.405096Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 106ms, session id ydb://session/3?node_id=2&id=ODE5ZTNhNDItNTlmYjEwM2MtNjZlOTFiYTItNzlhYTZkM2U= } 2026-01-04T02:40:46.506394Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715376:2756] 2026-01-04T02:40:46.514529Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=ZWEwODc3OGItZTAxNzAxZjgtY2E5NGYxMjYtNDRlYzMzOWI= } 2026-01-04T02:40:46.605938Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494446634, txId: 281474976710676] shutting down 2026-01-04T02:40:46.729971Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715499:2780] 2026-01-04T02:40:46.736270Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=YmYyZmJlOWItMWMxNmMwNmEtMzY5MmI5NTAtMmMyY2UwNzE= } 2026-01-04T02:40:46.850677Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494446851, txId: 281474976710678] shutting down 2026-01-04T02:40:46.851202Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715520:2789] 2026-01-04T02:40:46.856559Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 118ms, session id ydb://session/3?node_id=2&id=NjM5NTJkODAtZjg0ZjhmZWMtMTIzNTJhMDYtYTZmZTEyNjE= } 2026-01-04T02:40:46.975759Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330841794715622:2804] 2026-01-04T02:40:46.981447Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 121ms, session id ydb://session/3?node_id=2&id=MWY1ZTA0ZGYtNmIyNWFlMGItN2ZlZmQ0ZmMtMjNhNjg5ZQ== } 2026-01-04T02:40:47.060979Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447096, txId: 281474976710680] shutting down 2026-01-04T02:40:47.197328Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330846089683042:2828] 2026-01-04T02:40:47.204523Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=M2IyZjFiN2EtYjk0YzQxMGQtYTZkODUzNjAtOGE3ZmU2ZjM= } 2026-01-04T02:40:47.330740Z node 2 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [2:7591330846089683067:2839] 2026-01-04T02:40:47.340471Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 130ms, session id ydb://session/3?node_id=2&id=Y2MzZDEyNTktNWZhZWY4ZjQtMTMyMmNmNTQtNDFjNzA1Mjc= } 2026-01-04T02:40:47.427053Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447453, txId: 281474976710682] shutting down >> KqpYql::EvaluateExpr1 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TConsoleTests::TestCreateSubSubDomain [GOOD] >> TConsoleTests::TestCreateSubSubDomainExtSubdomain >> KqpYql::TestUuidPrimaryKeyPrefixSearch >> TPQTest::TestReadSessions [GOOD] >> TPQTest::TestReadSubscription >> KqpScripting::ScriptValidate [GOOD] >> KqpScripting::ScriptStats |91.5%| [TA] $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] |91.5%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::ScriptUdf [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_PreferedPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test |91.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/slow/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpYql::InsertCVList+useSink [GOOD] >> KqpYql::Discard+DiscardSelectIsOn >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced >> TConsoleTests::TestAlterServerlessTenant [GOOD] >> TConsoleTests::TestAuthorization >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::ScriptUdf [GOOD] Test command err: Trying to start YDB, gRPC: 17500, MsgBus: 20053 2026-01-04T02:40:39.306237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330815384308303:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:39.306301Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:39.343538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:39.559744Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:39.559869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:39.615069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:39.632144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:39.654085Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330815384308182:2081] 1767494439299349 != 1767494439299352 2026-01-04T02:40:39.660024Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:39.730165Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:39.730198Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:39.730214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:39.731962Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:40.160710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:40.167346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:40.223425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.312621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.363962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.508844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.581704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.447340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330828269211948:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.447440Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.447762Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330828269211958:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.447813Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.729861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.759635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.790330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.820243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.849221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.878705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.927778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.968963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.046514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330832564180123:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.046615Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.046691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330832564180128:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.046815Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330832564180130:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.046850Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.050558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:43.060803Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330832564180132:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:43.133689Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330832564180183:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:44.305656Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330815384308303:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:44.305723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:20: Error: RefSelect mode isn't supported by provider: kikimr Trying to start YDB, gRPC: 1617, MsgBus: 31259 2026-01-04T02:40:45.553254Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330839046564135:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:45.553515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:45.574789Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:45.658144Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:45.660474Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330839046564022:2081] 1767494445550107 != 1767494445550110 2026-01-04T02:40:45.672209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:45.672292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:45.698523Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:45.754436Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:45.754466Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:45.754474Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:45.754545Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:45.849205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:46.166458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:46.184479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.240715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.368865Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.427040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.583826Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.469142Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330851931467780:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.469214Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.469364Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330851931467790:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.469402Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.530161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.562601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.590980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.615617Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.640697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.698436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.728730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.783163Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.880154Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330851931468660:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.880309Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.880393Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330851931468665:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.880738Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330851931468667:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.880801Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.883789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:48.897367Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330851931468668:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:48.955991Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330851931468720:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 }
: Error: Type annotation, code: 1030
:10:13: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:10:20: Error: At function: Apply
:8:28: Error: At function: ScriptUdf
:8:28: Error: Module not loaded for script type: Python3 2026-01-04T02:40:50.553357Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330839046564135:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:50.553446Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptClientOperationTimeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 28064, MsgBus: 25269 2026-01-04T02:40:30.736341Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330775348635893:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:30.736588Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:31.008441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:31.102314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:31.102421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:31.153338Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:31.157620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:31.276705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:31.368932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:31.368959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:31.368967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:31.369045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:31.746370Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:31.985113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:32.049285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.187515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.334215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:32.414440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:33.872564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330788233539608:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.872659Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.872940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330788233539618:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:33.872999Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.285163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.311464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.337550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.363591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.388175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.422391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.462837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.510787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:34.581607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330792528507782:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.581657Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.581736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330792528507787:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.581826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330792528507789:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.581880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:34.584891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:34.594648Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330792528507791:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:34.661495Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330792528507842:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:35.736749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330775348635893:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:35.736808Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:36.727571Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436694, txId: 281474976710674] shutting down 2026-01-04T02:40:36.728802Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494436694, txId: 281474976710675] shutting down 2026-01-04T02:40:36.730188Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot ... T02:40:47.583024Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447264, txId: 281474976715695] shutting down 2026-01-04T02:40:47.583308Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 79ms, session id ydb://session/3?node_id=2&id=NzBlMjJiZTktNTVkMmE4ZTgtODE2MjMwYjItMTJhZjk4NA== } 2026-01-04T02:40:47.586769Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447264, txId: 281474976715696] shutting down 2026-01-04T02:40:47.625502Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447264, txId: 281474976715697] shutting down 2026-01-04T02:40:47.627064Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 82ms, session id ydb://session/3?node_id=2&id=YzgyOTk1NmMtNTVjNzMxMGMtOTczZGMxMzItOGQzYWIwNjg= } 2026-01-04T02:40:47.630886Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 85ms, session id ydb://session/3?node_id=2&id=YTNmNTUwNzQtZjcxY2U1ZGYtZjZkMmY1NTgtMTVlZjZkMzA= } 2026-01-04T02:40:47.642448Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 88ms, session id ydb://session/3?node_id=2&id=YjMwNjhhMGMtN2U2MDVlZGItYmJhMmRmYzItYTdhNzY0ZjA= } 2026-01-04T02:40:47.642544Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 91ms, session id ydb://session/3?node_id=2&id=MTE5OWJmOTUtMjMzNjk5ODYtNjkyZjUzMjktYThiNDg3YmQ= } 2026-01-04T02:40:47.724744Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=ZTM3ZmQ4NzItNGRjMWI0MS02ODgzNjdiNi1hYmI1OTMyYQ== } 2026-01-04T02:40:47.774936Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447775, txId: 281474976715702] shutting down 2026-01-04T02:40:47.778442Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447775, txId: 281474976715703] shutting down 2026-01-04T02:40:47.779211Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447768, txId: 281474976715701] shutting down 2026-01-04T02:40:47.878844Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 97ms, session id ydb://session/3?node_id=2&id=YzEyMzNhZWItODQ3MTI5OGYtYWFkYTdkYmMtN2NiOTlhODA= } 2026-01-04T02:40:47.922915Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=2&id=Njk3NjhiMTAtNGQ0MTFlYWQtZjI4ZGE5YmItMTFjOTZmOTA= } 2026-01-04T02:40:47.973363Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447985, txId: 281474976715707] shutting down 2026-01-04T02:40:47.975057Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494447985, txId: 281474976715708] shutting down 2026-01-04T02:40:48.032271Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=YjRjYzJlMWUtMjdmNjc1OTktNmU5MjI2MjYtOTdiYWQ2ZGY= } 2026-01-04T02:40:48.143125Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 106ms, session id ydb://session/3?node_id=2&id=NWZhYmNjYTItYzc4YjAzNzItNGU1Mjg3NDQtYjVkOTNjZDY= } 2026-01-04T02:40:48.163416Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448181, txId: 281474976715712] shutting down 2026-01-04T02:40:48.163643Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448181, txId: 281474976715711] shutting down 2026-01-04T02:40:48.257438Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=ZjY4OWRhNjQtNmE1NjYxODYtOGY3N2M2YWQtM2UwODFhNmI= } 2026-01-04T02:40:48.346784Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448377, txId: 281474976715716] shutting down 2026-01-04T02:40:48.347031Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448377, txId: 281474976715715] shutting down 2026-01-04T02:40:48.478035Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 115ms, session id ydb://session/3?node_id=2&id=NTM4ZTI1MGMtZTE1NmVlNTQtNDY2YTNiOC01ZWFmYTZiMQ== } 2026-01-04T02:40:48.596135Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 118ms, session id ydb://session/3?node_id=2&id=MjJlZDdmNC1lMjRjZGEzZC03ODU0MjRkMy1jNDg5ZTExMQ== } 2026-01-04T02:40:48.614070Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448629, txId: 281474976715719] shutting down 2026-01-04T02:40:48.615493Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448629, txId: 281474976715720] shutting down 2026-01-04T02:40:48.783396Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 121ms, session id ydb://session/3?node_id=2&id=ZTQ3ZjZkNGYtODUzNzg2MDYtMTc2ODE3NTctOWJlMWI2ZA== } 2026-01-04T02:40:48.817937Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448846, txId: 281474976715723] shutting down 2026-01-04T02:40:48.822967Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494448846, txId: 281474976715724] shutting down 2026-01-04T02:40:49.018501Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 127ms, session id ydb://session/3?node_id=2&id=ZGI0MTc0NzYtZDY5NTA4NTUtNzYzMzkyNGYtOGFjYzZhYWE= } 2026-01-04T02:40:49.052773Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449077, txId: 281474976715728] shutting down 2026-01-04T02:40:49.097479Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449077, txId: 281474976715727] shutting down 2026-01-04T02:40:49.101179Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 130ms, session id ydb://session/3?node_id=2&id=ODg0ZmEzMmUtNDJkMWI5MzUtNzEyOWNhYmYtMjQ1MjI1MDE= } 2026-01-04T02:40:49.287932Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 133ms, session id ydb://session/3?node_id=2&id=MzBhYTIwY2EtYTgyZWM4ZjItMzQzMGZjNjItZDlkZTY1Zjc= } 2026-01-04T02:40:49.291979Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449322, txId: 281474976715731] shutting down 2026-01-04T02:40:49.395420Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 136ms, session id ydb://session/3?node_id=2&id=ZWQ2YmE2OWQtODM5NjE3ZS1jY2JjMDU4OS1hMjRlYmVmZA== } 2026-01-04T02:40:49.439863Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449469, txId: 281474976715733] shutting down 2026-01-04T02:40:49.572569Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 139ms, session id ydb://session/3?node_id=2&id=NmM5NGM1YWYtZmM2NDU1M2EtYzllYjU4NmYtZTUyMGEyOWE= } 2026-01-04T02:40:49.577543Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449602, txId: 281474976715735] shutting down 2026-01-04T02:40:49.667284Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 142ms, session id ydb://session/3?node_id=2&id=YWNhZWNlNzMtMTBhM2Q3ZGQtMTQ1OTJlZjEtYzc2YzQ0NjQ= } 2026-01-04T02:40:49.713775Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449742, txId: 281474976715737] shutting down 2026-01-04T02:40:49.816503Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 145ms, session id ydb://session/3?node_id=2&id=NTQzYzY0ZjYtNTk3NTQzYWQtMjIxM2NmMmQtYzUzY2M3OTY= } 2026-01-04T02:40:49.880645Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494449910, txId: 281474976715739] shutting down 2026-01-04T02:40:50.040731Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 148ms, session id ydb://session/3?node_id=2&id=MTk4YTU3MmMtM2I0NzdmOTctYzRlOGMzYmQtMzY1M2I4MTE= } 2026-01-04T02:40:50.088955Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494450113, txId: 281474976715741] shutting down 2026-01-04T02:40:50.089655Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494450113, txId: 281474976715742] shutting down >> KqpScan::ScanPg [GOOD] >> KqpYql::TableConcat [GOOD] >> KqpYql::SelectNoAsciiValue |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateIf [GOOD] >> KqpYql::BinaryJsonOffsetBound [GOOD] >> KqpYql::AnsiIn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::InsertCVList+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 62229, MsgBus: 15009 2026-01-04T02:40:38.851072Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330807860568624:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:38.855759Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:39.095757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:39.095886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:39.134941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:39.151602Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:39.154505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:39.155152Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330807860568587:2081] 1767494438838460 != 1767494438838463 2026-01-04T02:40:39.253909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:39.253937Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:39.253954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:39.254072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:39.418787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:39.640171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:39.697601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.846360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.855797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:39.997234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.056549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.747867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330820745472346:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.747990Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.748416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330820745472356:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.748484Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.176118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.208886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.237400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.262933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.288002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.318749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.354382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.394563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.465601Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330825040440521:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.465696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.465960Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330825040440526:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.466006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330825040440527:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.466091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.470064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:42.479687Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330825040440530:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:42.547019Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330825040440581:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:43.840694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330807860568624:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:43.840769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:44.377261Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591330833630375475:2535], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e1s0aazwh12mtxyc231vy. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDEwNWZhOWItM2QxZTQwNzMtZj ... eration type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:45.967780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.116189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.181170Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.350591Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.627064Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330853255346300:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.627168Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.627501Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330853255346310:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.627559Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.705210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.736551Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.765934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.802417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.838159Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.884980Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.954538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.005186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.079052Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857550314476:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.079135Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.079139Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857550314481:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.079255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857550314483:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.079296Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.082971Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:49.092498Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330857550314484:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:49.173523Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330857550314536:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:50.341654Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330840370442590:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:50.341726Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:50.789902Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-04T02:40:50.790196Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037914 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:40:50.790352Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037914 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:40:50.790532Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7591330861845282166:2536], Table: `/Root/Test` ([72057594046644480:45:1]), SessionActorId: [2:7591330861845282150:2536]Got CONSTRAINT VIOLATION for table `/Root/Test`. ShardID=72075186224037914, Sink=[2:7591330861845282166:2536].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:40:50.790634Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7591330861845282159:2536], SessionActorId: [2:7591330861845282150:2536], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7591330861845282150:2536]. 2026-01-04T02:40:50.790896Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=ZmQxZDBkY2QtNzRiMjY3NmQtZTJiYjFjNjktYTVjNDFhMWU=, ActorId: [2:7591330861845282150:2536], ActorState: ExecuteState, LegacyTraceId: 01ke3e1zfzez03j10jr1vq1cbs, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7591330861845282160:2536] from: [2:7591330861845282159:2536] trace_id# 2026-01-04T02:40:50.791010Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7591330861845282160:2536] TxId: 281474976715673. Ctx: { TraceId: 01ke3e1zfzez03j10jr1vq1cbs, Database: /Root, SessionId: ydb://session/3?node_id=2&id=ZmQxZDBkY2QtNzRiMjY3NmQtZTJiYjFjNjktYTVjNDFhMWU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/Test`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:40:50.791336Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZmQxZDBkY2QtNzRiMjY3NmQtZTJiYjFjNjktYTVjNDFhMWU=, ActorId: [2:7591330861845282150:2536], ActorState: ExecuteState, LegacyTraceId: 01ke3e1zfzez03j10jr1vq1cbs, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/Test`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Execution, code: 1060
: Error: Constraint violated. Table: `/Root/Test`., code: 2012
: Error: Conflict with existing key., code: 2012 |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTest::TestMaxTimeLagRewind [GOOD] >> TPQTest::TestManyConsumers >> KqpYql::EvaluateExpr3 [GOOD] >> KqpYql::EvaluateExprPgNull >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] >> KqpScripting::LimitOnShard [GOOD] >> KqpScripting::NoAstSizeLimit >> KqpScripting::StreamExecuteYqlScriptScan [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce >> Compression::WriteWithMixedCodecs [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithoutTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 15111, MsgBus: 25420 2026-01-04T02:40:39.321207Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330812718288741:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:39.323535Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:39.533962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:39.539994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:39.540078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:39.561939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:39.636269Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:39.649633Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330812718288699:2081] 1767494439319622 != 1767494439319625 2026-01-04T02:40:39.745715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:39.745744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:39.745756Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:39.745841Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:39.763734Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:40.168606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:40.217248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.327673Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:40.350046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.495038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.567508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.464293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330825603192468:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.464445Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.465269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330825603192478:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.465368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:42.778377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.809666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.838070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.865971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.892763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.922659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.979932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.023118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.093168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330829898160646:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.093241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.093328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330829898160651:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.093406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330829898160653:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.093454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:43.097241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:43.107457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330829898160655:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:43.178843Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330829898160706:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:44.321453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330812718288741:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:44.321529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:44.657820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:45.767525Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:45.830146Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:45.832351Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330840006299719:2081] 1767494445754278 != 1767494445754281 2026-01-04T02:40:45.871582Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:45.871663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:45.875635Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:45.922815Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:45.922845Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:45.922852Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:45.922943Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:45.924418Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:46.293583Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:46.299861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:40:46.306724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.361016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.474649Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.529083Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:46.760471Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.583710Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330852891203472:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.583797Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.584009Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330852891203481:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.584068Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.660248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.693006Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.722911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.795932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.829108Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.871527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.940693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.992023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.067216Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857186171655:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.067290Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.067508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857186171660:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.067515Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330857186171661:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.067555Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.070475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:49.079521Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330857186171664:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:49.161407Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330857186171715:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:50.756101Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330840006299760:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:50.756173Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:50.930161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.754159Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494451779, txId: 281474976715675] shutting down |91.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateIf [GOOD] Test command err: Trying to start YDB, gRPC: 18901, MsgBus: 26589 2026-01-04T02:40:41.163938Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330821326159176:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:41.169022Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:41.421235Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:41.427330Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:41.427494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:41.457708Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:41.479067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:41.573734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:41.573757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:41.573766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:41.573867Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:41.676433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:41.952895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:42.013383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.174917Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:42.184971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.350296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.417850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.305600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330834211062896:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.305766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.306059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330834211062906:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.306138Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.637784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.664389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.689093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.718451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.746561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.779782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.835166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.878416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.942566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330834211063775:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.942656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.942765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330834211063781:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.942796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330834211063783:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.942827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:44.946312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:44.957971Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330834211063785:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:45.023935Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330838506031132:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:46.164207Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330821326159176:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:46.164274Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 13331, MsgBus: 8360 2026-01-04T02:40:47.266513Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330847398606046:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:47.266590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:47.280769Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:47.366968Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:47.389115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:47.389177Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:47.396535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:47.443531Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:47.443558Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:47.443565Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:47.443664Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:47.548071Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:47.808540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:47.829101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.901674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.025991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.099362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.272128Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:50.185443Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330860283509749:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.185527Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.185766Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330860283509758:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.185821Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.259608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.285064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.312406Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.339754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.370868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.397911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.426586Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.463961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.524279Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330860283510625:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.524360Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.524373Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330860283510630:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.524513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330860283510632:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.524551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.528266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:50.538435Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330860283510633:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:50.591702Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330860283510685:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:52.266759Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330847398606046:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:52.266878Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_kqp_scan/unittest >> KqpScan::ScanPg [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:20.871859Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:20.872831Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:20.881363Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:40:21.012557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:40:21.023445Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:40:21.024431Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:21.024763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:21.024946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:21.026542Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:21.026583Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:21.580372Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:21.701903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:21.702062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:21.702507Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:21.702630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:21.763395Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:40:21.764108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:21.764560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:21.876571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:21.902834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:22.567999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:22.636542Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:22.637988Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:22.981004Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:224:2183] Handle TEvProposeTransaction 2026-01-04T02:40:22.981083Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:224:2183] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:40:22.982208Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:224:2183] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:1563:3023] 2026-01-04T02:40:23.098732Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:1563:3023] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:40:23.098854Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:1563:3023] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:40:23.100870Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:1563:3023] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:40:23.101050Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:1563:3023] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:40:23.101640Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:1563:3023] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:40:23.101838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:1563:3023] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:40:23.102037Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:1563:3023] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:40:23.108631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:23.109875Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:1563:3023] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:40:23.112911Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:1563:3023] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:40:23.113020Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:1563:3023] txid# 281474976715657 SEND to# [1:1490:2980] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:40:23.202345Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1614:2394] 2026-01-04T02:40:23.202753Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:40:23.250937Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:40:23.251238Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:40:23.253735Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:40:23.253878Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:40:23.254026Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:40:23.256025Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:40:23.256846Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:40:23.256955Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1638:2394] in generation 1 2026-01-04T02:40:23.271266Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:40:23.309517Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:40:23.310985Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:40:23.311150Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1641:2411] 2026-01-04T02:40:23.311207Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:40:23.311252Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:40:23.311290Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:40:23.312898Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:40:23.313015Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:40:23.313202Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:40:23.313253Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:40:23.313295Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:40:23.313337Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:40:23.313495Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1600:3053], serverId# [2:1611:2392], sessionId# [0:0:0] 2026-01-04T02:40:23.314051Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:40:23.314459Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:40:23.314588Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:40:23.317861Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 720 ... d: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [3:1910:3235] 2026-01-04T02:40:39.420984Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [3:1903:3206] TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [4:1913:2440], trace_id# 2026-01-04T02:40:39.421384Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [3:1903:3206] TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [4:1913:2440] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 225759 DurationUs: 8000 Tasks { TaskId: 1 CpuTimeUs: 222428 FinishTimeMs: 1767494439362 OutputRows: 1 OutputBytes: 6 Tables { TablePath: "/Root/table-1" ReadRows: 100 ReadBytes: 800 } ComputeCpuTimeUs: 106 BuildCpuTimeUs: 222322 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-46vtxmsa7i" NodeId: 4 StartTimeMs: 1767494439354 CreateTimeMs: 1767494439094 UpdateTimeMs: 1767494439362 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:40:39.421473Z node 3 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [4:1913:2440] 2026-01-04T02:40:39.421693Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [3:1903:3206] TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:40:39.421778Z node 3 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [3:1903:3206] TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# 2026-01-04T02:40:39.421864Z node 3 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [3:1903:3206] TxId: 281474976710662. Ctx: { TraceId: 01ke3e1k30dr1e027pj9tv41m8, Database: , SessionId: ydb://session/3?node_id=3&id=Y2FiNTg1OGMtOTViYTc1NjQtYmVhYWRlYzUtODJhYTYwNTU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.697587s ReadRows: 100 ReadBytes: 800 RequestUnits# 465 ForceFlag# true trace_id# 2026-01-04T02:40:39.423030Z node 3 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976710661] shutting down 2026-01-04T02:40:39.423127Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:225:2183] Handle TEvProposeTransaction 2026-01-04T02:40:39.423168Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:225:2183] TxId# 0 ProcessProposeTransaction 2026-01-04T02:40:39.423309Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [3:225:2183] Cookie# 0 userReqId# "" txid# 0 reqId# [3:1934:3245] SnapshotReq marker# P0 2026-01-04T02:40:39.424419Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [3:1937:3245] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-04T02:40:39.424673Z node 3 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [3:1937:3245] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-04T02:40:39.424783Z node 3 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [3:1934:3245] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 ... waiting for SysViewsRoster update finished 2026-01-04T02:40:46.752822Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:46.754066Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:46.756434Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:40:46.768870Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:40:46.771397Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:40:46.773506Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:46.773903Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:46.774044Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:46.776269Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:46.776387Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:47.069373Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:47.172749Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:47.172890Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:47.173772Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:47.173873Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:47.219807Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-04T02:40:47.220694Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:47.221108Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:47.314646Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:47.362027Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:47.987642Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:48.051810Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.051973Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.400072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.966365Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1724:3110], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.966493Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1735:3115], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.966572Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.967725Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1739:3119], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.967980Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.974020Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:49.424390Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:1738:3118], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:40:49.538573Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:1832:3184] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:51.508232Z node 5 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_kqp_scan/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> PQCountersLabeled::NewConsumersCountersAppear [GOOD] Test command err: 2026-01-04T02:40:04.738975Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:133:2057] recipient: [1:131:2164] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:133:2057] recipient: [1:131:2164] Leader for TabletID 72057594037927937 is [1:137:2168] sender: [1:138:2057] recipient: [1:131:2164] 2026-01-04T02:40:04.818920Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:04.818996Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:04.819051Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:04.819108Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:179:2057] recipient: [1:177:2198] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:179:2057] recipient: [1:177:2198] Leader for TabletID 72057594037927938 is [1:183:2202] sender: [1:184:2057] recipient: [1:177:2198] Leader for TabletID 72057594037927937 is [1:137:2168] sender: [1:209:2057] recipient: [1:14:2061] 2026-01-04T02:40:04.839507Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:04.861590Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:207:2220] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:40:04.862675Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:215:2168] 2026-01-04T02:40:04.865414Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:215:2168] 2026-01-04T02:40:04.867611Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:216:2168] 2026-01-04T02:40:04.869490Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:216:2168] 2026-01-04T02:40:04.876717Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.877135Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c4e015-f592804-5df428b9-102d93c9_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.884507Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.884917Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|89cdeec7-bc125ba2-5ca35f90-a8220c33_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.891210Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.891688Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|c66ee43d-98324a18-494cd64e-bf54b77_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 2026-01-04T02:40:04.918731Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.970592Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:04.991377Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.012093Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.063749Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.084586Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.182018Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.269841Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.332863Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.471771Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.544427Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.691610Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:05.939323Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Expected: { "sensors": [ { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByCommitted" }, "value": 30 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/MessageLagByLastRead" }, "value": 29 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/PartitionMaxReadQuotaUsage" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgMin" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesAvailAvgSec" }, "value": 1000000000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesMaxPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerDay" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerHour" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerMin" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesPerSec" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadBytesQuota" }, "value": 1000000000 }, { "kind": "RATE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadOffsetRewindSum" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/ReadTimeLagMs" }, "value": 0 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByCommitted" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/SizeLagByLastRead" }, "value": 747 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "sensor": "PQ/TimeSinceLastReadMs" }, "value": 5000 }, { "kind": "GAUGE", "labels": { "user_counters": "PersQueue", "client": "total", "important": "0", "topic": "rt3.dc1--asdfgs--topic", "s ... let_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:41.232443Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:41.438919Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:41.611313Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:41.706076Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:41.950715Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:42.181102Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:42.409938Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:42.463833Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:42.652505Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:42.954682Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:43.185282Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:43.410124Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:43.558069Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:43.602027Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:43.878198Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:44.107415Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:44.307824Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:44.502170Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:44.601412Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:44.755100Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:45.010301Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:45.208141Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:45.447744Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:45.628726Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:45.711335Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.003817Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.233117Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.456664Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.664898Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.706689Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:46.750410Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][topic] pipe [6:432:2377] connected; active server actors: 1 2026-01-04T02:40:47.094040Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:47.353597Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:47.597296Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:47.856028Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:47.990772Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:48.159185Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:48.408894Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:48.674514Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:48.968367Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:49.194415Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:49.325658Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:49.604020Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:49.846310Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:50.095332Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:50.340364Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:50.414088Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:50.666222Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:50.907084Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:51.152981Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:51.455708Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:51.507692Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:51.688505Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:51.966840Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.215360Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.465354Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.643802Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.757275Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.019315Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.252469Z node 6 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 6 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert |91.6%| [TA] $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} |91.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestReadSubscription [GOOD] >> KqpYql::InsertCV+useSink >> KqpYql::EvaluateExprYsonAndType |91.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_kqp_scan/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::SelectNullType [GOOD] >> KqpScripting::StreamDdlAndDml >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] >> KqpPragma::ResetPerQuery [GOOD] >> KqpPragma::Warning >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] >> KqpYql::EvaluateExpr1 [GOOD] >> KqpYql::EvaluateExpr2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestReadSubscription [GOOD] Test command err: 2026-01-04T02:40:03.930987Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:40:03.992320Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:03.992395Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:03.992441Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:03.992494Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:40:04.011012Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:04.031242Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } Consumers { Name: "another-user" Generation: 1 Important: false } 2026-01-04T02:40:04.032378Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:40:04.034725Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:40:04.038781Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:40:04.039916Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:40:04.042214Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [1:191:2142] 2026-01-04T02:40:04.043678Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [1:191:2142] 2026-01-04T02:40:04.051647Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:211:2209], now have 1 active actors on pipe 2026-01-04T02:40:04.051748Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:04.051789Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:04.051996Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-04T02:40:04.052050Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-04T02:40:04.052108Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:40:04.052154Z node 1 :PERSQUEUE DEBUG: event_helpers.cpp:40: tablet 72057594037927937 topic 'rt3.dc1--asdfgs--topic' partition 0 error: new GetOwnership request needed for owner 2026-01-04T02:40:04.052194Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:04.052239Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.052263Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.052292Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:04.052313Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:04.052355Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1253: [PQ: 72057594037927937] Handle TEvPQ::TEvError Cookie 1, Error new GetOwnership request needed for owner 2026-01-04T02:40:04.052419Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:04.052679Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:213:2211], now have 1 active actors on pipe 2026-01-04T02:40:04.052743Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:04.052777Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:04.052850Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|dfd0295c-cf8ae0b8-3510f8d0-96e596d5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:04.052892Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:04.052922Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:04.052954Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:40:04.053009Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.053039Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:04.053081Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:40:04.053106Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:40:04.053133Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:04.053194Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:37: [72057594037927937][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-04T02:40:04.053259Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:40:04.053469Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:215:2213], now have 1 active actors on pipe 2026-01-04T02:40:04.053562Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:04.053595Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:40:04.053634Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 1 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-04T02:40:04.053666Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2074: [PQ: 72057594037927937] got client message topic: rt3.dc1--asdfgs--topic partition: 0 SourceId: 'sourceid' SeqNo: 2 partNo : 0 messageNo: 0 size 1 offset: -1 2026-01-04T02:40:04.053699Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:636: [72057594037927937][Partition][0][StateIdle] Received TPartition::TEvWrite 2026-01-04T02:40:04.053732Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:04.053753Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:04.053786Z node 1 :PERSQUEUE DEBUG: partition.cpp:2423: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:40:04.053821Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:40:04.053841Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:04.053859Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:04.053936Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 1 partNo 0 2026-01-04T02:40:04.054690Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 1 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 72 count 1 nextOffset 1 batches 1 2026-01-04T02:40:04.054741Z node 1 :PERSQUEUE DEBUG: partition.cpp:2487: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:40:04.054767Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1348: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob processing sourceId 'sourceid' seqNo 2 partNo 0 2026-01-04T02:40:04.054798Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:1452: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 part blob complete sourceId 'sourceid' seqNo 2 partNo 0 FormedBlobsCount 0 NewHead: Offset 0 PartNo 0 PackedSize 112 count 2 nextOffset 2 batches 1 2026-01-04T02:40:04.054815Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:40:04.054835Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:40:04.054874Z node 1 :PERSQU ... ion][4][StateInit] bootstrapping 4 [56:193:2142] 2026-01-04T02:40:52.210923Z node 56 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [56:193:2142] 2026-01-04T02:40:52.220267Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:52.220719Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|4a48941-a800b26a-1db9bc66-701c1cb3_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:52.248065Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.331277Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.354201Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.450050Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.502376Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.554617Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.648633Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.659337Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.808138Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:52.891730Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.009909Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.166722Z node 56 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 56 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.201841Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:53.202437Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|512b7001-1208a920-98aaff93-2d196157_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:53.211142Z node 56 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:53.211738Z node 56 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|682e3911-875af94-b7b9f904-35976f37_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:53.531913Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:107:2057] recipient: [57:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:107:2057] recipient: [57:105:2138] Leader for TabletID 72057594037927937 is [57:111:2142] sender: [57:112:2057] recipient: [57:105:2138] 2026-01-04T02:40:53.585165Z node 57 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:53.585220Z node 57 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:53.585264Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:53.585314Z node 57 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:153:2057] recipient: [57:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [57:153:2057] recipient: [57:151:2172] Leader for TabletID 72057594037927938 is [57:157:2176] sender: [57:158:2057] recipient: [57:151:2172] Leader for TabletID 72057594037927937 is [57:111:2142] sender: [57:183:2057] recipient: [57:14:2061] 2026-01-04T02:40:53.603445Z node 57 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:53.605016Z node 57 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 60 actor [57:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 60 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 60 } Consumers { Name: "user1" Generation: 60 Important: true } 2026-01-04T02:40:53.606143Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [57:189:2142] 2026-01-04T02:40:53.608983Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [57:189:2142] 2026-01-04T02:40:53.612060Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [57:190:2142] 2026-01-04T02:40:53.614265Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [57:190:2142] 2026-01-04T02:40:53.616833Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [57:191:2142] 2026-01-04T02:40:53.619030Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [57:191:2142] 2026-01-04T02:40:53.621571Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [57:192:2142] 2026-01-04T02:40:53.623721Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [57:192:2142] 2026-01-04T02:40:53.626287Z node 57 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [57:193:2142] 2026-01-04T02:40:53.628443Z node 57 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [57:193:2142] 2026-01-04T02:40:53.639796Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:53.640162Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b8424540-a45ccb2-479c8f11-3c1685b6_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:53.667589Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.750009Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.771054Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.865707Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.917766Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:53.970407Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.064501Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.075441Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.223342Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.307450Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.416913Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.575802Z node 57 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 57 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:54.610796Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:54.611327Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|734ddfba-fde8d76-ff19192c-14cc2e96_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:40:54.621258Z node 57 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:54.621620Z node 57 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ce92723b-1ce59db9-a285f376-6778d15_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TConsoleTests::TestCreateSubSubDomainExtSubdomain [GOOD] >> TConsoleTests::TestDatabaseQuotas >> KqpYql::UuidPrimaryKey ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidPrimaryKeyPrefixSearch [GOOD] Test command err: Trying to start YDB, gRPC: 28360, MsgBus: 11633 2026-01-04T02:40:50.095309Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330859355169262:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:50.095363Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:50.275226Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:50.312086Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:50.312247Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:50.348210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:50.401147Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:50.435465Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:50.466001Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:50.466026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:50.466033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:50.466144Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:50.835812Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:51.105470Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:53.022000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330872240071986:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.022271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.022665Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330872240071996:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.022720Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.243357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.338266Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330872240072091:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.338343Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.338461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330872240072096:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.338497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330872240072097:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.338535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.342045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:53.351375Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330872240072100:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:40:53.457251Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330872240072151:2595] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn >> KqpScripting::ExecuteYqlScriptScanScalar >> KqpScripting::ScriptingCreateAndAlterTableTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::AlterTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] Leader for TabletID 72057594046316545 is [1:126:2150] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:141:2058] recipient: [1:116:2145] 2026-01-04T02:39:43.330901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:43.331006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.331047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:43.331156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:43.331215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:43.331267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:43.331331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:43.331399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:43.332391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:43.332708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:43.439508Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:43.439590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:43.440626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:187:2058] recipient: [1:15:2062] 2026-01-04T02:39:43.454635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:43.454779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:43.454937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:43.460210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:43.460747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:43.461512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:43.461946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:43.465534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.465715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:43.467001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:43.467067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:43.467198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:43.467243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:43.467310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:43.467544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:43.617444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.618436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.618561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.618746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.618841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.618926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:43.619599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... pp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:40:55.110523Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:40:55.110562Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-04T02:40:55.110601Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:40:55.110641Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:40:55.110728Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-01-04T02:40:55.111301Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-04T02:40:55.111404Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:55.111545Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-04T02:40:55.111655Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 1040 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-04T02:40:55.112254Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 219043334820 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:40:55.112300Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409546, partId: 0 2026-01-04T02:40:55.112406Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 738 RawX2: 219043334820 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:40:55.112475Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:40:55.112556Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 738 RawX2: 219043334820 } Origin: 72075186233409546 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:40:55.112618Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:40:55.112657Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:40:55.112698Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:40:55.112744Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-04T02:40:55.116410Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-04T02:40:55.116557Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:40:55.117004Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:40:55.117344Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:40:55.117392Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-04T02:40:55.117484Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:40:55.117517Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:40:55.117558Z node 51 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:40:55.117588Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:40:55.117626Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-04T02:40:55.117667Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:40:55.117710Z node 51 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-04T02:40:55.117742Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-04T02:40:55.117875Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-04T02:40:55.128490Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-04T02:40:55.128552Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-04T02:40:55.128944Z node 51 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-04T02:40:55.129047Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-04T02:40:55.129082Z node 51 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [51:861:2833] TestWaitNotification: OK eventTxId 1003 2026-01-04T02:40:55.129571Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:40:55.129793Z node 51 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 270us result status StatusSuccess 2026-01-04T02:40:55.130345Z node 51 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpYql::Discard+DiscardSelectIsOn [GOOD] >> KqpYql::Discard-DiscardSelectIsOn >> KqpScripting::ScriptExplainCreatedTable >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_PartitionInactive_0_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test >> KqpScripting::NoAstSizeLimit [GOOD] >> TConsoleTests::TestAuthorization [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain >> KqpPragma::Auth >> KqpYql::AnsiIn [GOOD] >> KqpYql::SelectNoAsciiValue [GOOD] >> KqpYql::EvaluateExprPgNull [GOOD] >> KqpScripting::ScriptStats [GOOD] >> KqpYql::UpdatePk >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::NoAstSizeLimit [GOOD] Test command err: Trying to start YDB, gRPC: 8410, MsgBus: 3219 2026-01-04T02:40:47.759506Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330846773115026:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:47.759828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:48.009134Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:48.013551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:48.013632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:48.019942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:48.105018Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:48.106257Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330846773114989:2081] 1767494447758789 != 1767494447758792 2026-01-04T02:40:48.195934Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:48.195979Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:48.195988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:48.196077Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:48.244031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:48.559952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:48.632370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.770397Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.772472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.901921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.963243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.813238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330859658018750:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.813365Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.813688Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330859658018760:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.813755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.124556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.150837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.175994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.203024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.231145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.261044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.315774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.351517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.413792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863952986928:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.413878Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863952986933:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.413879Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.414028Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863952986935:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.414068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.416920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:51.424643Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330863952986937:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:51.505667Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330863952986988:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:52.759654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330846773115026:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:52.759727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:53.191374Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494453228, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 22903, MsgBus: 20055 2026-01-04T02:40:53.959734Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330875489665132:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:53.960381Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:53.965101Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:54.044964Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:54.046696Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330875489665019:2081] 1767494453955861 != 1767494453955864 2026-01-04T02:40:54.056024Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:54.077249Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:54.077309Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:54.079248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:54.121997Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:54.122027Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:54.122034Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:54.122115Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:54.356737Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:54.499156Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:54.962337Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:57.009620Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535087:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.009731Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.009992Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535096:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.010053Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.032849Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.089513Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535192:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.089600Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.089884Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535194:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.089964Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.119997Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535205:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.120063Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535210:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.120076Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.120233Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330892669535213:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.120266Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.122584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:57.130340Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330892669535212:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:40:57.220191Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330892669535265:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::AnsiIn [GOOD] Test command err: Trying to start YDB, gRPC: 15895, MsgBus: 31017 2026-01-04T02:40:46.964029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330845539405246:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:46.964093Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:47.167672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:47.167774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:47.192491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:47.229233Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:47.246888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:47.247642Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330845539405214:2081] 1767494446962539 != 1767494446962542 2026-01-04T02:40:47.340600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:47.340627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:47.340635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:47.340756Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:47.383936Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:47.753458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:47.759820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:47.793132Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.921227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.019760Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.080467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.161934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.932626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858424308969:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.932818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.933182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858424308979:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.933238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.248234Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.284369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.317001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.346385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.377539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.412076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.445621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.490229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.551641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330862719277144:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.551781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.552143Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330862719277149:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.552203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330862719277150:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.552321Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.556293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:50.567701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330862719277153:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:50.655810Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330862719277204:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:51.964110Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330845539405246:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:51.964168Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:52.118179Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3404} SessionId: ydb://session/3?node_id=1&id=NjU0Y2I0YjAtN2I0NzI1ODAtNzE3ZTg0MTUtNG ... ://session/3?node_id=1&id=NjU0Y2I0YjAtN2I0NzI1ODAtNzE3ZTg0MTUtNGM1MzgzMTI=, ActorId: [1:7591330867014244803:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3e20rc1635k8eqf2j1s4t8, Create QueryResponse for error on request, msg: yql/essentials/types/binary_json/read.cpp:159: StringOffset must be inside buffer status# INTERNAL_ERROR issues# trace_id# Trying to start YDB, gRPC: 27871, MsgBus: 14023 2026-01-04T02:40:52.935087Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330871326341256:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:52.935202Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:52.946269Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:53.029686Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:53.031172Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330871326341130:2081] 1767494452931900 != 1767494452931903 2026-01-04T02:40:53.069777Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:53.069880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:53.074945Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:53.099394Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:53.113180Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:53.113211Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:53.113218Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:53.113295Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:53.447696Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:53.456295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.507176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.631110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.686147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.939434Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:55.690500Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330884211244884:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.690631Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.694079Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330884211244894:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.694183Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.752280Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.784921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.814030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.843629Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.873275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.903790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.966906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.016676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.091082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888506213059:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.091170Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.091325Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888506213064:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.091694Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888506213066:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.091771Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.095825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:56.107489Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330888506213067:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:56.162896Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330888506213119:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:57.934575Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330871326341256:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:57.934698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::SelectNoAsciiValue [GOOD] Test command err: Trying to start YDB, gRPC: 64394, MsgBus: 31228 2026-01-04T02:40:46.642032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330841693771504:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:46.642108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:46.855436Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:46.908510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:46.908640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:46.910816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:46.979328Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:46.980106Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330841693771472:2081] 1767494446640320 != 1767494446640323 2026-01-04T02:40:47.054766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:47.054794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:47.054800Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:47.054879Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:47.095668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:47.482494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:47.488981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:47.534789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.649631Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:47.674845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.817601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.882191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.747064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330854578675231:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.747223Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.747593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330854578675241:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.747663Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.085632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.119758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.152569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.185062Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.211507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.241381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.300804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.370697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.447943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858873643409:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.448052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.448254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858873643414:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.448291Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858873643415:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.448529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.451323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:50.462253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330858873643418:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:50.539201Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330858873643469:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:51.642201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330841693771504:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:51.642270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Table intent determination, code: 1040
:3:27: Error: CONCAT is not supported on Kikimr clusters. Trying to start YDB, gRPC: 19976, MsgBus: 8651 ... ;send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:52.800661Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:52.821730Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:52.907378Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330869601655179:2081] 1767494452799698 != 1767494452799701 2026-01-04T02:40:52.912029Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:52.947566Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:52.947653Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:52.955505Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:53.015546Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:53.015569Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:53.015577Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:53.015663Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:53.030356Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:53.375950Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:53.384449Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.436155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.555278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.614730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.812571Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:55.745946Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330882486558940:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.746032Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.746356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330882486558950:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.746427Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.823951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.885313Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.914886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.945030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.973837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.004921Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.036405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.079291Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.145622Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330886781527115:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.145705Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.145854Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330886781527120:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.145915Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330886781527122:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.145977Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.149653Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:56.159791Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330886781527124:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:56.250982Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330886781527175:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:57.646203Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.800537Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330869601655202:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:57.800595Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:57.914612Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494457953, txId: 281474976715675] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExprPgNull [GOOD] Test command err: Trying to start YDB, gRPC: 29118, MsgBus: 9006 2026-01-04T02:40:46.903030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330841454765116:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:46.903107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:47.107121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:47.137786Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:47.137929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:47.171907Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:47.173917Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330841454765085:2081] 1767494446901853 != 1767494446901856 2026-01-04T02:40:47.182845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:47.262890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:47.286463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:47.286484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:47.286490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:47.286574Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:47.674673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:47.723008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.825645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.927121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.005720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.085955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.803613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330854339668848:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.803715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.810109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330854339668858:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:49.810261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.266359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.297088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.327014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.356293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.386023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.419201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.461699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.501350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.578136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858634637023:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.578249Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.578491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858634637029:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.578538Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330858634637028:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.578639Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.582074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:50.592212Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330858634637032:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:50.677661Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330858634637083:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:51.906011Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330841454765116:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:51.906086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 64535, MsgBus: 4980 2026-01-04T02:40:53.104813Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330875567514829:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:53.105054Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:53.117460Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:53.194094Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:53.226601Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:53.226694Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:53.240725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:53.286382Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:53.286408Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:53.286416Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:53.286499Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:53.390080Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:53.650761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:53.659568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.707655Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.830336Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.880216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.111292Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:56.384375Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888452418541:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.384469Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.384699Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888452418551:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.384739Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.449018Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.477552Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.508567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.535548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.563923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.595090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.627340Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.674067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.743587Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888452419418:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.743673Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.743743Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888452419423:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.743954Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330888452419425:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.744008Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:56.749089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:56.759291Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330888452419426:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:56.821386Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330888452419478:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:58.104864Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330875567514829:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:58.104931Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKeyBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 18469, MsgBus: 24236 2026-01-04T02:40:54.806394Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330878344195864:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:54.806506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.034026Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.065530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.065672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.113346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.117947Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330878344195839:2081] 1767494454805581 != 1767494454805584 2026-01-04T02:40:55.134486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.265949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:55.266533Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.266555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.266563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.266640Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.723626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:55.819548Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:57.822658Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330891229098611:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.822775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.823319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330891229098621:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.823394Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.056085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.174223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330895524066020:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.174309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.174626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330895524066025:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.174641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330895524066026:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.174704Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.178839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:58.189741Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330895524066029:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:40:58.304342Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330895524066080:2602] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptStats [GOOD] Test command err: Trying to start YDB, gRPC: 61858, MsgBus: 18692 2026-01-04T02:40:44.386077Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330833859409003:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:44.386140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:44.625884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:44.645577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:44.645719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:44.675973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:44.753947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:44.760452Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330833859408963:2081] 1767494444384767 != 1767494444384770 2026-01-04T02:40:44.797268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:44.830174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:44.830196Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:44.830204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:44.830273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:45.224401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:45.284711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:45.392182Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:45.406435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:45.564942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:45.637268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.509755Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330846744312746:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:47.509967Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:47.510553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330846744312756:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:47.510613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:47.827197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.858625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.895495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.926226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.959900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:47.995167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.059301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.103233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.177148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330851039280919:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.177228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.177394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330851039280924:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.177457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330851039280925:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.177731Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:48.181217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:48.191984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330851039280928:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:48.248233Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330851039280979:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:49.386287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330833859409003:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:49.386360Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 65384, MsgBus: 21807 2026-01-04T02:40:50.540447Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330859826561745:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:50.540494Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_s ... iled to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:50.624348Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:50.626166Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330859826561716:2081] 1767494450539361 != 1767494450539364 2026-01-04T02:40:50.637651Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:50.637732Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:50.668318Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:50.700501Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:50.700521Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:50.700529Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:50.700602Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:50.843039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:51.016273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:51.024431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.073623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.206592Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.260787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.576082Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:53.795712Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330872711465471:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.795817Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.796087Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330872711465481:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.796145Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.850323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.873898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.901209Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.927199Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.954040Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.005719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.035412Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.077571Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.147005Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330877006433654:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.147082Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.147089Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330877006433659:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.147273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330877006433661:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.147315Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.150258Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:54.159665Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330877006433662:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:54.244908Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330877006433714:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:55.540562Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330859826561745:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:55.540662Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:56.479806Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.037835Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494457043, txId: 281474976715676] shutting down 2026-01-04T02:40:57.727711Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494457743, txId: 281474976715680] shutting down 2026-01-04T02:40:58.114024Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494458107, txId: 281474976715684] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] >> KqpYql::InsertCV+useSink [GOOD] >> KqpYql::FromBytes >> KqpYql::EvaluateExprYsonAndType [GOOD] >> KqpYql::EvaluateFor >> KqpYql::EvaluateExpr2 [GOOD] >> KqpYql::UuidPrimaryKey [GOOD] >> KqpPragma::Warning [GOOD] >> KqpScripting::StreamDdlAndDml [GOOD] >> TConsoleTests::TestDatabaseQuotas [GOOD] >> TConsoleTests::TestDatabaseQuotasBadOverallQuota >> KqpScripting::ScanQueryInvalid ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanCancelAfterBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 3489, MsgBus: 17816 2026-01-04T02:40:47.820797Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330846182084619:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:47.827545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:48.058582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:48.058697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:48.101400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:48.112896Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330846182084576:2081] 1767494447818640 != 1767494447818643 2026-01-04T02:40:48.127903Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:48.130241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:48.212692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:48.212718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:48.212730Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:48.212829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:48.370250Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:48.596244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:48.643531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.753753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.834287Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:48.913055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:48.981374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.772349Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330859066988335:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.772464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.772843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330859066988345:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:50.772926Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.044074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.071592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.100437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.128396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.154065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.183458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.211945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.256973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.321959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863361956517:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.322038Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.322128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863361956522:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.322229Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863361956524:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.322268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.325190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:51.335185Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330863361956526:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:51.411719Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330863361956577:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:52.821078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330846182084619:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:52.821143Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:53.259459Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494453277, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 29598, MsgBus: 29885 2026-01-04T02:40:53.973596Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;acto ... 146131:7762515]; 2026-01-04T02:40:53.973672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:53.984704Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:54.092411Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:54.104733Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:54.104820Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:54.107447Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:54.146275Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:54.146295Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:54.146300Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:54.146360Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:54.285834Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:54.563021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:54.570471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:54.575810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.627575Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.752704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.812376Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.979684Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:57.092147Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330890565304820:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.092267Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.092625Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330890565304829:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.092710Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.165103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.190940Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.220176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.248021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.275459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.303983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.339720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.388846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.464353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330890565305699:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.464435Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.464444Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330890565305704:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.464600Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330890565305706:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.464646Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:57.468054Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:57.477663Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330890565305707:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:57.545129Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330890565305759:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:58.973825Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330873385433819:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:58.973891Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:59.214339Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494459248, txId: 281474976710673] shutting down 2026-01-04T02:40:59.419951Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494459451, txId: 281474976710675] shutting down 2026-01-04T02:40:59.622786Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494459661, txId: 281474976710677] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::TestUuidDefaultColumn [GOOD] Test command err: Trying to start YDB, gRPC: 6805, MsgBus: 9530 2026-01-04T02:40:56.457594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330887584601202:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.458436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:56.693710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:56.697067Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:56.697182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:56.791428Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:56.837688Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330887584601097:2081] 1767494456454022 != 1767494456454025 2026-01-04T02:40:56.841887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:56.873669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:56.907932Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:56.907953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:56.907958Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:56.908043Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:57.285766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:57.462350Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:59.653566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330900469503869:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.653671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.653958Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330900469503879:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.654040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.034120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.155906Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330904764471270:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.156003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.156186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330904764471275:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.156238Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330904764471276:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.156309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.163639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:00.174591Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330904764471279:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:41:00.293097Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330904764471330:2594] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::BinaryJsonOffsetNormal >> KqpYql::DdlDmlMix >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateExpr2 [GOOD] Test command err: Trying to start YDB, gRPC: 4166, MsgBus: 5849 2026-01-04T02:40:49.816580Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330856495938587:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:49.816628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:50.121908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:50.122028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:50.125295Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:50.126012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:50.162827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:50.226369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:50.226396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:50.226403Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:50.226526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:50.343621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:50.632554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:50.675261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.780121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.884356Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:50.927685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:50.985205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.625987Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330869380842307:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.626115Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.626490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330869380842317:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.626559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.944508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.979668Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.007385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.032429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.059295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.092857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.152672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.191355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.260457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330873675810488:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.260545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.260564Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330873675810493:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.260803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330873675810495:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.260846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:53.264231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:53.274694Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330873675810496:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:53.345015Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330873675810548:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:54.816552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330856495938587:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:54.816633Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9660, MsgBus: 3892 2026-01-04T02:40:55.675454Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330880645527125:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:55.675545Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.708286Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.812261Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.812349Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.822896Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.823221Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330880645527086:2081] 1767494455674046 != 1767494455674049 2026-01-04T02:40:55.824585Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.877664Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.877682Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.877689Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.877759Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.967559Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:56.226965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:56.239677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.292468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.437135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.496160Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.682026Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:58.753638Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330893530430839:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.753742Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.754779Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330893530430849:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.754850Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.832857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.866788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.900373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.929085Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.959674Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.017056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.050723Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.094032Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.168114Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330897825399017:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.168207Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.168363Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330897825399022:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.168369Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330897825399024:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.168408Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.172188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:59.182017Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330897825399026:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:59.261638Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330897825399077:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:00.675613Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330880645527125:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:00.675687Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UuidPrimaryKey [GOOD] Test command err: Trying to start YDB, gRPC: 28270, MsgBus: 10687 2026-01-04T02:40:56.328998Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330886605508540:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.329077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:56.535027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:56.563133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:56.563228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:56.621881Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330886605508512:2081] 1767494456327716 != 1767494456327719 2026-01-04T02:40:56.630944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:56.637161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:56.734342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:56.745160Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:56.745207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:56.745218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:56.745300Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:57.112079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:57.345969Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:59.028126Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330899490411285:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.028262Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.028969Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330899490411295:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.029054Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.280882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.386398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330899490411389:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.386485Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.386849Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330899490411395:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.386866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330899490411394:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.386930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.390937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:59.404641Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330899490411398:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:40:59.534938Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330899490411449:2592] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:59.940144Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330899490411545:2362], status: GENERIC_ERROR, issues:
:3:25: Error: Invalid value "invalid-uuid" for type Uuid 2026-01-04T02:40:59.940511Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTA3ZGMyNWMtMzRjZjY5YWYtNjA2NDk5MDAtZGFhYzQ1YmU=, ActorId: [1:7591330895195443961:2319], ActorState: ExecuteState, LegacyTraceId: 01ke3e28gv2ggmcvaj9a50rvxj, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Invalid value \"invalid-uuid\" for type Uuid" end_position { row: 3 column: 25 } severity: 1 } tx_id# trace_id# |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamDdlAndDml [GOOD] Test command err: Trying to start YDB, gRPC: 16229, MsgBus: 18700 2026-01-04T02:40:48.461201Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330854025577691:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:48.461303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:48.671370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:48.687499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:48.687617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:48.766343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:48.823221Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:48.824889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330854025577660:2081] 1767494448459881 != 1767494448459884 2026-01-04T02:40:48.909551Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:48.930739Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:48.930773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:48.930784Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:48.930865Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:49.334900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:49.385618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.469507Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:49.481709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.618013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.677765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.604422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330866910481424:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.604523Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.605053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330866910481434:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.605091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.964979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.990651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.022141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.051937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.081442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.115093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.151025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.226037Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.293422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330871205449602:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.293532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.293589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330871205449607:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.293714Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330871205449609:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.293773Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.298017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:52.308242Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330871205449611:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:52.375680Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330871205449662:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:53.461693Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330854025577691:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:53.461768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:53.896429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... ;send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:55.033180Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.044401Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.137049Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.138928Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330882407772774:2081] 1767494455023253 != 1767494455023256 2026-01-04T02:40:55.144594Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.144678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.168149Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.238447Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.238471Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.238478Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.238555Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.243215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:55.623822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:55.639162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.700989Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.823322Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.881579Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.038926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:58.068692Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330895292676538:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.068771Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.068971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330895292676547:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.069025Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.138546Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.168695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.196712Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.222769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.250792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.284923Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.316328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.402355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.476164Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330895292677419:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.476261Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.476723Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330895292677425:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.476787Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330895292677424:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.476898Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.480598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:58.493771Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330895292677428:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:58.586081Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330895292677479:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:00.025555Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330882407772893:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:00.026629Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:00.222117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.773869Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494460809, txId: 281474976715675] shutting down |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::Warning [GOOD] Test command err: Trying to start YDB, gRPC: 15788, MsgBus: 14935 2026-01-04T02:40:48.535934Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330851055235909:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:48.536068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:48.806338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:48.811915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:48.812054Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:48.844525Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:48.895261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:48.986715Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:48.986741Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:48.986752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:48.986888Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:49.102496Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:49.406899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:49.411943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:49.455991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.544030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:49.565490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.711818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.762499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.693513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863940139618:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.693616Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.693893Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330863940139628:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.693956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.116650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.148587Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.178399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.204549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.233400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.265607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.312687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.347500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.417856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330868235107793:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.417942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330868235107798:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.417947Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.418104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330868235107800:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.418155Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.421507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:52.430831Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330868235107801:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:52.498478Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330868235107853:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:53.537904Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330851055235909:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:53.537983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:54.319481Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330876825042778:2542], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: RemovePrefixMembers, At function: PersistableRepr, At function: SqlProject
:2:50: Error: At function: ... : "At function: PersistableRepr" end_position { row: 2 column: 20 } severity: 1 issues { position { row: 2 column: 26 } message: "At function: Member" end_position { row: 2 column: 26 } severity: 1 issues { position { row: 2 column: 26 } message: "Member not found: _yql_partition_id" end_position { row: 2 column: 26 } severity: 1 } } } } } } } } } } tx_id# trace_id# Trying to start YDB, gRPC: 16205, MsgBus: 26398 2026-01-04T02:40:55.218930Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330881322505364:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:55.222120Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.234984Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.325030Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.359141Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.359220Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.368707Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.403882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:55.421590Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.421611Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.421620Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.421696Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.800256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:55.810359Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:55.827384Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.893061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.023277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.088681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.227040Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:58.287436Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330894207409068:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.287512Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.287812Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330894207409078:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.287876Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.356606Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.388858Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.418029Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.482585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.516528Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.548239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.580775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.628779Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.713294Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330894207409948:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.713385Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.713465Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330894207409953:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.713522Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330894207409955:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.713569Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.717025Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:58.729222Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330894207409957:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:58.831604Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330894207410008:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:00.218182Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330881322505364:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:00.218255Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPQTest::TestAlreadyWrittenWithoutDeduplication [GOOD] >> TPQTest::TestCompactifiedWithRetention >> KqpYql::Discard-DiscardSelectIsOn [GOOD] >> KqpScripting::ExecuteYqlScriptScanScalar [GOOD] >> KqpScripting::JoinIndexLookup >> KqpYql::UpdateBadType >> TPQTest::TestManyConsumers [GOOD] >> TConsoleTests::TestAuthorizationExtSubdomain [GOOD] >> TConsoleTests::TestAttributes >> KqpPragma::Auth [GOOD] >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer >> TOlap::StoreStats >> TOlap::CreateStoreWithDirs >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] >> KqpScripting::ScriptingCreateAndAlterTableTest [GOOD] >> KqpScripting::SecondaryIndexes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Discard-DiscardSelectIsOn [GOOD] Test command err: Trying to start YDB, gRPC: 6337, MsgBus: 20908 2026-01-04T02:40:51.539095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330864622185449:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:51.539328Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:51.769344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:51.824720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:51.837973Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:51.838088Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:51.842225Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:51.918510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:51.918540Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:51.918548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:51.918630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:52.028035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:52.315244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:52.373157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.511899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.610326Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:52.648384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.712390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.377003Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330877507089171:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.377116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.377340Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330877507089181:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.377380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.802726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.832712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.860233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.887570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.912385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.946019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.009154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.052150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.138198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330881802057346:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.138284Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.138825Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330881802057351:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.138881Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330881802057352:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.139166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.142608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:55.159093Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330881802057355:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:40:55.237767Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330881802057406:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:56.540120Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330864622185449:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.540176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 24664, MsgBus: 5161 2026-01-04T02:40:57.572829Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330892119372812:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:57.572955Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:57.586887Z node 2 :KQP_PROXY WARN: kqp_final ... status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:57.651800Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:57.654050Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330892119372772:2081] 1767494457572129 != 1767494457572132 2026-01-04T02:40:57.680919Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:57.680982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:57.684319Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:57.718595Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:57.718617Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:57.718625Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:57.718701Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:57.877407Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:58.087248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:58.092285Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:58.104443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.159553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.276955Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.334080Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.582174Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:00.365084Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330905004276522:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.365180Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.365398Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330905004276531:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.365446Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.438000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.469436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.497244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.525158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.551683Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.579448Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.630337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.675746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.758082Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330905004277401:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.758179Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.758430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330905004277406:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.758431Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330905004277407:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.758472Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.761860Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:00.771311Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330905004277410:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:00.850423Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330905004277461:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:02.307028Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330913594212362:2533], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:13: Error: DISCARD not supported in YDB queries, code: 2008 2026-01-04T02:41:02.307492Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=OTliNTQ1MzQtZDM5ZjQ5OGMtMjk0ZGQ1ZTUtZjM4NDgwZTY=, ActorId: [2:7591330913594212355:2529], ActorState: ExecuteState, LegacyTraceId: 01ke3e2arzbwezafhedj3cz2kd, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 13 } message: "DISCARD not supported in YDB queries" end_position { row: 2 column: 13 } issue_code: 2008 severity: 1 } } tx_id# trace_id# 2026-01-04T02:41:02.573229Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330892119372812:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.575960Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TOlap::CreateDropStandaloneTable >> TPQTest::TestCompactifiedWithRetention [GOOD] >> TPQTest::TestChangeConfig >> TOlapNaming::AlterColumnTableFailed >> TConsoleTests::TestDatabaseQuotasBadOverallQuota [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota >> TOlap::CreateStore >> TOlapNaming::CreateColumnTableOk >> KqpYql::UpdatePk [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestManyConsumers [GOOD] Test command err: 2026-01-04T02:39:52.699273Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:39:52.767970Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:39:52.771526Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:39:52.771803Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:52.771880Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:52.771921Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:39:52.771978Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:39:52.772059Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.772124Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:39:52.789119Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-04T02:39:52.789239Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:52.809961Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:39:52.812649Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:39:52.812795Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:52.813620Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 52428800 BurstSize: 52428800 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:39:52.813753Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:52.814121Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:52.814469Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:39:52.817071Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:52.817126Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:39:52.817169Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:39:52.817220Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:52.817282Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:52.818794Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:52.820089Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:52.820155Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:52.820203Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:52.820264Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:52.820297Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:39:52.820330Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:52.820370Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:52.820434Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:52.820470Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:52.820501Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:52.820528Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-04T02:39:52.820551Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-04T02:39:52.820579Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:39:52.820625Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:39:52.820677Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:52.820948Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:52.820983Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:52.821032Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:52.821204Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:39:52.821400Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:52.824193Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:39:52.824299Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:39:52.824356Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:52.824391Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:52.824442Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:52.824478Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:39:52.824511Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:52.824577Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:39:52.824899Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:196:2203], now have 1 active actors on pipe 2026-01-04T02:39:52.840112Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:199:2205], now have 1 active actors on pipe 2026-01-04T02:39:52.840268Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:39:52.840324Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1-- ... Q: 72057594037927937] server connected, pipe [28:998:2987], now have 1 active actors on pipe 2026-01-04T02:41:02.201325Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.226812Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.289389Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1001:2990], now have 1 active actors on pipe 2026-01-04T02:41:02.291673Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.314845Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.348175Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1004:2993], now have 1 active actors on pipe 2026-01-04T02:41:02.350406Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.368291Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.406491Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1007:2996], now have 1 active actors on pipe 2026-01-04T02:41:02.408845Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.426526Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.464784Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1010:2999], now have 1 active actors on pipe 2026-01-04T02:41:02.466973Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.484735Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.551163Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1013:3002], now have 1 active actors on pipe 2026-01-04T02:41:02.553445Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.579278Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.618102Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1016:3005], now have 1 active actors on pipe 2026-01-04T02:41:02.620432Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.638095Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.676123Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1019:3008], now have 1 active actors on pipe 2026-01-04T02:41:02.678428Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.688681Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.710094Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1022:3011], now have 1 active actors on pipe 2026-01-04T02:41:02.711676Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.722825Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.757282Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1025:3014], now have 1 active actors on pipe 2026-01-04T02:41:02.759479Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.810163Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.850661Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1028:3017], now have 1 active actors on pipe 2026-01-04T02:41:02.852836Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.876812Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.911993Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1031:3020], now have 1 active actors on pipe 2026-01-04T02:41:02.914231Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.932551Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.971448Z node 28 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [28:1034:3023], now have 1 active actors on pipe 2026-01-04T02:41:02.973662Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:02.991227Z node 28 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037927937][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:41:03.030427Z node 28 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [28:1037:3026] connected; active server actors: 1 |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpScripting::ScriptExplainCreatedTable [GOOD] >> KqpScripting::ScriptExplain >> TOlap::CreateStoreWithDirs [GOOD] >> TOlap::CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeDisabled_BadSourceId_Test [GOOD] Test command err: 2026-01-04T02:40:03.766276Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330660296049710:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:03.766370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:03.802861Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:03.803063Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330657945740784:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:03.803156Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:03.810728Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:03.979076Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:03.981115Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:04.007979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:04.008089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:04.009393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:04.009468Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:04.018050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:04.045585Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:40:04.045832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:04.080004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:04.157245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexYFR72V.tmp 2026-01-04T02:40:04.157283Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexYFR72V.tmp 2026-01-04T02:40:04.157503Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexYFR72V.tmp 2026-01-04T02:40:04.157630Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:04.178045Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:04.188644Z INFO: TTestServer started on Port 64823 GrpcPort 29674 2026-01-04T02:40:04.262888Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:29674 2026-01-04T02:40:04.361384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:04.446763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:04.775692Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:04.807355Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:06.751397Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330670830642962:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751402Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330670830642954:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751478Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330673180952979:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751486Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330673180952990:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751771Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330670830642968:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.751839Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.752216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330673180952994:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.752281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:06.755746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:06.773473Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330673180952993:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:40:06.813414Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330670830642971:2175] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:40:06.814285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710662, at schemeshard: 72057594046644480 2026-01-04T02:40:06.814547Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330670830642969:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:40:06.874804Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330673180953091:3043] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:06.915119Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330670830642999:2182] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:07.055349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:07.061277Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330673180953101:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:07.061797Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTcyMzhlZjctMTc1YWRmM2YtNGY5YWZiNjctOWFmZjk4MDQ=, ActorId: [1:7591330673180952977:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3e0mjxe67058k53vpvbq8r, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" ... node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:51.714694Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:51.732849Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexPUG7kU.tmp 2026-01-04T02:40:51.732872Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexPUG7kU.tmp 2026-01-04T02:40:51.732997Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c77/r3tmp/yandexPUG7kU.tmp 2026-01-04T02:40:51.733081Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:51.769035Z INFO: TTestServer started on Port 62699 GrpcPort 13667 PQClient connected to localhost:13667 2026-01-04T02:40:51.999084Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:52.026374Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.187404Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:52.499673Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:52.512584Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:55.319870Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591330883386882592:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.319954Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591330883386882581:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.320296Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.321887Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591330883386882596:2311], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.321978Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.328244Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:55.371167Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [8:7591330883386882595:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:40:55.462048Z node 7 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [7:7591330880712140322:2337], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:55.462470Z node 7 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=7&id=ZTU0ODAyMTMtMzJkZTMwYTQtOTAyMDJjNTItYzJlZjZjMDQ=, ActorId: [7:7591330880712140296:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3e244246shse4tkx462qw7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:55.462887Z node 7 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:55.462164Z node 8 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [8:7591330883386882624:2183] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:55.476775Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.507929Z node 8 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [8:7591330883386882631:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:55.509856Z node 8 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=8&id=YjBkZjNkLWMxOWI5YjBjLTMyYWNjMGExLTNlNmVlNmUy, ActorId: [8:7591330883386882578:2305], ActorState: ExecuteState, LegacyTraceId: 01ke3e240nbytfgg5jcyhysge5, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:55.510389Z node 8 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:55.590969Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.722142Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [7:7591330885007108080:3401] 2026-01-04T02:40:56.493896Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7591330863532269665:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.493972Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:56.505216Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[8:7591330866207013013:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.505315Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-04T02:41:02.881865Z node 7 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [7:7591330910776912227:3607] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-04T02:41:02.881915Z node 7 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [7:7591330910776912227:3607] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpYql::FromBytes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdatePk [GOOD] Test command err: Trying to start YDB, gRPC: 28519, MsgBus: 12729 2026-01-04T02:40:59.298750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330897996118691:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:59.298805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:59.534367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:59.543370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:59.543445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:59.609332Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:59.654485Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:59.655539Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330897996118655:2081] 1767494459297413 != 1767494459297416 2026-01-04T02:40:59.725122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:59.725149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:59.725180Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:59.725273Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:59.798988Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:00.110031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:00.173236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.292991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.326811Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:00.440662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.498047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.251626Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330910881022416:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.251745Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.252487Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330910881022426:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.252530Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.646065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.674790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.703401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.735761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.766591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.799435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.834185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.873213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.952584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330910881023296:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.952668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.953018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330910881023301:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.953043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330910881023302:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.953127Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.956871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:02.968356Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330910881023305:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:03.062380Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330915175990652:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:04.299246Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330897996118691:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:04.299305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:3:20: Warning: At lambda, At function: AsStruct, At tuple
:4:31: Warning: At function: +
:4:31: Warning: Integral type implicit bitcast: Optional and Int32, code: 1107
:5:27: Error: At function: KiUpdateTable!
:5:27: Error: Cannot update primary key column: Group |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TOlap::CreateStore [GOOD] >> TOlap::CreateDropTable >> TPQTest::TestChangeConfig [GOOD] >> KqpYql::EvaluateFor [GOOD] >> THiveTest::TestCheckSubHiveMigrationWithReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots >> TOlap::CreateDropStandaloneTable [GOOD] >> TOlap::AlterStore >> TOlapNaming::CreateColumnStoreFailed >> TOlap::CreateTable [GOOD] >> TOlap::CreateTableTtl >> KqpScripting::ScanQueryInvalid [GOOD] >> KqpScripting::ScanQueryTruncate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestChangeConfig [GOOD] Test command err: 2026-01-04T02:40:06.684179Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:40:06.751502Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:40:06.754280Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:40:06.754491Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:06.754543Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:06.754572Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:40:06.754637Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:40:06.754693Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.754760Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:184:2057] recipient: [1:14:2061] 2026-01-04T02:40:06.773460Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:183:2196], now have 1 active actors on pipe 2026-01-04T02:40:06.773642Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:40:06.793247Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:06.796220Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:06.796392Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.797310Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:06.797455Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:06.797894Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:06.798292Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:190:2142] 2026-01-04T02:40:06.801035Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:06.801102Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:40:06.801150Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:190:2142] 2026-01-04T02:40:06.801209Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:06.801311Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:06.802913Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:06.804329Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:06.804416Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:06.804461Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:06.804518Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:06.804549Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:40:06.804590Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.804635Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:06.804721Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:40:06.804771Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:40:06.804810Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:06.804844Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-04T02:40:06.804869Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-04T02:40:06.804902Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:40:06.804981Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:40:06.805028Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:06.805338Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:06.805386Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:06.805447Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:06.805648Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:06.805880Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:06.808542Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:06.808639Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:06.808699Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:06.808741Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:06.808775Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:06.808824Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:06.808875Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:06.808931Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:06.809357Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:197:2204], now have 1 active actors on pipe 2026-01-04T02:40:06.821744Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2206], now have 1 active actors on pipe 2026-01-04T02:40:06.821925Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:40:06.822016Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topi ... 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 45 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 44 } Consumers { Name: "bbb" Generation: 45 Important: true } Consumers { Name: "ccc" Generation: 45 Important: true } 2026-01-04T02:41:06.298666Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.299196Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ff9dd61a-525a512e-7daa7f89-802d7b67_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.305983Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.306520Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|afa4fbb1-f41ad7df-44bd6ab5-27fd54bb_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.319223Z node 40 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.319768Z node 40 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fda91665-8b50b696-8b874eaf-e2b1b8cf_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.715661Z node 41 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 41 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:107:2057] recipient: [41:105:2138] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:112:2057] recipient: [41:105:2138] 2026-01-04T02:41:06.775157Z node 41 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:41:06.775221Z node 41 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:41:06.775266Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:41:06.775325Z node 41 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [41:153:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927938 is [41:157:2176] sender: [41:158:2057] recipient: [41:151:2172] Leader for TabletID 72057594037927937 is [41:111:2142] sender: [41:183:2057] recipient: [41:14:2061] 2026-01-04T02:41:06.796800Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:41:06.798593Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 46 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 100 MaxSizeInPartition: 104857600 LifetimeSeconds: 172800 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 TopicName: "rt3.dc1--asdfgs--topic" Version: 46 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "aaa" Generation: 46 Important: true } 2026-01-04T02:41:06.799963Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [41:189:2142] 2026-01-04T02:41:06.803180Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [41:189:2142] 2026-01-04T02:41:06.806629Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [41:190:2142] 2026-01-04T02:41:06.809100Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [41:190:2142] 2026-01-04T02:41:06.811965Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [41:191:2142] 2026-01-04T02:41:06.814412Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [41:191:2142] 2026-01-04T02:41:06.817218Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][3][StateInit] bootstrapping 3 [41:192:2142] 2026-01-04T02:41:06.819650Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][3][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 3 generation 2 [41:192:2142] 2026-01-04T02:41:06.822480Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][4][StateInit] bootstrapping 4 [41:193:2142] 2026-01-04T02:41:06.824879Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][4][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 4 generation 2 [41:193:2142] 2026-01-04T02:41:06.838165Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.838576Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|38e02a6-9a21100e-b62ecfcc-4078fc4d_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.848092Z node 41 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:41:06.856038Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][5][StateInit] bootstrapping 5 [41:237:2142] 2026-01-04T02:41:06.858567Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][5][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 5 generation 2 [41:237:2142] 2026-01-04T02:41:06.863128Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][6][StateInit] bootstrapping 6 [41:238:2142] 2026-01-04T02:41:06.865548Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][6][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 6 generation 2 [41:238:2142] 2026-01-04T02:41:06.869884Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][7][StateInit] bootstrapping 7 [41:239:2142] 2026-01-04T02:41:06.872305Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][7][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 7 generation 2 [41:239:2142] 2026-01-04T02:41:06.876884Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][8][StateInit] bootstrapping 8 [41:240:2142] 2026-01-04T02:41:06.879326Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][8][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 8 generation 2 [41:240:2142] 2026-01-04T02:41:06.883401Z node 41 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][9][StateInit] bootstrapping 9 [41:241:2142] 2026-01-04T02:41:06.885755Z node 41 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][9][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 9 generation 2 [41:241:2142] 2026-01-04T02:41:06.916803Z node 41 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 47 actor [41:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 5 MaxSizeInPartition: 1048576 LifetimeSeconds: 86400 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 PartitionIds: 4 PartitionIds: 5 PartitionIds: 6 PartitionIds: 7 PartitionIds: 8 PartitionIds: 9 TopicName: "rt3.dc1--asdfgs--topic" Version: 47 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } Partitions { PartitionId: 4 } Partitions { PartitionId: 5 } Partitions { PartitionId: 6 } Partitions { PartitionId: 7 } Partitions { PartitionId: 8 } Partitions { PartitionId: 9 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } AllPartitions { PartitionId: 4 } AllPartitions { PartitionId: 5 } AllPartitions { PartitionId: 6 } AllPartitions { PartitionId: 7 } AllPartitions { PartitionId: 8 } AllPartitions { PartitionId: 9 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 46 } Consumers { Name: "bbb" Generation: 47 Important: true } Consumers { Name: "ccc" Generation: 47 Important: true } 2026-01-04T02:41:06.919046Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.919450Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|23afc073-a9e93246-630f3b1a-fb1374ba_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.925453Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.925871Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|71a04577-e7f526b7-aab040ea-3fe6a054_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:41:06.936612Z node 41 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 9 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:41:06.937020Z node 41 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|919c8ee6-5cfb9813-5454c43e-5eae385e_0 generated for partition 9 topic 'rt3.dc1--asdfgs--topic' owner default |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_PreferedPartition_OtherPartition_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::FromBytes [GOOD] Test command err: Trying to start YDB, gRPC: 25088, MsgBus: 15199 2026-01-04T02:40:54.992102Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330878862005183:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:54.992169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.266422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.270924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.271127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.295906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.331873Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.334791Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330878862005151:2081] 1767494454990575 != 1767494454990578 2026-01-04T02:40:55.452909Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.452945Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.452953Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.453074Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.510529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:55.897006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:55.902520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:40:55.960933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.023426Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:56.121057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.273170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.341473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.123680Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330896041876204:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.123795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.124156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330896041876214:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.124212Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.457144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.492662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.526714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.564454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.599109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.639595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.690581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.738567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.818440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330896041877084:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.818543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.818819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330896041877090:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.818944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330896041877089:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.818972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.823904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:58.834385Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330896041877093:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:58.936996Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330896041877144:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:59.992321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330878862005183:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:59.992393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:00.372078Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing ... : log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330909115305090:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:01.184319Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:01.194078Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:01.269259Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330909115305065:2081] 1767494461183755 != 1767494461183758 2026-01-04T02:41:01.278535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:01.278607Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:01.280163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:01.281765Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:01.289485Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:41:01.289509Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:41:01.296642Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:01.318244Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:01.318270Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:01.318276Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:01.318336Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:01.481465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:01.723743Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:01.740041Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.811852Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.942386Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.992413Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.190848Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:04.053432Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330922000208824:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.053516Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.053816Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330922000208833:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.053864Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.121908Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.153176Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.178211Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.207719Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.238997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.271680Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.299717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.338846Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.407274Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330922000209705:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.407353Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330922000209710:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.407361Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.407546Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330922000209713:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.407623Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.411019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:04.421554Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330922000209712:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:04.518848Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330922000209765:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:06.184444Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330909115305090:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:06.184532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TOlap::CreateDropTable [GOOD] >> TOlap::CreateDropStandaloneTableDefaultSharding >> KqpYql::BinaryJsonOffsetNormal [GOOD] >> KqpYql::Closure ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::EvaluateFor [GOOD] Test command err: Trying to start YDB, gRPC: 10974, MsgBus: 29334 2026-01-04T02:40:55.086254Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330880983715998:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:55.086689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:55.340364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:55.340470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:55.393795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:55.394432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:55.412444Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:55.511719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:55.511754Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:55.511782Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:55.511938Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:55.562438Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:55.955123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:56.020654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.091771Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:56.172910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.321139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:56.378536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.074887Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330893868619717:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.075004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.075350Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330893868619727:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.075425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.401568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.439390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.471760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.502012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.537937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.576486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.631581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.678421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.755302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330893868620599:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.755366Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.755549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330893868620604:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.755643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330893868620605:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.755681Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:58.759211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:58.771231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330893868620608:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:58.852284Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330893868620659:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:00.083403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330880983715998:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:00.083514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10771, MsgBus: 7568 2026-01-04T02:41:01.366206Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330907791455036:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:01.366963Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:01.375686Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:01.450848Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:01.452281Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330907791455007:2081] 1767494461363831 != 1767494461363834 2026-01-04T02:41:01.483894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:01.489938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:01.490017Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:01.491742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:01.523208Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:01.523233Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:01.523240Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:01.523334Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:01.896895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:01.915055Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.973740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.110308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.166061Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.372473Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:04.461263Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330920676358766:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.461346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.461603Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330920676358775:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.461652Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.549284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.580201Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.605558Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.629693Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.656026Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.681664Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.713213Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.752771Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.821189Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330920676359647:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.821256Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.821323Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330920676359652:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.821394Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330920676359654:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.821421Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.824598Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:04.833972Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330920676359656:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:04.896854Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330920676359707:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:06.366588Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330907791455036:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:06.366650Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpYql::DdlDmlMix [GOOD] >> KqpYql::CreateUseTable >> TOlap::AlterStore [GOOD] >> TOlap::AlterTtl >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] >> TOlapNaming::CreateColumnStoreFailed [GOOD] >> TOlapNaming::AlterColumnTableOk >> TOlap::CreateTableTtl [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestDatabaseQuotasBadStorageQuota [GOOD] Test command err: 2026-01-04T02:39:51.013355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:51.013434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:51.286031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:52.218470Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:39:52.219072Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:39:52.219791Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13880977764702771737 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:39:52.350522Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.371739Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.371907Z node 4 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.435297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:52.605826Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:39:52.606215Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:39:52.606470Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpCaCott/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1630858142091080618 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunk ... Weight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:40:58.547989Z node 140 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:40:58.548545Z node 140 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:40:58.548760Z node 140 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4641485042737105413 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:40:58.591357Z node 143 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:40:58.591918Z node 143 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:40:58.592160Z node 143 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7116293933552347686 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:40:58.654076Z node 139 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:40:58.654646Z node 139 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:40:58.654860Z node 139 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpmOsDQp/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10179477157000012143 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:40:59.045899Z node 136 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:40:59.045994Z node 136 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:59.059945Z node 136 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-04T02:41:02.249049Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:02.249144Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:02.356638Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:03.331985Z node 146 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:41:03.332515Z node 146 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpGNwVKK/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:41:03.332799Z node 146 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpGNwVKK/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/002410/r3tmp/tmpGNwVKK/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15906599236966580253 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:41:05.938627Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:05.938741Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:06.073090Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> KqpYql::UpdateBadType [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateTableTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:41:04.774181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:04.774289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:04.774335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:04.774379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:04.774427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:04.774465Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:04.774538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:04.774667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:04.775701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:04.776007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:04.869715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:04.869783Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:04.884962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:04.889427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:04.889643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:04.896196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:04.896469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:04.897259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:04.897534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:04.900792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:04.901016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:04.902427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:04.902490Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:04.902643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:04.902708Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:04.902771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:04.902930Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.026419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027887Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.027957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.028676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 105 CreateStep: 5000042 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 ColumnTableTtlSettingsVersion: 1 } ChildrenExist: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "Table3" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 360 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } Version: 1 } SchemaPresetId: 1 SchemaPresetName: "default" ColumnStorePathId { OwnerId: 72057594046678944 LocalId: 38 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 106 2026-01-04T02:41:09.039045Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot/OlapStore" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "Table4" TtlSettings { Enabled { ColumnName: "timestamp" ColumnUnit: UNIT_AUTO Tiers { ApplyAfterSeconds: 3600000000 EvictToExternalStorage { Storage: "/MyRoot/Tier1" } } } } ColumnShardCount: 1 } } TxId: 106 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:41:09.039448Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/OlapStore/Table4, opId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:09.039970Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 38], parent name: OlapStore, child name: Table4, child id: [OwnerId: 72057594046678944, LocalPathId: 43], at schemeshard: 72057594046678944 2026-01-04T02:41:09.040060Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 0 2026-01-04T02:41:09.040136Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 106:0 type: TxCreateColumnTable target path: [OwnerId: 72057594046678944, LocalPathId: 43] source path: 2026-01-04T02:41:09.040425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 1 2026-01-04T02:41:09.040763Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:41:09.040847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:747) 2026-01-04T02:41:09.040997Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:41:09.041067Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 2 2026-01-04T02:41:09.044258Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusAccepted TxId: 106 SchemeshardId: 72057594046678944 PathId: 43, at schemeshard: 72057594046678944 2026-01-04T02:41:09.044631Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE COLUMN TABLE, path: /MyRoot/OlapStore/ 2026-01-04T02:41:09.044917Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:09.044968Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:41:09.045217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 43] 2026-01-04T02:41:09.045333Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:09.045383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-04T02:41:09.045446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 106, path id: 43 2026-01-04T02:41:09.045862Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:09.045929Z node 3 :FLAT_TX_SCHEMESHARD INFO: create_table.cpp:237: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState at tabletId# 72057594046678944 2026-01-04T02:41:09.046138Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: create_table.cpp:323: TCreateColumnTable TConfigureParts operationId# 106:0 ProgressState Propose modify scheme on shard tabletId: 72075186233409546 2026-01-04T02:41:09.046947Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:09.047055Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 10 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:09.047101Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:41:09.047145Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 10 2026-01-04T02:41:09.047205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-04T02:41:09.048075Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:09.048155Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 1 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:09.048190Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:41:09.048222Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 1 2026-01-04T02:41:09.048257Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:41:09.048358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-04T02:41:09.050701Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382272 2026-01-04T02:41:09.050864Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 0, tablet: 72075186233409546 2026-01-04T02:41:09.051548Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;self_id=[3:719:2705];ev=NActors::IEventHandle;tablet_id=72075186233409546;tx_id=106;this=136237277893056;method=TTxController::StartProposeOnExecute;tx_info=106:TX_KIND_SCHEMA;min=5000043;max=18446744073709551615;plan=0;src=[3:129:2154];cookie=12:5;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:41:09.052247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:41:09.052366Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> KqpScripting::JoinIndexLookup [GOOD] |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> BasicStatistics::NotFullStatisticsColumnshard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:57.699002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:57.787191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:57.795746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:57.796091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:57.796259Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:58.206659Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:58.297323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:58.297508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:58.344806Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:58.429321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:59.092996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:59.093599Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:59.093648Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:59.093682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:59.094248Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:59.161332Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:59.715732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:03.192506Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:03.201647Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:39:03.206017Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:03.241063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.241209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.270591Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:03.272918Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.456543Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:03.456666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:03.458241Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.458932Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.459507Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.460578Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.460863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.460982Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.461266Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.461407Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.461501Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:03.488734Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:03.761600Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:03.788393Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:03.788559Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:03.820879Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:03.822259Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:03.822560Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:03.822624Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:03.822696Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:03.822760Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:03.822816Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:03.822883Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:03.823431Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:03.823706Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:39:03.826164Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:39:03.835388Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:39:03.835460Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:39:03.835549Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:03.848573Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:03.860615Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:39:03.865029Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:39:03.868891Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:03.869016Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:03.900771Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:39:03.901216Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:39:04.027476Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:39:04.178830Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:39:04.377541Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:04.379848Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:04.383584Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:04.387462Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:39:04.387567Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... or_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:6013:4688], schemeshard id = 72075186224037897 ... waiting for TEvPeriodicTableStats2 2026-01-04T02:40:10.568623Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:12.756088Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:12.756308Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 3 2026-01-04T02:40:12.756403Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-04T02:40:15.957249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:18.032326Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:18.032501Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 4 2026-01-04T02:40:18.032592Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-04T02:40:21.270322Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:21.456743Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:40:21.456860Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:21.456912Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:21.456961Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:40:23.584385Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:23.584617Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 5 2026-01-04T02:40:23.584705Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:40:23.595378Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:40:23.595464Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:26.879714Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:28.892964Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:28.893243Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 6 2026-01-04T02:40:28.893365Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:40:32.017296Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:34.352384Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:34.352554Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 7 2026-01-04T02:40:34.352621Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 7 2026-01-04T02:40:37.403881Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:39.940225Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:39.940438Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 8 2026-01-04T02:40:39.940596Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 8 2026-01-04T02:40:39.986432Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:40:39.986528Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:39.986760Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:40:40.007338Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:43.305017Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:45.543725Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:45.543930Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 9 2026-01-04T02:40:45.544068Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 9 2026-01-04T02:40:45.565545Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:40:45.565657Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:45.565952Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:40:45.590648Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:48.890290Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:51.033788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:51.034057Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 10 2026-01-04T02:40:51.034147Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 10 2026-01-04T02:40:51.055631Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:40:51.055702Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:51.055942Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:40:51.069132Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:40:54.323227Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:40:54.496639Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:40:54.496707Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:54.496739Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:40:54.496770Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:40:56.834640Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:40:56.834833Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 11 2026-01-04T02:40:56.834922Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 11 2026-01-04T02:40:56.857553Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:40:56.857626Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:40:56.857857Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:40:56.875066Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... blocking NKikimr::TEvDataShard::TEvPeriodicTableStats from TX_COLUMNSHARD_ACTOR to FLAT_SCHEMESHARD_ACTOR cookie 0 ... waiting for TEvPeriodicTableStats2 (done) ... waiting for stats update from SchemeShard 2026-01-04T02:41:00.302241Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:41:02.472816Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:41:02.473077Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 12 2026-01-04T02:41:02.473182Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 12 2026-01-04T02:41:02.483875Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:41:02.483939Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:41:02.484159Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:41:02.497754Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete ... waiting for stats update from SchemeShard (done) ... waiting for TEvPropagateStatistics 2026-01-04T02:41:05.692877Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:41:07.967456Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:41:07.967691Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 13 ... waiting for TEvPropagateStatistics (done) 2026-01-04T02:41:07.968130Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:7806:5443]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:07.968366Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 13 2026-01-04T02:41:07.968479Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:41:07.968542Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:7806:5443], StatRequests.size() = 1 >> TOlap::AlterTtl [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_object_storage_listing/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_object_storage_listing/unittest >> KqpScripting::StreamExecuteYqlScriptScanClientTimeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce >> TConsoleTests::TestAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-04T02:39:11.144602Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1767494351144570 2026-01-04T02:39:11.318853Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330434038959719:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:11.318934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:11.348004Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:11.348083Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330435994426468:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:11.348648Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:11.356920Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:11.527854Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:11.536934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:11.564119Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:11.564272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:11.566849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:11.566934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:11.598494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:11.612923Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:11.628675Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:11.630081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:11.712038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0025aa/r3tmp/yandexbVMTvP.tmp 2026-01-04T02:39:11.712082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0025aa/r3tmp/yandexbVMTvP.tmp 2026-01-04T02:39:11.712282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0025aa/r3tmp/yandexbVMTvP.tmp 2026-01-04T02:39:11.712393Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:11.741447Z INFO: TTestServer started on Port 26300 GrpcPort 22734 2026-01-04T02:39:11.771513Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:11.804647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:22734 2026-01-04T02:39:11.885686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:12.327407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:12.353440Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:14.040689Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330448879328665:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:14.040688Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330448879328656:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:14.040798Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:14.041088Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330448879328671:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:14.041155Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:14.045877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:14.061041Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330448879328670:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:39:14.224339Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330448879328700:2137] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:14.493003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:14.495169Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330448879328715:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:14.495501Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZDA0ODc5YTUtNjlhZjg5ZWItM2ZlYjE4YTktNzk3MDdkNzI=, ActorId: [2:7591330448879328653:2300], ActorState: ExecuteState, LegacyTraceId: 01ke3dz13pekb7y70z00r2afh3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:14.497407Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:39:14.498381Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330446923862985:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:14.498708Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NzdiMDczZDgtNzhkNjVlYTItY2IzMjMwNDctZThhMWE4ODg=, ActorId: [1:7591330446923862951:2328], ActorState: ExecuteState, LegacyTraceId: 01ke3dz18f28gbeah4strzcnz0, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:14.499015Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 20 ... tive actors on pipe 2026-01-04T02:41:07.779313Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:839: TPartitionWriter 72075186224037892 (partition=0) TEvClientConnected Status OK, TabletId: 72075186224037892, NodeId 16, Generation: 1 2026-01-04T02:41:07.779514Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:41:07.779543Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:41:07.779612Z node 16 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 generated for partition 0 topic 'rt3.dc1--test-topic' owner src 2026-01-04T02:41:07.779658Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:41:07.779672Z node 16 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:41:07.779689Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:41:07.779702Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:41:07.779720Z node 16 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:41:07.779757Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:41:07.779771Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:41:07.779785Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:41:07.779818Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-04T02:41:07.779875Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:41:07.780326Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:41:07.780348Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:41:07.780398Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:41:07.780614Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 2026-01-04T02:41:07.781241Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767494467781 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:41:07.781368Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-04T02:41:07.781582Z :INFO: [] MessageGroupId [src] SessionId [src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0] Write session: close. Timeout = 0 ms 2026-01-04T02:41:07.781649Z :INFO: [] MessageGroupId [src] SessionId [src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0] Write session will now close 2026-01-04T02:41:07.781703Z :DEBUG: [] MessageGroupId [src] SessionId [src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0] Write session: aborting 2026-01-04T02:41:07.782241Z :INFO: [] MessageGroupId [src] SessionId [src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:41:07.782309Z :DEBUG: [] MessageGroupId [src] SessionId [src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0] Write session: destroy 2026-01-04T02:41:07.783571Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 grpc read done: success: 0 data: 2026-01-04T02:41:07.783592Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 grpc read failed 2026-01-04T02:41:07.783741Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 1 sessionId: src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 2026-01-04T02:41:07.783770Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|9fdf7ca6-91e45ddb-47cfb2fc-7d565ce8_0 is DEAD 2026-01-04T02:41:07.784117Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:41:07.784439Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [15:7591330933325111477:2456] destroyed 2026-01-04T02:41:07.784483Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:41:07.784507Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:41:07.784520Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:41:07.784531Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:41:07.784544Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:41:07.784557Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:41:07.816963Z :INFO: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Starting read session 2026-01-04T02:41:07.817013Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Starting cluster discovery 2026-01-04T02:41:07.817272Z :INFO: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16504: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16504
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:16504. " 2026-01-04T02:41:07.817319Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Restart cluster discovery in 0.005308s 2026-01-04T02:41:07.823048Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Starting cluster discovery 2026-01-04T02:41:07.823433Z :INFO: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16504: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16504
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:16504. " 2026-01-04T02:41:07.823508Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Restart cluster discovery in 0.012916s 2026-01-04T02:41:07.837001Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Starting cluster discovery 2026-01-04T02:41:07.837222Z :INFO: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16504: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16504
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:16504. " 2026-01-04T02:41:07.837256Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Restart cluster discovery in 0.029023s 2026-01-04T02:41:07.867003Z :DEBUG: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Starting cluster discovery 2026-01-04T02:41:07.867366Z :NOTICE: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16504: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16504
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:16504. " } 2026-01-04T02:41:07.867545Z :NOTICE: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:16504: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:16504
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:16504. " } 2026-01-04T02:41:07.867671Z :INFO: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Closing read session. Close timeout: 0.000000s 2026-01-04T02:41:07.867765Z :NOTICE: [/Root] [/Root] [a328be16-9749ff61-ccf0cb34-945d4c71] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:41:07.877920Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:41:07.877962Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:41:07.877982Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:41:07.878005Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:41:07.878023Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:41:08.467226Z node 15 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [15:7591330937620078813:2467] TxId: 281474976710674. Ctx: { TraceId: 01ke3e2getdvxvr3mzp5nmb590, Database: /Root, SessionId: ydb://session/3?node_id=15&id=NDY0MTVlMzktYTE5ZGRiMjctMjAwNzA5NmYtYmM1MDQ3ZTU=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 16 trace_id# 2026-01-04T02:41:08.467431Z node 15 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [15:7591330937620078820:2467], TxId: 281474976710674, task: 3. Ctx: { TraceId : 01ke3e2getdvxvr3mzp5nmb590. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=15&id=NDY0MTVlMzktYTE5ZGRiMjctMjAwNzA5NmYtYmM1MDQ3ZTU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [15:7591330937620078813:2467], status: UNAVAILABLE, reason: {
: Error: Terminate execution } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::AlterTtl [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:05.400352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:05.400438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.400472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:05.400532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:05.400586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:05.400616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:05.400669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.400729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:05.401649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:05.401960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:05.479942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:05.479999Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:05.495315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:05.495495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:05.495692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:05.504661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:05.504912Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:05.505597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:05.505866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:05.508994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.509190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:05.510420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:05.510483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.510608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:05.510656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:05.510755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:05.510957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.617336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618531Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.618965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.619045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.619088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.619152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.619215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hard::TEvProposeTransactionResult> complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:10.303433Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:10.303497Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:149: TAlterColumnTable TPropose operationId# 106:0 HandleReply ProgressState at tablet: 72057594046678944 2026-01-04T02:41:10.303560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 106 ready parts: 1/1 2026-01-04T02:41:10.303714Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } AffectedSet { TabletId: 72075186233409546 Flags: 2 } ExecLevel: 0 TxId: 106 MinStep: 5000042 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:41:10.305206Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:106 msg type: 269090816 2026-01-04T02:41:10.305327Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 106, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 106 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 106 at step: 5000043 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 106 at step: 5000043 2026-01-04T02:41:10.306095Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=106;fline=column_engine_logs.cpp:113;event=double_schema_version;v=1; 2026-01-04T02:41:10.315059Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:10.315210Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 106 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 12884904048 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:41:10.315291Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:110: TAlterColumnTable TPropose operationId# 106:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000043 2026-01-04T02:41:10.316180Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 128 -> 129 2026-01-04T02:41:10.316446Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:41:10.316528Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000043 2026-01-04T02:41:10.319838Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:10.319887Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:41:10.320089Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:41:10.320233Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:10.320270Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 106, path id: 38 2026-01-04T02:41:10.320333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [3:210:2211], at schemeshard: 72057594046678944, txId: 106, path id: 39 2026-01-04T02:41:10.320738Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:10.320785Z node 3 :FLAT_TX_SCHEMESHARD INFO: alter_table.cpp:200: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:41:10.320841Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: alter_table.cpp:223: TAlterColumnTable TProposedWaitParts operationId# 106:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-04T02:41:10.321793Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:10.321913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:10.321948Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:41:10.321987Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-04T02:41:10.322029Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:41:10.322962Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:10.323057Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 14 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:41:10.323085Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:41:10.323111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 14 2026-01-04T02:41:10.323150Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:41:10.323213Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 0/1, is published: true 2026-01-04T02:41:10.327151Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 106:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-04T02:41:10.328109Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:41:10.329205Z node 3 :TX_TIERING ERROR: log.cpp:841: fline=manager.cpp:170;error=cannot_read_secrets;reason=Can't read access key: No such secret: SId:secret; 2026-01-04T02:41:10.330754Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:41:10.343040Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 2026-01-04T02:41:10.343113Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 106, tablet: 72075186233409546, partId: 0 2026-01-04T02:41:10.343250Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 106:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 106 FAKE_COORDINATOR: Erasing txId 106 2026-01-04T02:41:10.345616Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:10.345797Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:41:10.345870Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-04T02:41:10.346001Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:41:10.346041Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:41:10.346111Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:41:10.346161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:41:10.346208Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-04T02:41:10.346293Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:752:2729] message: TxId: 106 2026-01-04T02:41:10.346358Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:41:10.346414Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-04T02:41:10.346455Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-04T02:41:10.346598Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:41:10.348708Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:41:10.348764Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [3:960:2928] TestWaitNotification: OK eventTxId 106 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/MyRoot/Tier1' stopped at tablet 72075186233409546 >> TConsoleTests::TestAttributesExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::UpdateBadType [GOOD] Test command err: Trying to start YDB, gRPC: 2349, MsgBus: 12838 2026-01-04T02:41:04.023014Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330917428242293:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:04.026640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:04.237585Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:04.286761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:04.286927Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:04.322833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330917428242267:2081] 1767494463997309 != 1767494463997312 2026-01-04T02:41:04.329856Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:04.334140Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:04.416460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:04.416488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:04.416508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:04.416606Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:04.528597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:04.832605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:04.909240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.032467Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:05.042499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.176897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.250602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.903923Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930313146020:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.904044Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.904648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930313146030:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.904777Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.171353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.198529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.226498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.254373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.282935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.313592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.368233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.407035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.470368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330934608114197:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.470443Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.470501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330934608114202:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.470648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330934608114204:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.470701Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.474053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:07.484350Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330934608114206:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:07.587148Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330934608114257:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:09.005573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330917428242293:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:09.005629Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Type annotation, code: 1030
:4:26: Error: At function: KiUpdateTable!
:3:20: Error: Failed to convert type: Struct<'Amount':String?> to Struct<'Amount':Uint64?>
:3:20: Error: Failed to convert 'Amount': Optional to Optional
:3:20: Error: Row type mismatch for table: db.[/Root/Test] |91.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::JoinIndexLookup [GOOD] Test command err: Trying to start YDB, gRPC: 19107, MsgBus: 28292 2026-01-04T02:40:56.724415Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330884490376585:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.725059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:56.921931Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:56.960130Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:56.960257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:56.984113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:57.019228Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:57.078382Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:57.078416Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:57.078423Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:57.078520Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:57.131369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:57.460442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:57.509214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.626224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.738608Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:57.776152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.844343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.739942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330897375280295:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.740022Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.740293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330897375280304:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:59.740356Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.090486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.117524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.147249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.178549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.207566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.240582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.300122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.342009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.408927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901670248474:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.409193Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.409301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901670248479:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.409454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901670248481:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.409505Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.413151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:00.427663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330901670248483:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:00.527225Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330901670248534:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:01.724911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330884490376585:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:01.724981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:02.910736Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494462923, txId: 281474976715673] shutting down Trying to start YDB, gRPC: 22748, MsgBus: 11991 2026-01-04T02:41:03.762049Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330914663941797:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:03.762102Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:03.777796Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:03.855298Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:03.866943Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:03.867021Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:03.889404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:03.962492Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:03.962520Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:03.962528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:03.962616Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:04.005981Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:04.363535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:04.375016Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.425527Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.549451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.638740Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.786175Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:06.908742Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330927548845515:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.908844Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.909081Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330927548845524:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.909159Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.968013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.998646Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.030208Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.063045Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.094706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.130117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.163556Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.211361Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.284739Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330931843813695:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.284824Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.284826Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330931843813700:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.285061Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330931843813702:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.285131Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.288171Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:07.297876Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330931843813703:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:07.395387Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330931843813755:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:08.762305Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330914663941797:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:08.762381Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TA] $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_object_storage_listing/test-results/unittest/{meta.json ... results_accumulator.log} >> THiveTest::TestCreateAndDeleteTabletWithStoragePoolsReboots [GOOD] >> THiveTest::TestCreateAndDeleteTabletWithStoragePools >> KqpScripting::StreamExecuteYqlScriptOperationTmeoutBruteForce [GOOD] >> KqpScripting::StreamExecuteYqlScriptPg >> TOlapNaming::CreateColumnTableOk [GOOD] >> TOlapNaming::CreateColumnTableFailed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpPragma::MatchRecognizeWithTimeOrderRecoverer [GOOD] Test command err: Trying to start YDB, gRPC: 2750, MsgBus: 64373 2026-01-04T02:40:58.619948Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330896130260950:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:58.620726Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:58.883176Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:58.886441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:58.886621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:58.891234Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:58.981417Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:59.058664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:59.058688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:59.058693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:59.058751Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:59.127230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:59.445118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:59.506773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.621568Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:59.654723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.797497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.860401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.486471Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330909015164559:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.486558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.486872Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330909015164569:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.486918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.770395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.795891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.820977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.848026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.877878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.911598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.947005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.022735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.091315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330913310132735:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.091419Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.091643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330913310132740:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.091676Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330913310132741:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.091700Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:02.095403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:02.107367Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330913310132744:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:02.179150Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330913310132795:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:03.563303Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330917605100402:2535], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:34: Error: Pragma auth not supported inside Kikimr query., code: 2016 2026-01-04T02:41:03.563728Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2I2ZTE4Ny02N2YyMDFiLTdjMGI4MGVlLWNmZjk5MTAz, ActorId: [1:7591330917605100394:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3e2c0g79qzt13mmpsxdanv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 2 column: 34 } message: "Pragma auth not supported inside Kikimr query." end_position { row: 2 column: 34 } issue_code: 2016 severity: 1 } } tx_id# trace_id# 2026-01-04T02:41:03.618303Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_e ... ath=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:04.446275Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:04.505290Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:04.506471Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330921302484850:2081] 1767494464434145 != 1767494464434148 2026-01-04T02:41:04.548910Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:04.549010Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:04.577248Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:04.584625Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:04.584645Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:04.584651Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:04.584719Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:04.616882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:04.925307Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:04.930983Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:04.942058Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.991567Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.112879Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.169305Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.441478Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:07.440535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330934187388603:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.440624Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.440929Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330934187388613:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.440964Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.494425Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.522981Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.549744Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.577263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.602650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.649149Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.674360Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.716711Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.778374Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330934187389480:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.778464Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.778483Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330934187389485:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.778702Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330934187389487:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.778739Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:07.781854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:07.791626Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330934187389488:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:07.854671Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330934187389540:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:09.419733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.435572Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330921302484879:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:09.435628Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:10.095507Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494470133, txId: 281474976710675] shutting down |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.7%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpScripting::ScriptExplain [GOOD] |91.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.7%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TOlapNaming::AlterColumnTableFailed [GOOD] >> TOlapNaming::AlterColumnStoreOk >> KqpScripting::SecondaryIndexes [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectAll >> GenericFederatedQuery::IcebergHadoopTokenSelectAll >> TOlapNaming::CreateColumnTableFailed [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectAll >> GenericFederatedQuery::IcebergHadoopBasicSelectAll >> GenericFederatedQuery::IcebergHiveBasicSelectAll >> THiveTest::TestCreateAndDeleteTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWithStoragePools >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 >> GenericFederatedQuery::IcebergHadoopSaSelectAll >> KqpScripting::ScanQueryTruncate [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScriptExplain [GOOD] Test command err: Trying to start YDB, gRPC: 62921, MsgBus: 19704 2026-01-04T02:40:57.999948Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330891334799039:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:58.000023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:58.208365Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:58.208472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:58.212845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:58.262129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:58.309731Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:58.309899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330891334799007:2081] 1767494457998718 != 1767494457998721 2026-01-04T02:40:58.367812Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:58.367841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:58.367856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:58.368051Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:58.545708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:58.777127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:58.828377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.962524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.058180Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:59.103598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:59.168538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.958362Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330904219702762:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.958501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.958919Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330904219702772:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.958993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.224092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.256064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.284096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.312667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.341463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.374498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.434738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.477701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:01.553138Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330908514670940:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.553207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.553264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330908514670945:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.553422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330908514670947:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.553466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:01.557303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:01.569136Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330908514670949:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:01.674216Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330908514671000:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:03.000140Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330891334799039:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:03.000215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:03.165215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:06.210549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:06.210554Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:06.210614Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:06.369531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:06.585877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:06.598957Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.645749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.757008Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.804906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:07.065434Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:09.089406Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330942696832322:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.089562Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.089889Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330942696832332:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.089987Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.150328Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.182692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.214024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.243097Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.274585Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.311541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.343659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.389224Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.479784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330942696833202:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.479931Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.480051Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330942696833207:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.480110Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330942696833209:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.480159Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:09.483288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:09.495627Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330942696833211:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:09.587000Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330942696833262:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:11.060067Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330929811928615:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:11.060146Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:11.634874Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330951286768178:2536], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:168: Error: At function: DataQueryBlocks
:1:185: Error: At function: TKiDataQueryBlock
:1:208: Error: At function: KiEffects
:1:219: Error: At function: KiWriteTable!
:1:219: Error: Cannot find table 'db.[/Root/ScriptingTest]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:41:11.635328Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODk0ZjJjNWYtYjQxMjgxYjUtZjkxNTQzZjktZDQ5MmU0OTg=, ActorId: [2:7591330951286768176:2535], ActorState: ExecuteState, LegacyTraceId: 01ke3e2kxw42j0hfeyhthyzcev, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 168 } message: "At function: DataQueryBlocks" end_position { row: 1 column: 168 } severity: 1 issues { position { row: 1 column: 185 } message: "At function: TKiDataQueryBlock" end_position { row: 1 column: 185 } severity: 1 issues { position { row: 1 column: 208 } message: "At function: KiEffects" end_position { row: 1 column: 208 } severity: 1 issues { position { row: 1 column: 219 } message: "At function: KiWriteTable!" end_position { row: 1 column: 219 } severity: 1 issues { position { row: 1 column: 219 } message: "Cannot find table \'db.[/Root/ScriptingTest]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 219 } issue_code: 2003 severity: 1 } } } } } } tx_id# trace_id# |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> GenericFederatedQuery::YdbManagedSelectAll >> TOlapNaming::AlterColumnStoreOk [GOOD] >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::SecondaryIndexes [GOOD] Test command err: Trying to start YDB, gRPC: 27269, MsgBus: 13948 2026-01-04T02:40:57.109309Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330888741813943:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:57.109391Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:57.310187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:57.310319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:57.329152Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:57.420030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:57.425866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:57.428936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330888741813912:2081] 1767494457108004 != 1767494457108007 2026-01-04T02:40:57.476506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:57.476545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:57.476552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:57.476635Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:57.576531Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:57.825829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:57.866177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:57.993259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.119863Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:58.133554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:58.199702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.161399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901626717675:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.161526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.161932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901626717685:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.161993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.454290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.483617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.512708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.541693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.568058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.598398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.628533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.683581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:00.748810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901626718560:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.748928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.749029Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901626718565:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.749348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330901626718567:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.749406Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:00.753445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:00.763166Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330901626718569:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:00.864156Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330901626718620:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:02.109586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330888741813943:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.109672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:02.324187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:04.945468Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:04.959615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:04.959709Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:04.966099Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:05.011144Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:05.011167Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:05.011176Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:05.011259Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:05.127807Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:05.381785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:05.399780Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.461397Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.599380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.665533Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.828671Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:08.013920Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330938311203280:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.013998Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.014234Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330938311203290:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.014299Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.080561Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.108229Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.133230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.156517Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.179872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.208463Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.261676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.303234Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.378052Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330938311204159:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.378126Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330938311204164:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.378146Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.378335Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330938311204167:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.378411Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:08.381009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:08.388708Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330938311204166:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:08.482296Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330938311204219:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:09.823327Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330921131332270:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:09.823400Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:09.937019Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.967146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.997000Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] >> TUrlMatcherTest::MatchExactPathOnly [GOOD] >> TAuditTest::DeniedPathsAreNotAudited [GOOD] >> TUrlMatcherTest::MatchRecursive [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::CreateColumnTableFailed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:05.405570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:05.405665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.405718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:05.405783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:05.405830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:05.405855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:05.405894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.405941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:05.406631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:05.406903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:05.477742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:05.477796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:05.491719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:05.491913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:05.492106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:05.500860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:05.501088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:05.501831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:05.502079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:05.505373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.505582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:05.506811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:05.506875Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.507012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:05.507059Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:05.507170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:05.507350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.657880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.658839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.659971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:41:12.868139Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:12.868259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:41:12.868313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:41:12.868579Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:41:12.868635Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:41:12.868843Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:41:12.868919Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:41:12.870598Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:12.870647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:12.870848Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:12.870899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:41:12.871363Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:41:12.871419Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:41:12.871536Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:41:12.871578Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:41:12.871637Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:41:12.871679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:41:12.871722Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:41:12.871768Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:41:12.871812Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:41:12.871865Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:41:12.871955Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:41:12.872004Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:41:12.872047Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:41:12.872490Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:41:12.872595Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:41:12.872645Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:41:12.872693Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:41:12.872741Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:41:12.872835Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:41:12.875343Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:41:12.875867Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:41:12.876712Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:678:2667] Bootstrap 2026-01-04T02:41:12.877938Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:678:2667] Become StateWork (SchemeCache [2:683:2672]) 2026-01-04T02:41:12.881080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "mess age" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:41:12.881391Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:41:12.881661Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusSchemeError, reason: Invalid name for column 'mess age', at schemeshard: 72057594046678944 2026-01-04T02:41:12.882610Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:678:2667] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:41:12.885586Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSchemeError Reason: "Invalid name for column \'mess age\'" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:41:12.885884Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column 'mess age', operation: CREATE COLUMN TABLE, path: /MyRoot/ 2026-01-04T02:41:12.886319Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:41:12.886533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:41:12.886591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:41:12.886959Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:41:12.887059Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:41:12.887104Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:693:2682] TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:41:12.890543Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateColumnTable CreateColumnTable { Name: "TestTable" Schema { Columns { Name: "Id" Type: "Int32" NotNull: true } Columns { Name: "~!@#$%^&*()+=asdfa" Type: "Utf8" } KeyColumnNames: "Id" } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:41:12.890800Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: create_table.cpp:595: TCreateColumnTable Propose, path: /MyRoot/TestTable, opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:12.891007Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', at schemeshard: 72057594046678944 2026-01-04T02:41:12.893123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSchemeError Reason: "Invalid name for column \'~!@#$%^&*()+=asdfa\'" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:41:12.893322Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSchemeError, reason: Invalid name for column '~!@#$%^&*()+=asdfa', operation: CREATE COLUMN TABLE, path: /MyRoot/ TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:41:12.893588Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:41:12.893627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:41:12.893978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:41:12.894055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:41:12.894091Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:700:2689] TestWaitNotification: OK eventTxId 102 >> KqpYql::Closure [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> THiveTest::TestCreateAndReassignTabletWithStoragePools [GOOD] >> THiveTest::TestCreateAndReassignTabletWhileStarting >> KqpYql::CreateUseTable [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OptionsRequestsAreNotAudited [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> TPQTest::TestSourceIdDropByUserWrites [GOOD] >> TPQTest::TestSourceIdDropBySourceIdCount |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchExactPathOnly [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::DeniedPathsAreNotAudited [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TUrlMatcherTest::MatchRecursive [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::ModifyingMethodsAlwaysAuditable [GOOD] |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnStoreOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:05.478254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:05.478359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.478403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:05.478455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:05.478497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:05.478529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:05.478595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.478675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:05.479600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:05.479947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:05.567716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:05.567779Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:05.581681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:05.581883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:05.582080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:05.593814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:05.594065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:05.594788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:05.595028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:05.598259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.598487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:05.599670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:05.599731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.599881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:05.599931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:05.600785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:05.601015Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.749737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.750834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.750996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.751917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.752013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 129 2026-01-04T02:41:13.643623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:41:13.643785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:41:13.644889Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=1;to_version=2;diff=Version: 2 UpsertColumns { Id: 3 Name: "comment" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:13.645030Z node 2 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=72075186233409546;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=103;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=5000040;tx_id=103;;switch_optimizer=0;switch_accessors=1; 2026-01-04T02:41:13.647095Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:13.647153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:13.647401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:41:13.647601Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:13.647653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-04T02:41:13.647706Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000040 2026-01-04T02:41:13.648448Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.648517Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:41:13.648585Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 103:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409546 2026-01-04T02:41:13.648705Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:41:13.649433Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.649544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.649589Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:41:13.649637Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:41:13.649683Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:41:13.650969Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.651054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.651087Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:41:13.651118Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-04T02:41:13.651149Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:41:13.654064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 275382275 2026-01-04T02:41:13.654643Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:13.654686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:41:13.655285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:41:13.655535Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:41:13.655609Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:13.655650Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:41:13.656186Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.656271Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:41:13.656304Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:41:13.656334Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 5 2026-01-04T02:41:13.656368Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:41:13.656451Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:41:13.657785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:41:13.674876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-04T02:41:13.674947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 103, tablet: 72075186233409546, partId: 0 2026-01-04T02:41:13.675098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 103 2026-01-04T02:41:13.675159Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:41:13.678580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.678757Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.678803Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:41:13.678923Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:41:13.678982Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:41:13.679026Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:41:13.679064Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:41:13.679107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:41:13.679184Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:751:2727] message: TxId: 103 2026-01-04T02:41:13.679242Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:41:13.679287Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:41:13.679319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:41:13.679450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:41:13.681412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:41:13.681467Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [2:856:2824] TestWaitNotification: OK eventTxId 103 |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::ScanQueryTruncate [GOOD] Test command err: Trying to start YDB, gRPC: 12828, MsgBus: 28556 2026-01-04T02:41:01.838120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330906753096141:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:01.838193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:02.070942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:02.071059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:02.073663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:02.167669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:02.170934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:02.224401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:02.224545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:02.224556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:02.224679Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:02.396803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:02.657176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:02.724503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.833496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:02.900545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:02.988111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.053225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.819410Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330919637999855:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.819577Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.819865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330919637999865:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:04.819915Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.119890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.154904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.188919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.219280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.246626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.278321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.345356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.387874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.478520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330923932968042:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.478606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.478655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330923932968047:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.479150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330923932968050:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.479240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.482530Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:05.496502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330923932968049:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:05.578190Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330923932968102:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:06.848078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330906753096141:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:06.848546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:07.057749Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330932522903018:2539], status: PRECONDITION_FAILED, issues:
: Error: Default error
:1:752: Error: Scan query should have a single result set., code: 2029
: Error: Default error
:1:752: Error: Scan query should have a single result set., code: 2029 2026-01-04T02:41:07.058269Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MTZkMDM4Mi1jZDIzMjVjOS0yNGVkOT ... cution, code: 1060
: Error: Default error
:1:375: Error: Scan query cannot have data modifications., code: 2029 Trying to start YDB, gRPC: 10618, MsgBus: 8714 2026-01-04T02:41:07.981537Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330933321352032:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:07.981593Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:07.993182Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:08.061956Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:08.097183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:08.097264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:08.103789Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:08.138033Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:08.138056Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:08.138063Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:08.138149Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:08.223183Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:08.486056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:08.494810Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.550549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.690896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.753285Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:08.987405Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:11.253174Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950501223034:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.253271Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.253589Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950501223044:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.253634Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.304920Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.334844Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.363535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.392925Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.422754Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.453188Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.483650Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.523975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.640437Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950501223913:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.640536Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.640769Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950501223918:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.640817Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950501223919:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.640870Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.644185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:11.654002Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330950501223922:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:11.717826Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330950501223973:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:12.981996Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330933321352032:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:12.982647Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:13.210570Z node 2 :TX_DATASHARD ERROR: datashard__kqp_scan.cpp:163: Undelivered event: 65542, at: [2:7591330959091158966:2057], tablet: [2:7591330937616320297:2313], scanId: 3, table: /Root/EightShard 2026-01-04T02:41:13.218976Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494473248, txId: 281474976710673] shutting down |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test >> TPQTest::TestPQRead [GOOD] >> TPQTest::TestPQReadAhead |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::Closure [GOOD] Test command err: Trying to start YDB, gRPC: 12362, MsgBus: 26679 2026-01-04T02:41:02.336892Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330914296255248:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.336946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:02.529295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:02.562747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:02.562945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:02.602380Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:02.641362Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:02.698428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:02.708679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:02.708709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:02.708732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:02.708835Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:03.123620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:03.169069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.284293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.384051Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:03.423938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.485752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.436391Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330927181158979:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.436518Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.436894Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330927181158989:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.436956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.740503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.773364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.803265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.832406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.856913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.888076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.923545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.973499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.035880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931476127155:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.035958Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.036041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931476127160:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.036113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931476127162:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.036164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.039714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:06.049420Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330931476127164:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:06.130189Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330931476127215:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:07.337318Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330914296255248:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:07.338871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 27114, MsgBus: 61132 2026-01-04T02:41:08.494722Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330937981621138:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:08.494919Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:08.507267Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:08.590308Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:08.591735Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330937981621096:2081] 1767494468493310 != 1767494468493313 2026-01-04T02:41:08.604781Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:08.604856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:08.628015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:08.676501Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:08.676520Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:08.676528Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:08.676656Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:08.801538Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:09.052874Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:09.057673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:09.070607Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.126540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.258185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.314876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.506252Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:11.630179Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950866524855:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.630280Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.630535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330950866524865:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.630593Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.700047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.763554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.791842Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.820452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.846717Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.877064Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.905014Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.949215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.023807Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330955161493029:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.023918Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.023971Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330955161493034:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.024112Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330955161493036:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.024164Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.027299Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:12.037092Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330955161493038:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:12.106281Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330955161493089:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:13.495217Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330937981621138:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.495291Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> THiveTest::TestCreateAndReassignTabletWhileStarting [GOOD] >> THiveTest::TestCreateTabletAndReassignGroups >> TAuditTest::OtherGetRequestsAreAudited [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::MoveTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] Leader for TabletID 72057594046316545 is [1:126:2150] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:141:2058] recipient: [1:116:2145] 2026-01-04T02:39:42.214486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:42.214551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.214580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:42.214673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:42.214708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:42.214740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:42.214794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:42.214852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:42.215507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:42.215739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:42.310737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:42.310810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:42.311555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:187:2058] recipient: [1:15:2062] 2026-01-04T02:39:42.322408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:42.322504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:42.322638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:42.326506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:42.326945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:42.327472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:42.327760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:42.330461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.330613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:42.331597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:42.331638Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:42.331722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:42.331755Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:42.331796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:42.331991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:42.449739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.450993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.451116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.451184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.451269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.451357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:42.451439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 4T02:41:13.866881Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:13.866921Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:210:2211], at schemeshard: 72057594046678944, txId: 1003, path id: 1 2026-01-04T02:41:13.866970Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [62:210:2211], at schemeshard: 72057594046678944, txId: 1003, path id: 39 2026-01-04T02:41:13.867424Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.867479Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 1003:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:41:13.867574Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.867613Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:41:13.867655Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-04T02:41:13.868946Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.869037Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.869074Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-04T02:41:13.869113Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:41:13.869155Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:41:13.870917Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.871016Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.871053Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1003 2026-01-04T02:41:13.871091Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1003, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:41:13.871131Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:41:13.871222Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 0/1, is published: true 2026-01-04T02:41:13.874230Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:41:13.874296Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_move_table.cpp:581: TMoveTable TDone, operationId: 1003:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:41:13.874348Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_move_table.cpp:591: TMoveTable TDone, operationId: 1003:0 ProgressState, SourcePathId: [OwnerId: 72057594046678944, LocalPathId: 39], TargetPathId: [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:41:13.874456Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:41:13.874496Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:41:13.874539Z node 62 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:41:13.874574Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:41:13.874612Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-04T02:41:13.874657Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:41:13.874699Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-04T02:41:13.874734Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-04T02:41:13.874867Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:41:13.874908Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:41:13.875955Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:41:13.876009Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:41:13.876091Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:41:13.876759Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.877105Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1003 2026-01-04T02:41:13.879731Z node 62 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-04T02:41:13.880113Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-04T02:41:13.880159Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-04T02:41:13.880526Z node 62 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-04T02:41:13.880617Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-04T02:41:13.880653Z node 62 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [62:878:2850] TestWaitNotification: OK eventTxId 1003 2026-01-04T02:41:13.881123Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableMoved" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:41:13.881340Z node 62 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableMoved" took 255us result status StatusSuccess 2026-01-04T02:41:13.881955Z node 62 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableMoved" PathDescription { Self { Name: "TTLEnabledTableMoved" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 2 } ChildrenExist: false } Table { Name: "TTLEnabledTableMoved" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 2 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpYql::CreateUseTable [GOOD] Test command err: Trying to start YDB, gRPC: 2766, MsgBus: 12330 2026-01-04T02:41:02.667463Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330913938478727:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.667658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:02.875810Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:02.902802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:02.902909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:02.920563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:03.011028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:03.066864Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:03.073824Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:03.073846Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:03.073859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:03.073951Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:03.462332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:03.515821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.642768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.721049Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:03.779799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.853291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.786031Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330926823382365:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.786165Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.787336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330926823382375:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.787435Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.103216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.130474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.157355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.183087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.209959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.240379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.294133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.332811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.394868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931118350540:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.394934Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.394994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931118350545:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.395052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330931118350547:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.395079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.398183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:06.406508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330931118350549:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:06.484303Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330931118350600:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:07.665852Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330913938478727:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:07.665907Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Optimization, code: 1070
:4:24: Error: Queries with mixed data and scheme operations are not supported. Use separate queries for different types of operations., code: 2009 Trying to start YDB, gRPC: 7133, MsgBus: 1951 2026-01-04T02:41:08.853064Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330940097148832:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:08.854055Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:08.871621Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:08.950337Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:08.965122Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:08.965204Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:08.989971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:09.049878Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:09.049905Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:09.049912Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:09.049985Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:09.084494Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:09.446562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:09.463403Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.513268Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.621749Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.671897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:09.872138Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:11.847803Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330952982052545:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.847929Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.848266Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330952982052555:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.848331Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:11.905975Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.935644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:11.964452Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.017914Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.044631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.075128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.104117Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.148793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.216819Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330957277020726:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.216895Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.216901Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330957277020731:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.217085Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330957277020733:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.217141Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.219941Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:12.230346Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330957277020734:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:12.318189Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330957277020786:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:13.781373Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:13.853904Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330940097148832:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.853976Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:14.004539Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494474046, txId: 281474976715675] shutting down |91.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest >> TAuditTest::OtherGetRequestsAreAudited [GOOD] |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId1 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 >> TOlapNaming::AlterColumnTableOk [GOOD] |91.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/mon/audit/ut/unittest |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/mon/audit/ut/unittest >> THiveTest::TestCreateTabletAndReassignGroups [GOOD] >> THiveTest::TestCreateTabletAndMixedReassignGroups3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::CreateDropStandaloneTableDefaultSharding [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:05.411713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:05.411809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.411870Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:05.411911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:05.411950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:05.411976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:05.412047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:05.412114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:05.413003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:05.413316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:05.505430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:05.505486Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:05.520906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:05.521111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:05.521312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:05.528615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:05.528867Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:05.529697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:05.529986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:05.532981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.533182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:05.534415Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:05.534485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:05.534601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:05.534648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:05.534752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:05.534951Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.695046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.696987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.697049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.697154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.697262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Close pipe to deleted shardIdx 72057594046678944:59 tabletId 72075186233409604 2026-01-04T02:41:16.069129Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:41:16.069152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:41:16.069219Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:41:16.069237Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:41:16.069286Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:41:16.069308Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:41:16.069372Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:23 2026-01-04T02:41:16.069388Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:23 tabletId 72075186233409568 2026-01-04T02:41:16.069425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:21 2026-01-04T02:41:16.069439Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:21 tabletId 72075186233409566 2026-01-04T02:41:16.069491Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:19 2026-01-04T02:41:16.069516Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:19 tabletId 72075186233409564 2026-01-04T02:41:16.069566Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:17 2026-01-04T02:41:16.069582Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:17 tabletId 72075186233409562 2026-01-04T02:41:16.070131Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:15 2026-01-04T02:41:16.070161Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:15 tabletId 72075186233409560 2026-01-04T02:41:16.070218Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:13 2026-01-04T02:41:16.070235Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:13 tabletId 72075186233409558 2026-01-04T02:41:16.070267Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:11 2026-01-04T02:41:16.070283Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:11 tabletId 72075186233409556 2026-01-04T02:41:16.070315Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:9 2026-01-04T02:41:16.070333Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:9 tabletId 72075186233409554 2026-01-04T02:41:16.070409Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:36 2026-01-04T02:41:16.070435Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:36 tabletId 72075186233409581 2026-01-04T02:41:16.070507Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:38 2026-01-04T02:41:16.070532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:38 tabletId 72075186233409583 2026-01-04T02:41:16.073313Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:40 2026-01-04T02:41:16.073349Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:40 tabletId 72075186233409585 2026-01-04T02:41:16.073408Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:32 2026-01-04T02:41:16.073423Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:32 tabletId 72075186233409577 2026-01-04T02:41:16.073463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:34 2026-01-04T02:41:16.073478Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:34 tabletId 72075186233409579 2026-01-04T02:41:16.073635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:28 2026-01-04T02:41:16.073650Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:28 tabletId 72075186233409573 2026-01-04T02:41:16.074124Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:30 2026-01-04T02:41:16.074152Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:30 tabletId 72075186233409575 2026-01-04T02:41:16.074475Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:26 2026-01-04T02:41:16.074497Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:26 tabletId 72075186233409571 2026-01-04T02:41:16.074535Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:24 2026-01-04T02:41:16.074549Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:24 tabletId 72075186233409569 2026-01-04T02:41:16.075383Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:57 2026-01-04T02:41:16.075407Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:57 tabletId 72075186233409602 2026-01-04T02:41:16.076200Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:53 2026-01-04T02:41:16.076232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:53 tabletId 72075186233409598 2026-01-04T02:41:16.076276Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:55 2026-01-04T02:41:16.076291Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:55 tabletId 72075186233409600 2026-01-04T02:41:16.076326Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:49 2026-01-04T02:41:16.076352Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:49 tabletId 72075186233409594 2026-01-04T02:41:16.076410Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:47 2026-01-04T02:41:16.076425Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:47 tabletId 72075186233409592 2026-01-04T02:41:16.076461Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:51 2026-01-04T02:41:16.076476Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:51 tabletId 72075186233409596 2026-01-04T02:41:16.076506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:45 2026-01-04T02:41:16.076521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:45 tabletId 72075186233409590 2026-01-04T02:41:16.076560Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:43 2026-01-04T02:41:16.076574Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:43 tabletId 72075186233409588 2026-01-04T02:41:16.076602Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:41 2026-01-04T02:41:16.076637Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:41 tabletId 72075186233409586 2026-01-04T02:41:16.077745Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 109 2026-01-04T02:41:16.078841Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MyDir/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:41:16.079042Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MyDir/ColumnTable" took 226us result status StatusPathDoesNotExist 2026-01-04T02:41:16.079177Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/MyDir/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/MyDir\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/MyDir/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/MyDir" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "MyDir" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:41:16.079693Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 42 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-04T02:41:16.079760Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 42 took 82us result status StatusPathDoesNotExist 2026-01-04T02:41:16.079820Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'\', error: path is empty" Path: "" PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TA] $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {RESULT} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/audit/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlapNaming::AlterColumnTableOk [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:07.985795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:07.985911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:07.985953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:07.986000Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:07.986031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:07.986054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:07.986095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:07.986154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:07.987052Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:07.987279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:08.082827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:08.082889Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:08.097950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:08.098145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:08.098368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:08.107330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:08.107568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:08.108321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:08.108611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:08.111878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:08.112079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:08.113326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:08.113384Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:08.113508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:08.113552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:08.113658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:08.113843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:08.267373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.268934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:08.269491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -04T02:41:16.355171Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.355221Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.355901Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.356437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.359121Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.359206Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.359251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.359460Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.359526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.361095Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.361159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.361277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.363703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.363842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.363887Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.363987Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364061Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364320Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.364984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.365033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.365208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.365880Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.365969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.366367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368055Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368137Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368231Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368471Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.368533Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.370777Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.370915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:41:16.370966Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:41:16.371064Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:41:16.371103Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:41:16.371157Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:41:16.371191Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:41:16.371219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:41:16.371292Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:3130:4348] message: TxId: 102 2026-01-04T02:41:16.371329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:41:16.371373Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:41:16.371402Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:41:16.372401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 66 2026-01-04T02:41:16.374944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:41:16.374988Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:4040:5202] TestWaitNotification: OK eventTxId 102 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] >> TConsoleTests::TestAttributesExtSubdomain [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning >> THiveTest::TestCreateTabletAndMixedReassignGroups3 [GOOD] >> THiveTest::TestCreateExternalTablet >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] >> KqpLimits::KqpMkqlMemoryLimitException ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptPg [GOOD] Test command err: Trying to start YDB, gRPC: 28937, MsgBus: 12498 2026-01-04T02:41:02.870421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330910272764153:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.870520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:03.096194Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:03.100582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:03.100710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:03.153737Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:03.183103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:03.185966Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330910272764129:2081] 1767494462869469 != 1767494462869472 2026-01-04T02:41:03.255073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:03.266589Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:03.266616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:03.266623Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:03.266700Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:03.657268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:03.705472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.844174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.943925Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:03.989371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.056466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:05.691744Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330923157667890:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.691881Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.692302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330923157667900:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:05.692368Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.058669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.086686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.111318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.136345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.164859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.210367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.240785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.284624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.352426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330927452636068:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.352492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.352542Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330927452636073:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.352720Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330927452636075:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.352783Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.355769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:06.365521Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330927452636076:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:06.442566Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330927452636128:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:07.870827Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330910272764153:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:07.870911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:07.878307Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzE4MWE0MTAtM2M3MmJiMGItMzEwMThiNi1mOTU5NWViNg==, ActorId: [1:7591330931747603728:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3e2g90atveva49es6re8h0, Create QueryResponse for error on request, msg: status# TIMEOUT issue ... OUT issues# { message: "Request timeout 115ms exceeded" severity: 1 }{ message: "Cancelling after 115ms in ExecuteState" severity: 1 } trace_id# 2026-01-04T02:41:11.470162Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494471498, txId: 281474976710742] shutting down 2026-01-04T02:41:11.470734Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494471498, txId: 281474976710741] shutting down Trying to start YDB, gRPC: 25790, MsgBus: 13974 2026-01-04T02:41:12.229843Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330956057540914:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:12.230327Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:12.249815Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:12.302898Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:12.342556Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:12.342640Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:12.364686Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:12.368910Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:12.368935Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:12.368944Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:12.369027Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:12.411836Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:12.769792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:12.784281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.836604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:12.990249Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:13.058555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:13.238098Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:15.748942Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330968942444630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:15.749051Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:15.749342Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330968942444640:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:15.749395Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:15.815961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.843947Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.870613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.896665Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.923548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.955554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:15.988385Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:16.030428Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:16.102733Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330973237412810:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:16.102824Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330973237412815:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:16.102845Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:16.103104Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330973237412818:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:16.103153Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:16.106147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:16.115266Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330973237412817:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:16.169893Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330973237412870:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:17.230152Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330956057540914:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:17.230225Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpQuery::Now >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal >> KqpQueryDiscard::DiscardSelectSupport >> KqpTypes::QuerySpecialTypes >> KqpLimits::BigParameter >> KqpLimits::TooBigQuery+useSink >> KqpExplain::Explain >> KqpParams::RowsList >> KqpStats::RequestUnitForBadRequestExecute >> KqpQuery::PreparedQueryInvalidate >> KqpExplain::PrecomputeRange >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink >> THiveTest::TestCreateExternalTablet [GOOD] >> KqpExplain::UpdateSecondaryConditional+UseSink >> KqpParams::DefaultParameterValue >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionOperationId2 [GOOD] >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanOperationTmeoutBruteForce [GOOD] Test command err: Trying to start YDB, gRPC: 19703, MsgBus: 11589 2026-01-04T02:41:02.885349Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330913713604694:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:02.885439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:02.925386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:03.191191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:03.191348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:03.195029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:03.244344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:03.244543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:03.248699Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330913713604651:2081] 1767494462884121 != 1767494462884124 2026-01-04T02:41:03.301859Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:03.301904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:03.301917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:03.302031Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:03.498382Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:03.711066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:03.772519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:03.896600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:03.897979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.031705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:04.104331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.043821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930893475713:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.043989Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.044313Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930893475723:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.044360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.344870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.373360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.400708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.429920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.458611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.488239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.548245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.589591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:06.652621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930893476591:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.652696Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.652737Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930893476596:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.652841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330930893476598:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.652866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:06.656371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:06.666392Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330930893476600:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:06.743270Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330930893476651:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:07.885626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330913713604694:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:07.885698Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:08.117235Z node 1 :RPC_REQUEST WARN: rpc_stream_execute_yql_script.cpp:377: Client lost, ActorId: [1:759133 ... 9591689774:2537], ActorState: ExecuteState, LegacyTraceId: 01ke3e2r5q7gh6s9mm43z386fw, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 7ms exceeded" severity: 1 }{ message: "Cancelling after 13ms in ExecuteState" severity: 1 } trace_id# 2026-01-04T02:41:15.984766Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=2&id=OWQ4MDMwOTQtYjcwMjkwM2ItZWEwY2Q1ZWQtNjc3Mzg2YjE= } 2026-01-04T02:41:15.995977Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591330948116850211:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:15.996051Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:16.024492Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 13ms, session id ydb://session/3?node_id=2&id=MjE4N2ExYjMtNDdkOWE1MTAtYTIwZjQ5OGUtNDUwYjNjYjE= } 2026-01-04T02:41:16.063920Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 10ms, session id ydb://session/3?node_id=2&id=ZWZlNmMzMTUtMjA0MDIzMGItZDFmNTIxZTUtNjJlNGQ0NTU= } 2026-01-04T02:41:16.098569Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 16ms, session id ydb://session/3?node_id=2&id=NWRkYzEzMzktYjcyMGJkNDYtYWNmYzUyZjItZWY0NTdiNWI= } 2026-01-04T02:41:16.110198Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=OTdkNDM5OC00NDAxMGEzZS05YTAyNjNiZS1mYTMyOWVkOA==, ActorId: [2:7591330973886657109:2553], ActorState: ExecuteState, LegacyTraceId: 01ke3e2r8f2vxg91hxmg8ky5sb, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 19ms exceeded" severity: 1 }{ message: "Cancelling after 59ms in ExecuteState" severity: 1 } trace_id# 2026-01-04T02:41:16.164147Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 22ms, session id ydb://session/3?node_id=2&id=YjMyZmVjNS1kZjM2MmU3MC0xNjJkNTJkZS1lM2FjMjlhNQ== } 2026-01-04T02:41:16.201400Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 25ms, session id ydb://session/3?node_id=2&id=OTZjNmM0MTQtN2MxMzBhMWEtYTM0OGIwMzYtM2IzMmQ1ZjA= } 2026-01-04T02:41:16.238103Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 28ms, session id ydb://session/3?node_id=2&id=OGU1ODJlOTgtMzMzMzY2NmEtMmE0Y2FlOGItZGVjZTFlYzQ= } 2026-01-04T02:41:16.274852Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 31ms, session id ydb://session/3?node_id=2&id=ODBkOTA2NGUtNDk0NDFhYmEtNTVhZWQwMWEtNmEyNDk2NjE= } 2026-01-04T02:41:16.275219Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ODAyYzVhZGYtY2M3NTVmNTEtZDQzNGY1ZjUtOTE1MmQ4OWI=, ActorId: [2:7591330973886657180:2583], ActorState: ExecuteState, LegacyTraceId: 01ke3e2rdzdd33z3v9geq07x69, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 34ms exceeded" severity: 1 }{ message: "Cancelling after 49ms in ExecuteState" severity: 1 } trace_id# 2026-01-04T02:41:16.370468Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 37ms, session id ydb://session/3?node_id=2&id=ZDgxN2Q2YTAtNzBiNGE2MmMtYmE0NmM5NDMtMjA4MTk3NmM= } 2026-01-04T02:41:16.419716Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 40ms, session id ydb://session/3?node_id=2&id=ZjBmOTQ5N2MtMWNhZDZjNzUtM2FjNTk2NjMtZjlhNGNjM2E= } 2026-01-04T02:41:16.470297Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 43ms, session id ydb://session/3?node_id=2&id=NzRmNjNiZTMtNjUwOTdjZDEtNWMzYjY3ZTgtODQ2MjFiOGY= } 2026-01-04T02:41:16.470572Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZDE1YjJkOGMtZjdkNTM4YTItNzdjZmU2ZjktODFjN2UwYjQ=, ActorId: [2:7591330973886657247:2612], ActorState: ExecuteState, LegacyTraceId: 01ke3e2rm53svvmzeq9deqb5qz, Create QueryResponse for error on request, msg: status# TIMEOUT issues# { message: "Request timeout 46ms exceeded" severity: 1 }{ message: "Cancelling after 46ms in ExecuteState" severity: 1 } trace_id# 2026-01-04T02:41:16.579399Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 49ms, session id ydb://session/3?node_id=2&id=ZTc1YTBlZTctODYwZDk5ZjEtMWNlNDRiYTEtNzViYTE3ZDc= } 2026-01-04T02:41:16.643696Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 52ms, session id ydb://session/3?node_id=2&id=MTVlNzBjNTMtNGUxZjQ5My1lZDUyMzg4Ni03NTEwNzNlNA== } 2026-01-04T02:41:16.702770Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 55ms, session id ydb://session/3?node_id=2&id=YmE2ZjM3YzItNWQ0NWRjZjMtZmY4OWY1YWQtOWU2NGUzYTQ= } 2026-01-04T02:41:16.768801Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 58ms, session id ydb://session/3?node_id=2&id=OGY5YjViOTQtN2JiNDMyMC04ZTQ5ZmEyZC1lZWI3YjNmZA== } 2026-01-04T02:41:16.837484Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 61ms, session id ydb://session/3?node_id=2&id=YWEwNTZkNmYtZGVmYWQyMjAtZTEyYzdmMDYtZTQ2M2FlOTc= } 2026-01-04T02:41:16.912333Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 64ms, session id ydb://session/3?node_id=2&id=MWQxOTQwODUtYWJmMGFlZjctMzdmNmI1YTUtZDc0OTk1NTM= } 2026-01-04T02:41:16.983372Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 67ms, session id ydb://session/3?node_id=2&id=MzU2NjI5YTktNTg5ZjI5OTctY2ZmNWU3ZmEtMjg2MDE2ZTI= } 2026-01-04T02:41:17.058133Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 70ms, session id ydb://session/3?node_id=2&id=ZjIzMmFkZjQtZWIzN2JiYjQtNjg4NDEwLTRhYzI3Mjdl } 2026-01-04T02:41:17.139517Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 73ms, session id ydb://session/3?node_id=2&id=OGNkZjg4OS0yMjMxZTk0LWYxZmQzOGNjLTk4ZDM3OWZm } 2026-01-04T02:41:17.223733Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 76ms, session id ydb://session/3?node_id=2&id=NGYwZDY3MjMtOTkxYTM5YTEtOGY4NDE4NjctYjUxNWI5Mg== } 2026-01-04T02:41:17.311522Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 79ms, session id ydb://session/3?node_id=2&id=OWUyN2M3ZTItNzY4MjY5YTUtNmE5ZTNjMzMtMTlmMDg2Mzg= } 2026-01-04T02:41:17.394525Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 82ms, session id ydb://session/3?node_id=2&id=Y2ZlMjdjNjEtOTE3NjE1NDctN2Y4NjE0YzAtYjcwZDhkMTc= } 2026-01-04T02:41:17.415067Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494477448, txId: 281474976710673] shutting down 2026-01-04T02:41:17.577693Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 88ms, session id ydb://session/3?node_id=2&id=ZWEzNzg5NWEtNDI4NzZhZDYtNDBiODZjZi1mNzU0Njdh } 2026-01-04T02:41:17.662859Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 91ms, session id ydb://session/3?node_id=2&id=Yjg2NWMyODgtYTRjMDNlMjItM2E4MmEwMDYtNDI0MjYzY2U= } 2026-01-04T02:41:17.765123Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 94ms, session id ydb://session/3?node_id=2&id=OTdiMTA3N2YtOTk2NmE4MGYtZTk4OTc1NjAtNTVhZTA5NTc= } 2026-01-04T02:41:17.817039Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494477854, txId: 281474976710676] shutting down 2026-01-04T02:41:17.976658Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 100ms, session id ydb://session/3?node_id=2&id=ZTcwODI0MmMtOTUyZDY5NmEtODM5ZDRiZS01YmZjZjQyMw== } 2026-01-04T02:41:18.087235Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 103ms, session id ydb://session/3?node_id=2&id=OWM0Y2QyN2MtYThhZGI4YTEtZGI2NTQwZWUtMjNmNWVmNTI= } 2026-01-04T02:41:18.129756Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494478162, txId: 281474976710678] shutting down 2026-01-04T02:41:18.304802Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 109ms, session id ydb://session/3?node_id=2&id=NGI2MjY1YzUtZWYxNGU0YWEtNDA0MjUwZGEtMjQ4MGIwZDE= } 2026-01-04T02:41:18.346618Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494478365, txId: 281474976710680] shutting down |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> GenericFederatedQuery::IcebergHadoopSaSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectConstant >> GenericFederatedQuery::IcebergHiveBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectConstant >> GenericFederatedQuery::PostgreSQLOnPremSelectAll [GOOD] >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap >> GenericFederatedQuery::TestFailsOnIncorrectScriptExecutionFetchToken [GOOD] >> GenericFederatedQuery::TestConnectorNotConfigured >> GenericFederatedQuery::YdbManagedSelectAll [GOOD] >> GenericFederatedQuery::YdbManagedSelectConstant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_BadSourceId_Test [GOOD] Test command err: 2026-01-04T02:39:55.479318Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330625047787381:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:55.479716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:55.532258Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:55.535171Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330625999804962:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:39:55.536538Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:39:55.542639Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:39:55.694367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:55.696320Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:39:55.844874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:55.845864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.853058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:55.856607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:55.856705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:55.865612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.867481Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:55.868473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:55.936602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:55.949312Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:39:56.104299Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c98/r3tmp/yandexK5Ol7R.tmp 2026-01-04T02:39:56.104322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c98/r3tmp/yandexK5Ol7R.tmp 2026-01-04T02:39:56.106426Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c98/r3tmp/yandexK5Ol7R.tmp 2026-01-04T02:39:56.106545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:39:56.292269Z INFO: TTestServer started on Port 8033 GrpcPort 18764 PQClient connected to localhost:18764 2026-01-04T02:39:56.454462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:56.484778Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:56.540619Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:39:56.545339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:58.110178Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330638884707234:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:58.110286Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:58.110713Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330638884707243:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:58.110817Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:58.111201Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330638884707248:2309], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:39:58.121935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:58.140461Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330638884707250:2310], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-04T02:39:58.246718Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330638884707277:2182] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:58.643241Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330637932690750:2336], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:58.643261Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330638884707291:2314], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:39:58.643879Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODFhZjk2MWYtM2ZlZWQxZGMtYzExMzU1YmEtM2UyZGZhNjM=, ActorId: [2:7591330638884707232:2304], ActorState: ExecuteState, LegacyTraceId: 01ke3e0c4w6dfhw2hjpp5awrws, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:58.647608Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:39:58.653775Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZDFiMzg5ZC01ZWI2ZWFkZS0yMzIyYzVlZi01MDZiODk1Zg==, ActorId: [1:7591330637932690700:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3e0c8s5nrrwjemc2mg0e75, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:39:58.654163Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:39:58.687688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pr ... WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.915509Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.916224Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591330953460386167:2312], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.916310Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.917271Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591330956272404377:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.917494Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.921226Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:12.928068Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591330953460386168:2177] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exists but creating right now (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateCreate)" severity: 1 } 2026-01-04T02:41:12.928167Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591330956272404417:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.928295Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.928718Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591330956272404419:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.928796Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:12.944624Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591330956272404376:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:41:12.944747Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591330953460386166:2311], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T02:41:12.965649Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:13.008282Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591330957755353491:2184] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:13.033350Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:13.046123Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591330960567371942:3178] txid# 281474976710665, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:13.049480Z node 12 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [12:7591330957755353505:2316], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:41:13.050003Z node 12 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=12&id=NzQ0ZjE2ZmUtOWVlODEwYjItNzM1ZGNkOTUtNDczYmM2NA==, ActorId: [12:7591330953460386125:2304], ActorState: ExecuteState, LegacyTraceId: 01ke3e2n6gfmy6cqdsvrr83jyh, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:41:13.050573Z node 12 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:41:13.082333Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7591330960567371966:2357], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:5:17: Error: At function: KiReadTable!
:5:17: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Versions]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:41:13.082746Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NTExMDZiNGYtYWI2ZGEzYTgtZGM2N2UyY2UtYzMxYjRlOWM=, ActorId: [11:7591330956272404351:2331], ActorState: ExecuteState, LegacyTraceId: 01ke3e2n6h0g4hsfxe6d142kca, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:41:13.083128Z node 11 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 5 column: 17 } message: "At function: KiReadTable!" end_position { row: 5 column: 17 } severity: 1 issues { position { row: 5 column: 17 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Versions]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 5 column: 17 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:41:13.157741Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); 2026-01-04T02:41:13.413990Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591330939092533805:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.414084Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:13.429923Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591330936280516677:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.430037Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Subcribe to ClusterTracker from [11:7591330960567372228:3384] === CheckClustersList. Ok Received TEvChooseError: Bad SourceId 2026-01-04T02:41:20.687142Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7591330990632143689:3603] (SourceId=base64:a***, PreferedPartition=(NULL)) Start idle 2026-01-04T02:41:20.687181Z node 11 :PQ_PARTITION_CHOOSER INFO: partition_chooser_impl__abstract_chooser_actor.h:312: TPartitionChooser [11:7591330990632143689:3603] (SourceId=base64:a***, PreferedPartition=(NULL)) ReplyError: Bad SourceId |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicSelectAll [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant >> GenericFederatedQuery::IcebergHiveTokenSelectAll [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectConstant >> HttpRequest::Probe [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal >> KqpLimits::ComputeActorMemoryAllocationFailure+useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink >> KqpQuery::DecimalOutOfPrecision+UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal >> TOlap::StoreStats [GOOD] >> TOlap::Decimal >> KqpStats::RequestUnitForBadRequestExecute [GOOD] >> KqpStats::RequestUnitForBadRequestExplicitPrepare >> GenericFederatedQuery::IcebergHiveBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveBasicSelectCount >> GenericFederatedQuery::PostgreSQLOnPremSelectConstant [GOOD] >> GenericFederatedQuery::PostgreSQLSelectCount >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn >> GenericFederatedQuery::IcebergHadoopSaSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopSaSelectCount ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> HttpRequest::Probe [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:38:56.384049Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:38:56.497492Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:38:56.505679Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:38:56.506125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:38:56.506294Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:38:56.938877Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:57.032867Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:57.033031Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:57.080581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:57.167468Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:38:57.825022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:57.825615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:57.825675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:57.825710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:57.826323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:57.895686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:58.423636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:01.366542Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:39:01.373937Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:39:01.377016Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:39:01.406700Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.406830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.435146Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:39:01.437214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.621990Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:39:01.622110Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:39:01.623671Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.624412Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.625015Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.626132Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.626433Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.626586Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.626873Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.627014Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.627108Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:39:01.654125Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:39:01.870832Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:01.941551Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:39:01.941659Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:39:01.972911Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:39:01.974250Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:39:01.974473Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:39:01.974535Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:39:01.974598Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:39:01.974664Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:39:01.974718Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:39:01.974772Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:39:01.975372Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:39:01.975684Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:39:01.978150Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:39:01.986821Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:39:01.986902Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:39:01.986988Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:01.999582Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:02.012108Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:39:02.016615Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:39:02.020520Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.020622Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:39:02.051617Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:39:02.052032Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:39:02.174312Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:39:02.214756Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:39:02.499146Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:39:02.501386Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:02.505102Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:39:02.508931Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:39:02.509039Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... etDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:39:36.884158Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:5401:4745], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720659 completed, doublechecking } 2026-01-04T02:39:37.038128Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:5481:4793], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:39:37.068114Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:5480:4792] txid# 281474976720660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:39:37.376371Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:5502:4806]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:39:37.376704Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:39:37.376795Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:5504:4808] 2026-01-04T02:39:37.376872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:5504:4808] 2026-01-04T02:39:37.377305Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5505:4809] 2026-01-04T02:39:37.377430Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5504:4808], server id = [2:5505:4809], tablet id = 72075186224037894, status = OK 2026-01-04T02:39:37.377550Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5505:4809], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:39:37.377622Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:39:37.377767Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:39:37.377884Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5502:4806], StatRequests.size() = 1 2026-01-04T02:39:37.378036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:40:33.542269Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5374:4731], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:40:33.542473Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5374:4731], Start read next stream part 2026-01-04T02:40:33.542635Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dzqb3emy209km8a0dbxfb", SessionId: ydb://session/3?node_id=2&id=Nzc3YjU2MjEtZmYzMDQzYmQtYzg3NDUyZDEtNDRiOTM1ZDE=, Slow query, duration: 56.734223s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:40:33.543855Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37090, txId: 18446744073709551615] shutting down 2026-01-04T02:40:33.543987Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:40:33.546363Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:40:33.546448Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], Start read next stream part 2026-01-04T02:40:33.549263Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5374:4731], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:40:33.549359Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5374:4731], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTJhODliYS01NWMxODdmZi0xOWQ5MTkxNC0xYzc5MGNmNg==, TxId: 2026-01-04T02:40:33.646878Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5786:5001]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:40:33.647216Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:40:33.647271Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5786:5001], StatRequests.size() = 1 2026-01-04T02:41:23.266157Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:41:23.266323Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], Start read next stream part 2026-01-04T02:41:23.267190Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3e1era1zmzw2wwa9yf3r3p", SessionId: ydb://session/3?node_id=2&id=NjgwNzNhYjItMjlmZDUyNjYtYWQ0YzVkY2UtNzIwYjczZTY=, Slow query, duration: 49.717167s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:41:23.270539Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6005:5139], ActorId: [2:6006:5140], Starting query actor #1 [2:6007:5141] 2026-01-04T02:41:23.270619Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6006:5140], ActorId: [2:6007:5141], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:41:23.273871Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 37190, txId: 18446744073709551615] shutting down 2026-01-04T02:41:23.274464Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6006:5140], ActorId: [2:6007:5141], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZTEwYjdkMDAtNjNjMmYyODgtYmM2YjI4YjgtZWQxZGQ1Zg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:41:23.275613Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:41:23.275673Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5364:2462], ActorId: [2:5762:4981], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTNkODcxYTktNzY0NGVlODgtM2MwYzJjNDctZTIzMTBlZmE=, TxId: 2026-01-04T02:41:23.357116Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6024:5155]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:23.357495Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:41:23.357550Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:6024:5155], StatRequests.size() = 1 2026-01-04T02:41:23.550913Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6006:5140], ActorId: [2:6007:5141], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTEwYjdkMDAtNjNjMmYyODgtYmM2YjI4YjgtZWQxZGQ1Zg==, TxId: 2026-01-04T02:41:23.551024Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6006:5140], ActorId: [2:6007:5141], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTEwYjdkMDAtNjNjMmYyODgtYmM2YjI4YjgtZWQxZGQ1Zg==, TxId: 2026-01-04T02:41:23.551527Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6005:5139], ActorId: [2:6006:5140], Got response [2:6007:5141] SUCCESS 2026-01-04T02:41:23.551999Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:41:23.589907Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:41:23.589995Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=GbUj, ActorId=[1:3734:3495] 2026-01-04T02:41:23.590983Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:6042:4147]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:23.591381Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:41:23.591455Z node 1 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:41:23.591777Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:41:23.591865Z node 1 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:41:23.591943Z node 1 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:41:23.604871Z node 1 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 Answer: '/Root/Database/Table[Value]=100' |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/hive/ut/unittest >> THiveTest::TestCreateExternalTablet [GOOD] Test command err: 2026-01-04T02:35:03.681118Z node 1 :BS_NODE DEBUG: {NW26@node_warden_impl.cpp:348} Bootstrap 2026-01-04T02:35:03.708950Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# false Origin# initial ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:03.709288Z node 1 :BS_NODE DEBUG: {NW04@node_warden_pdisk.cpp:235} StartLocalPDisk NodeId# 1 PDiskId# 1 Path# "SectorMap:0:3200" PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} Temporary# false 2026-01-04T02:35:03.710152Z node 1 :BS_NODE WARN: {NW89@node_warden_pdisk.cpp:124} Can't write new MockDevicesConfig to file Path# /Berkanavt/kikimr/testing/mock_devices.txt 2026-01-04T02:35:03.710578Z node 1 :BS_NODE DEBUG: {NW23@node_warden_vdisk.cpp:69} StartLocalVDiskActor SlayInFlight# false VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 DonorMode# false PDiskRestartInFlight# false PDisksWaitingToStart# false 2026-01-04T02:35:03.711717Z node 1 :BS_NODE DEBUG: {NW24@node_warden_vdisk.cpp:287} StartLocalVDiskActor done VDiskId# [0:1:0:0:0] VSlotId# 1:1:0 PDiskGuid# 1 2026-01-04T02:35:03.711781Z node 1 :BS_NODE DEBUG: {NW12@node_warden_proxy.cpp:36} StartLocalProxy GroupId# 0 HasGroupInfo# true GroupInfoGeneration# 1 2026-01-04T02:35:03.712826Z node 1 :BS_NODE DEBUG: {NW21@node_warden_pipe.cpp:23} EstablishPipe AvailDomainId# 0 PipeClientId# [1:28:2075] ControllerId# 72057594037932033 2026-01-04T02:35:03.712868Z node 1 :BS_NODE DEBUG: {NW20@node_warden_pipe.cpp:73} SendRegisterNode 2026-01-04T02:35:03.712969Z node 1 :BS_NODE DEBUG: {NW11@node_warden_impl.cpp:323} StartInvalidGroupProxy GroupId# 4294967295 2026-01-04T02:35:03.713126Z node 1 :BS_NODE DEBUG: {NW62@node_warden_impl.cpp:335} StartRequestReportingThrottler 2026-01-04T02:35:03.737032Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:184: Group# 0 TEvConfigureProxy received GroupGeneration# 1 IsLimitedKeyless# false Marker# DSP02 2026-01-04T02:35:03.737100Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:344: EnsureMonitoring Group# 0 IsLimitedKeyless# 0 fullIfPossible# 0 Marker# DSP58 2026-01-04T02:35:03.755831Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:36:2080] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756111Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:37:2081] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756275Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:38:2082] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756439Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:39:2083] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756592Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:40:2084] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756744Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:41:2085] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.756993Z node 1 :BS_PROXY DEBUG: group_sessions.cpp:83: Group# 0 Actor# [1:27:2074] Create Queue# [1:42:2086] targetNodeId# 1 Marker# DSP01 2026-01-04T02:35:03.757019Z node 1 :BS_PROXY INFO: dsproxy_state.cpp:31: Group# 0 SetStateEstablishingSessions Marker# DSP03 2026-01-04T02:35:03.757114Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [1:28:2075] 2026-01-04T02:35:03.757198Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [1:28:2075] 2026-01-04T02:35:03.757247Z node 1 :BS_PROXY NOTICE: dsproxy_state.cpp:284: Group# 4294967295 HasInvalidGroupId# 1 Bootstrap -> StateEjected Marker# DSP42 2026-01-04T02:35:03.757299Z node 1 :BS_NODE DEBUG: {NWDC00@distconf.cpp:27} Bootstrap 2026-01-04T02:35:03.758237Z node 1 :BS_NODE DEBUG: {NWDC40@distconf_persistent_storage.cpp:25} TReaderActor bootstrap Paths# [] 2026-01-04T02:35:03.758732Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:03.794039Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:03.794122Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:03.814411Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435074 StorageConfigLoaded# false NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:03.814478Z node 1 :BS_NODE DEBUG: {NWDC32@distconf_persistent_storage.cpp:219} TEvStorageConfigLoaded Cookie# 0 NumItemsRead# 0 2026-01-04T02:35:03.814930Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 268639258 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 0 2026-01-04T02:35:03.815045Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037932033 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:03.815232Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [1:28:2075] 2026-01-04T02:35:03.815684Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 0} 2026-01-04T02:35:03.815739Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 1} 2026-01-04T02:35:03.815770Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037932033 Cookie: 2} 2026-01-04T02:35:03.815815Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:03.815892Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037936129] ::Bootstrap [1:32:2063] 2026-01-04T02:35:03.815917Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037936129] lookup [1:32:2063] 2026-01-04T02:35:03.822330Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037927937] ::Bootstrap [1:52:2092] 2026-01-04T02:35:03.822380Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037927937] lookup [1:52:2092] 2026-01-04T02:35:03.822446Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:03.822592Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037936129 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037936129 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:03.822711Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 131082 StorageConfigLoaded# true NodeListObtained# false PendingEvents.size# 1 2026-01-04T02:35:03.822740Z node 1 :BS_NODE DEBUG: {NWDC11@distconf_binding.cpp:8} TEvNodesInfo 2026-01-04T02:35:03.822812Z node 1 :BS_NODE DEBUG: {NWDC13@distconf_binding.cpp:26} ApplyNewNodeList NewNodeList# [[::1:12001/1:DC=1/M=1/R=1/U=1/]] 2026-01-04T02:35:03.838768Z node 1 :BS_NODE DEBUG: {NWDC18@distconf_binding.cpp:509} UpdateBound RefererNodeId# 1 NodeId# ::1:12001/1 Meta# {Fingerprint: "\363\365\\\016\336\205\240m2\241c\3010\003\261\342\227\n\267}" } 2026-01-04T02:35:03.839009Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037927937] queue send [1:52:2092] 2026-01-04T02:35:03.839066Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037932033 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:35:03.839200Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037927937 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72057594037927937 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:35:03.840450Z node 1 :BS_NODE DEBUG: {NWDC53@distconf.cpp:358} StateWaitForInit event Type# 2146435072 StorageConfigLoaded# true NodeListObtained# true PendingEvents.size# 1 2026-01-04T02:35:03.840506Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639258 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:03.840767Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72057594037932033 leader: [0:0:0] followers: 0 2026-01-04T02:35:03.840859Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72057594037932033] forward result error, check reconnect [1:28:2075] 2026-01-04T02:35:03.840905Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:569: TClient[72057594037932033] schedule retry [1:28:2075] 2026-01-04T02:35:03.855151Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037936129 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:03.855470Z node 1 :BS_NODE DEBUG: {NW18@node_warden_resource.cpp:51} ApplyServiceSet IsStatic# true Comprehensive# true Origin# distconf ServiceSet# {PDisks { NodeID: 1 PDiskID: 1 Path: "SectorMap:0:3200" PDiskGuid: 1 } VDisks { VDiskID { GroupID: 0 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VDiskLocation { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } Groups { GroupID: 0 GroupGeneration: 1 ErasureSpecies: 0 Rings { FailDomains { VDiskLocations { NodeID: 1 PDiskID: 1 VDiskSlotID: 0 PDiskGuid: 1 } } } } AvailabilityDomains: 0 } 2026-01-04T02:35:03.855646Z node 1 :BS_NODE DEBUG: {NW110@node_warden_pdisk.cpp:540} ApplyServiceSetPDisks PDiskId# 1 NewExpectedSlotCount# 0 OldExpectedSlotCount# 0 NewSlotSizeInUnits# 0 OldSlotSizeInUnits# 0 2026-01-04T02:35:03.855789Z node 1 :BS_NODE DEBUG: {NWDC15@distconf.cpp:427} StateFunc Type# 268639248 Sender# [1:12:2059] SessionId# [0:0:0] Cookie# 0 2026-01-04T02:35:03.856036Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 0} 2026-01-04T02:35:03.856085Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 1} 2026-01-04T02:35:03.856112Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037936129 Cookie: 2} 2026-01-04T02:35:03.856242Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72057594037927937 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:35:03.861995Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 0} 2026-01-04T02:35:03.862057Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 1} 2026-01-04T02:35:03.862101Z node 1 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72057594037927937 Cookie: 2} 2026-01-04T02:35:03.862158Z node 1 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72057594037936129 ClusterStateGeneration: 0 ClusterStateGuid: 0} 202 ... 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037927937] connected with status OK role: Leader [153:271:2264] 2026-01-04T02:41:19.700324Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037927937] send queued [153:271:2264] 2026-01-04T02:41:19.700384Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037927937] push event to server [153:271:2264] 2026-01-04T02:41:19.700472Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72057594037927937] shutdown pipe due to pending shutdown request [153:271:2264] 2026-01-04T02:41:19.700531Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594037927937] notify reset [153:271:2264] 2026-01-04T02:41:19.700675Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037927937] HandleSend Sender# [153:270:2263] EventType# 268697601 2026-01-04T02:41:19.700922Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} queued, type NKikimr::NHive::TTxCreateTablet 2026-01-04T02:41:19.701002Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:41:19.701700Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} hope 1 -> done Change{4, redo 1157b alter 0b annex 0, ~{ 14, 0, 1, 2 } -{ }, 0 gb} 2026-01-04T02:41:19.701790Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:4} Tx{4, NKikimr::NHive::TTxCreateTablet} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:41:19.701952Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72057594037932033] ::Bootstrap [153:310:2290] 2026-01-04T02:41:19.701983Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72057594037932033] lookup [153:310:2290] 2026-01-04T02:41:19.702048Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72057594037932033 entry.State: StNormal leader: [153:93:2123] followers: 0 ev: {EvForward TabletID: 72057594037932033 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:41:19.702099Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 153 selfDC leaderDC 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 72057594037932033 followers: 0 countLeader 1 allowFollowers 0 winner: [153:93:2123] 2026-01-04T02:41:19.702180Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:148: TClient[72057594037932033] queue send [153:310:2290] 2026-01-04T02:41:19.702212Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[72057594037932033] forward result local node, try to connect [153:310:2290] 2026-01-04T02:41:19.702248Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[72057594037932033]::SendEvent [153:310:2290] 2026-01-04T02:41:19.702310Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [72057594037932033] Accept Connect Originator# [153:310:2290] 2026-01-04T02:41:19.702419Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:308: TClient[72057594037932033] connected with status OK role: Leader [153:310:2290] 2026-01-04T02:41:19.702451Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72057594037932033] send queued [153:310:2290] 2026-01-04T02:41:19.702476Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037932033] push event to server [153:310:2290] 2026-01-04T02:41:19.702517Z node 153 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:141: [72057594037932033] HandleSend Sender# [153:280:2269] EventType# 268637702 2026-01-04T02:41:19.702636Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} queued, type NKikimr::NBsController::TBlobStorageController::TTxSelectGroups 2026-01-04T02:41:19.702719Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:41:19.702922Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} hope 1 -> done Change{20, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:41:19.703003Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037932033:2:9} Tx{28, NKikimr::NBsController::TBlobStorageController::TTxSelectGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:41:19.703303Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} queued, type NKikimr::NHive::TTxUpdateTabletGroups 2026-01-04T02:41:19.703385Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:41:19.703787Z node 153 :HIVE NOTICE: tx__update_tablet_groups.cpp:326: HIVE#72057594037927937 THive::TTxUpdateTabletGroups{137061022783904}(72075186224037888)::Execute - TryToBoot was not successfull 2026-01-04T02:41:19.703918Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} hope 1 -> done Change{5, redo 698b alter 0b annex 0, ~{ 2, 1, 3 } -{ }, 0 gb} 2026-01-04T02:41:19.704003Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} Tx{5, NKikimr::NHive::TTxUpdateTabletGroups} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:41:19.715138Z node 153 :BS_PROXY_PUT INFO: dsproxy_put.cpp:665: [8729fbeaec2f6015] bootstrap ActorId# [153:313:2293] Group# 0 BlobCount# 1 BlobIDs# [[72057594037927937:2:4:0:0:698:0]] HandleClass# TabletLog Tactic# MinLatency RestartCounter# 0 Marker# BPP13 2026-01-04T02:41:19.715318Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:42: [8729fbeaec2f6015] Id# [72057594037927937:2:4:0:0:698:0] restore disk# 0 part# 0 situation# ESituation::Unknown Marker# BPG51 2026-01-04T02:41:19.715384Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_restore.h:65: [8729fbeaec2f6015] restore Id# [72057594037927937:2:4:0:0:698:0] optimisticReplicas# 1 optimisticState# EBS_FULL Marker# BPG55 2026-01-04T02:41:19.715459Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:329: [8729fbeaec2f6015] partPlacement record partSituation# ESituation::Unknown to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG33 2026-01-04T02:41:19.715511Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_strategy_base.cpp:350: [8729fbeaec2f6015] Sending missing VPut part# 0 to# 0 blob Id# [72057594037927937:2:4:0:0:698:1] Marker# BPG32 2026-01-04T02:41:19.715665Z node 153 :BS_PROXY DEBUG: group_sessions.h:193: Send to queueActorId# [153:36:2080] NKikimr::TEvBlobStorage::TEvVPut# {ID# [72057594037927937:2:4:0:0:698:1] FDS# 698 HandleClass# TabletLog {MsgQoS ExtQueueId# PutTabletLog} DataSize# 0 Data# } cookie# 0 2026-01-04T02:41:19.716984Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_put.cpp:265: [8729fbeaec2f6015] received {EvVPutResult Status# OK ID# [72057594037927937:2:4:0:0:698:1] {MsgQoS MsgId# { SequenceId: 1 MsgId: 18 } Cost# 85496 ExtQueueId# PutTabletLog IntQueueId# IntPutLog Window# { Status# Processed ActualWindowSize# 0 MaxWindowSize# 150000000 ExpectedMsgId# { SequenceId: 1 MsgId: 19 }}}} from# [0:1:0:0:0] Marker# BPP01 2026-01-04T02:41:19.717104Z node 153 :BS_PROXY_PUT DEBUG: dsproxy_put_impl.cpp:72: [8729fbeaec2f6015] Result# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} GroupId# 0 Marker# BPP12 2026-01-04T02:41:19.717170Z node 153 :BS_PROXY_PUT INFO: dsproxy_put.cpp:499: [8729fbeaec2f6015] SendReply putResult# TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} ResponsesSent# 0 PutImpl.Blobs.size# 1 Last# true Marker# BPP21 2026-01-04T02:41:19.717362Z node 153 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594037927937:2:4:0:0:698:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0.998955} 2026-01-04T02:41:19.717502Z node 153 :TABLET_EXECUTOR DEBUG: Leader{72057594037927937:2:5} commited cookie 1 for step 4 2026-01-04T02:41:19.717883Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:41:19.718021Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:41:19.718084Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:41:19.718131Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:41:19.718186Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.718265Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.718325Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.718772Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[72075186224037888] ::Bootstrap [153:317:2296] 2026-01-04T02:41:19.718860Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[72075186224037888] lookup [153:317:2296] 2026-01-04T02:41:19.719112Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 72075186224037888 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 72075186224037888 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:41:19.719286Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:287: ProxyRequest::HandleInit ringGroup:0 ev: {EvLookup TabletID: 72075186224037888 Cookie: 0 ProxyOptions: SigNone} 2026-01-04T02:41:19.719472Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 2} 2026-01-04T02:41:19.719608Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.719692Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 0} 2026-01-04T02:41:19.719742Z node 153 :STATESTORAGE DEBUG: statestorage_replica.cpp:185: Replica::Handle ev: {EvReplicaLookup TabletID: 72075186224037888 Cookie: 1} 2026-01-04T02:41:19.719801Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.719869Z node 153 :STATESTORAGE DEBUG: statestorage_proxy.cpp:399: ProxyRequest::HandleLookup ringGroup:0 ev: {EvReplicaInfo Status: 1 TabletID: 72075186224037888 ClusterStateGeneration: 0 ClusterStateGuid: 0} 2026-01-04T02:41:19.719981Z node 153 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 72075186224037888 leader: [0:0:0] followers: 0 2026-01-04T02:41:19.720113Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:170: TClient[72075186224037888] forward result error, check reconnect [153:317:2296] 2026-01-04T02:41:19.720192Z node 153 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[72075186224037888] connect failed [153:317:2296] >> KqpParams::DefaultParameterValue [GOOD] >> KqpParams::Decimal-QueryService-UseSink >> KqpTypes::QuerySpecialTypes [GOOD] >> KqpTypes::SelectNull >> KqpQuery::Now [GOOD] >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows >> KqpExplain::PrecomputeRange [GOOD] >> KqpExplain::PureExpr >> KqpQuery::PreparedQueryInvalidate [GOOD] >> KqpQuery::OlapTemporary >> GenericFederatedQuery::IcebergHadoopTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenSelectCount >> TOlap::Decimal [GOOD] >> TOlap::MoveTableStats >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] >> KqpParams::RowsList [GOOD] >> KqpParams::ParameterTypes >> KqpExplain::Explain [GOOD] >> KqpExplain::ExplainDataQuery |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/mind/hive/ut/unittest >> KqpLimits::BigParameter [GOOD] >> KqpLimits::AffectedShardsLimit >> ColumnStatistics::CountMinSketchStatistics [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap >> GenericFederatedQuery::YdbManagedSelectConstant [GOOD] >> GenericFederatedQuery::YdbSelectCount >> GenericFederatedQuery::IcebergHiveTokenSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHiveTokenSelectCount >> GenericFederatedQuery::IcebergHadoopBasicSelectConstant [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicSelectCount >> KqpExplain::UpdateSecondaryConditional+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary+UseSink >> KqpParams::ImplicitParameterTypes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CreateTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] Leader for TabletID 72057594046316545 is [1:126:2150] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:141:2058] recipient: [1:116:2145] 2026-01-04T02:39:46.997447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:46.997560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:46.997598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:46.997697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:46.997748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:46.997794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:46.997871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:46.997932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:46.998701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:46.998947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:47.113554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:47.113618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:47.114526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:187:2058] recipient: [1:15:2062] 2026-01-04T02:39:47.126883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:47.126965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:47.127059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:47.130607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:47.131188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:47.131814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:47.132173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:47.134598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:47.134716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:47.135505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:47.135557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:47.135670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:47.135701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:47.135742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:47.135931Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:47.254506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.255977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.256032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.256086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:47.256137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... 02:41:26.782159Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 3 PathOwnerId: 72057594046678944, cookie: 1002 2026-01-04T02:41:26.782192Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 1002 2026-01-04T02:41:26.782226Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1002, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 3 2026-01-04T02:41:26.782251Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:41:26.782306Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 0/1, is published: true 2026-01-04T02:41:26.783563Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 822 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-04T02:41:26.783598Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-04T02:41:26.783693Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 822 } } CommitVersion { Step: 5000039 TxId: 1002 } 2026-01-04T02:41:26.783778Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409546 Status: COMPLETE TxId: 1002 Step: 5000039 OrderId: 1002 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409546 CpuTimeUsec: 822 } } CommitVersion { Step: 5000039 TxId: 1002 } debug: NTableState::TProposedWaitParts operationId# 1002:0 2026-01-04T02:41:26.784672Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 309237648033 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-04T02:41:26.784704Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1002, tablet: 72075186233409546, partId: 0 2026-01-04T02:41:26.784780Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1002:0, at schemeshard: 72057594046678944, message: Source { RawX1: 735 RawX2: 309237648033 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-04T02:41:26.784820Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:41:26.784881Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1002:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 735 RawX2: 309237648033 } Origin: 72075186233409546 State: 2 TxId: 1002 Step: 0 Generation: 2 2026-01-04T02:41:26.784924Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1002:0, shardIdx: 72057594046678944:1, shard: 72075186233409546, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:26.784952Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-04T02:41:26.784983Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1002:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:41:26.785017Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1002:0 129 -> 240 2026-01-04T02:41:26.786353Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-04T02:41:26.786416Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1002 2026-01-04T02:41:26.787578Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-04T02:41:26.787681Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-04T02:41:26.787752Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1002:0, at schemeshard: 72057594046678944 2026-01-04T02:41:26.787781Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1002:0 ProgressState 2026-01-04T02:41:26.787871Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-04T02:41:26.787897Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-04T02:41:26.787926Z node 72 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1002:0 progress is 1/1 2026-01-04T02:41:26.787948Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-04T02:41:26.787977Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1002, ready parts: 1/1, is published: true 2026-01-04T02:41:26.788004Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1002 ready parts: 1/1 2026-01-04T02:41:26.788035Z node 72 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1002:0 2026-01-04T02:41:26.788061Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1002:0 2026-01-04T02:41:26.788164Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1002, wait until txId: 1002 TestWaitNotification wait txId: 1002 2026-01-04T02:41:26.790606Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1002: send EvNotifyTxCompletion 2026-01-04T02:41:26.790651Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1002 2026-01-04T02:41:26.790952Z node 72 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1002, at schemeshard: 72057594046678944 2026-01-04T02:41:26.791037Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1002: got EvNotifyTxCompletionResult 2026-01-04T02:41:26.791070Z node 72 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1002: satisfy waiter [72:812:2784] TestWaitNotification: OK eventTxId 1002 2026-01-04T02:41:26.791471Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:41:26.791663Z node 72 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTable" took 228us result status StatusSuccess 2026-01-04T02:41:26.792178Z node 72 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTable" PathDescription { Self { Name: "TTLEnabledTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1002 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpQuery::DecimalOutOfPrecisionBulk-EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> KqpLimits::ComputeActorMemoryAllocationFailure-useSink [GOOD] >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink >> KqpQuery::DecimalOutOfPrecision-UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal >> KqpLimits::KqpMkqlMemoryLimitException [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure >> KqpLimits::QSReplySizeEnsureMemoryLimits+useSink [GOOD] >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:36.024971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:36.149973Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:36.160096Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:36.160488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:36.160659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:36.566346Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:36.659547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:36.659704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:36.706773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:36.790389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:37.417504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:37.418167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:37.418215Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:37.418249Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:37.418792Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:37.483729Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:38.012181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:41.295651Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:41.301867Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:41.304666Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:41.328116Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:41.328241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:41.355587Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:41.357587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:41.529028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:41.529163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:41.531405Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.532183Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.532772Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.533920Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.534246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.534378Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.534632Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.534847Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.534986Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:41.563594Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:41.806802Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:41.882611Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:41.882719Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:41.917226Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:41.918780Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:41.919032Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:41.919102Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:41.919160Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:41.919255Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:41.919327Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:41.919386Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:41.920131Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:41.920530Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:41.923535Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:41.942832Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:41.942914Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:41.943007Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:41.958734Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:41.971123Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:41.975168Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:41.979298Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:41.979418Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:42.014284Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:32:42.014743Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:32:42.137619Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:42.361154Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:42.634212Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:42.636818Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:42.641624Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:42.646767Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:42.646909Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... ode 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:33:04.044419Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4619:4107], StatRequests.size() = 1 2026-01-04T02:33:04.044594Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:37:11.223029Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4484:4032], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:37:11.223348Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4484:4032], Start read next stream part 2026-01-04T02:37:11.224212Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:37:11.224562Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dkq1n7rdmv0tyrrpx0s31", SessionId: ydb://session/3?node_id=2&id=NTI4ODc3ZWMtNDZmYzcxZmMtNzUxNzVkZDItNTI1ZGJkMGY=, Slow query, duration: 247.934552s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:37:11.229045Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-04T02:37:11.229195Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-04T02:37:11.229319Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], Start read next stream part 2026-01-04T02:37:11.232941Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4484:4032], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:37:11.233017Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4484:4032], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTUzNTNmZmUtOTZiOWI5NjctZWIxYTA1ZmItOTdhZWNlZjk=, TxId: 2026-01-04T02:37:11.487770Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4851:4256]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:37:11.488044Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:37:11.488097Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4851:4256], StatRequests.size() = 1 2026-01-04T02:41:26.169069Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:41:26.169298Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], Start read next stream part 2026-01-04T02:41:26.179816Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dv95x22ywwdx7my3k52vj", SessionId: ydb://session/3?node_id=2&id=ZWIyNjVmNDctODJjNjI2OGMtOWE2YzBkNzAtZTkyNTFkZWU=, Slow query, duration: 254.945118s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:41:26.182807Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5028:4358], ActorId: [2:5029:4359], Starting query actor #1 [2:5030:4360] 2026-01-04T02:41:26.182885Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5029:4359], ActorId: [2:5030:4360], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:41:26.185428Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32180, txId: 18446744073709551615] shutting down 2026-01-04T02:41:26.186829Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5029:4359], ActorId: [2:5030:4360], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODdlOThiOTMtZTdiN2IxMzEtZDUzMjMyOGYtYmM5ZDk2NjU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:41:26.189073Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:41:26.189170Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4474:2462], ActorId: [2:4824:4233], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmNlZTFhNDgtZjg0ZDZkOWUtZjA3MTZjZi04MzliZTk2NA==, TxId: 2026-01-04T02:41:26.259839Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5047:4374]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:26.260199Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:41:26.260244Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5047:4374], StatRequests.size() = 1 2026-01-04T02:41:26.472078Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5029:4359], ActorId: [2:5030:4360], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODdlOThiOTMtZTdiN2IxMzEtZDUzMjMyOGYtYmM5ZDk2NjU=, TxId: 2026-01-04T02:41:26.472157Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5029:4359], ActorId: [2:5030:4360], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODdlOThiOTMtZTdiN2IxMzEtZDUzMjMyOGYtYmM5ZDk2NjU=, TxId: 2026-01-04T02:41:26.472506Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5028:4358], ActorId: [2:5029:4359], Got response [2:5030:4360] SUCCESS 2026-01-04T02:41:26.472896Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:41:26.510046Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:41:26.510141Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3411:3459] 2026-01-04T02:41:26.511164Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5066:4384]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:41:26.511610Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:26.511685Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:41:26.512032Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:26.512086Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:41:26.512131Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:41:26.512410Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:41:26.517847Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:41:26.519053Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:26.519250Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> TOlap::MoveTableStats [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] >> KqpQuery::QueryClientTimeout >> GenericFederatedQuery::IcebergHadoopSaSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown >> GenericFederatedQuery::IcebergHiveBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown >> GenericFederatedQuery::PostgreSQLSelectCount [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_olap/unittest >> TOlap::MoveTableStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:41:04.741392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:41:04.741473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:04.741519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:41:04.741556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:41:04.741584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:41:04.741603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:41:04.741640Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:41:04.741688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:41:04.742352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:41:04.742572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:41:04.843572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:41:04.843649Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:04.859916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:41:04.860309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:41:04.861399Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:41:04.871051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:41:04.871311Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:41:04.872795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:41:04.873107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:41:04.877062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:04.877319Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:41:04.880065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:41:04.880131Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:41:04.880271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:41:04.880325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:41:04.880551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:41:04.880771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:41:05.037750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.038852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.039938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:41:05.040031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 28: Part operation is done id#203:0 progress is 1/1 2026-01-04T02:41:30.154626Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-04T02:41:30.154667Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#203:0 progress is 1/1 2026-01-04T02:41:30.154709Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-04T02:41:30.154766Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 203, ready parts: 1/1, is published: true 2026-01-04T02:41:30.154856Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [3:876:2835] message: TxId: 203 2026-01-04T02:41:30.154913Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 203 ready parts: 1/1 2026-01-04T02:41:30.154957Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 203:0 2026-01-04T02:41:30.155009Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 203:0 2026-01-04T02:41:30.155162Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:41:30.155205Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:41:30.155605Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:41:30.155656Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:41:30.155745Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:41:30.160639Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:41:30.160722Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 203: got EvNotifyTxCompletionResult 2026-01-04T02:41:30.160779Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 203: satisfy waiter [3:1053:3002] TestWaitNotification: OK eventTxId 203 2026-01-04T02:41:30.161597Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:41:30.161794Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 229us result status StatusPathDoesNotExist 2026-01-04T02:41:30.162017Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ColumnTable\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ColumnTable" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:41:30.162837Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: PathId: 41 SchemeshardId: 72057594046678944 Options { }, at schemeshard: 72057594046678944 2026-01-04T02:41:30.163135Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:44: Tablet 72057594046678944 describe pathId 41 took 304us result status StatusSuccess 2026-01-04T02:41:30.163637Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 226 LastUpdateTime: 226 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:41:30.177781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 1225216 rowCount 100000 cpuUsage 0 2026-01-04T02:41:30.178007Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:279: PersistSingleStats for pathId [OwnerId: 72057594046678944, LocalPathId: 39], tabletId 72075186233409546, followerId 0: unknown pathId 2026-01-04T02:41:30.188521Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:41:30.510756Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/MovedColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:41:30.511118Z node 3 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/MovedColumnTable" took 416us result status StatusSuccess 2026-01-04T02:41:30.511646Z node 3 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/MovedColumnTable" PathDescription { Self { Name: "MovedColumnTable" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 203 CreateStep: 5000043 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 ColumnTableVersion: 2 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1225216 RowCount: 100000 IndexSize: 0 LastAccessTime: 226 LastUpdateTime: 226 ImmediateTxCompleted: 1 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 1 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_olap/unittest >> GenericFederatedQuery::IcebergHadoopTokenSelectCount [GOOD] >> KqpQueryDiscard::DiscardSelectSupport [GOOD] >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown >> KqpQueryDiscard::DiscardSelectEnsureExecuted >> KqpQuery::YqlSyntaxV0 >> KqpStats::RequestUnitForBadRequestExplicitPrepare [GOOD] >> KqpStats::RequestUnitForSuccessExplicitPrepare >> TPQTest::TestWritePQ [GOOD] >> TPQTest::TestWriteOffsetWithBigMessage ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::TestConnectorNotConfigured [GOOD] Test command err: Trying to start YDB, gRPC: 64332, MsgBus: 3293 2026-01-04T02:41:13.496366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330958236124122:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496447Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.826145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.843916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.844069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.860526Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:13.863793Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330958236124092:2081] 1767494473491683 != 1767494473491686 2026-01-04T02:41:13.880909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:14.011776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060615Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060706Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060787Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.507532Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.671656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.672135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.674302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.675130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.677515Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474725, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.679533Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330958236124626:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.679694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.679744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-04T02:41:14.680014Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330958236124066:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.680115Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330958236124060:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.680260Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330958236124063:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.680401Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330958236124775:2288][/Root] Path was updated to new version: owner# [1:7591330958236124765:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.680445Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330958236124543:2204][/Root] Path was updated to new version: owner# [1:7591330958236124382:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.680556Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330958236124626:2250] Ack update: ack to# [1:7591330958236124459:2154], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.680755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T02:41:14.680767Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330958236124779:2289][/Root] Path was updated to new version: owner# [1:7591330958236124766:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } Trying to start YDB, gRPC: 3732, MsgBus: 22937 2026-01-04T02:41:16.844121Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330974272640707:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:16.844509Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:16.855004Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:16.913099Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:16.914527Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591330974272640599:2081] 1767494476840592 != 1767494476840595 2026-01-04T02:41:16.960958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:16.961041Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:16.979331Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:17.006933Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:17.006966Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:17.006974Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:17.007057Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:17.053974Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:17.362014Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:17.362357Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:17.362394Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:17.363044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:17.365358Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494477413, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:17.366228Z node 2 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [2:7591330974272641127:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:17.366303Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:17.366351Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-04T02:41:17.366422Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7591330974272640567:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:17.366528Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7591330974272640570:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:17.366529Z node 2 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [2:7591330974272640573:2055 ... _CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:24.419671Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:24.419760Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:24.474072Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:24.854553Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:24.890595Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.940067Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.132906Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.194470Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:25.209827Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.583658Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331018100992837:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.583752Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.584060Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331018100992847:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.584084Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.056555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.099384Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.138886Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.174567Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.210838Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.283002Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.322810Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.372475Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:28.478143Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331022395961023:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.478219Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.478240Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331022395961028:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.478511Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331022395961030:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.478551Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.482476Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:28.493986Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331022395961031:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:28.560982Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331022395961085:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:29.188146Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331005216089093:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:29.188227Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:30.355601Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSecret, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_secret.cpp:267) 2026-01-04T02:41:30.364011Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710673, at schemeshard: 72057594046644480 2026-01-04T02:41:30.368608Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) 2026-01-04T02:41:30.471447Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591331030985896070:2542], status: GENERIC_ERROR, issues:
: Error: Table metadata loading, code: 1050
:2:17: Error: Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn't exist, please contact internal support 2026-01-04T02:41:30.472186Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=M2UwNGNlZDUtMjQwNGMwNmItZTFkNTJlY2ItZDRjNWM1YWM=, ActorId: [4:7591331030985896068:2541], ActorState: ExecuteState, LegacyTraceId: 01ke3e368tc8p9s8w1m4ntnx7j, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Table metadata loading" issue_code: 1050 severity: 1 issues { position { row: 2 column: 17 } message: "Unsupported. Failed to load metadata for table: /Root/external_data_source.[example_1] data source generic doesn\'t exist, please contact internal support" end_position { row: 2 column: 17 } severity: 1 } } tx_id# trace_id# |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpTypes::SelectNull [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn |91.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::CreateAsSelectTypes+NotNull-IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap >> KqpExplain::PureExpr [GOOD] >> KqpExplain::ReadTableRangesFullScan >> KqpQuery::DecimalOutOfPrecision-UseOltpSink-EnableParameterizedDecimal [GOOD] >> KqpQuery::DeleteWhereInSubquery |91.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_olap/test-results/unittest/{meta.json ... results_accumulator.log} >> GenericFederatedQuery::YdbSelectCount [GOOD] >> GenericFederatedQuery::YdbFilterPushdown >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService+useSink [GOOD] >> KqpLimits::CancelAfterRwTx+useSink >> KqpLimits::ComputeActorMemoryAllocationFailureQueryService-useSink [GOOD] >> KqpLimits::DatashardProgramSize+useSink >> GenericFederatedQuery::IcebergHadoopBasicSelectCount [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown >> GenericFederatedQuery::IcebergHiveTokenSelectCount [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown >> KqpParams::ParameterTypes [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap >> KqpExplain::UpdateConditional-UseSink >> KqpQuery::GenericQueryNoRowsLimitLotsOfRows [GOOD] >> KqpQuery::NoEvaluate >> KqpQuery::DecimalOutOfPrecision+UseOltpSink+EnableParameterizedDecimal [GOOD] >> KqpQuery::CurrentUtcTimestamp >> KqpQuery::OlapTemporary [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotWritten_Test [GOOD] >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test >> KqpStats::JoinNoStatsYql >> KqpLimits::AffectedShardsLimit [GOOD] >> KqpLimits::CancelAfterRoTx >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal-IsColumn [GOOD] >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn >> KqpParams::Decimal-QueryService-UseSink [GOOD] >> KqpParams::Decimal+QueryService-UseSink >> KqpParams::ImplicitParameterTypes [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck >> KqpExplain::ExplainDataQuery [GOOD] >> KqpExplain::ExplainDataQueryWithParams >> ColumnStatistics::SimpleColumnStatistics [GOOD] >> KqpLimits::LargeParametersAndMkqlFailure [GOOD] >> KqpLimits::DatashardReplySize >> KqpLimits::QSReplySizeEnsureMemoryLimits-useSink [GOOD] >> KqpLimits::QueryReplySize >> KqpTypes::Time64Columns+EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::SimpleColumnStatistics [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:24.519332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:24.668296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:24.677545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:24.677973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:24.678150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:25.274753Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:25.408869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:25.409033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:25.467858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:25.573570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:26.445666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:26.446781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:26.446843Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:26.446882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:26.447342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:26.505588Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:27.075938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:30.634378Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:30.641839Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:30.645119Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:30.684199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:30.684358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:30.714601Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:30.717550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.941426Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:30.941539Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:30.942904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.943462Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.944189Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.945182Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.945479Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.945616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.945756Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.948367Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.948488Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.970935Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:31.244935Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:31.284754Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:31.284871Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:31.320748Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:31.322148Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:31.322402Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:31.322531Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:31.322592Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:31.322671Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:31.322738Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:31.322796Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:31.323364Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:31.323638Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:32:31.325243Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:31.338451Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:32:31.338517Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:32:31.338611Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:31.359066Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2142:2596], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:31.363449Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:31.363590Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2146:2599], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:31.373663Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2120:2582] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:32:31.379409Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:32:31.405311Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2220:2650] 2026-01-04T02:32:31.406363Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2220:2650], schemeshard id = 72075186224037897 2026-01-04T02:32:31.531227Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:31.582591Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:31.842910Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:31.845138Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2569:2882], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:31.848605Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:31.852304Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:31.852398Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... BUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:32:59.423186Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:32:59.423275Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4648:4123], StatRequests.size() = 1 2026-01-04T02:32:59.423390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:37:11.374936Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4513:4048], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:37:11.375320Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4513:4048], Start read next stream part 2026-01-04T02:37:11.376419Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:37:11.379362Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dkjaqecskcjfbfnhatb8j", SessionId: ydb://session/3?node_id=2&id=M2JjYmIzZGQtMmNmN2MyMGYtNWEwZjY0ZmItMWM5Yzg1M2Q=, Slow query, duration: 252.918695s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:37:11.381069Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32120, txId: 18446744073709551615] shutting down 2026-01-04T02:37:11.381200Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-04T02:37:11.381317Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], Start read next stream part 2026-01-04T02:37:11.384784Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4513:4048], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:37:11.384857Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4513:4048], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTZiODM4N2YtMWU0NGEwYTQtOWZkYmNhNzUtYTZiMGQ2ODA=, TxId: 2026-01-04T02:37:11.683560Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4878:4271]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:37:11.683832Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:37:11.683885Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4878:4271], StatRequests.size() = 1 2026-01-04T02:41:34.506649Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:41:34.506958Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], Start read next stream part 2026-01-04T02:41:34.507201Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dv9andzm4retarp38rby9", SessionId: ydb://session/3?node_id=2&id=YjZlYzM3YmEtNTY0ZjBhYTctZmVhYTZhOWQtMzE3ZTBjNDY=, Slow query, duration: 263.121548s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:41:34.511346Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32200, txId: 18446744073709551615] shutting down 2026-01-04T02:41:34.512198Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5056:4374], ActorId: [2:5060:4376], Starting query actor #1 [2:5061:4377] 2026-01-04T02:41:34.512285Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5060:4376], ActorId: [2:5061:4377], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:41:34.512886Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:41:34.512949Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4503:2462], ActorId: [2:4851:4248], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDljZmJkMzAtZjRmOTNmOTEtNTI5OWRiNTctOTM5YzU5Mjc=, TxId: 2026-01-04T02:41:34.518186Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5060:4376], ActorId: [2:5061:4377], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTBkNGRmMzYtZjU1N2JmMmItYzIzMTk1ZmItZDY5NDcwMjQ=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:41:34.592643Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5074:4389]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:34.592918Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:41:34.592971Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5074:4389], StatRequests.size() = 1 2026-01-04T02:41:34.743634Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5060:4376], ActorId: [2:5061:4377], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTBkNGRmMzYtZjU1N2JmMmItYzIzMTk1ZmItZDY5NDcwMjQ=, TxId: 2026-01-04T02:41:34.743735Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5060:4376], ActorId: [2:5061:4377], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTBkNGRmMzYtZjU1N2JmMmItYzIzMTk1ZmItZDY5NDcwMjQ=, TxId: 2026-01-04T02:41:34.744179Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5056:4374], ActorId: [2:5060:4376], Got response [2:5061:4377] SUCCESS 2026-01-04T02:41:34.744674Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:41:34.787733Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:41:34.787845Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3440:3472] 2026-01-04T02:41:34.788684Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5093:4399]], StatType[ 1 ], StatRequestsCount[ 2 ] 2026-01-04T02:41:34.789148Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:34.789212Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:41:34.789488Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:34.789544Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:41:34.789621Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 1 ] 2026-01-04T02:41:34.790057Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 1 ], ColumnTag[ 2 ] 2026-01-04T02:41:34.796399Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:34.796590Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpExplain::UpdateOnSecondary+UseSink [GOOD] >> KqpExplain::UpdateOnSecondary-UseSink >> KqpBatchUpdate::ManyPartitions_3 [GOOD] >> KqpQuery::CreateAsSelectBadTypes+IsOlap [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap >> KqpQueryDiscard::DiscardSelectEnsureExecuted [GOOD] >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard >> KqpQuery::YqlSyntaxV0 [GOOD] >> KqpQuery::YqlTableSample >> KqpStats::RequestUnitForSuccessExplicitPrepare [GOOD] >> KqpStats::RequestUnitForExecute |91.8%| [TA] $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::DeleteWhereInSubquery [GOOD] >> KqpExplain::ReadTableRangesFullScan [GOOD] >> KqpQuery::DictJoin >> KqpLimits::DatashardProgramSize+useSink [GOOD] >> KqpLimits::OutOfSpaceBulkUpsertFail >> KqpExplain::ReadTableRanges >> KqpLimits::DatashardProgramSize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_3 [GOOD] Test command err: Trying to start YDB, gRPC: 21527, MsgBus: 20603 2026-01-04T02:38:30.576762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330257680450081:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:30.577506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:30.761833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:30.766664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:30.766798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:30.792987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:30.842954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:30.843833Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330257680449977:2081] 1767494310572705 != 1767494310572708 2026-01-04T02:38:30.906644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:30.906666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:30.906676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:30.906762Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:30.993892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:31.289247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:31.341329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.481994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.582365Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:31.617710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.681766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.257932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270565353733:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.258078Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.258407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270565353743:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.258473Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.537322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.565606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.589917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.616639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.643853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.674896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.707310Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.753361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:33.820016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270565354611:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.820093Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.820187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270565354616:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.820243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330270565354618:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.820294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:33.823337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:33.835563Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330270565354620:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:33.924939Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330270565354671:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:35.352447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:35.984786Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330257680450081:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:36.013978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7591330991526089059:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.957509Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.983453Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:21.091128Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:21.133192Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:21.133321Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:21.143669Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:21.287646Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:21.388656Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:21.388685Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:21.388697Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:21.388822Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.977994Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:22.225353Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:22.250349Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.343244Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.668733Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.781992Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.956272Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591330991526089059:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.956389Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:27.042236Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591331021590861970:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.042380Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.042803Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591331021590861980:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.042871Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.165181Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.225302Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.277267Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.332340Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.388038Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.454063Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.521395Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.612822Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.758296Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591331021590862868:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.758468Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.758841Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591331021590862873:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.758913Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591331021590862874:2490], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.759388Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:27.766357Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:27.789955Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591331021590862877:2491], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:27.881172Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591331021590862928:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:30.812227Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.069989Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:41:36.070023Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded >> KqpQuery::QueryClientTimeout [GOOD] >> KqpQuery::QueryClientTimeoutPrecompiled >> KqpQuery::CreateAsSelectTypes-NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> KqpExplain::UpdateConditional-UseSink [GOOD] >> KqpExplain::UpdateConditionalKey+UseSink >> KqpQuery::CurrentUtcTimestamp [GOOD] >> KqpStats::JoinNoStatsYql [GOOD] >> ColumnStatistics::EqWidthHistogram [GOOD] >> KqpQuery::NoEvaluate [GOOD] >> KqpQuery::DdlInDataQuery >> KqpTypes::ParametrizedDecimalColumns-EnableParameterizedDecimal+IsColumn [GOOD] >> KqpQuery::CreateAsSelectBadTypes-IsOlap [GOOD] >> KqpParams::ExplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpQuery::OlapCreateAsSelect_Simple [GOOD] >> KqpExplain::ExplainDataQueryWithParams [GOOD] >> KqpStats::DataQueryWithEffects+UseSink >> KqpStats::JoinStatsBasicYql+StreamLookupJoin >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn >> KqpQuery::OlapCreateAsSelect_Complex >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck >> KqpQuery::OltpCreateAsSelect_Simple >> KqpExplain::CreateTableAs+Stats >> KqpParams::Decimal+QueryService-UseSink [GOOD] >> KqpParams::Decimal-QueryService+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::EqWidthHistogram [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:24.278308Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:24.399148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:24.408866Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:24.409997Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:24.410216Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:24.914509Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:25.025190Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:25.025340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:25.075677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:25.184014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:26.027413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:26.028406Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:26.028469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:26.028512Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:26.028940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:26.117116Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:26.659976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:29.754956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:29.762220Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:32:29.764346Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:29.794131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:29.794219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:29.833149Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:32:29.835811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.031869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:30.031987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:30.050574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:30.142885Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.143619Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.144558Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.145141Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.145895Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.146075Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.146254Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.146455Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.146774Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:30.376330Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:30.427473Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:30.427577Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:30.477934Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:30.479536Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:30.479776Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:30.479844Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:30.479912Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:30.479978Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:30.480050Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:30.480111Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:30.481152Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:30.493541Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:30.493661Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2134:2592], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:30.518459Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2200:2640] 2026-01-04T02:32:30.519108Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2200:2640], schemeshard id = 72075186224037897 2026-01-04T02:32:30.607900Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2255:2666] 2026-01-04T02:32:30.610724Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:32:30.627140Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Describe result: PathErrorUnknown 2026-01-04T02:32:30.627207Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Creating table 2026-01-04T02:32:30.627292Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:32:30.633086Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2313:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:30.637374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:30.645139Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:30.645282Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Subscribe on create table tx: 281474976720657 2026-01-04T02:32:30.657033Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:32:30.675949Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:32:30.831012Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:31.294407Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:32:31.405614Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:32:31.405681Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2261:2671] Owner: [2:2260:2670]. Column diff is empty, finishing 2026-01-04T02:32:32.081255Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:37:14.276504Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], RunStreamQuery with text: $f1 = ($p0,$p1,$p2) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::EWHMerge, StatisticsInternal::EWHFinalize, StatisticsInternal::EWHSerialize, StatisticsInternal::EWHDeserialize, ) }; $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1` 2026-01-04T02:37:14.276640Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], Start read next stream part 2026-01-04T02:37:14.277202Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32310, txId: 18446744073709551615] shutting down 2026-01-04T02:37:14.281296Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4494:4029], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:37:14.281381Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4494:4029], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWY5NWQwMmUtNjhiYWFiNTYtNjVhM2U0MmYtYmEzOWY5ODM=, TxId: 2026-01-04T02:37:14.574570Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4845:4250]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:37:14.574893Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:37:14.574965Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4845:4250], StatRequests.size() = 1 2026-01-04T02:41:40.280042Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:41:40.280379Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], Start read next stream part 2026-01-04T02:41:40.282225Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3dvc540x3k2h3jc8kwf3sq", SessionId: ydb://session/3?node_id=2&id=MjU3NWJiYmUtNjhmNWRiZS1hNjQ4NGZiMy1iYWI3MGMwOQ==, Slow query, duration: 266.001816s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f1 = ($p0,$p1,$p2) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::EWHCreate, $parent as Depends)($item,$p0,$p1,$p2) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::EWHAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::EWHMerge,\n StatisticsInternal::EWHFinalize,\n StatisticsInternal::EWHSerialize,\n StatisticsInternal::EWHDeserialize,\n)\n};\n$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(LowCardinalityString,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f0(4096,8)),AGGREGATE_BY(LowCardinalityInt,$f1(9,0l,9l)),AGGREGATE_BY(Float,$f0(4096,8)),AGGREGATE_BY(Float,$f1(32,CAST(0 AS Float),CAST(99 AS Float))),AGGREGATE_BY(Date,$f0(4096,8)),AGGREGATE_BY(Date,$f1(32,10000ul,10099ul)),AGGREGATE_BY(NearNumericLimits,$f0(4096,8)),AGGREGATE_BY(NearNumericLimits,$f1(32,-9223372036854775808l,9223372036854775807l)) FROM `/Root/Database/Table1`", parameters: 0b 2026-01-04T02:41:40.285970Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5022:4352], ActorId: [2:5023:4353], Starting query actor #1 [2:5024:4354] 2026-01-04T02:41:40.286062Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5023:4353], ActorId: [2:5024:4354], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:41:40.286489Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32400, txId: 18446744073709551615] shutting down 2026-01-04T02:41:40.291587Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5023:4353], ActorId: [2:5024:4354], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjI2OTYxYzMtMWMxYjU1ZWUtOTE5ZmEzZjctYzk4YTI3MmU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:41:40.292723Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:41:40.292784Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4484:2467], ActorId: [2:4818:4227], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTkwZDE2MjktOGUxMGIxMDItYzQ3M2YzNzMtNmY3YWZmM2M=, TxId: 2026-01-04T02:41:40.380810Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5041:4368]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:40.381198Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:41:40.381256Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5041:4368], StatRequests.size() = 1 2026-01-04T02:41:40.624123Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5023:4353], ActorId: [2:5024:4354], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjI2OTYxYzMtMWMxYjU1ZWUtOTE5ZmEzZjctYzk4YTI3MmU=, TxId: 2026-01-04T02:41:40.624221Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5023:4353], ActorId: [2:5024:4354], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjI2OTYxYzMtMWMxYjU1ZWUtOTE5ZmEzZjctYzk4YTI3MmU=, TxId: 2026-01-04T02:41:40.624555Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5022:4352], ActorId: [2:5023:4353], Got response [2:5024:4354] SUCCESS 2026-01-04T02:41:40.624956Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:41:40.662499Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:41:40.662592Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3349:3460] 2026-01-04T02:41:40.663424Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:5060:4378]], StatType[ 3 ], StatRequestsCount[ 6 ] 2026-01-04T02:41:40.663976Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:40.664042Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:41:40.664429Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:40.664493Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:41:40.664560Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 1 ] 2026-01-04T02:41:40.664853Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 2 ] 2026-01-04T02:41:40.665014Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 3 ] 2026-01-04T02:41:40.665145Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 4 ] 2026-01-04T02:41:40.665280Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 5 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 5 ] 2026-01-04T02:41:40.665401Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 6 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 3 ], ColumnTag[ 6 ] 2026-01-04T02:41:40.674612Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:41:40.675029Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:41:40.676833Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:40.677071Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:40.677201Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:40.677276Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:40.677394Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:41:40.677480Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest >> KqpQuery::YqlTableSample [GOOD] >> KqpQuery::UpdateWhereInSubquery >> KqpStats::RequestUnitForExecute [GOOD] >> KqpLimits::DatashardProgramSize-useSink [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery >> KqpLimits::ComputeNodeMemoryLimit |91.8%| [TA] {RESULT} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQTest::TestPQReadAhead [GOOD] >> TPQTest::TestOwnership >> KqpExplain::UpdateConditionalKey+UseSink [GOOD] >> KqpExplain::UpdateConditionalKey-UseSink |91.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/hive/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::UpdateOnSecondary-UseSink [GOOD] >> KqpExplain::CreateTableAs+Stats [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink >> KqpExplain::CreateTableAs-Stats >> KqpExplain::ReadTableRanges [GOOD] >> KqpExplain::Predicates >> KqpLimits::QueryReplySize [GOOD] >> KqpLimits::QueryExecTimeoutCancel ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::RequestUnitForExecute [GOOD] Test command err: Trying to start YDB, gRPC: 12086, MsgBus: 62687 2026-01-04T02:41:20.140709Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330989146367383:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.140887Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.473873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.475492Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.475581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.492097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.664027Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.676189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330989146367334:2081] 1767494480132074 != 1767494480132077 2026-01-04T02:41:20.737135Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.802447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.802481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.802487Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.802570Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.142013Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.388182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.396477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.462529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.738744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.894730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.980399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.557403Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002031271099:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.557509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.557766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002031271109:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.557856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.948387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.982302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.010783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.036375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.082659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.123152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.175904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.251066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006326239278:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.251141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.251292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006326239283:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.251319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006326239285:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.251373Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.255055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.266933Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331006326239287:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.322304Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331006326239338:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.140800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330989146367383:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.140870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:25.829411Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331010621206947:2536], status: GENERIC_ERROR, i ... 644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:38.022041Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591331047077757822:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:38.022113Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 21609, MsgBus: 4650 2026-01-04T02:41:39.753844Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331069869487813:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:39.753890Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:39.798041Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:39.895937Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:39.910112Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:39.910198Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:39.936579Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:39.960182Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:39.985863Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:39.985885Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:39.985895Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:39.985969Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:40.442003Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:40.451951Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:40.506373Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:40.638828Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:40.692412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:40.856237Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:43.435904Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331087049358807:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.435994Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.436342Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331087049358817:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.436386Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.515602Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.546691Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.580985Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.648724Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.695243Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.740586Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.800625Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.859116Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.954771Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331087049359686:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.954850Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.954931Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331087049359691:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.955209Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331087049359693:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.955259Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:43.958877Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:43.972001Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331087049359694:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:44.071520Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331091344327042:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:44.753849Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331069869487813:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:44.753927Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Consumed units: 297 Consumed units: 6 |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] >> KqpQuery::OltpCreateAsSelect_Simple [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable >> KqpQuery::DdlInDataQuery [GOOD] >> KqpStats::JoinStatsBasicYql+StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicYql-StreamLookupJoin >> KqpQuery::CreateAsSelectTypes+NotNull+IsOlap [GOOD] >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix >> KqpParams::ImplicitDifferentParameterTypesQueryCacheCheck [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery >> KqpStats::DataQueryWithEffects+UseSink [GOOD] >> KqpStats::DataQueryMulti >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DictJoin [GOOD] Test command err: Trying to start YDB, gRPC: 30574, MsgBus: 19157 2026-01-04T02:41:20.062097Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990153476876:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.066983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.344666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.421388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.421512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.477579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.533754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.574880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.770396Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.770430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.770440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.770508Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.085585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.427544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:23.337932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003038379617:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.338118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.338596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003038379627:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.338673Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.338902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003038379631:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.347099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:23.356496Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331003038379633:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:41:23.458991Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331003038379684:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:23.932141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 32166, MsgBus: 8365 2026-01-04T02:41:24.958469Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331007403129351:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:24.958590Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:24.975914Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:25.044244Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:25.111721Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:25.111813Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:25.114853Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:25.178432Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:25.178477Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:25.178494Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:25.178565Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:25.199318Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:25.537181Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:25.966460Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:28.126078Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331024582999383:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.126159Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331024582999374:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.126228Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.126470Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331024582999389:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.126531Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.129722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:28.140223Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331024582999388:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:41:28.244843Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331024582999441:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:28.268519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 1135, MsgBus: 23036 2026-01-04T02:41:29.168848Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591331027052542328:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:29.168896Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:29.189781Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme ... DATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10323, MsgBus: 2435 2026-01-04T02:41:40.940301Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331076893059917:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:40.940372Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:40.957052Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:41.037495Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:41.044023Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331076893059890:2081] 1767494500939359 != 1767494500939362 2026-01-04T02:41:41.071056Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:41.071175Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:41.091437Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:41.163811Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:41.163853Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:41.163861Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:41.163959Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:41.204215Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:41.652980Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:41.658731Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:41.680654Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:41.762664Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:41.922471Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:41.979002Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:42.005621Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.801950Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331094072930937:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.802063Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.804156Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331094072930947:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.804226Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.875102Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.921445Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.968356Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.012190Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.062297Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.115422Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.186124Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.242678Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.333298Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331098367899115:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.333390Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.333503Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331098367899120:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.333754Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331098367899122:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.333794Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.338593Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:45.354268Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331098367899123:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:45.455789Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331098367899175:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:45.940557Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331076893059917:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:45.940637Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath+UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 14655, MsgBus: 13676 2026-01-04T02:41:20.074421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990637626072:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.074484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.451462Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.456844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.456967Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.537974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.617148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.780956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.801031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.801076Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.801089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.801204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.083551Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.405941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.420148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.471529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.753764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.937977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.011293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.655405Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003522529723:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.655535Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.655819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003522529733:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.655873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.970183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.002774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.029922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.060928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.094638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.136326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.193955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.233326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.301711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007817497899:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.301763Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.301965Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007817497904:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.301972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007817497905:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.302020Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.305338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.314935Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007817497908:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.407011Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007817497959:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.074575Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330990637626072:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.074632Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26000, MsgBus: 20264 2026-01-04T02:41:27.338937Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331018202531939:2179];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:27.339253Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.c ... ], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:62: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-04T02:41:42.703746Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=M2RkYTM4ZmEtNWU2MTBjZjItNWI1MDFhOTYtZTFiOTQzMDI=, ActorId: [4:7591331085213936988:2322], ActorState: ExecuteState, LegacyTraceId: 01ke3e3fbf4np16fpmfkyfa059, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 62 } message: "At function: KiCreateTable!" end_position { row: 6 column: 62 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:41:42.764884Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591331085213937102:2343], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:45: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-04T02:41:42.766936Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=Y2M5ZmE4NDItNjlkMjMyMTUtMTBlMzAwMjQtN2IxODZiYWQ=, ActorId: [4:7591331085213937091:2337], ActorState: ExecuteState, LegacyTraceId: 01ke3e3ja2fazxyqeymn0ayymy, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 45 } message: "At function: KiCreateTable!" end_position { row: 6 column: 45 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:41:42.831733Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591331085213937121:2349], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:6:43: Error: At function: KiCreateTable!
:6:20: Error: Invalid type for column: Value. Only YQL data types and PG types are currently supported, code: 2031 2026-01-04T02:41:42.833055Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZjU3ODM2OTItNDc4MTY1LWY1NzVkNzBhLTdmZGMzYjkw, ActorId: [4:7591331085213937115:2346], ActorState: ExecuteState, LegacyTraceId: 01ke3e3jbw6g64a2g0qdpg7zpn, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 6 column: 43 } message: "At function: KiCreateTable!" end_position { row: 6 column: 43 } severity: 1 issues { position { row: 6 column: 20 } message: "Invalid type for column: Value. Only YQL data types and PG types are currently supported" end_position { row: 6 column: 20 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 24787, MsgBus: 30274 2026-01-04T02:41:43.745581Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331087785425710:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:43.745644Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:43.757234Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:43.884101Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:43.886767Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331087785425684:2081] 1767494503744266 != 1767494503744269 2026-01-04T02:41:43.891314Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:43.920864Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:43.920954Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:43.924679Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:43.972714Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:43.972737Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:43.972745Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:43.972840Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:44.172667Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:44.436550Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:44.443685Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:44.450281Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:44.511772Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:44.750353Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:47.397759Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331104965295843:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.397848Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331104965295830:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.398268Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.398562Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331104965295847:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.398668Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.402180Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:47.416658Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331104965295846:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:41:47.518796Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331104965295899:2597] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:47.554913Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.821631Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:48.043710Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.085758Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.093793Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715671, at schemeshard: 72057594046644480 2026-01-04T02:41:48.095256Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) >> KqpQuery::OlapCreateAsSelect_Complex [GOOD] >> KqpQuery::MixedCreateAsSelect >> KqpQuery::QueryClientTimeoutPrecompiled [GOOD] >> KqpQuery::QueryCancelWrite |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::DdlInDataQuery [GOOD] Test command err: Trying to start YDB, gRPC: 16791, MsgBus: 19982 2026-01-04T02:41:20.767120Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990370473663:2218];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.767245Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.988474Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:21.016743Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:21.016861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:21.095256Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:21.110352Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:21.128199Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330990370473464:2081] 1767494480733436 != 1767494480733439 2026-01-04T02:41:21.233654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:21.233712Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:21.233724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:21.233837Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.252310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:21.735271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.754748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.768186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:23.811900Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003255376237:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.811904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003255376247:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.812015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.812243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003255376252:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.812290Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.815796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:23.825965Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331003255376251:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:41:23.922718Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331003255376304:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:24.181869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.505543Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2FiZWI0ZjItZjFhZjkzOWEtOWQ2MzAwNDEtNDQyNzlkZjA=, ActorId: [1:7591331007550343711:2340], ActorState: ExecuteState, LegacyTraceId: 01ke3e30awb22w0qdqazapf13t, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Decimal value for precision:  status# BAD_REQUEST issues# trace_id# Trying to start YDB, gRPC: 6431, MsgBus: 2120 2026-01-04T02:41:25.200643Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331009991907063:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.200835Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:25.222975Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:25.318785Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331009991907036:2081] 1767494485199572 != 1767494485199575 2026-01-04T02:41:25.331145Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:25.331207Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:25.332485Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:25.338643Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:25.358638Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:41:25.358755Z node 2 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:41:25.386166Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:25.386192Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:25.386203Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:25.386281Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:25.421892Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:25.714562Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:25.722178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:26.212172Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:28.314173Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331022876809804:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.314255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331022876809815:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.314266Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.314535Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331022876809819:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.314593Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:28.317168Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:28.326786Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331022876809818:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 28147497671065 ... : schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:42.993749Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.109915Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:43.168870Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:43.232028Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.963430Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331095961910641:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.963534Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.963862Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331095961910652:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.963924Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.014043Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.050557Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.090987Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.130002Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.174186Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.219089Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.263448Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.324875Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:46.415353Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331100256878823:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.415447Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.415660Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331100256878828:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.415718Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331100256878829:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.415757Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:46.419591Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:46.431486Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331100256878832:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:46.511050Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331100256878883:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:47.104560Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331083077007016:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:47.104616Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-04T02:41:48.306405Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331108846813805:2538], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:5:30: Error: Operation 'CreateTable' can't be performed in data query, code: 2008 2026-01-04T02:41:48.307594Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NGNkYTdhZDMtODE5MDQzMzQtMjE1MDQwMTQtODkxMDA1ZDQ=, ActorId: [5:7591331108846813797:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e3qqs4rjv10yd5zqsrg3r, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 5 column: 30 } message: "Operation \'CreateTable\' can\'t be performed in data query" end_position { row: 5 column: 30 } issue_code: 2008 severity: 1 } } tx_id# trace_id# 2026-01-04T02:41:48.334969Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331108846813818:2541], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-04T02:41:48.335344Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NGNkYTdhZDMtODE5MDQzMzQtMjE1MDQwMTQtODkxMDA1ZDQ=, ActorId: [5:7591331108846813797:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e3qrw9pd8w3jc3zw25b4k, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 24 } message: "Operation \'DropTable\' can\'t be performed in data query" end_position { row: 2 column: 24 } issue_code: 2008 severity: 1 } } tx_id# trace_id#
: Error: Execution, code: 1060
:2:24: Error: Operation 'DropTable' can't be performed in data query, code: 2008 2026-01-04T02:41:48.368900Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331108846813827:2545], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008
: Error: Execution, code: 1060
:2:54: Error: Operation 'AlterTable' can't be performed in data query, code: 2008 2026-01-04T02:41:48.370151Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NGNkYTdhZDMtODE5MDQzMzQtMjE1MDQwMTQtODkxMDA1ZDQ=, ActorId: [5:7591331108846813797:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e3qsw0hrwpyxkxrm6xn8b, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 2 column: 54 } message: "Operation \'AlterTable\' can\'t be performed in data query" end_position { row: 2 column: 54 } issue_code: 2008 severity: 1 } } tx_id# trace_id# |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpLimits::StreamWrite+Allowed >> KqpQueryDiscard::NoChannelDataEventsWhenDiscard [GOOD] >> KqpQueryDiscard::DiscardSelectMultiLine ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns+EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 24424, MsgBus: 22829 2026-01-04T02:41:20.069480Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990267327055:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.069954Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.103273Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:20.410310Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.415883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.415989Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.471129Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.506721Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.575475Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.770675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.770707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.770713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.770834Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.073637Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.441569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.449584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.494799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.718767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.885326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.961042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.520570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003152230701:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.520694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.521080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003152230711:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.521130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.945666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.978331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.014154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.042395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.082230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.154119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.191275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.269179Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007447198880:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.269253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.269333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007447198885:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.269519Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007447198887:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.269606Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.282925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007447198888:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:24.369282Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007447198940:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.068514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330990267327055:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.068594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17368, MsgBus: 24920 2026-01-04T02:41:26.903552Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exist ... } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.774299Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.774921Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.775228Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.775881Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037973;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.776145Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.776769Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037959;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.777369Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037975;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.777633Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038010;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.778357Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.778425Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037961;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.779286Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.779298Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.780157Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037963;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.780164Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.780947Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037977;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.780985Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.781763Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038004;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.782682Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.783287Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.783591Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.784147Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.784505Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.785098Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:48.785531Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Timestamp" Type: "Timestamp64" TypeId: 66 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullExpYql >> KqpQuery::UpdateWhereInSubquery [GOOD] >> KqpQuery::UpdateThenDelete-UseSink >> KqpQuery::QuerySkipHasNoColumns [GOOD] >> KqpQuery::QueryResultsTruncated >> KqpLimits::DatashardReplySize [GOOD] >> KqpLimits::ManyPartitions >> KqpQuery::RewriteIfPresentToMap >> KqpParams::Decimal-QueryService+UseSink [GOOD] >> KqpParams::Decimal+QueryService+UseSink >> KqpExplain::CreateTableAs-Stats [GOOD] >> KqpParams::CheckQueryCacheForPreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForUnpreparedQuery >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] >> KqpAnalyze::AnalyzeTable+ColumnStore >> KqpExplain::UpdateConditionalKey-UseSink [GOOD] >> KqpExplain::UpdateOn+UseSink >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CreateTableAs-Stats [GOOD] Test command err: Trying to start YDB, gRPC: 11099, MsgBus: 9581 2026-01-04T02:41:20.063138Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330989893273621:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.063218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.106510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:20.423258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.423395Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.436577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.497075Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.516593Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.763835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.768338Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.768362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.768368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.768430Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.078056Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.444732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.504900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.725334Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.889453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.970225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.516820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002778177344:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.516937Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.517208Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002778177354:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.517251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.949162Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.982416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.012352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.083208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.115421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.177702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.223136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.283926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007073145523:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.283996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007073145528:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.284004Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.284186Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007073145530:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.284233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.287584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.296399Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007073145531:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:24.355056Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007073145583:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.063324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330989893273621:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.063388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans" ... lans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 7495, MsgBus: 10449 2026-01-04T02:41:48.784590Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331108997489319:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:48.785266Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:48.791539Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:48.899749Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:48.901605Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:48.901669Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:48.901854Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331108997489273:2081] 1767494508779931 != 1767494508779934 2026-01-04T02:41:48.917662Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:48.917958Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:48.996325Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:48.996356Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:48.996366Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:48.996455Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:49.213266Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:49.515931Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:49.788776Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:52.710310Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331126177359345:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:52.710398Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331126177359335:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:52.710574Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:52.710948Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331126177359350:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:52.711050Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:52.714388Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:52.727659Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331126177359349:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:41:52.815671Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331126177359404:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:52.848540Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) PLAN::{"Plan":{"Plans":[{"Tables":["Destination"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/Destination","Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Destination","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]},{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/Destination2"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/Destination2","Name":"FillTable","Table":"test\/Destination2","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/Destination2","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination2","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} PLAN::{"Plan":{"Plans":[{"Tables":["test\/test2\/Destination3"],"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Source"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Source","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Source","ReadColumns":["Col1 (-∞, +∞)","Col2"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage"}],"Node Type":"Map","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Operators":[{"Inputs":[],"Path":"\/Root\/test\/test2\/Destination3","Name":"FillTable","Table":"test\/test2\/Destination3","SinkType":"KqpTableSink"}],"Node Type":"Sink"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Source","reads":[{"columns":["Col1","Col2"],"scan_by":["Col1 (-∞, +∞)"],"type":"FullScan"}]},{"name":"\/Root\/test\/test2\/Destination3","writes":[{"columns":["Col1","Col2"],"type":"MultiReplace"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"FillTable","Table":"Destination3","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} 2026-01-04T02:41:53.782841Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331108997489319:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.782924Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::ParametrizedDecimalColumns+EnableParameterizedDecimal+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 10243, MsgBus: 31881 2026-01-04T02:41:26.738706Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331017017222354:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:26.738950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:26.781068Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:27.059838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:27.059935Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:27.078348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:27.140768Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:27.206420Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:27.294433Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:27.294463Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:27.294485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:27.294573Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:27.336367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:27.757538Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:27.825400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:27.834453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:29.889607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331029902125095:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:29.889766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:29.890159Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331029902125105:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:29.890217Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.112699Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331034197092415:2503] txid# 281474976710658, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0PK\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } 2026-01-04T02:41:30.149973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331034197092423:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.150069Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.150491Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331034197092426:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.150579Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.168075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:30.305944Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331034197092519:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.306070Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.306637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331034197092522:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.306699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:30.333089Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331034197092533:2574] txid# 281474976710660, issues: { message: "Type \'Decimal(15,0)\' specified for column \'Decimal_15_0\', but support for parametrized decimal is disabled (EnableParameterizedDecimal feature flag is off)" severity: 1 } Trying to start YDB, gRPC: 25173, MsgBus: 6026 2026-01-04T02:41:31.051174Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331038160310335:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:31.051230Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:31.069726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:31.131159Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:31.134850Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331038160310298:2081] 1767494491050419 != 1767494491050422 2026-01-04T02:41:31.140705Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:31.140802Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:31.206699Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:31.265705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:31.306847Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:31.306870Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:31.306879Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:31.306960Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:31.601343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:32.066223Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:34.173562Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331051045213064:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.173646Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.178028Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331051045213074:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.178107Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.196125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part prop ... } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.896000Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037965;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.896828Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038003;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.896990Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037987;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.897613Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.898273Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037991;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.898505Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.899253Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.899496Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037979;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.900115Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.900485Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038007;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.900857Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.901487Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037989;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.901501Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038013;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.902416Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.902449Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037995;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.903310Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038015;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.903495Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037981;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.904275Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038011;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.904583Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037997;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.905757Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037985;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; 2026-01-04T02:41:52.906615Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037983;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715667;fline=column_engine_logs.cpp:128;event=schema_will_be_ignored;last_version=3;to_version=4;diff=Version: 4 UpsertColumns { Id: 5 Name: "Decimal_35_10" Type: "Decimal(35,10)" TypeId: 4865 TypeInfo { DecimalPrecision: 35 DecimalScale: 10 } NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } DefaultCompression { } Options { SchemeNeedActualization: false } ; |91.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::OltpCreateAsSelect_Disable [GOOD] Test command err: Trying to start YDB, gRPC: 61373, MsgBus: 25982 2026-01-04T02:41:20.069975Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330988159779857:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.070030Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.374204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.389000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.389117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.520039Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.604194Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.613976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.770290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.770312Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.770316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.770369Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.072807Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.440866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.446623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.496713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.736736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.905761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.999477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.657236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331001044683449:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.657392Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.657764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331001044683459:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.657840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.941942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.976698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.008937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.037146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.070313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.111067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.155787Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.196314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.272685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331005339651624:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272809Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331005339651629:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.273008Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331005339651631:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.273047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.276560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.287287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331005339651632:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.376683Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331005339651684:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.073934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330988159779857:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.073987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:26.037765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187 ... o unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:44.342196Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:44.563934Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:47.520532Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331107545732494:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.520603Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331107545732483:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.520763Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.522068Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331107545732498:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.522150Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.524641Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:47.574275Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331107545732497:2332], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:41:47.638476Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331107545732552:2620] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:47.663476Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.899420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:48.145112Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.152272Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.559022Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331090365862354:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:48.559119Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62062, MsgBus: 4720 2026-01-04T02:41:49.344837Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331112874833434:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:49.344931Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:49.383047Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:49.458157Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:49.477277Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:49.477489Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:49.480472Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:49.577408Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:49.579657Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:49.579681Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:49.579690Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:49.579832Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:50.173498Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:50.183115Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:50.349985Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:53.365707Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331130054703442:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.365851Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.366440Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331130054703456:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.366530Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331130054703457:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.366603Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.372321Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:53.390183Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331130054703460:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:41:53.469398Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331130054703511:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:53.504244Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.755218Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331130054703651:2350], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:5:49: Error: Creating table with data is not supported. 2026-01-04T02:41:53.757515Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NDAxNjE1MDUtZWQ5M2QwZGMtNzU1NjA1OTEtOTI3MTM0NTE=, ActorId: [5:7591331130054703649:2349], ActorState: ExecuteState, LegacyTraceId: 01ke3e3x1z40xddm1rd04pzjpb, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 5 column: 49 } message: "Creating table with data is not supported." end_position { row: 5 column: 49 } severity: 1 } } tx_id# trace_id# |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] >> KqpStats::DataQueryMulti [GOOD] >> KqpStats::CreateTableAsStats+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateAsSelectPath-UseTablePathPrefix [GOOD] Test command err: Trying to start YDB, gRPC: 22012, MsgBus: 14487 2026-01-04T02:41:23.341495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331004022045755:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:23.341638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:23.585166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:23.585334Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:23.598150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:23.654807Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:23.656957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:23.658687Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331004022045650:2081] 1767494483335706 != 1767494483335709 2026-01-04T02:41:23.720281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:23.720304Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:23.720319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:23.720402Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:23.907352Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:24.103579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:24.110087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:24.347277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:26.311675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016906948412:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.311679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016906948423:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.311791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.312152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016906948434:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.312214Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.315432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:26.325186Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331016906948433:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:41:26.455126Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331016906948488:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:26.694292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.415046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:27.694066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:27.710099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 12645, MsgBus: 7074 2026-01-04T02:41:28.512752Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331022360107373:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:28.512814Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:28.532925Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:28.605322Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331022360107334:2081] 1767494488511653 != 1767494488511656 2026-01-04T02:41:28.608232Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:28.662463Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:28.662542Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:28.666994Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:28.708049Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:28.708073Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:28.708081Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:28.708158Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:28.788369Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:29.090535Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:29.104974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:29.519185Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:31.759705Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331035245010102:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:31.759862Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:31.760297Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331035245010117:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:31.760320Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331035245010116:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:31.760380Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:31.764150Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 7205 ... chemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383254Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383348Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383448Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383470Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037951 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383526Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037889 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383589Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037906 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383617Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037904 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383669Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037902 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383699Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037898 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.383726Z node 4 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037900 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710666 2026-01-04T02:41:48.390533Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.399254Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:48.405551Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710668, at schemeshard: 72057594046644480 Trying to start YDB, gRPC: 24958, MsgBus: 10262 2026-01-04T02:41:49.887527Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331112273154425:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:49.887587Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:49.926685Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:50.005353Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:50.027878Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:50.027978Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:50.041363Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:50.117213Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:50.117249Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:50.117258Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:50.117361Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:50.203917Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:50.691178Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:50.700453Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:50.715135Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:50.794853Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:41:50.895009Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:53.880671Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331129453024519:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.880773Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331129453024552:2333], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.880965Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.881330Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331129453024558:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.881414Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.886202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:53.904854Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331129453024557:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:41:53.991997Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331129453024612:2597] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:54.027125Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.434587Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.685218Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:54.693073Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:54.708766Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:41:54.889900Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331112273154425:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:54.890001Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryCancelWrite [GOOD] >> KqpQuery::QueryCancelWriteImmediate >> KqpStats::JoinStatsBasicYql-StreamLookupJoin [GOOD] >> KqpStats::JoinStatsBasicScan >> KqpExplain::Predicates [GOOD] >> KqpParams::EmptyListForListParameterExecuteDataQuery [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPartitionChooserSuite::TPartitionChooserActor_SplitMergeEnabled_SourceId_OldPartitionExists_NotBoundary_Test [GOOD] Test command err: 2026-01-04T02:40:09.710504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330685272989466:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:09.710604Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:09.742971Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:09.743768Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591330685729126204:2163];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:09.743962Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:09.751374Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:40:09.929179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:09.929644Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:09.983375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:09.983498Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:09.984929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:09.985017Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:10.002198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:10.004969Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:40:10.006641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:10.065858Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:10.122424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00189d/r3tmp/yandexy1c9Jo.tmp 2026-01-04T02:40:10.122447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00189d/r3tmp/yandexy1c9Jo.tmp 2026-01-04T02:40:10.122647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00189d/r3tmp/yandexy1c9Jo.tmp 2026-01-04T02:40:10.122741Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:10.163751Z INFO: TTestServer started on Port 8524 GrpcPort 23894 2026-01-04T02:40:10.186795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:10.197481Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:23894 2026-01-04T02:40:10.361350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:10.463922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:10.719964Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:10.748607Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:12.541049Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330698614028385:2305], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:12.541049Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330698614028395:2308], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:12.541142Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:12.541396Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591330698614028400:2310], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:12.541488Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:12.545777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:12.563799Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591330698614028399:2309], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:40:12.806270Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591330698614028428:2181] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:12.830108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:12.830339Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591330698157892812:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:12.830822Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZmY4ZGQ5N2ItNTNmNDdmODctYTJjOGM4MWItZTUxYjI1NGQ=, ActorId: [1:7591330698157892783:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3e0t8c6ed31gypvndjfekj, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:12.831237Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591330698614028444:2315], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:40:12.831579Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ODVmYjBjNmYtOGQ3OGYwNWUtNDQ1ZTBjOTUtMzZkM2RlYjE=, ActorId: [2:7591330698614028383:2304], ActorState: ExecuteState, LegacyTraceId: 01ke3e0t7v407xab6hkbaayghm, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:40:12.832718Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:40:12.832746Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' bec ... `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES (3059208687299993691, "Root", "00595F536F757263655F37", 1767494513328, 1767494513328, 0, 13); 2026-01-04T02:41:53.516454Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:62: TTableHelper SelectQuery: --!syntax_v1 DECLARE $Hash AS Uint64; DECLARE $Topic AS Utf8; DECLARE $SourceId AS Utf8; SELECT Partition, CreateTime, AccessTime, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash == $Hash AND Topic == $Topic AND ProducerId == $SourceId; 2026-01-04T02:41:53.516482Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:63: TTableHelper UpdateQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; DECLARE $SeqNo AS Uint64; UPSERT INTO `//Root/.metadata/TopicPartitionsMapping` (Hash, Topic, ProducerId, CreateTime, AccessTime, Partition, SeqNo) VALUES ($Hash, $Topic, $SourceId, $CreateTime, $AccessTime, $Partition, $SeqNo); 2026-01-04T02:41:53.516494Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__table_helper.h:64: TTableHelper UpdateAccessTimeQuery: --!syntax_v1 DECLARE $SourceId AS Utf8; DECLARE $Topic AS Utf8; DECLARE $Hash AS Uint64; DECLARE $Partition AS Uint32; DECLARE $CreateTime AS Uint64; DECLARE $AccessTime AS Uint64; UPDATE `//Root/.metadata/TopicPartitionsMapping` SET AccessTime = $AccessTime WHERE Hash = $Hash AND Topic = $Topic AND ProducerId = $SourceId AND Partition = $Partition; 2026-01-04T02:41:53.516524Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:116: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOwnershipFast Partition=1 TabletId=1001 2026-01-04T02:41:53.516683Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7591331132047530011:4192], Recipient [11:7591331110572692519:3597]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1001 Status: OK ServerId: [11:7591331132047530010:4192] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-04T02:41:53.516785Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692519:3597]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 1 SourceId: "Y_Source_7" 2026-01-04T02:41:53.516855Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:139: StateOwnershipFast, received event# 271188558, Sender [11:7591331110572692519:3597], Recipient [11:7591331132047530010:4192]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-04T02:41:53.516896Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:88: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) InitTable: SourceId=Y_Source_7 TopicsAreFirstClassCitizen=1 UseSrcIdMetaMappingInFirstClass=1 2026-01-04T02:41:53.516969Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692519:3597]: NActors::TEvents::TEvPoison 2026-01-04T02:41:53.517303Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:101: StateInitTable, received event# 277020685, Sender [11:7591331054738115349:2072], Recipient [11:7591331132047530010:4192]: NKikimr::NMetadata::NProvider::TEvManagerPrepared 2026-01-04T02:41:53.517337Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:111: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) StartKqpSession 2026-01-04T02:41:53.521267Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:132: StateCreateKqpSession, received event# 271646728, Sender [11:7591331054738115567:2269], Recipient [11:7591331132047530010:4192]: NKikimrKqp.TEvCreateSessionResponse Error: "" Response { SessionId: "ydb://session/3?node_id=11&id=NjVmZDY4ZTgtOTExY2I4YzMtYjg2OTJkYWUtM2M5MDMwNzQ=" NodeId: 11 } YdbStatus: SUCCESS ResourceExhausted: false 2026-01-04T02:41:53.521314Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:142: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Select from the table 2026-01-04T02:41:53.738590Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:163: StateSelect, received event# 271646721, Sender [11:7591331054738115567:2269], Recipient [11:7591331132047530010:4192]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=NjVmZDY4ZTgtOTExY2I4YzMtYjg2OTJkYWUtM2M5MDMwNzQ=" PreparedQuery: "85ca2fa5-eb712422-8684cf2a-50ee132b" QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { id: "01ke3e3x229bb0h85k1y9qkksa" } YdbResults { columns { name: "Partition" type { optional_type { item { type_id: UINT32 } } } } columns { name: "CreateTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "AccessTime" type { optional_type { item { type_id: UINT64 } } } } columns { name: "SeqNo" type { optional_type { item { type_id: UINT64 } } } } rows { items { uint32_value: 0 } items { uint64_value: 1767494513328 } items { uint64_value: 1767494513328 } items { uint64_value: 13 } } format: FORMAT_VALUE } QueryDiagnostics: "" } YdbStatus: SUCCESS 2026-01-04T02:41:53.738780Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:151: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Selected from table PartitionId=0 SeqNo=13 2026-01-04T02:41:53.738816Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__sm_chooser_actor.h:151: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) GetOldSeqNo 2026-01-04T02:41:53.739054Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7591331132047530043:4192], Recipient [11:7591331110572692518:3596]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1000 Status: OK ServerId: [11:7591331132047530010:4192] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-04T02:41:53.739179Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271187968, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692518:3596]: NKikimrClient.TPersQueueRequest PartitionRequest { Partition: 0 CmdGetMaxSeqNo { SourceId: "\000Y_Source_7" } PipeClient { RawX1: 7591331132047530043 RawX2: 47244644448 } } 2026-01-04T02:41:53.739373Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__sm_chooser_actor.h:209: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) OnPartitionChosen 2026-01-04T02:41:53.739478Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692518:3596]: NActors::TEvents::TEvPoison 2026-01-04T02:41:53.739525Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 269877760, Sender [11:7591331132047530044:4192], Recipient [11:7591331110572692520:3598]: NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 1002 Status: OK ServerId: [11:7591331132047530010:4192] Leader: 1 Dead: 0 Generation: 1 VersionInfo: } 2026-01-04T02:41:53.739578Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 271188557, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692520:3598]: NKikimrPQ.TEvCheckPartitionStatusRequest Partition: 2 2026-01-04T02:41:53.739735Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:240: StateCheckPartition, received event# 271188558, Sender [11:7591331110572692520:3598], Recipient [11:7591331132047530010:4192]: NKikimrPQ.TEvCheckPartitionStatusResponse Status: Active 2026-01-04T02:41:53.739774Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:174: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Update the table 2026-01-04T02:41:53.740065Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_ut.cpp:382: StateMockWork, received event# 65543, Sender [11:7591331132047530010:4192], Recipient [11:7591331110572692520:3598]: NActors::TEvents::TEvPoison Received TEvChooseResult: 2 2026-01-04T02:41:53.916028Z node 11 :PQ_PARTITION_CHOOSER TRACE: partition_chooser_impl__abstract_chooser_actor.h:212: StateUpdate, received event# 271646721, Sender [11:7591331054738115567:2269], Recipient [11:7591331132047530010:4192]: NKikimrKqp.TEvQueryResponse Response { SessionId: "ydb://session/3?node_id=11&id=NjVmZDY4ZTgtOTExY2I4YzMtYjg2OTJkYWUtM2M5MDMwNzQ=" PreparedQuery: "99c742e4-c5386c36-b68d052a-e33955ee" QueryParameters { Name: "$AccessTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$CreateTime" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Hash" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Partition" Type { Kind: Data Data { Scheme: 2 } } } QueryParameters { Name: "$SourceId" Type { Kind: Data Data { Scheme: 4608 } } } QueryParameters { Name: "$SeqNo" Type { Kind: Data Data { Scheme: 4 } } } QueryParameters { Name: "$Topic" Type { Kind: Data Data { Scheme: 4608 } } } TxMeta { } QueryDiagnostics: "" } YdbStatus: SUCCESS ConsumedRu: 108 2026-01-04T02:41:53.916073Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:183: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) HandleUpdate PartitionPersisted=0 Status=SUCCESS 2026-01-04T02:41:53.916113Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:305: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) ReplyResult: Partition=2, SeqNo=157 2026-01-04T02:41:53.916139Z node 11 :PQ_PARTITION_CHOOSER DEBUG: partition_chooser_impl__abstract_chooser_actor.h:268: TPartitionChooser [11:7591331132047530010:4192] (SourceId=Y_Source_7, PreferedPartition=(NULL)) Start idle Run query: --!syntax_v1 SELECT Partition, SeqNo FROM `//Root/.metadata/TopicPartitionsMapping` WHERE Hash = 3059208687299993691 AND Topic = "Root" AND ProducerId = "00595F536F757263655F37" 2026-01-04T02:41:54.369130Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7591331136342497442:2673] TxId: 281474976715701. Ctx: { TraceId: 01ke3e3xg403wz153025rxnfpe, Database: /Root, SessionId: ydb://session/3?node_id=11&id=NTM3OTY1N2YtYWU1NDE1NTgtYjA4MjZiMWMtZDM0MDhkMzY=, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-04T02:41:54.369295Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7591331136342497448:2673], TxId: 281474976715701, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3e3xg403wz153025rxnfpe. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=NTM3OTY1N2YtYWU1NDE1NTgtYjA4MjZiMWMtZDM0MDhkMzY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [11:7591331136342497442:2673], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpStats::StatsProfile >> KqpQuery::UdfTerminate >> KqpParams::MissingParameter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::Predicates [GOOD] Test command err: Trying to start YDB, gRPC: 31436, MsgBus: 2875 2026-01-04T02:41:20.063607Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330987559218200:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.063679Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.434002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.458906Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.459899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.539358Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.541705Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.629350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.767091Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.767195Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.767203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.767284Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.095162Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.451643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.530316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.712727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.890691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.963374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.615480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331000444121928:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.615608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.616074Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331000444121938:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.616172Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.948430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.979258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.014488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.042601Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.081939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.121904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.199679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.276746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331004739090107:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.276829Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.276842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331004739090112:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.277025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331004739090114:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.277062Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.279905Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.288618Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331004739090115:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.382849Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331004739090167:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.064089Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330987559218200:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.064150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"TopSort","Limit":"4","TopSortBy":"row.Data"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TopSort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connectio ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:48.927320Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:49.058772Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:49.058879Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:49.064214Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:49.078519Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:49.080957Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331110875760144:2081] 1767494508896334 != 1767494508896337 2026-01-04T02:41:49.096954Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:49.164408Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:49.164438Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:49.164456Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:49.164551Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:49.289903Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:49.721160Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:49.727241Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:49.740508Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:49.856718Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:49.978528Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:50.042777Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:50.165468Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.100976Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331132350598504:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.101067Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.101358Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331132350598513:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.101403Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.188357Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.238589Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.284021Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.332442Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.376731Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.425281Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.499856Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.559138Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.671433Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331132350599388:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.671546Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.671962Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331132350599393:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.672012Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331132350599394:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.672367Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:53.676995Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:53.695337Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331132350599397:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:53.748364Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331132350599448:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:53.897872Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331110875760169:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.897954Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:55.740284Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpLimits::ManyPartitions [GOOD] >> KqpLimits::ManyPartitionsSorting |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullExpYql [GOOD] >> KqpStats::MultiTxStatsFullExpScan >> KqpExplain::ExplainStream >> KqpQuery::RewriteIfPresentToMap [GOOD] >> KqpQuery::RowsLimit >> TPQTest::TestOwnership [GOOD] >> TPQTest::TestPQCacheSizeManagement >> KqpQuery::QueryResultsTruncated [GOOD] >> KqpQuery::QueryStats+UseSink >> KqpQuery::UpdateThenDelete-UseSink [GOOD] >> KqpTypes::UnsafeTimestampCastV0 >> KqpParams::CheckQueryCacheForUnpreparedQuery [GOOD] >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries >> KqpLimits::WaitCAsStateOnAbort ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18047, MsgBus: 24229 2026-01-04T02:41:32.773504Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331041014941857:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:32.773982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:33.025933Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:33.053044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:33.053192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:33.113422Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331041014941831:2081] 1767494492772606 != 1767494492772609 2026-01-04T02:41:33.131421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:33.139192Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:33.227516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:33.264057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:33.264079Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:33.264086Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:33.264178Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:33.726900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:33.783141Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:33.817892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.937711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:34.087837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:34.150114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.179719Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331058194812885:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.179887Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.180213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331058194812895:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.180253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.461427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.494698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.523850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.553464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.580002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.614108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.647395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.700289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.795609Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331058194813763:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.795686Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.796112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331058194813769:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.796149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.796187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331058194813768:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:36.800964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:36.813657Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331058194813772:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:36.885643Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331058194813823:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:37.773680Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331041014941857:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:37.773737Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:38.484836Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331066784748746:2540], status: GENERIC_ERROR, issues:
:3:26: Error: mismatched input '[' expecting {'*', '(', '@', '$', ABORT, ACTION, ADD, AFTER, ALL, ALTER, ANALYZE, AND, ANSI, ANY, ARRAY, AS, ASC, ASSUME, ASYMMETRIC, ASYNC, AT, ... 841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591331100039544407:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:51.052347Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8943, MsgBus: 7379 2026-01-04T02:41:53.039683Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331129216978977:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.044354Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:53.068574Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:53.141918Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:53.143489Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591331129216978932:2081] 1767494513037452 != 1767494513037455 2026-01-04T02:41:53.185305Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:53.185404Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:53.190545Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:53.287885Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:53.287909Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:53.287921Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:53.288017Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:53.348085Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:53.821418Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:53.838754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:53.948430Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.067304Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:54.117500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.217974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.722195Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331142101882703:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.722302Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.722593Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331142101882712:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.722655Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.839566Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.871866Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.904464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.942669Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.982668Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.022529Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.066381Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.118837Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.228970Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331146396850887:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.229041Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.229112Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331146396850892:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.229320Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331146396850894:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.229389Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.232208Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:57.241446Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331146396850895:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:57.322976Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331146396850947:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:58.039614Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331129216978977:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.039655Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] >> KqpExplain::UpdateOn+UseSink [GOOD] >> KqpExplain::UpdateOn-UseSink >> KqpScripting::StreamExecuteYqlScriptWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamOperationTimeout >> KqpParams::Decimal+QueryService+UseSink [GOOD] >> KqpBatchDelete::ManyPartitions_1 [GOOD] >> KqpQuery::QueryCancelWriteImmediate [GOOD] >> KqpQuery::QueryExplain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQueryDiscard::DiscardSelectMultiLine [GOOD] Test command err: Trying to start YDB, gRPC: 63283, MsgBus: 5549 2026-01-04T02:41:20.062496Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330989129098596:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.062553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.380937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.482040Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.482161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.485682Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.532076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.623627Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.770414Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.770443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.770448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.770538Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.082036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.517044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.530236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.594267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.745282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.893412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.979657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.514908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002014002324:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.515039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.515297Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002014002334:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.515337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.954786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.984188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.012268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.039566Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.078351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.110704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.152654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.220261Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006308970499:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.220351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.220455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006308970504:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.220540Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006308970506:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.220580Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.223737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.233132Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331006308970508:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:24.301214Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331006308970559:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.062860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330989129098596:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.062929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:27.703530Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331019193872984:2598], status: GENERIC_ERROR, issues:
:1:16: Warning: DISCARD can only be used at the top level, not inside subqueries, code: 4541
: Error: DISCARD can only be used at the top level, not inside subqueries, code: 2008 2026-0 ... pt_executions 2026-01-04T02:41:51.706782Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:51.816847Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:51.816941Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:51.819272Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:51.845922Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:51.881308Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:51.881333Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:51.881342Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:51.881426Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:52.338385Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:52.357369Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:52.433753Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:52.616815Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:52.658020Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:52.680505Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.447164Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331140924279031:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.447263Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.447530Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331140924279040:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.447566Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.526568Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.564452Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.607277Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.650376Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.692547Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.772145Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.813359Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.867443Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:55.959393Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331140924279922:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.959503Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.959838Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331140924279928:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.959876Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.959875Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331140924279927:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:55.963870Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:55.977995Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331140924279931:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:56.053416Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331145219247278:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:56.617946Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331123744408022:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:56.618040Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:02.028375Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:7591331170989051759:2652], status: GENERIC_ERROR, issues:
:1:0: Error: mismatched input 'sub' expecting {';', '(', '$', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES} 2026-01-04T02:42:02.028913Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=OTU5MzA4MDUtYjA5N2I5YWItOTQ0YjAzODUtZDEzYWNjNDM=, ActorId: [4:7591331170989051757:2651], ActorState: ExecuteState, LegacyTraceId: 01ke3e454v111tdqzzdyzbvjct, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 } message: "mismatched input \'sub\' expecting {\';\', \'(\', \'$\', ALTER, ANALYZE, BACKUP, BATCH, COMMIT, CREATE, DECLARE, DEFINE, DELETE, DISCARD, DO, DROP, EVALUATE, EXPLAIN, EXPORT, FOR, FROM, GRANT, IF, IMPORT, INSERT, PARALLEL, PRAGMA, PROCESS, REDUCE, REPLACE, RESTORE, REVOKE, ROLLBACK, SELECT, SHOW, TRUNCATE, UPDATE, UPSERT, USE, VALUES}" end_position { row: 1 } severity: 1 } tx_id# trace_id# |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpExplain::SortStage >> KqpStats::StatsProfile [GOOD] >> KqpStats::StreamLookupStats+StreamLookupJoin >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] >> KqpLimits::ManyPartitionsSorting [GOOD] >> KqpParams::MissingParameter [GOOD] >> KqpParams::MissingOptionalParameter+UseSink >> KqpQuery::UdfTerminate [GOOD] >> KqpQuery::UdfMemoryLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::Decimal+QueryService+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 4314, MsgBus: 11383 2026-01-04T02:41:20.121299Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330989879413936:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.121386Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.465927Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.511222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.511315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.636537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.661480Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.664749Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330989879413897:2081] 1767494480119235 != 1767494480119238 2026-01-04T02:41:20.732983Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.789112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.789143Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.789149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.789232Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.139143Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.446294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.458631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.527769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.730728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.889640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.962203Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.574750Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002764317664:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.574826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.575068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331002764317674:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.575123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.946432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.975844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.002170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.032112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.064879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.099092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.158246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.246234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007059285846:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.246296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.246305Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007059285851:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.246433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007059285853:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.246469Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.249336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.257925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007059285854:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:24.341934Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007059285906:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.121944Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330989879413936:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.122016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 62732, MsgBus: 7152 2026-01-04T02:41:26.811626Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor ... ble.cpp:689) 2026-01-04T02:41:58.540939Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.577398Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.621118Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.665856Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.706508Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.777490Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.883724Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331151563486447:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:58.883832Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:58.883865Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331151563486452:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:58.884017Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331151563486454:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:58.884083Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:58.888857Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:58.903080Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331151563486455:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:58.984554Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331151563486507:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:59.382483Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331134383614537:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:59.382683Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:01.082776Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.253449Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331168743356229:2582], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:4:17: Error: At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda
:3:25: Error: At function: Parameter, At function: DataType
:3:25: Error: Invalid decimal precision: 99 2026-01-04T02:42:02.254257Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=MjA5Y2U3ZmItZWEzZDg3OTctMTM4Y2NhM2YtNTk4ODBhZWU=, ActorId: [5:7591331168743356227:2581], ActorState: ExecuteState, LegacyTraceId: 01ke3e45br0htx9ddfd2f3mkz0, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 4 column: 17 } message: "At function: RemovePrefixMembers, At function: Unordered, At function: PersistableRepr, At function: OrderedSqlProject, At tuple, At function: SqlProjectItem, At lambda" end_position { row: 4 column: 17 } severity: 1 issues { position { row: 3 column: 25 } message: "At function: Parameter, At function: DataType" end_position { row: 3 column: 25 } severity: 1 issues { position { row: 3 column: 25 } message: "Invalid decimal precision: 99" end_position { row: 3 column: 25 } severity: 1 } } } } tx_id# trace_id# 2026-01-04T02:42:02.371240Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTVlZjZlODAtNmEwYWY4NDctYzU0YWY3OTAtNjdmYWI0NzA=, ActorId: [5:7591331168743356233:2584], ActorState: ExecuteState, LegacyTraceId: 01ke3e45csewqjabzzkaw5jsjt, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1555: ydb/core/kqp/query_data/kqp_query_data.cpp:266: Parameter $value22 type mismatch, expected: { Kind: Data Data { Scheme: 4865 DecimalParams { Precision: 22 Scale: 9 } } }, actual: Type (Data), schemeType: Decimal(35,10), schemeTypeId: 4865 status# BAD_REQUEST issues# trace_id# 2026-01-04T02:42:02.419345Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331168743356252:2592], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:7:29: Error: At function: KiWriteTable!
:7:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:4:25: Error: Implicit decimal cast would lose precision
:7:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:7:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-04T02:42:02.423169Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=Zjk5NzY5ODItMzA4YjgxNWYtNWE1ZjU5NjUtNjZlMTgxY2I=, ActorId: [5:7591331168743356250:2591], ActorState: ExecuteState, LegacyTraceId: 01ke3e45grbwe9y0ege25xfwjc, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 7 column: 29 } message: "At function: KiWriteTable!" end_position { row: 7 column: 29 } severity: 1 issues { position { row: 7 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 7 column: 50 } severity: 1 issues { position { row: 4 column: 25 } message: "Implicit decimal cast would lose precision" end_position { row: 4 column: 25 } severity: 1 } issues { position { row: 7 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 7 column: 50 } severity: 1 } } issues { position { row: 7 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 7 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:42:02.469502Z node 5 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [5:7591331168743356263:2597], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:3:29: Error: At function: KiWriteTable!
:3:50: Error: Failed to convert type: Struct<'Key':Int32,'Value22':Decimal(35,10),'Value35':Decimal(35,10)> to Struct<'Key':Int32?,'Value22':Decimal(22,9)?,'Value35':Decimal(35,10)?>
:0:14: Error: Implicit decimal cast would lose precision
:3:50: Error: Failed to convert 'Value22': Decimal(35,10) to Optional
:3:50: Error: Failed to convert input columns types to scheme types, code: 2031 2026-01-04T02:42:02.472056Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=5&id=NjEyZjc5MjctMjlkMzdlMzQtNjY5MTZkMjItYTBmNmMzNjg=, ActorId: [5:7591331168743356261:2596], ActorState: ExecuteState, LegacyTraceId: 01ke3e45jb4msj5h1g5n55s7dc, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 29 } message: "At function: KiWriteTable!" end_position { row: 3 column: 29 } severity: 1 issues { position { row: 3 column: 50 } message: "Failed to convert type: Struct<\'Key\':Int32,\'Value22\':Decimal(35,10),\'Value35\':Decimal(35,10)> to Struct<\'Key\':Int32?,\'Value22\':Decimal(22,9)?,\'Value35\':Decimal(35,10)?>" end_position { row: 3 column: 50 } severity: 1 issues { position { column: 14 } message: "Implicit decimal cast would lose precision" end_position { column: 14 } severity: 1 } issues { position { row: 3 column: 50 } message: "Failed to convert \'Value22\': Decimal(35,10) to Optional" end_position { row: 3 column: 50 } severity: 1 } } issues { position { row: 3 column: 50 } message: "Failed to convert input columns types to scheme types" end_position { row: 3 column: 50 } issue_code: 2031 severity: 1 } } } tx_id# trace_id# |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpScripting::StreamExecuteYqlScriptScanWriteCancelAfterBruteForced [GOOD] >> KqpScripting::StreamExecuteYqlScriptScanScalar >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchDelete::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 15753, MsgBus: 22053 2026-01-04T02:38:28.699398Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330251759807338:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:28.699488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:28.917312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:28.937081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:28.937176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:28.975044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:29.010305Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330251759807309:2081] 1767494308697730 != 1767494308697733 2026-01-04T02:38:29.012851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:29.057647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:29.057670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:29.057676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:29.057770Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:29.177788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:29.389431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:29.446051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.582482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.696588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:29.731949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:29.757415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.334254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330264644711074:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.334390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.334723Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330264644711084:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.334770Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.611651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.640838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.669634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.697589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.729175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.763105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.822504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.866197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:31.926699Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330264644711959:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.926811Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.927019Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330264644711964:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.927047Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330264644711965:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.927060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:31.930421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:31.940820Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330264644711968:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:38:31.998950Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330264644712019:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:33.704463Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330251759807338:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:33.705796Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:38:33.746386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 2Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7591331115358468781:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:49.311148Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:49.365967Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:49.390018Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7591331115358468745:2081] 1767494509308583 != 1767494509308586 2026-01-04T02:41:49.458995Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:49.462036Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:49.462210Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:49.469059Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:49.551620Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:49.611711Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:49.611754Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:49.611770Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:49.611910Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:50.319998Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:50.611881Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:50.638751Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:50.754898Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.111547Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.294201Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.313227Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7591331115358468781:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:54.313345Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:56.352896Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331145423241725:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.353107Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.353962Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331145423241735:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.354065Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.494051Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.554174Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.616020Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.676722Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.816612Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.890286Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.950666Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.044174Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.189836Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331149718209922:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.190006Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.190043Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331149718209927:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.194032Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331149718209929:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.194234Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.198116Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:57.220623Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7591331149718209930:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:57.277291Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7591331149718209982:3794] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:00.747319Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] >> KqpStats::MultiTxStatsFullExpScan [GOOD] >> KqpStats::MultiTxStatsFullYql >> KqpQuery::MixedCreateAsSelect [GOOD] >> KqpQuery::RowsLimit [GOOD] >> KqpQuery::RowsLimitServiceOverride >> KqpExplain::LimitOffset ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopSaFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 6955, MsgBus: 5297 2026-01-04T02:41:13.496371Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330960779137795:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.764295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.769155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.769262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.796812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:13.891189Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:13.897902Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330960779137757:2081] 1767494473492204 != 1767494473492207 2026-01-04T02:41:14.024932Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060549Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060600Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060609Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060705Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.519847Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.623066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.629170Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.631255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.631998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.635018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474683, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.636444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.636501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:41:14.636804Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330960779138291:2252] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.637348Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330960779137725:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.637386Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330960779137728:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.637536Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330960779137731:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.637790Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330960779138199:2203][/Root] Path was updated to new version: owner# [1:7591330960779138047:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.637847Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330960779138445:2289][/Root] Path was updated to new version: owner# [1:7591330960779138437:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.637870Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330960779138291:2252] Ack update: ack to# [1:7591330960779138115:2151], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.638123Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330960779138443:2288][/Root] Path was updated to new version: owner# [1:7591330960779138436:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.638174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T02:41:16.351687Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.352863Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c8e/r3tmp/spilling-tmp-runner/node_1_79510bb8-5c570a0e-7c70096a-1dd88ebe, actor: [1:7591330973664040486:2305] 2026-01-04T02:41:16.353027Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c8e/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.354876Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330973664040492:2493][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330960779138047:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.357633Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330973664040512:2495][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330960779138047:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.357633Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330973664040513:2496][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330960779138047:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.359641Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2px206652z5vksh86dbn", Request has 18444976579233.192022s seconds to be completed E0104 02:41:16.361576975 668928 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.361729854 668928 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0104 02:41:16.369522402 668928 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.369657984 668928 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.372143Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2px206652z5vksh86dbn", Created new session, sessionId: ydb://session/3?node_id=1&id=MWJjOTM4ZDUtMzdhZTNjOTUtZjExYzI1NzQtZTFjNWVkNzE=, workerId: [1:7591330973664040532:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.372389Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2px206652z5vksh86dbn 2026-01-04T02:41:16.372501Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.372533Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.372553Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.373544Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.382022959 668928 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.382186148 668928 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.383441Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.398112506 668931 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.398242640 668931 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.398535Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rkb4xzbyt9sj9hdb6hy, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MWJjOTM4ZDUtMzdhZTNjOTUtZjExYzI1NzQtZTFjNWVkNzE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330973664040532:2327] 2026-01-04T02:41:16.398592Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330973664040534:2498] 2026-01-04T02: ... { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0104 02:42:04.398246390 690672 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:04.398464487 690672 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:04.401065Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: 2026-01-04T02:42:04.454101Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331180614623673:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e473tdgpavhcx0z35nja2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmIwY2UzODYtN2YyMDliZmItZTg2ZmJhMzYtYzI1MzJlOTg=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:04.460822Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e473tdgpavhcx0z35nja2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmIwY2UzODYtN2YyMDliZmItZTg2ZmJhMzYtYzI1MzJlOTg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331180614623677:2352] 2026-01-04T02:42:04.461238Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e473tdgpavhcx0z35nja2, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZmIwY2UzODYtN2YyMDliZmItZTg2ZmJhMzYtYzI1MzJlOTg=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331180614623678:2353] 2026-01-04T02:42:04.462776Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e473tdgpavhcx0z35nja2", Forwarded response to sender actor, requestId: 4, sender: [9:7591331180614623570:2337], selfId: [9:7591331159139786161:2070], source: [9:7591331180614623571:2338] 2026-01-04T02:42:04.463396Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=ZmIwY2UzODYtN2YyMDliZmItZTg2ZmJhMzYtYzI1MzJlOTg=, workerId: [9:7591331180614623571:2338], local sessions count: 0 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ManyPartitionsSorting [GOOD] Test command err: Trying to start YDB, gRPC: 10090, MsgBus: 19835 2026-01-04T02:41:20.062609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330989057206574:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.062662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.114323Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:20.455665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.455766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.472121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.561426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.602947Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.768497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.768522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.768535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.768594Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:20.774555Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:21.076091Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.405892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.412752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.474829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.740450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.909112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.976523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.535844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331001942110299:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.535962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.536263Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331001942110309:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.536451Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.950495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.978990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.004637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.032051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.074925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.111679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.171092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.253169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006237078474:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.253247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.256527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006237078480:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.256527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331006237078479:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.256636Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.260251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.270821Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331006237078483:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.349405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331006237078534:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.063021Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330989057206574:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.063075Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:25.800655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itse ... dsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/ManyShardsTable\",\"ReadRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ReadBytes\":{\"Count\":4,\"Sum\":8800,\"Max\":2208,\"Min\":2192}}],\"OutputRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"PhysicalStageId\":0,\"FinishedTasks\":4,\"Introspections\":[\"4 tasks from DSScanMinimalThreads setting\"],\"IngressRows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"DurationUs\":{\"Count\":4,\"Sum\":293000,\"Max\":89000,\"Min\":64000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":4,\"Sum\":4194304,\"Max\":1048576,\"Min\":1048576,\"History\":[0,1048576,2,2097152,7,4194304,98,4194304]},\"BaseTimeMs\":1767494524147,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":314,\"Max\":95,\"Min\":65},\"ActiveMessageMs\":{\"Count\":4,\"Max\":95,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":10,\"Min\":4},\"Bytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004,\"History\":[22,664,23,1516,27,2097,44,3342,49,4087,68,5166,71,5526,78,6024,79,6342,98,8168]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":287000,\"Max\":87000,\"Min\":61000}},\"Name\":\"4\",\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":313,\"Max\":95,\"Min\":65},\"Chunks\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":313,\"Max\":95,\"Min\":65},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":10,\"Min\":4},\"ActiveMessageMs\":{\"Count\":4,\"Max\":95,\"Min\":4},\"PauseMessageMs\":{\"Count\":4,\"Sum\":13,\"Max\":6,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":286000,\"Max\":87000,\"Min\":61000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":296594,\"Max\":89453,\"Min\":63902,\"History\":[22,20054,23,41027,27,61011,44,104510,49,126700,68,169695,71,188062,78,270600,79,278766,98,296594]},\"WaitPeriods\":{\"Count\":4,\"Sum\":19,\"Max\":6,\"Min\":2}}}],\"CpuTimeUs\":{\"Count\":4,\"Sum\":12102,\"Max\":4862,\"Min\":2221,\"History\":[0,550,2,619,7,948,22,1577,23,2461,27,3060,44,4410,49,5176,68,6397,71,9364,78,9952,79,10276,98,12102]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":313,\"Max\":95,\"Min\":65},\"ActiveMessageMs\":{\"Count\":4,\"Max\":95,\"Min\":4},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":10,\"Min\":4},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,2816,23,6720,27,9184,44,14464,49,17632,68,22208,71,23968,78,26080,79,27456,98,35200]},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":286000,\"Max\":87000,\"Min\":61000}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"Rows\":{\"Count\":4,\"Sum\":1100,\"Max\":276,\"Min\":274},\"LastMessageMs\":{\"Count\":4,\"Sum\":313,\"Max\":95,\"Min\":65},\"Chunks\":{\"Count\":4,\"Sum\":100,\"Max\":25,\"Min\":25},\"ResumeMessageMs\":{\"Count\":4,\"Sum\":313,\"Max\":95,\"Min\":65},\"FirstMessageMs\":{\"Count\":4,\"Sum\":27,\"Max\":10,\"Min\":4},\"ActiveMessageMs\":{\"Count\":4,\"Max\":95,\"Min\":4},\"Bytes\":{\"Count\":4,\"Sum\":35200,\"Max\":8832,\"Min\":8768,\"History\":[22,2816,23,6720,27,9184,44,14464,49,17632,68,22208,71,23968,78,26080,79,27456,98,35200]},\"PauseMessageMs\":{\"Count\":4,\"Sum\":12,\"Max\":6,\"Min\":0},\"ActiveTimeUs\":{\"Count\":4,\"Sum\":286000,\"Max\":87000,\"Min\":61000},\"WaitTimeUs\":{\"Count\":4,\"Sum\":296662,\"Max\":89456,\"Min\":63922,\"History\":[22,19965,23,40969,27,60963,44,104369,49,126466,68,169542,71,188044,78,270654,79,278816,98,296662]},\"WaitPeriods\":{\"Count\":4,\"Sum\":19,\"Max\":6,\"Min\":2}}}],\"StageDurationUs\":93000,\"WaitInputTimeUs\":{\"Count\":4,\"Sum\":267447,\"Max\":81960,\"Min\":56713,\"History\":[22,15335,23,31006,27,46091,44,87524,49,108177,68,149548,71,167299,78,246395,79,254027,98,267447]},\"OutputBytes\":{\"Count\":4,\"Sum\":8168,\"Max\":2075,\"Min\":2004},\"UpdateTimeMs\":97,\"Tasks\":4}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Key (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":92000,\"Max\":92000,\"Min\":92000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[7,1048576,99,1048576]},\"BaseTimeMs\":1767494524147,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":47,\"Max\":47,\"Min\":47},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":97,\"Max\":97,\"Min\":97},\"ActiveMessageMs\":{\"Count\":1,\"Max\":97,\"Min\":11},\"FirstMessageMs\":{\"Count\":1,\"Sum\":11,\"Max\":11,\"Min\":11},\"Bytes\":{\"Count\":1,\"Sum\":7852,\"Max\":7852,\"Min\":7852,\"History\":[28,994,53,1575,72,3685,92,5665,99,7852]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":86000,\"Max\":86000,\"Min\":86000}},\"Name\":\"RESULT\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":97,\"Max\":97,\"Min\":97},\"Chunks\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":95,\"Max\":95,\"Min\":95},\"FirstMessageMs\":{\"Count\":1,\"Sum\":10,\"Max\":10,\"Min\":10},\"ActiveMessageMs\":{\"Count\":1,\"Max\":97,\"Min\":10},\"PauseMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":87000,\"Max\":87000,\"Min\":87000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":84315,\"Max\":84315,\"Min\":84315,\"History\":[28,20061,53,40713,72,61023,92,80491,99,84315]},\"WaitPeriods\":{\"Count\":1,\"Sum\":15,\"Max\":15,\"Min\":15},\"WaitMessageMs\":{\"Count\":1,\"Max\":95,\"Min\":8}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":10988,\"Max\":10988,\"Min\":10988,\"History\":[7,572,28,2388,53,3389,72,6043,92,9050,99,10988]},\"StageDurationUs\":92000,\"ResultRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"ResultBytes\":{\"Count\":1,\"Sum\":7852,\"Max\":7852,\"Min\":7852},\"OutputBytes\":{\"Count\":1,\"Sum\":7852,\"Max\":7852,\"Min\":7852},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":54,\"Max\":54,\"Min\":54},\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":96,\"Max\":96,\"Min\":96},\"ActiveMessageMs\":{\"Count\":1,\"Max\":96,\"Min\":7},\"FirstMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[28,1267,53,1848,72,4006,92,5915,99,8168]},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":89000,\"Max\":89000,\"Min\":89000}},\"Name\":\"2\",\"Push\":{\"Rows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100},\"LastMessageMs\":{\"Count\":1,\"Sum\":95,\"Max\":95,\"Min\":95},\"Chunks\":{\"Count\":1,\"Sum\":100,\"Max\":100,\"Min\":100},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":95,\"Max\":95,\"Min\":95},\"FirstMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"ActiveMessageMs\":{\"Count\":1,\"Max\":95,\"Min\":7},\"Bytes\":{\"Count\":1,\"Sum\":8168,\"Max\":8168,\"Min\":8168,\"History\":[28,2512,53,4502,72,5692,92,7836,99,8168]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":8,\"Max\":8,\"Min\":8},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":88000,\"Max\":88000,\"Min\":88000},\"WaitTimeUs\":{\"Count\":1,\"Sum\":19677,\"Max\":19677,\"Min\":19677,\"History\":[28,4655,53,9651,72,14618,92,18834,99,19677]},\"WaitPeriods\":{\"Count\":1,\"Sum\":48,\"Max\":48,\"Min\":48},\"WaitMessageMs\":{\"Count\":1,\"Max\":95,\"Min\":8}}}],\"UpdateTimeMs\":99,\"Tasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1100,\"Max\":1100,\"Min\":1100}}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":164104,\"CpuTimeUs\":156457},\"ProcessCpuTimeUs\":368,\"TotalDurationUs\":397470,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":119770},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"ManyShardsTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"Data\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/ManyShardsTable\" \'\"72057594046644480:38\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Data\" \'\"Key\") \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $3 (StructType \'(\'\"Data\" (OptionalType (DataType \'Int32))) \'(\'\"Key\" (OptionalType (DataType \'Uint32)))))\n(let $4 \'(\'(\'\"_logical_id\" \'368) \'(\'\"_id\" \'\"83365835-dc82b4ea-9e9f6246-82a60b18\") \'(\'\"_wide_channels\" $3)))\n(let $5 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($9) (block \'(\n (let $10 (lambda \'($11) (Member $11 \'\"Data\") (Member $11 \'\"Key\")))\n (return (FromFlow (ExpandMap (ToFlow $9) $10)))\n))) $4))\n(let $6 (DqCnMerge (TDqOutput $5 \'\"0\") \'(\'(\'1 \'\"Asc\"))))\n(let $7 (DqPhyStage \'($6) (lambda \'($12) (FromFlow (NarrowMap (ToFlow $12) (lambda \'($13 $14) (AsStruct \'(\'\"Data\" $13) \'(\'\"Key\" $14)))))) \'(\'(\'\"_logical_id\" \'380) \'(\'\"_id\" \'\"2d553e7b-73eac62e-9347ecaf-b34d9e1c\"))))\n(let $8 (DqCnResult (TDqOutput $7 \'\"0\") \'(\'\"Key\" \'\"Data\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($5 $7) \'($8) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType $3) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 397470 total_cpu_time_us: 288360 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/ManyShardsTable\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":38},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Data\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Key\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Key\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767494524\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"fb2d6d1b-b3de3967-daeb5ef1-688daba\",\"version\":\"1.0\"}" 2026-01-04T02:42:04.569913Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331159095676454:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:04.569986Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::QueryStats+UseSink [GOOD] >> KqpQuery::QueryStats-UseSink >> KqpExplain::ExplainStream [GOOD] >> KqpExplain::ExplainScanQueryWithParams |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpTypes::UnsafeTimestampCastV0 [GOOD] >> KqpTypes::UnsafeTimestampCastV1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 28519, MsgBus: 28075 2026-01-04T02:41:13.496233Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330959840041542:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.757549Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.757666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.805044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:13.857054Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:13.857338Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.859413Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330959840041506:2081] 1767494473491737 != 1767494473491740 2026-01-04T02:41:14.060606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060635Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060692Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.114121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.513762Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.631912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.632329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.636401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.637096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.644254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474690, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.648317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.648390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-04T02:41:14.648822Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330959840042039:2251] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.649315Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959840041474:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.649456Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959840041477:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.649524Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959840041480:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.649835Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959840041957:2208][/Root] Path was updated to new version: owner# [1:7591330959840041777:2108], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.650314Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330959840042039:2251] Ack update: ack to# [1:7591330959840041866:2150], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.650517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T02:41:14.654091Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959840042212:2289][/Root] Path was updated to new version: owner# [1:7591330959840042184:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.654337Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959840042211:2288][/Root] Path was updated to new version: owner# [1:7591330959840042183:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.407411Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.408439Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c44/r3tmp/spilling-tmp-runner/node_1_ded08885-e9903aa-33d62971-8eca8b19, actor: [1:7591330972724944235:2305] 2026-01-04T02:41:16.408579Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c44/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.410334Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972724944249:2493][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330959840041777:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.410457Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2pxpa4fyrn126qay9her", Request has 18444976579233.141178s seconds to be completed 2026-01-04T02:41:16.413939Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2pxpa4fyrn126qay9her", Created new session, sessionId: ydb://session/3?node_id=1&id=YzBlMTM0ZjItNTVjYjdmYWUtOTllN2VlMjUtNzkxOWQ0ZmU=, workerId: [1:7591330972724944274:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.414218Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2pxpa4fyrn126qay9her 2026-01-04T02:41:16.414306Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.414365Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.414399Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.414637Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972724944265:2495][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330959840041777:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.414753Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972724944266:2496][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330959840041777:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.414990730 668939 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.415165617 668939 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0104 02:41:16.417567751 668939 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.417704721 668939 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.418841Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.424898343 668939 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.425050760 668939 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.426230Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.432348Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rmf11nzrwmnz363v52e, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzBlMTM0ZjItNTVjYjdmYWUtOTllN2VlMjUtNzkxOWQ0ZmU=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330972724944274:2326] 2026-01-04T02:41:16.432421Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330972724944283:2498] 2026-01-04T02:41:16.433533Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972724944285:2499][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7591330959840041777:2108], state# { Deleted: 1 Strong: 1 Version: : Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:42:04.897572Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331177937596448:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e47h4bhhavsapggnc7hzv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2Y5Y2U2NmEtZWJhM2ZkNTQtOTcwYzdhMy0yYWEwMjQ4OQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:04.903012Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e47h4bhhavsapggnc7hzv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2Y5Y2U2NmEtZWJhM2ZkNTQtOTcwYzdhMy0yYWEwMjQ4OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331177937596452:2352] 2026-01-04T02:42:04.903587Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e47h4bhhavsapggnc7hzv, Database: /Root, SessionId: ydb://session/3?node_id=9&id=Y2Y5Y2U2NmEtZWJhM2ZkNTQtOTcwYzdhMy0yYWEwMjQ4OQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331177937596453:2353] 2026-01-04T02:42:04.904557Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e47h4bhhavsapggnc7hzv", Forwarded response to sender actor, requestId: 4, sender: [9:7591331177937596345:2337], selfId: [9:7591331156462759165:2266], source: [9:7591331177937596346:2338] 2026-01-04T02:42:04.904989Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=Y2Y5Y2U2NmEtZWJhM2ZkNTQtOTcwYzdhMy0yYWEwMjQ4OQ==, workerId: [9:7591331177937596346:2338], local sessions count: 0 E0104 02:42:05.233570385 690982 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:05.233774557 690982 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 31600, MsgBus: 29358 2026-01-04T02:41:13.496851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330959237096339:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496901Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.753411Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.809440Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.809572Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.906696Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:13.916536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:14.037572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060595Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.517260Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.623230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.630585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.633127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.633730Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.635929Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474683, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.636989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.637034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:41:14.637272Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330959237096825:2244] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.637567Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959237096268:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.637664Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959237096271:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.637779Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959237097051:2289][/Root] Path was updated to new version: owner# [1:7591330959237097045:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.638126Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959237097050:2288][/Root] Path was updated to new version: owner# [1:7591330959237097044:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.638247Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959237096274:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.638356Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330959237096825:2244] Ack update: ack to# [1:7591330959237096644:2140], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.638445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T02:41:14.638847Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959237096743:2200][/Root] Path was updated to new version: owner# [1:7591330959237096572:2109], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.465074Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.466328Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c4a/r3tmp/spilling-tmp-runner/node_1_9fae624d-ad5ecc41-db085fe9-774054ca, actor: [1:7591330972121999029:2305] 2026-01-04T02:41:16.466487Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c4a/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.468059Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2px72n8jn5rwkvp611hf", Request has 18444976579233.083592s seconds to be completed E0104 02:41:16.471256867 668957 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.471389603 668957 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.472523Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2px72n8jn5rwkvp611hf", Created new session, sessionId: ydb://session/3?node_id=1&id=NDkwNGNhZjQtZWIyNzliODAtM2Q2YjZmNjItY2Q0YzZkOA==, workerId: [1:7591330972121999049:2324], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.472792Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2px72n8jn5rwkvp611hf 2026-01-04T02:41:16.472891Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.472965Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.473000Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0104 02:41:16.474073162 668957 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.474192904 668957 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.474389Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972121999052:2490][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330959237096572:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.474406Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972121999053:2491][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330959237096572:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.475486Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.476059Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972121999068:2493][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330959237096572:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.485453702 668957 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.485562478 668957 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.486859Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.494006Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rpdbj8mw7waajdzcvac, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NDkwNGNhZjQtZWIyNzliODAtM2Q2YjZmNjItY2Q0YzZkOA==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330972121999049:2324] 2026-01-04T02:41:16.494069Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330972121999077:2495] 2026-01-04T02:41:16.495217Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972121999079:2496][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7591330959237096572:2109], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.496353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId ... { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } Call ListSplits. selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-04T02:42:05.254533Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331184823910842:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e47wcd6xvsfj9xtwekf53, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTg4Yzc2Y2ItNWIwNDk3MjQtN2Y0NDE3NGYtMzZjMTBhNw==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:05.261471Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e47wcd6xvsfj9xtwekf53, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTg4Yzc2Y2ItNWIwNDk3MjQtN2Y0NDE3NGYtMzZjMTBhNw==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331184823910847:2353] 2026-01-04T02:42:05.262399Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e47wcd6xvsfj9xtwekf53, Database: /Root, SessionId: ydb://session/3?node_id=9&id=MTg4Yzc2Y2ItNWIwNDk3MjQtN2Y0NDE3NGYtMzZjMTBhNw==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331184823910846:2352] 2026-01-04T02:42:05.263699Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e47wcd6xvsfj9xtwekf53", Forwarded response to sender actor, requestId: 4, sender: [9:7591331180528943442:2337], selfId: [9:7591331159054106243:2244], source: [9:7591331180528943443:2338] 2026-01-04T02:42:05.264437Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=MTg4Yzc2Y2ItNWIwNDk3MjQtN2Y0NDE3NGYtMzZjMTBhNw==, workerId: [9:7591331180528943443:2338], local sessions count: 0 E0104 02:42:05.269109120 691133 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:05.269296053 691133 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:05.272412Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::EmptyListForListParameterExecuteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 11213, MsgBus: 27871 2026-01-04T02:41:29.241666Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331027870984287:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:29.241970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:29.285602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:29.519155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:29.519215Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:29.521782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:29.566318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:29.616914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:29.619325Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331027870984181:2081] 1767494489237705 != 1767494489237708 2026-01-04T02:41:29.686185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:29.686227Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:29.686234Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:29.686305Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:29.788080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:30.146715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:30.229967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:30.251602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:30.396139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:30.557490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:30.619481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.467839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331040755887944:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.467961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.468459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331040755887954:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.468547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.769567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.806459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.834180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.874265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.913429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.967336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.018251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.076220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.163284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331045050856124:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:33.163380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:33.163689Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331045050856130:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:33.164024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331045050856129:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:33.164061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:33.168042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:33.179566Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331045050856133:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:33.248835Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331045050856184:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:34.241919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331027870984287:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:34.241978Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10786, MsgBus: 29346 2026-01-04T02:41:36.124845Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fli ... ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331115668126528:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:54.989983Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30171, MsgBus: 7754 2026-01-04T02:41:58.022094Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331153142076968:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.022165Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:58.042411Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:58.120028Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:58.158474Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:58.158575Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:58.166602Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:58.215613Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:58.215636Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:58.215645Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:58.215726Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:58.302986Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:58.768236Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:58.775008Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:58.784539Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.865949Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.027448Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.028914Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:59.108713Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.393675Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331170321947975:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.393786Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.394132Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331170321947985:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.394184Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.486578Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.528040Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.564967Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.602715Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.635072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.671635Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.724711Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.787883Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.868379Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331170321948856:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.868501Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.868524Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331170321948861:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.868664Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331170321948863:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.868719Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.872586Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:02.884493Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331170321948864:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:02.962518Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331170321948916:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:03.021990Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331153142076968:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:03.022065Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpQuery::QueryCache >> GenericFederatedQuery::YdbFilterPushdown [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::PostgreSQLFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 18786, MsgBus: 28464 2026-01-04T02:41:13.497903Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330957426195962:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.497972Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.729949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.777639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.777757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.829931Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:13.884654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:14.003532Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060571Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060606Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060612Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060683Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.507510Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.665791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.667090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.670099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.671053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.673737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474718, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.675191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.675280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 1 2026-01-04T02:41:14.675579Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330957426196444:2243] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.675974Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330957426195890:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.676135Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330957426195893:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.676387Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330957426195896:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.676654Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330957426196347:2190][/Root] Path was updated to new version: owner# [1:7591330957426196211:2120], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.676668Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330957426196565:2288][/Root] Path was updated to new version: owner# [1:7591330957426196559:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.676937Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330957426196444:2243] Ack update: ack to# [1:7591330957426196272:2144], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.677037Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330957426196566:2289][/Root] Path was updated to new version: owner# [1:7591330957426196560:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.677122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T02:41:16.404402Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.405394Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c87/r3tmp/spilling-tmp-runner/node_1_2a6b894e-c071be77-25d222e4-c581da9d, actor: [1:7591330970311098652:2305] 2026-01-04T02:41:16.405612Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c87/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.407481Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2py42vnrks8c91q3dbs1", Request has 18444976579233.144180s seconds to be completed E0104 02:41:16.411102832 668954 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-04T02:41:16.411063Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330970311098676:2495][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330957426196211:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.411115Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330970311098674:2493][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330957426196211:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.411197Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330970311098675:2494][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330957426196211:2120], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.411272437 668954 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.411889Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2py42vnrks8c91q3dbs1", Created new session, sessionId: ydb://session/3?node_id=1&id=YmQ0OWZhN2MtNDc1MjAyMy1jNzA1Y2UzMC1hNjE0YzlmNg==, workerId: [1:7591330970311098697:2326], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.412140Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2py42vnrks8c91q3dbs1 2026-01-04T02:41:16.412224Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.412260Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.412280Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0104 02:41:16.413622432 668958 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.413826272 668958 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.415205Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.421470351 668958 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.421648627 668958 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.422724Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.439272650 668954 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.439391118 668954 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.440346Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.448387Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rmzb8109qb2k4tz3js4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YmQ0OWZhN2MtNDc1MjAyMy1jNzA1Y2UzMC1hNjE0YzlmNg==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330970311098697:2326] 2026-01-04T02:41:16.448432Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330970311098700:2498] 20 ... atusCode: 0 schema { columns { name: "colDate" type { optional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0104 02:42:05.353948957 691315 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:05.354130224 691315 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:05.358469Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0104 02:42:05.577196918 691315 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:05.577361183 691315 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:05.582037Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ReadSplits. 2026-01-04T02:42:05.612421Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331184121242550:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e48898ryk31grry5syzq8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjUwYWRlN2EtMjU3MzMxZWEtNmU0YWQwNzktM2U3YzJmNmM=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: POSTGRESQL endpoint { host: "localhost" port: 5432 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE pg_options { schema: "public" } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:05.618262Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e48898ryk31grry5syzq8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjUwYWRlN2EtMjU3MzMxZWEtNmU0YWQwNzktM2U3YzJmNmM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331184121242554:2352] 2026-01-04T02:42:05.618758Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e48898ryk31grry5syzq8, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NjUwYWRlN2EtMjU3MzMxZWEtNmU0YWQwNzktM2U3YzJmNmM=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331184121242555:2353] 2026-01-04T02:42:05.619839Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e48898ryk31grry5syzq8", Forwarded response to sender actor, requestId: 4, sender: [9:7591331184121242447:2337], selfId: [9:7591331162646405269:2266], source: [9:7591331184121242448:2338] 2026-01-04T02:42:05.620506Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NjUwYWRlN2EtMjU3MzMxZWEtNmU0YWQwNzktM2U3YzJmNmM=, workerId: [9:7591331184121242448:2338], local sessions count: 0 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpQuery::QueryTimeout ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::MixedCreateAsSelect [GOOD] Test command err: Trying to start YDB, gRPC: 63653, MsgBus: 8056 2026-01-04T02:41:20.065639Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990230347451:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.065985Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.434117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.477873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.477974Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.510101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.611740Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.641278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.770405Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.770440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.770448Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.770539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.076416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.437261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.448423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.492740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.726172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.925098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.006374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.540763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003115251167:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.540865Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.541132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003115251177:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.541174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.951112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.988318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.018583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.060136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.102261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.142020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.189432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.272139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007410219344:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007410219349:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272412Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007410219351:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.272456Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.275320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.284998Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007410219352:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:24.365135Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007410219404:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.065201Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330990230347451:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.065269Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17392, MsgBus: 7010 2026-01-04T02:41:27.059291Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331017942777904:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:27.059353Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp ... Result TxId: 281474976710669 2026-01-04T02:42:02.731681Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731718Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731741Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731792Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731794Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731850Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731856Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731891Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731901Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731934Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731947Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731976Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038004 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.731991Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732019Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038006 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732033Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038013 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732060Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038002 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732076Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038015 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732104Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037998 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732122Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038011 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732144Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732168Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038009 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732181Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732210Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732218Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732254Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732256Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732295Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732297Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732333Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732341Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732373Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732385Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732414Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732425Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732453Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732471Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732492Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.732513Z node 5 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710669 2026-01-04T02:42:02.736919Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:02.746702Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:02.850254Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331169536123810:6073] txid# 281474976710672, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:02.863299Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.067005Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:04.083173Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats+IsOlap [GOOD] >> KqpStats::CreateTableAsStats-IsOlap >> KqpParams::CheckQueryCacheForExecuteAndPreparedQueries [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpected >> KqpStats::JoinStatsBasicScan [GOOD] >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] >> KqpStats::OneShardLocalExec+UseSink >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::YdbFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 24658, MsgBus: 11939 2026-01-04T02:41:14.078574Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330962999291500:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:14.078743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:14.300969Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:14.312530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:14.312659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:14.401052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:14.401938Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330962999291461:2081] 1767494474076509 != 1767494474076512 2026-01-04T02:41:14.406653Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:14.513752Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.531167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.531199Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.531225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.531334Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.944166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.944614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.946527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.947107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.948841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474998, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.949678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.949711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:41:14.949886Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330962999291987:2243] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.950117Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330962999291429:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.950244Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330962999291432:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.950340Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330962999291987:2243] Ack update: ack to# [1:7591330962999291816:2147], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.950395Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330962999291435:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.950503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-04T02:41:14.950671Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330962999291903:2199][/Root] Path was updated to new version: owner# [1:7591330962999291755:2122], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.951612Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330962999292144:2288][/Root] Path was updated to new version: owner# [1:7591330962999292138:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.951921Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330962999292145:2289][/Root] Path was updated to new version: owner# [1:7591330962999292139:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.952332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:15.084140Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330967294259592:2490][/Root/.metadata/initialization/migrations] Set up state: owner# [1:7591330962999291755:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:15.086982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:16.772698Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.774341Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/0029ab/r3tmp/spilling-tmp-runner/node_1_d4313ef1-a4775ec3-b2ce0230-6a99370a, actor: [1:7591330971589226899:2305] 2026-01-04T02:41:16.774521Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/0029ab/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.775429Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2q6m2v62vv0jwj6r0thx", Request has 18444976579232.776208s seconds to be completed 2026-01-04T02:41:16.775616Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330971589226915:2494][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330962999291755:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.775804Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330971589226916:2495][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330962999291755:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.778665Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2q6m2v62vv0jwj6r0thx", Created new session, sessionId: ydb://session/3?node_id=1&id=NGUzNGJmYTAtOTQ0MDBhYy1lYzBmZjU1NC0zYzc1ZjY1MQ==, workerId: [1:7591330971589226935:2325], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.778864Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2q6m2v62vv0jwj6r0thx 2026-01-04T02:41:16.778926Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.779107Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.779135Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0104 02:41:16.779804922 669411 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.779951831 669411 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.780691Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330971589226937:2497][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330962999291755:2122], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.782186286 669411 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.782301512 669411 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.783513Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.793005044 669411 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.793137705 669411 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.794103Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.802177Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2s011q9qwtgcb40114sc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGUzNGJmYTAtOTQ0MDBhYy1lYzBmZjU1NC0zYzc1ZjY1MQ==, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Sen ... ional_type { item { type_id: DATE } } } } columns { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } columns { name: "colString" type { optional_type { item { type_id: STRING } } } } } error { status: SUCCESS } E0104 02:42:06.958705989 692071 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:06.958907051 692071 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:06.960765Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 2026-01-04T02:42:07.180316Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[9:7591331170143788486:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:07.180423Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; E0104 02:42:07.192993053 692071 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:07.193203437 692071 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:07.195020Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:42:07.207143Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331191618625999:2338] TxId: 281474976715662. Ctx: { TraceId: 01ke3e49sx1h18jdzrnfe45q16, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGJjOTgwNDQtMWFiMDBhNDYtNmVhNTU4LWUzODUxMGE2, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: YDB endpoint { host: "localhost" port: 2136 } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: true protocol: NATIVE } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:07.212914Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01ke3e49sx1h18jdzrnfe45q16, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGJjOTgwNDQtMWFiMDBhNDYtNmVhNTU4LWUzODUxMGE2, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331191618626003:2351] 2026-01-04T02:42:07.213363Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01ke3e49sx1h18jdzrnfe45q16, Database: /Root, SessionId: ydb://session/3?node_id=9&id=ZGJjOTgwNDQtMWFiMDBhNDYtNmVhNTU4LWUzODUxMGE2, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331191618626004:2352] 2026-01-04T02:42:07.214359Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e49sx1h18jdzrnfe45q16", Forwarded response to sender actor, requestId: 4, sender: [9:7591331187323658602:2337], selfId: [9:7591331170143788716:2266], source: [9:7591331187323658603:2338] 2026-01-04T02:42:07.215017Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=ZGJjOTgwNDQtMWFiMDBhNDYtNmVhNTU4LWUzODUxMGE2, workerId: [9:7591331187323658603:2338], local sessions count: 0 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpLimits::StreamWrite+Allowed [GOOD] >> KqpLimits::StreamWrite-Allowed >> KqpExplain::UpdateSecondaryConditional-UseSink >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert >> TPQTest::TestPQCacheSizeManagement [GOOD] >> TPQTest::TestOffsetEstimation [GOOD] >> KqpStats::SysViewClientLost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::JoinStatsBasicScan [GOOD] Test command err: Trying to start YDB, gRPC: 15893, MsgBus: 2135 2026-01-04T02:41:35.697557Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331052347226374:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:35.697594Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:35.760469Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:36.049893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:36.124858Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:36.124957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:36.127148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:36.168620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:36.177693Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331052347226346:2081] 1767494495695600 != 1767494495695603 2026-01-04T02:41:36.246336Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:36.246366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:36.246374Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:36.246477Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:36.249989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:36.653719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:36.702474Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:36.708522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:36.849737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:37.006360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:37.085866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.807365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331065232130113:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.807491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.808042Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331065232130123:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.808108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.154765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.190635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.222417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.254228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.287996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.333711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.376370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.431022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:39.505469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069527098292:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.505529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.505751Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069527098298:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.505823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069527098297:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.505877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:39.509013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:39.521081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331069527098301:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:39.586686Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331069527098352:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:40.698092Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331052347226374:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:40.698146Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16008, MsgBus: 16942 2026-01-04T02:41:42.592670Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29025, MsgBus: 22869 2026-01-04T02:41:57.776764Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331148605546125:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:57.776829Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:57.798618Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:57.869384Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:57.908925Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:57.909029Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:57.913180Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:57.985954Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:57.985977Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:57.985985Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:57.986074Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:58.067491Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:58.511804Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:58.521659Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:58.528703Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.596047Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.790559Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:58.802875Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:58.886856Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.144979Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331170080384447:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.145090Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.145457Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331170080384456:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.145544Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.230731Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.271830Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.317274Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.354893Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.387206Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.441253Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.484754Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.547285Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.631531Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331170080385327:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.631610Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.631800Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331170080385332:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.631851Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.631864Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331170080385333:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.636721Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:02.649592Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331170080385336:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:02.726836Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331170080385387:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:02.777055Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331148605546125:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:02.777129Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:07.881733Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494525391, txId: 281474976715673] shutting down |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHadoopBasicFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 7683, MsgBus: 21805 2026-01-04T02:41:13.496606Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330961545143403:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496692Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.743579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.784687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.784815Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.826900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:13.838282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:13.881970Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330961545143365:2081] 1767494473491790 != 1767494473491793 2026-01-04T02:41:13.957692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060562Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060672Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.506288Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.647008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.647508Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.652441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.653287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.655495Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474704, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.664645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710657:0 2026-01-04T02:41:14.664691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710657, publications: 1, subscribers: 0 2026-01-04T02:41:14.664819Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330961545143889:2242] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976710657, is deletion# false, version: 6 2026-01-04T02:41:14.665090Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330961545143333:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.665116Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330961545143339:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.665329Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330961545143889:2242] Ack update: ack to# [1:7591330961545143720:2150], cookie# 281474976710657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.665403Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330961545143813:2205][/Root] Path was updated to new version: owner# [1:7591330961545143653:2119], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.665586Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330961545144007:2288][/Root] Path was updated to new version: owner# [1:7591330961545143977:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.665884Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330961545144008:2289][/Root] Path was updated to new version: owner# [1:7591330961545143978:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.666094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 0 2026-01-04T02:41:14.666291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:14.666336Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330961545143336:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:16.437359Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.438656Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c91/r3tmp/spilling-tmp-runner/node_1_681df992-49e1f73c-4ee4638e-26851846, actor: [1:7591330974430046100:2305] 2026-01-04T02:41:16.438847Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c91/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.439602Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330974430046112:2496][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330961545143653:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.441141Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2pxzcqmyh7rzjtnp8ty2", Request has 18444976579233.110511s seconds to be completed 2026-01-04T02:41:16.443132Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330974430046130:2498][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330961545143653:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.443282Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330974430046131:2499][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330961545143653:2119], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.443330919 668956 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.443471533 668956 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// E0104 02:41:16.445233518 668956 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.445315436 668956 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.445574Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2pxzcqmyh7rzjtnp8ty2", Created new session, sessionId: ydb://session/3?node_id=1&id=ODVmNThiMzQtNDgwNDllZjUtZjQ3ZDI0ZTQtM2JiMTEzMjI=, workerId: [1:7591330974430046146:2327], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.445838Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2pxzcqmyh7rzjtnp8ty2 2026-01-04T02:41:16.445937Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.445979Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.446005Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.446625Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.456966740 668963 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.457145417 668963 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.458416Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.467922Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rnk82yng4xmxw04921t, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODVmNThiMzQtNDgwNDllZjUtZjQ3ZDI0ZTQtM2JiMTEzMjI=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330974430046146:2327] 2026-01-04T02:41:16.467968Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330974430046148:2501] 2026-01-04T02:41:16.469090Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscribe ... username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 E0104 02:42:07.963249533 692150 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:07.963439460 692150 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:07.966488Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:42:08.041305Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331195342465789:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e4akg3tjg92f5ce8w4knn, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGVlZWI0MjctMzAxMjJiNTYtODc0MzkwNWYtODliMTlmZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { basic { username: "crab" password: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hadoop { } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:08.047160Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e4akg3tjg92f5ce8w4knn, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGVlZWI0MjctMzAxMjJiNTYtODc0MzkwNWYtODliMTlmZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331195342465793:2352] 2026-01-04T02:42:08.047240Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e4akg3tjg92f5ce8w4knn, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NGVlZWI0MjctMzAxMjJiNTYtODc0MzkwNWYtODliMTlmZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331195342465795:2353] 2026-01-04T02:42:08.048218Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e4akg3tjg92f5ce8w4knn", Forwarded response to sender actor, requestId: 4, sender: [9:7591331191047498390:2337], selfId: [9:7591331169572661193:2265], source: [9:7591331191047498391:2338] 2026-01-04T02:42:08.049255Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NGVlZWI0MjctMzAxMjJiNTYtODc0MzkwNWYtODliMTlmZQ==, workerId: [9:7591331191047498391:2338], local sessions count: 0 E0104 02:42:08.311912029 692150 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:08.312120498 692150 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> GenericFederatedQuery::IcebergHiveTokenFilterPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 13720, MsgBus: 4414 2026-01-04T02:41:13.496214Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330959730870708:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:13.496309Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:13.759748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:13.759880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:13.833516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:13.841586Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:13.859425Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:14.017683Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:14.060498Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:14.060528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:14.060548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:14.060636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:14.505192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:14.640825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T02:41:14.641233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:41:14.643471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:14.644243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:41:14.649950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494474690, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:41:14.651036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-04T02:41:14.651070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976715657, publications: 1, subscribers: 1 2026-01-04T02:41:14.651298Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:693: [1:7591330959730871201:2250] Update description: owner# 72057594046644480, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], cookie# 281474976715657, is deletion# false, version: 6 2026-01-04T02:41:14.651628Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959730870637:2049] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.651717Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959730870640:2052] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.651776Z node 1 :SCHEME_BOARD_REPLICA NOTICE: replica.cpp:831: [1:7591330959730870643:2055] Update description: path# /Root, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], deletion# false 2026-01-04T02:41:14.652090Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959730871124:2211][/Root] Path was updated to new version: owner# [1:7591330959730870940:2108], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.652433Z node 1 :SCHEME_BOARD_POPULATOR NOTICE: populator.cpp:786: [1:7591330959730871201:2250] Ack update: ack to# [1:7591330959730871022:2146], cookie# 281474976715657, pathId# [OwnerId: 72057594046644480, LocalPathId: 1], version# 6 2026-01-04T02:41:14.652551Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976715657, subscribers: 1 2026-01-04T02:41:14.653747Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959730871312:2288][/Root] Path was updated to new version: owner# [1:7591330959730871305:2283], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:14.653994Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:855: [main][1:7591330959730871314:2289][/Root] Path was updated to new version: owner# [1:7591330959730871306:2284], state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 5) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements }, new state# { Deleted: 0 Strong: 1 Version: (PathId: [OwnerId: 72057594046644480, LocalPathId: 1], Version: 6) DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.473626Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 2026-01-04T02:41:16.474695Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:230: Init DQ local file spilling service at /home/runner/.ya/build/build_root/c5zo/002c95/r3tmp/spilling-tmp-runner/node_1_1796f46e-e7b5b31f-db87a504-cabcea16, actor: [1:7591330972615773397:2305] 2026-01-04T02:41:16.474874Z node 1 :KQP_COMPUTE INFO: spilling_file.cpp:781: [RemoveOldTmp] removing at root: /home/runner/.ya/build/build_root/c5zo/002c95/r3tmp/spilling-tmp-runner 2026-01-04T02:41:16.476087Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1366: TraceId: "01ke3e2px9fba79s0zk5z5j879", Request has 18444976579233.075551s seconds to be completed 2026-01-04T02:41:16.479119Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1441: TraceId: "01ke3e2px9fba79s0zk5z5j879", Created new session, sessionId: ydb://session/3?node_id=1&id=ZGFiZTk4NTctZWU5ZGI4NjUtMzg2NzAyOTItYWQ5OWQ2NGE=, workerId: [1:7591330972615773417:2324], database: /Root, longSession: 1, local sessions count: 1 2026-01-04T02:41:16.479288Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:617: Received create session request, trace_id: 01ke3e2px9fba79s0zk5z5j879 2026-01-04T02:41:16.479331Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:468: Subscribed for config changes. 2026-01-04T02:41:16.479349Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:475: Updated table service config. 2026-01-04T02:41:16.479359Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1562: Updated YQL logs priority to current level: 5 E0104 02:41:16.480949980 668941 dns_resolver_ares.cc:452] no server name supplied in dns URI 2026-01-04T02:41:16.480915Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972615773421:2492][/Root/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591330959730870940:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.480917Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972615773419:2491][/Root/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591330959730870940:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.481110183 668941 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.481603Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972615773425:2493][/Root/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591330959730870940:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } E0104 02:41:16.483348531 668941 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.483455527 668941 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.484597Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } E0104 02:41:16.490730996 668941 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:41:16.490846650 668941 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:41:16.491720Z node 1 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } 2026-01-04T02:41:16.504212Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:740: Ctx: { TraceId: 01ke3e2rpq3b1yxyqgm65hdy4v, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGFiZTk4NTctZWU5ZGI4NjUtMzg2NzAyOTItYWQ5OWQ2NGE=, PoolId: , DatabaseId: , IsStreamingQuery: 0}. TEvQueryRequest, set timer for: 600.000000s timeout: 0.000000s cancelAfter: 0.000000s. Send request to target, requestId: 3, targetId: [1:7591330972615773417:2324] 2026-01-04T02:41:16.504284Z node 1 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1193: Scheduled timeout timer for requestId: 3 timeout: 600.000000s actor id: [1:7591330972615773445:2496] 2026-01-04T02:41:16.505388Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591330972615773447:2497][/Root/.metadata/workload_manager/pools/default] Set up state: owner# [1:7591330959730870940:2108], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:41:16.506396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: ... credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } ListSplits result. GRpcStatusCode: 0 Call ReadSplits. splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Expected: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL GENERIC-CONNECTOR-MOCK Actual: splits { select { data_source_instance { kind: ICEBERG endpoint { } database: "pgdb" credentials { token { type: "IAM" value: "qwerty12345" } } use_tls: false protocol: NATIVE iceberg_options { catalog { hive_metastore { uri: "hive_metastore_uri" } } warehouse { s3 { uri: "s3_uri" endpoint: "s3_endpoint" region: "s3_region" } } } } what { items { column { name: "colDate" type { optional_type { item { type_id: DATE } } } } } items { column { name: "colInt32" type { optional_type { item { type_id: INT32 } } } } } items { column { name: "colString" type { optional_type { item { type_id: STRING } } } } } } from { table: "example_1" } where { filter_typed { comparison { operation: EQ left_value { column: "colDate" } right_value { typed_value { type { type_id: DATE } value { uint32_value: 20326 } } } } } } } description: "some binary description" } format: ARROW_IPC_STREAMING filtering: FILTERING_OPTIONAL ReadSplits result. GRpcStatusCode: 0 2026-01-04T02:42:08.337235Z node 9 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [9:7591331194195553668:2338] TxId: 281474976710662. Ctx: { TraceId: 01ke3e4awbehek7zx2w8zegmvz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTUxZWRiNGMtYjdmNTEyMmUtNTQ4ODE2MTItNmRiYjkzODE=, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 2 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 2 use_followers# false trace_id# 2026-01-04T02:42:08.343053Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e4awbehek7zx2w8zegmvz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTUxZWRiNGMtYjdmNTEyMmUtNTQ4ODE2MTItNmRiYjkzODE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331194195553672:2352] 2026-01-04T02:42:08.343724Z node 9 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710662. Ctx: { TraceId: 01ke3e4awbehek7zx2w8zegmvz, Database: /Root, SessionId: ydb://session/3?node_id=9&id=NTUxZWRiNGMtYjdmNTEyMmUtNTQ4ODE2MTItNmRiYjkzODE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [9:7591331194195553673:2353] 2026-01-04T02:42:08.344652Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: TraceId: "01ke3e4awbehek7zx2w8zegmvz", Forwarded response to sender actor, requestId: 4, sender: [9:7591331189900586269:2337], selfId: [9:7591331168425749091:2266], source: [9:7591331189900586270:2338] 2026-01-04T02:42:08.345092Z node 9 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=9&id=NTUxZWRiNGMtYjdmNTEyMmUtNTQ4ODE2MTItNmRiYjkzODE=, workerId: [9:7591331189900586270:2338], local sessions count: 0 E0104 02:42:08.366677799 692247 dns_resolver_ares.cc:452] no server name supplied in dns URI E0104 02:42:08.366842305 692247 channel.cc:120] channel stack builder failed: UNKNOWN: the target uri is not valid: dns:/// 2026-01-04T02:42:08.372422Z node 9 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "Root" error: CLIENT_INTERNAL_ERROR {
: Error: GRpc error: (2): Failed to create secure client channel } |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/federated_query/generic_ut/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOnSecondaryWithoutSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 63519, MsgBus: 27949 2026-01-04T02:41:20.155529Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330990173074266:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.155654Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.649340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.649448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.675111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.699607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.758932Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.851675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.851711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.851717Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.851854Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:20.913294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:21.173925Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.447120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.454747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.512696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.733583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.895541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.975014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.793795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003057977983:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.793922Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.794211Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003057977993:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.794294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.111612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.151323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.185394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.232646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.272490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.319401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.345546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.389005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.465940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007352946157:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.466028Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.466112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007352946162:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.466161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331007352946164:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.466200Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.469683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.483746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331007352946166:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.566735Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331007352946217:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.155198Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330990173074266:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.160840Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:26.070552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... cpp:689) 2026-01-04T02:41:59.459336Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.569112Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:59.638418Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.718034Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.876192Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331168732985150:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.876295Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.876733Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331168732985160:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.876814Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.968309Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.019505Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.064164Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.108838Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.151039Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.195125Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.247983Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.306012Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.421745Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331173027953324:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.421849Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331173027953329:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.421854Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.422049Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331173027953331:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.422100Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.427325Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:03.439157Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331173027953332:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:03.505470Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331173027953384:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:03.513911Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331151553114133:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:03.513991Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:05.802845Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.856243Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.904668Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_2_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_2_0"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"Filter","Name":"Iterator"},{"E-Rows":"2","Inputs":[],"Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"ConstantExpr-Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_2_0","Node Type":"Precompute_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/SecondaryKeys","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"SecondaryKeys","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Key","Value"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Predicate":"Contains","E-Cost":"0","E-Size":"10","Name":"Filter"}],"Node Type":"Filter"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestOffsetEstimation [GOOD] Test command err: 2026-01-04T02:40:05.782656Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:40:05.855230Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:05.855306Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:05.855362Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:05.855425Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:40:05.876025Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:05.896338Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "aaa" Generation: 1 Important: true } 2026-01-04T02:40:05.897408Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:40:05.900124Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:40:05.903714Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:40:05.905676Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:40:05.917519Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:05.918012Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f391a3bf-83ffe663-1e035d7b-7474b218_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [1:181:2194] 2026-01-04T02:40:06.389841Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-04T02:40:06.422205Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:06.422258Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:06.422312Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.422358Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:187:2057] recipient: [2:186:2195] Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:189:2057] recipient: [2:186:2195] 2026-01-04T02:40:06.463054Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:06.463122Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:06.463187Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:06.463254Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2026-01-04T02:40:06.473835Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:188:2196] 2026-01-04T02:40:06.536156Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.567458Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.578159Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.609322Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.661031Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.692251Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.836695Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:06.847459Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.128186Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.200555Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.545349Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.712704Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:40:07.898012Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:268:2057] recipient: [2:14:2061] 2026-01-04T02:40:08.028316Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:08.029035Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } Consumers { Name: "aaa" Generation: 2 Important: true } 2026-01-04T02:40:08.029935Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [2:274:2196] 2026-01-04T02:40:08.031682Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 3 [2:274:2196] 2026-01-04T02:40:08.033991Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [2:275:2196] 2026-01-04T02:40:08.035269Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 3 [2:275:2196] 2026-01-04T02:40:08.045382Z node 2 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:40:08.045758Z node 2 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|94c5fee3-1bd5505b-e7e53cc5-524858bd_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 0 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 1 } Cookie: 123 } via pipe: [2:180:2192] 2026-01-04T02:40:08.423337Z node 3 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 3 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:107:2057] recipient: [3:105:2138] Leader for TabletID 72057594037927937 is [3:111:2142] sender: [3:112:2057] recipient: [3:105:2138] 2026-01-04T02:40:08.474355Z node 3 :PQ_TX IN ... 4828Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:782:2555] 2026-01-04T02:42:09.217683Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:783:2555] 2026-01-04T02:42:09.228712Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.236479Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 8 [69:783:2555] 2026-01-04T02:42:09.258838Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.259920Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 8 [69:782:2555] 2026-01-04T02:42:09.282876Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:180:2192] 2026-01-04T02:42:09.325861Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:723:2555] sender: [69:815:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:723:2555] sender: [69:818:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:723:2555] sender: [69:821:2057] recipient: [69:820:2621] Leader for TabletID 72057594037927937 is [69:822:2622] sender: [69:823:2057] recipient: [69:820:2621] 2026-01-04T02:42:09.368487Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:09.368561Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:09.369231Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:09.369274Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:42:09.369847Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:883:2622] 2026-01-04T02:42:09.372005Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:884:2622] 2026-01-04T02:42:09.380277Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.387873Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 9 [69:884:2622] 2026-01-04T02:42:09.398682Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.406679Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 9 [69:883:2622] 2026-01-04T02:42:09.433605Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:180:2192] 2026-01-04T02:42:09.471880Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:822:2622] sender: [69:918:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:822:2622] sender: [69:921:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:822:2622] sender: [69:924:2057] recipient: [69:923:2692] Leader for TabletID 72057594037927937 is [69:925:2693] sender: [69:926:2057] recipient: [69:923:2692] 2026-01-04T02:42:09.521174Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:09.521248Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:09.521994Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:09.522039Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:42:09.522797Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:988:2693] 2026-01-04T02:42:09.525166Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:989:2693] 2026-01-04T02:42:09.535972Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.543568Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 10 [69:989:2693] 2026-01-04T02:42:09.552640Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.553885Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 10 [69:988:2693] 2026-01-04T02:42:09.590260Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:180:2192] 2026-01-04T02:42:09.622957Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:925:2693] sender: [69:1023:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:925:2693] sender: [69:1026:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:925:2693] sender: [69:1029:2057] recipient: [69:1028:2765] Leader for TabletID 72057594037927937 is [69:1030:2766] sender: [69:1031:2057] recipient: [69:1028:2765] 2026-01-04T02:42:09.677675Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:09.677744Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:09.678592Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:09.678648Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:42:09.679444Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1095:2766] 2026-01-04T02:42:09.682249Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1096:2766] 2026-01-04T02:42:09.693788Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.703434Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 11 [69:1096:2766] 2026-01-04T02:42:09.713576Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.714522Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 11 [69:1095:2766] 2026-01-04T02:42:09.737592Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 Send read request: PartitionRequest { Partition: 0 CmdRead { ClientId: "user" SessionId: "" Offset: 0 Count: 1 Bytes: 104857600 } Cookie: 123 } via pipe: [69:180:2192] 2026-01-04T02:42:09.784491Z node 69 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 69 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [69:1030:2766] sender: [69:1130:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [69:1030:2766] sender: [69:1133:2057] recipient: [69:104:2137] Leader for TabletID 72057594037927937 is [69:1030:2766] sender: [69:1136:2057] recipient: [69:1135:2840] Leader for TabletID 72057594037927937 is [69:1137:2841] sender: [69:1138:2057] recipient: [69:1135:2840] 2026-01-04T02:42:09.842838Z node 69 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:09.842898Z node 69 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:09.843651Z node 69 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:09.843702Z node 69 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:42:09.844465Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [69:1204:2841] 2026-01-04T02:42:09.847095Z node 69 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [69:1205:2841] 2026-01-04T02:42:09.857901Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.864595Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 12 [69:1205:2841] 2026-01-04T02:42:09.885747Z node 69 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--asdfgs--topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:42:09.887066Z node 69 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 12 [69:1204:2841] 2026-01-04T02:42:09.919314Z node 69 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72057594037927937' partition 0 offset 0 partno 0 count 7 parts 16 suffix '0' size 8364507 >> KqpExplain::SortStage [GOOD] >> KqpExplain::SelfJoin3xSameLabels |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> KqpParams::MissingOptionalParameter+UseSink [GOOD] >> KqpParams::MissingOptionalParameter-UseSink >> KqpScripting::StreamOperationTimeout [GOOD] >> KqpStats::JoinNoStatsScan |91.9%| [TA] $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::StreamLookupStats+StreamLookupJoin [GOOD] >> KqpStats::StreamLookupStats-StreamLookupJoin |91.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} |91.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/federated_query/generic_ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsStub [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:25.588157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:25.718175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:25.718685Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:25.718744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:26.019342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:26.019479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:26.098307Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494422641932 != 1767494422641935 2026-01-04T02:40:26.115197Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:26.162719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:26.252162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:26.726965Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:26.727028Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:26.727064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:26.730126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:26.730646Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:26.745348Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:26.875081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-04T02:40:26.978592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:40:26.978798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:40:26.979010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:40:26.979099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:40:26.979162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:40:26.979250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:40:26.979318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:40:26.979418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:40:26.979495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:40:26.979598Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:40:26.979706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:40:26.979822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:40:26.979995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:40:26.997176Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-01-04T02:40:27.025143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:40:27.025232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:40:27.025363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:40:27.025448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:40:27.025556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:40:27.025628Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:40:27.025686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:40:27.025760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:40:27.025872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:40:27.026019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:40:27.026097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:40:27.026197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:40:27.026261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:40:27.029116Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-01-04T02:40:27.030580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:40:27.030644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:40:27.030740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:40:27.030777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:40:27.030984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:40:27.031018Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranule ... 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:41:57.453929Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:41:57.453977Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037888;has_config=0; 2026-01-04T02:41:57.454014Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:41:57.454043Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037888 2026-01-04T02:41:57.454077Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 2026-01-04T02:41:57.454122Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037888 2026-01-04T02:41:57.454172Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:41:57.454213Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:41:57.454243Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037889;has_config=0; 2026-01-04T02:41:57.454278Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:41:57.454307Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037889 2026-01-04T02:41:57.454334Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 2026-01-04T02:41:57.454371Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037889 2026-01-04T02:41:57.454413Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:41:57.454575Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:41:57.454614Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037890;has_config=0; 2026-01-04T02:41:57.454649Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:41:57.454676Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier2' at tablet 72075186224037890 2026-01-04T02:41:57.454706Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 2026-01-04T02:41:57.454750Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier2' started at tablet 72075186224037890 2026-01-04T02:41:57.454796Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=1}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:41:57.456513Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:41:57.456627Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:41:57.456715Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 2026-01-04T02:42:08.787337Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-04T02:42:08.787439Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-04T02:42:08.788032Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-04T02:42:08.788145Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:08.788195Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037889;has_config=0; 2026-01-04T02:42:08.788253Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.788293Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.788360Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:08.788414Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:08.788444Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037890;has_config=0; 2026-01-04T02:42:08.788476Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.788503Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.788546Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:08.788606Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-04T02:42:08.789287Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:08.789336Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-04T02:42:08.789374Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.789405Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.789451Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:08.789723Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:08.789756Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=72075186224037888;has_config=0; 2026-01-04T02:42:08.789788Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.789835Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:08.789878Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:08.790049Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:08.790148Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:08.790838Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 2026-01-04T02:42:09.661046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:09.661136Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037888 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037889 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037890 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpQuery::QueryExplain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateOn-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 29159, MsgBus: 6033 2026-01-04T02:41:34.539716Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331051837736670:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:34.539760Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:34.782786Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:34.809581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:34.809696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:34.897494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:34.908099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:34.911659Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331051837736450:2081] 1767494494480756 != 1767494494480759 2026-01-04T02:41:35.009911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:35.019771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:35.019799Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:35.019844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:35.019914Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:35.490443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:35.497067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:35.536731Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:35.557086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.704776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.861124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.941053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:37.913529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331064722640220:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:37.913667Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:37.913994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331064722640230:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:37.914052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.241283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.275482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.303914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.332831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.363718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.391680Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.423480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.469618Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:38.540343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069017608402:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.540430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.540527Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069017608407:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.540646Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331069017608409:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.540684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:38.543868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:38.553848Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331069017608410:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:38.621354Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331069017608464:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:39.537451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331051837736670:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:39.537509Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":" ... ) VolatileState: Connecting -> Connected 2026-01-04T02:42:03.461588Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:03.461616Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:03.461627Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:03.461726Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:03.511465Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:04.025126Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:04.043564Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.106224Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.259782Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:04.298154Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.369669Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.158995Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331190070895196:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.159109Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.159467Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331190070895206:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.159513Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.278902Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.314925Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.353064Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.391015Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.432437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.475551Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.514153Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.566594Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.675611Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331190070896079:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.675720Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.675967Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331190070896084:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.676030Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331190070896085:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.676035Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.680538Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:07.695684Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331190070896088:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:07.768043Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331190070896139:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:08.237594Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331172891024167:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:08.237688Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"Tables":["EightShard"],"PlanNodeId":8,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/EightShard","Name":"Upsert","Table":"EightShard"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"E-Size":"0","LookupKeyColumns":["Key"],"Node Type":"TableLookup","PlanNodeId":2,"Path":"\/Root\/EightShard","Columns":["Key"],"E-Rows":"2","Plans":[{"PlanNodeId":1,"Operators":[{"Inputs":[],"Iterator":"precompute_0_1","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_1"}],"Table":"EightShard","PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Stage"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Key"],"type":"Lookup"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"EightShard"}],"Plans":[{"PlanNodeId":8,"Operators":[{"E-Rows":"2","Columns":["Key"],"Name":"TableLookup","E-Cost":"0","E-Size":"0","LookupKeyColumns":["Key"],"Table":"EightShard"}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TTabletPipeTest::TestShutdown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamOperationTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 14500, MsgBus: 3561 2026-01-04T02:40:48.748168Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330853358201879:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:48.748270Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:48.966369Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:48.998118Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:48.998246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:49.035009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:49.081122Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:49.147524Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:49.147552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:49.147565Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:49.147698Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:49.251353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:49.537595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:49.591476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.725111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.819671Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:49.860716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:49.921993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:51.938169Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330866243105513:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.938287Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.938615Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330866243105523:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:51.938660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.264515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.291655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.318284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.345370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.374394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.404075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.438127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.477909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.552030Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330870538073688:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.552128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330870538073693:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.552159Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.552428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330870538073695:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.552488Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:52.556315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:52.567076Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330870538073696:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:52.648477Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330870538073748:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:53.743171Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330853358201879:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:53.743252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:54.546704Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494454579, txId: 281474976710674] shutting down 2026-01-04T02:40:54.832032Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494454859, txId: 281474976710677] shutting down 2026-01-04T02:40:55.089497Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: ... 71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:03.761363Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:03.856115Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:03.856213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:03.858901Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:03.860533Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331172835518751:2081] 1767494523724049 != 1767494523724052 2026-01-04T02:42:03.881485Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:03.950334Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:04.018010Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:04.018035Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:04.018041Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:04.018111Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:04.082376Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:04.536347Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:04.545355Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:04.552637Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.638831Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.838713Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:04.924891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.008642Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.779220Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331190015389814:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.779329Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.779680Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331190015389823:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.779758Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:07.900082Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.938979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.972242Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.001096Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.037476Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.076271Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.117402Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.173099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.261255Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331194310357998:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.261342Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.261345Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331194310358003:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.261497Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331194310358005:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.261541Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.267557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:08.278480Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331194310358006:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:08.346463Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331194310358058:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:08.726612Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591331172835518776:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:08.726698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:10.359475Z node 2 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 1ms, session id ydb://session/3?node_id=2&id=NmMwMjc4NTctYjk2ZDFkZDQtZGMzNWU1ZDUtOTkyNjZkYjM= } |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest >> KqpLimits::WaitCAsStateOnAbort [GOOD] >> KqpLimits::WaitCAsTimeout >> TResourceBroker::TestCounters >> BootstrapperTest::LoneBootstrapper >> TTabletLabeledCountersAggregator::SimpleAggregation >> TPipeTrackerTest::TestAddSameTabletTwice [GOOD] >> TPipeTrackerTest::TestAddTwoTablets [GOOD] >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries >> TabletState::ExplicitUnsubscribe >> KqpExplain::ExplainScanQueryWithParams [GOOD] >> KqpExplain::FewEffects+UseSink >> KqpQuery::RowsLimitServiceOverride [GOOD] >> KqpQuery::SelectCountAsteriskFromVar >> TTabletLabeledCountersAggregator::SimpleAggregation [GOOD] >> TTabletLabeledCountersAggregator::HeavyAggregation >> KqpStats::MultiTxStatsFullYql [GOOD] >> KqpStats::MultiTxStatsFullScan >> KqpTypes::UnsafeTimestampCastV1 [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestAddTwoTablets [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpExplain::LimitOffset [GOOD] >> KqpExplain::MultiUsedStage >> TResourceBroker::TestCounters [GOOD] >> TResourceBroker::TestChangeTaskType ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryExplain [GOOD] Test command err: Trying to start YDB, gRPC: 31566, MsgBus: 3513 2026-01-04T02:41:31.688427Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331036449509935:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:31.688564Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:31.913705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:31.930016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:31.930150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:32.018515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:32.021381Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:32.170130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:32.170154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:32.170161Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:32.170254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:32.215898Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:32.653826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:32.659901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:32.696780Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:32.705495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:32.845046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.040514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:33.110684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:34.778262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331049334413656:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.778408Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.778820Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331049334413666:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:34.778873Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.140726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.172845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.206001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.232994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.269506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.308354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.347701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.419982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:35.503446Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331053629381833:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.503531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.503931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331053629381839:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.504032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331053629381838:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.504100Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:35.510133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:35.523836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331053629381842:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:35.619258Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331053629381893:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:36.689922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331036449509935:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:36.689987Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:37.243692Z node 1 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d07e9de0c80] received request Name# Ydb.Table.V1.TableService/ExecuteDataQuery ok# true data# session_id: "ydb://session/3?node_id=1&id=YjJkNmUyYjktNmI5YzYxOTItOTYyNDY1NTEtOWNmZGRhMWM=" tx_control { begin_tx { serializable_read_write { } } commit_tx: true } query { yql_text: "\n SELECT ... ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.622550Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.773379Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.814181Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:05.865380Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.736636Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331195641209630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.736766Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.737143Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331195641209640:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.737214Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.809946Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.842205Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.874789Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.904074Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.931470Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.011088Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.052951Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.103571Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.186564Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331199936177809:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.186673Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.186938Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331199936177814:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.186976Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331199936177815:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.187035Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.190788Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:09.201653Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331199936177818:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:09.281267Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331199936177869:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:09.786985Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331178461338613:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:09.787071Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; AST: ( (let $1 (KqpTable '"/Root/Test" '"72057594046644480:45" '"" '1)) (let $2 '('"Amount" '"Comment" '"Group" '"Name")) (let $3 (Uint64 '"1001")) (let $4 '('('"ItemsLimit" $3) '('"Sequential" '1) '('"PointPrefixLen" '1))) (let $5 (Uint32 '1)) (let $6 (KqpRowsSourceSettings $1 $2 $4 '((KqlKeyExc $5 (String '"Name")) (KqlKeyInc $5)))) (let $7 (OptionalType (DataType 'String))) (let $8 (StructType '('"Amount" (OptionalType (DataType 'Uint64))) '('"Comment" $7) '('"Group" (OptionalType (DataType 'Uint32))) '('"Name" $7))) (let $9 '('('"_logical_id" '715) '('"_id" '"5d79c2d8-1ab7e014-323f52a4-6493c47") '('"_partition_mode" '"single") '('"_wide_channels" $8))) (let $10 (DqPhyStage '((DqSource (DataSource '"KqpReadRangesSource") $6)) (lambda '($14) (block '( (let $15 (lambda '($16) (Member $16 '"Amount") (Member $16 '"Comment") (Member $16 '"Group") (Member $16 '"Name"))) (return (FromFlow (ExpandMap (Take (ToFlow $14) $3) $15))) ))) $9)) (let $11 (DqCnUnionAll (TDqOutput $10 '"0"))) (let $12 (DqPhyStage '($11) (lambda '($17) (FromFlow (NarrowMap (Take (ToFlow $17) $3) (lambda '($18 $19 $20 $21) (AsStruct '('"Amount" $18) '('"Comment" $19) '('"Group" $20) '('"Name" $21)))))) '('('"_logical_id" '728) '('"_id" '"c76db983-d680b409-53a1d776-208dca5f")))) (let $13 (DqCnResult (TDqOutput $12 '"0") '())) (return (KqpPhysicalQuery '((KqpPhysicalTx '($10 $12) '($13) '() '('('"type" '"data")))) '((KqpTxResultBinding (ListType $8) '"0" '"0")) '('('"type" '"data_query")))) ) Plan: {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Sequential","ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Test","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Test","reads":[{"lookup_by":["Group (1)"],"columns":["Amount","Comment","Group","Name"],"scan_by":["Name (Name, +∞)"],"limit":"1001","type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["Group (1)","Name (Name, +∞)"],"E-Size":"0","ReadLimit":"1001","Name":"TableRangeScan","E-Rows":"1","Table":"Test","ReadColumns":["Amount","Comment","Group","Name"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TTabletPipeTest::TestShutdown [GOOD] >> TTabletPipeTest::TestTwoNodes >> BootstrapperTest::LoneBootstrapper [GOOD] >> BootstrapperTest::MultipleBootstrappers >> TResourceBrokerInstant::TestMerge >> KqpQuery::QueryStats-UseSink [GOOD] >> KqpQuery::QueryFromSqs >> TResourceBroker::TestChangeTaskType [GOOD] >> KqpQuery::QueryCache [GOOD] >> KqpQuery::QueryCacheTtl >> TTabletPipeTest::TestPipeConnectLoopUnknownTabletWithoutRetries [GOOD] >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries >> TResourceBroker::TestResubmitTask >> KqpQuery::QueryTimeout [GOOD] >> KqpQuery::RandomNumber >> TResourceBrokerInstant::TestMerge [GOOD] >> TTabletCountersAggregator::ColumnShardCounters >> KqpQuery::ExtendedTimeOutOfBounds+BulkUpsert [GOOD] >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert >> KqpQuery::CreateTableAs_PragmaAndParamsAndNamedExprs [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard >> TabletState::ExplicitUnsubscribe [GOOD] >> TabletState::ImplicitUnsubscribeOnDisconnect >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] >> TTabletPipeTest::TestTwoNodes [GOOD] >> TTabletCountersAggregator::ColumnShardCounters [GOOD] >> KqpStats::CreateTableAsStats-IsOlap [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestChangeTaskType [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestResubmitTask [GOOD] >> TResourceBroker::TestUpdateCookie >> TResourceBroker::TestRealUsage >> KqpQuery::UdfMemoryLimit [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn >> TResourceBroker::TestUpdateCookie [GOOD] >> TResourceBroker::TestRealUsage [GOOD] >> TResourceBroker::TestRandomQueue |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestTwoNodes [GOOD] |91.9%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::ColumnShardCounters [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] >> TFlatMetrics::TimeSeriesKV2 [GOOD] >> TPipeCacheTest::TestAutoConnect >> TResourceBroker::TestRandomQueue [GOOD] >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] |91.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestUpdateCookie [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen >> KqpStats::OneShardLocalExec+UseSink [GOOD] >> KqpStats::OneShardLocalExec-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectLoopLeaderDownWithoutRetries [GOOD] Test command err: 2026-01-04T02:42:13.510990Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007942s ... waiting for connect1 ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for connect2 ... waiting for connect3 ... waiting for connect4 ... waiting for connect5 ... waiting for connect6 ... waiting for connect7 ... waiting for connect8 ... waiting for connect9 ... waiting for connect10 ... waiting for connect11 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpen [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/yql/unittest >> KqpScripting::StreamExecuteYqlScriptScanScalar [GOOD] Test command err: Trying to start YDB, gRPC: 3422, MsgBus: 23386 2026-01-04T02:40:51.933236Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330866968285433:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:51.933958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:40:52.151123Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:40:52.157942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:52.158062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:52.161204Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:52.227937Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:52.297621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:52.297639Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:52.297662Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:52.297726Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:52.406611Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:40:52.633330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:52.671120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.809978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:52.941714Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:52.960746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:53.039592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:54.624461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330879853189147:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.624595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.625104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330879853189157:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:54.625141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.032411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.069707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.102970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.137595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.170239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.206934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.252121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.299791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:55.377608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330884148157323:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.377694Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.377761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330884148157328:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.377917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330884148157330:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.377966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:55.381433Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:55.390845Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330884148157332:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:40:55.477487Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330884148157383:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:56.933909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330866968285433:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:40:56.934019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:40:57.299915Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494457330, txId: 281474976710674] shutting down 2026-01-04T02:40:57.557649Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494457596, txId: 281474976710677] shutting down 2026-01-04T02:40:57.838735Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: ... : kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494524250, txId: 281474976711268] shutting down 2026-01-04T02:42:04.707782Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494524733, txId: 281474976711271] shutting down Trying to start YDB, gRPC: 14668, MsgBus: 12484 2026-01-04T02:42:05.934628Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331182683346185:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:05.935542Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:05.965915Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:06.023523Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:06.068244Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:06.068348Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:06.071197Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:06.154563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:06.154592Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:06.154602Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:06.154694Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:06.215009Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:06.620451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:06.632049Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.695736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.855857Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.024781Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:07.036087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.967741Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331199863217180:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.967825Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.968076Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331199863217189:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:09.968133Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.040896Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.068039Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.097560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.171154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.206538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.241715Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.274802Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.332338Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.412333Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331204158185360:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.412431Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.412531Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331204158185365:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.412593Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331204158185367:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.412626Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.416707Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:10.429009Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331204158185369:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:10.508350Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331204158185420:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:10.934581Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591331182683346185:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:10.934682Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:13.675590Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494533693, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestRandomQueue [GOOD] Test command err: 2026-01-04T02:42:15.866931Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-14 (14 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867069Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-17 (17 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867172Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-20 (20 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867263Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-24 (24 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867465Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-33 (33 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867643Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-40 (40 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867736Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-44 (44 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867835Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-47 (47 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867902Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-51 (51 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867937Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-53 (53 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.867980Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-54 (54 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868017Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-55 (55 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868092Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-58 (58 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868169Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-61 (61 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868356Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-71 (71 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868491Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-77 (77 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868564Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-81 (81 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868590Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-82 (82 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868656Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-85 (85 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868689Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-86 (86 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868812Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-92 (92 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868858Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-93 (93 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868923Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-96 (96 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.868990Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-98 (98 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869065Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-101 (101 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869236Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-108 (108 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869316Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-110 (110 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869386Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-115 (115 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869411Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-116 (116 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869444Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-118 (118 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869551Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-124 (124 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869609Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-127 (127 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869679Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-131 (131 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869859Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-144 (144 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.869933Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-148 (148 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870014Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-154 (154 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870092Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-160 (160 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870158Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-164 (164 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870263Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-172 (172 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870317Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-176 (176 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870359Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-179 (179 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870428Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-183 (183 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870516Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-189 (189 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870570Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-193 (193 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870603Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-194 (194 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870662Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-198 (198 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870692Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-199 (199 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870781Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-203 (203 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870844Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-205 (205 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.870975Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-210 (210 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871043Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-213 (213 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871069Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-214 (214 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871215Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-226 (226 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871271Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-229 (229 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871303Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-230 (230 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871328Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-231 (231 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871381Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-235 (235 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871403Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-236 (236 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871430Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-237 (237 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871535Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-246 (246 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871568Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-247 (247 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871660Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-253 (253 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871685Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-254 (254 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871771Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-261 (261 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871795Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-262 (262 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871861Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting task 'task-265 (265 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.871913Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning waiting ... ROR: resource_broker.cpp:675: Assigning in-fly task 'task-619 (619 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.910752Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-621 (621 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.910810Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-671 (671 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.910852Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-700 (700 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.910946Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-767 (767 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911055Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-787 (787 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911203Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-826 (826 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911246Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-827 (827 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911268Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-855 (855 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911332Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-899 (899 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911376Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-907 (907 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911415Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-910 (910 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911515Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-964 (964 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911594Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-14 (14 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911632Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-55 (55 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911657Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-82 (82 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911690Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-101 (101 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911725Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-116 (116 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911749Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-154 (154 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911784Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-164 (164 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911842Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-176 (176 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911901Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-183 (183 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911942Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-205 (205 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.911995Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-226 (226 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912030Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-229 (229 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912055Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-230 (230 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912087Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-235 (235 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912168Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-261 (261 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912211Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-265 (265 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912234Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-281 (281 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912266Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-296 (296 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912333Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-305 (305 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912350Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-308 (308 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912381Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-330 (330 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912416Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-357 (357 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912447Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-365 (365 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912492Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-366 (366 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912533Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-377 (377 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912552Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-385 (385 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912612Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-406 (406 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912645Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-425 (425 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912714Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-458 (458 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912742Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-477 (477 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912785Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-484 (484 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912815Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-485 (485 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912875Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-565 (565 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912901Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-566 (566 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912929Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-578 (578 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912953Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-591 (591 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.912997Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-609 (609 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913023Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-613 (613 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913109Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-636 (636 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913212Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-720 (720 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913287Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-747 (747 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913330Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-751 (751 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913391Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-761 (761 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913526Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-813 (813 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913557Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-814 (814 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913609Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-835 (835 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913664Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-886 (886 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913733Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-937 (937 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913758Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-940 (940 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913787Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-957 (957 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913852Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-963 (963 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913882Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-973 (973 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913939Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-994 (994 by [2:103:2137])' of unknown type 'wrong' to default queue 2026-01-04T02:42:15.913976Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:675: Assigning in-fly task 'task-999 (999 by [2:103:2137])' of unknown type 'wrong' to default queue |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/yql/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::ImplicitUnsubscribeOnDisconnect [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::CreateTableAsStats-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 22739, MsgBus: 6586 2026-01-04T02:41:43.906493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331089927235039:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:43.906622Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:44.115442Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:44.123940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:44.124023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:44.141752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:44.174013Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:44.175299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331089927234996:2081] 1767494503896529 != 1767494503896532 2026-01-04T02:41:44.265273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:44.265317Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:44.265332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:44.265448Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:44.363545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:44.688749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:44.751280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.906591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.913128Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:45.077266Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.147970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.062603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331107107106053:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.063147Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.063553Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331107107106063:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.063629Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.402837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.440507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.470522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.497148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.531397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.568160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.635613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.676917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:47.742693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331107107106932:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.742818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.743130Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331107107106938:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.743134Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331107107106937:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.743180Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:47.745742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:47.754639Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331107107106941:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:47.830510Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331107107106992:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:48.906431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331089927235039:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:48.906493Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 20995, MsgBus: 32028 2026-01-04T02:41:50.403873Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331119631486422:2196];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:50.403986Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... rators":[{"Name":"FillTable","Table":"Destination","SinkType":"KqpTableSink"}],"Node Type":"FillTable"}],"Node Type":"Query","PlanNodeType":"Query"}} 2026-01-04T02:42:13.739157Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:13.750564Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) query_phases { duration_us: 10758 table_access { name: "/Root/.tmp/sessions/e284a967-4f79-9623-27a8-b29eac0e9386/Root/Destination_2cc3104e-4571-8b79-2ef5-a294d047f711" updates { rows: 2 bytes: 24 } partitions_count: 1 } table_access { name: "/Root/Source" reads { rows: 2 bytes: 24 } partitions_count: 1 } cpu_time_us: 5772 affected_shards: 1 } compilation { duration_us: 14687 cpu_time_us: 8595 } process_cpu_time_us: 1109 query_plan: "{\"Plan\":{\"Plans\":[{\"Tables\":[\"Destination\"],\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Source\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Source\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Source\",\"ReadColumns\":[\"Col1 (-\342\210\236, +\342\210\236)\",\"Col2\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Stage\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[3,7]}},\"Name\":\"4\",\"Push\":{\"WaitTimeUs\":{\"Count\":1,\"Sum\":1444,\"Max\":1444,\"Min\":1444,\"History\":[3,1444]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PhysicalStageId\":0,\"Mkql\":{},\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Source\",\"ReadRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ReadBytes\":{\"Count\":1,\"Sum\":24,\"Max\":24,\"Min\":24}}],\"BaseTimeMs\":1767494533635,\"OutputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"CpuTimeUs\":{\"Count\":1,\"Sum\":728,\"Max\":728,\"Min\":728,\"History\":[3,728]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[3,64]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64,\"History\":[3,64]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1472,\"Max\":1472,\"Min\":1472,\"History\":[3,1472]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":3}}],\"Node Type\":\"Map\",\"PlanNodeType\":\"Connection\"}],\"Node Type\":\"Stage\",\"Stats\":{\"Egress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[7,28]}},\"Name\":\"KqpTableSink\",\"Egress\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Splits\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"ActiveMessageMs\":{\"Count\":1,\"Max\":7,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"ActiveTimeUs\":{\"Count\":1,\"Sum\":5000,\"Max\":5000,\"Min\":5000}},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":28,\"Max\":28,\"Min\":28,\"History\":[7,28]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1252,\"Max\":1252,\"Min\":1252,\"History\":[7,1252]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"UseLlvm\":\"undefined\",\"Table\":[{\"Path\":\"\\/Root\\/.tmp\\/sessions\\/e284a967-4f79-9623-27a8-b29eac0e9386\\/Root\\/Destination_2cc3104e-4571-8b79-2ef5-a294d047f711\"}],\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7},\"Introspections\":[\"1 tasks same as previous stage\"],\"EgressBytes\":{\"Count\":1,\"Sum\":64,\"Max\":64,\"Min\":64},\"DurationUs\":{\"Count\":1,\"Sum\":5000,\"Max\":5000,\"Min\":5000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[7,1048576]},\"BaseTimeMs\":1767494533635,\"CpuTimeUs\":{\"Count\":1,\"Sum\":730,\"Max\":730,\"Min\":730,\"History\":[7,730]},\"EgressRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"StageDurationUs\":5000,\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[7,7]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":7,\"Max\":7,\"Min\":7,\"History\":[7,7]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1765,\"Max\":1765,\"Min\":1765,\"History\":[7,1765]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"UpdateTimeMs\":7,\"InputRows\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Tasks\":1}}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/Destination\",\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":14687,\"CpuTimeUs\":8595},\"ProcessCpuTimeUs\":1109,\"TotalDurationUs\":375995,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":0},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"FillTable\",\"Table\":\"Destination\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"FillTable\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Source\" \'\"72057594046644480:42\" \'\"\" \'1))\n(let $2 (KqpRowsSourceSettings $1 \'(\'\"Col1\" \'\"Col2\") \'() (Void) \'()))\n(let $3 \'(\'(\'\"_logical_id\" \'474) \'(\'\"_id\" \'\"212503-aaba943d-51b30f30-e98ae2f2\") \'(\'\"_partition_mode\" \'\"single\") \'(\'\"_wide_channels\" (StructType \'(\'\"Col1\" (DataType \'Uint64)) \'(\'\"Col2\" (OptionalType (DataType \'Int32)))))))\n(let $4 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $2)) (lambda \'($10) (block \'(\n (let $11 (lambda \'($12) (Member $12 \'\"Col1\") (Member $12 \'\"Col2\")))\n (return (FromFlow (ExpandMap (ToFlow $10) $11)))\n))) $3))\n(let $5 (DqCnMap (TDqOutput $4 \'\"0\")))\n(let $6 \'\"/Root/.tmp/sessions/e284a967-4f79-9623-27a8-b29eac0e9386/Root/Destination_2cc3104e-4571-8b79-2ef5-a294d047f711\")\n(let $7 (KqpTable $6 \'\"\" \'\"\" \'\"\"))\n(let $8 (KqpTableSinkSettings $7 \'\"true\" \'\"fill_table\" \'\"0\" \'\"true\" \'\"false\" \'\"false\" \'(\'(\'\"OriginalPath\" \'\"/Root/Destination\"))))\n(let $9 (DqPhyStage \'($5) (lambda \'($13) (FromFlow (NarrowMap (ToFlow $13) (lambda \'($14 $15) (AsStruct \'(\'\"Col1\" $14) \'(\'\"Col2\" $15)))))) \'(\'(\'\"_logical_id\" \'539) \'(\'\"_id\" \'\"aec94c86-764a992f-50c44db6-9895695\")) \'((DqSink \'\"0\" (DataSink \'\"KqpTableSink\" \'\"db\") $8))))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($4 $9) \'() \'() \'(\'(\'\"type\" \'\"generic\") \'(\'\"with_effects\")))) \'() \'(\'(\'\"type\" \'\"query\"))))\n)\n" total_duration_us: 375995 total_cpu_time_us: 15476 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/.tmp/sessions/e284a967-4f79-9623-27a8-b29eac0e9386/Root/Destination_2cc3104e-4571-8b79-2ef5-a294d047f711\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":47},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Col1\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":true,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Col2\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"Int32\\\",\\\"TypeId\\\":1,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Col1\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767494533\",\"query_type\":\"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"a0ead8a4-1b1314a-d9f77883-f9baeb43\",\"version\":\"1.0\"}" 2026-01-04T02:42:13.888524Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331197207521652:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:13.888596Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TPipeTrackerTest::TestShareTablet [GOOD] >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] >> TPipeCacheTest::TestAutoConnect [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] |92.0%| [TA] $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendWithoutWaitOpenToWrongTablet [GOOD] |92.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/yql/test-results/unittest/{meta.json ... results_accumulator.log} |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestIdempotentAttachDetach [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16 [GOOD] >> TFlatMetrics::TimeSeriesAVG [GOOD] >> TTabletLabeledCountersAggregator::Version3Aggregation [GOOD] >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen >> KqpTypes::Time64Columns-EnableTableDatetime64-IsColumn [GOOD] >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn >> KqpParams::MissingOptionalParameter-UseSink [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestAutoConnect [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpExplain::SelfJoin3xSameLabels [GOOD] >> KqpExplain::SqlIn >> TPipeCacheTest::TestIdleRefresh |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAVG [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterReboot >> KqpStats::StreamLookupStats-StreamLookupJoin [GOOD] >> KqpStats::SelfJoin >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] >> BootstrapperTest::KeepExistingTablet >> TPipeCacheTest::TestIdleRefresh [GOOD] >> TPipeCacheTest::TestTabletNode >> TFlatMetrics::TimeSeriesAvg4 [GOOD] >> TFlatMetrics::TimeSeriesKV [GOOD] >> KqpExplain::UpdateSecondaryConditional-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket >> TabletState::NormalLifecycle >> BootstrapperTest::MultipleBootstrappers [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationHistNamedSingleBucket [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegular >> TTabletPipeTest::TestSendAfterReboot [GOOD] >> TTabletPipeTest::TestSendBeforeBootTarget >> TPipeCacheTest::TestTabletNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestClientDisconnectAfterPipeOpen [GOOD] Test command err: { LabeledCountersByGroup { Group: "aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } LabeledCountersByGroup { Group: "cons/aaa|1|aba/caba/daba|man" LabeledCounter { Value: 13 AggregateFunc: EAF_SUM Type: CT_SIMPLE NameId: 0 } Delimiter: "|" } CounterNames: "value1" } 2026-01-04T02:42:18.117613Z node 3 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437185] NodeDisconnected NodeId# 2 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesKV [GOOD] >> TabletState::NormalLifecycle [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::MultipleBootstrappers [GOOD] Test command err: ... waiting for pipe to connect ... stopping current instance ... waiting for pipe to disconnect ... waiting for pipe to connect ... sleeping for 2 seconds 2026-01-04T02:42:14.524484Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:14.524559Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:14.524621Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:14.525536Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:14.525604Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-04T02:42:14.525898Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:14.525961Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2026-01-04T02:42:14.526126Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:14.526159Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 13151740404452589043 2026-01-04T02:42:14.527097Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-04T02:42:14.527256Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-04T02:42:14.527342Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-04T02:42:14.527364Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-04T02:42:14.527524Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-04T02:42:14.527623Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-04T02:42:14.527653Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.190190s 2026-01-04T02:42:14.527857Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: FREE 2026-01-04T02:42:14.527890Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.153108s 2026-01-04T02:42:14.767047Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:14.767826Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:14.768332Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:14.768379Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-04T02:42:14.822094Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:14.822784Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:14.823213Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:14.823249Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect ... tablet initially started on node 4 (idx 2) in gen 2 ... disconnecting other nodes ... sleeping for 2 seconds (tablet expected to survive) 2026-01-04T02:42:15.721537Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-04T02:42:15.721613Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2026-01-04T02:42:15.721756Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-04T02:42:15.721833Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:15.721939Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-04T02:42:15.721966Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:15.722759Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:15.722884Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:15.723685Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:15.723726Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-04T02:42:15.723845Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:15.723869Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... disconnecting other nodes (new tablet connections fail) ... sleeping for 2 seconds (tablet expected to survive) 2026-01-04T02:42:16.511136Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 3 2026-01-04T02:42:16.511207Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [9437184] NodeDisconnected NodeId# 5 2026-01-04T02:42:16.511303Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-04T02:42:16.511346Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:16.511392Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:321: tablet: 9437184, type: Dummy, disconnected 2026-01-04T02:42:16.511416Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:16.512011Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:16.512196Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-04T02:42:16.512570Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-04T02:42:16.512601Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 10053858333920509680 2026-01-04T02:42:16.512656Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-04T02:42:16.512675Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 11851482555838222794 2026-01-04T02:42:16.513060Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-04T02:42:16.513124Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2026-01-04T02:42:16.513144Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) 2026-01-04T02:42:16.513249Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2026-01-04T02:42:16.513265Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) ... disconnect other nodes (new owner expected) ... sleeping for 2 seconds (new tablet expected to start once) 2026-01-04T02:42:17.299543Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2026-01-04T02:42:17.299631Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:17.299725Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:661: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335029 2026-01-04T02:42:17.299760Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:17.300753Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] 2026-01-04T02:42:17.300975Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [4:285:2098] ... disconnecting nodes 2 <-> 1 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... disconnecting nodes 2 <-> 3 (tablet connect attempt) ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 2026-01-04T02:42:17.301658Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-04T02:42:17.301703Z node 3 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 6622044195218853944 2026-01-04T02:42:17.301940Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: ERROR 2026-01-04T02:42:17.301992Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 16651091084497209256 ... disconnecting nodes 2 <-> 1 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 ... disconnecting nodes 2 <-> 3 (bootstrap watch attempt) ... blocking NKikimr::TEvBootstrapper::TEvWatch from TABLET_BOOTSTRAPPER to TABLET_BOOTSTRAPPER cookie 16045690984833335031 2026-01-04T02:42:17.302610Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2026-01-04T02:42:17.302659Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2026-01-04T02:42:17.302745Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 3 state: FREE 2026-01-04T02:42:17.302783Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: FREE 2026-01-04T02:42:17.302806Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-04T02:42:17.302981Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:421: tablet: 9437184, type: Dummy, disconnected from 4, round 16045690984833335031 2026-01-04T02:42:17.303008Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: DISCONNECTED 2026-01-04T02:42:17.303038Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:517: tablet: 9437184, type: Dummy, lost round, wait for 0.173014s 2026-01-04T02:42:17.305754Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:733: tablet: 9437184, type: Dummy, tablet dead 2026-01-04T02:42:17.305839Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:17.309688Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:440:2098] 2026-01-04T02:42:17.327666Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:17.327732Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting 2026-01-04T02:42:17.403296Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:17.404073Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: OK, leader: [3:440:2098] 2026-01-04T02:42:17.404913Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 9437184, type: Dummy, connect: OK 2026-01-04T02:42:17.404955Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:295: tablet: 9437184, type: Dummy, connected to leader, waiting ... waiting for pipe to connect >> KqpParams::CheckQueryLimitsWorksAsExpected [GOOD] >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService >> KqpQuery::ExtendedTimeOutOfBounds-BulkUpsert [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor >> TResourceBroker::TestOverusage |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TPipeCacheTest::TestTabletNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_ttl/unittest >> TSchemeShardTTLTestsWithReboots::CopyTable [GOOD] Test command err: ==== RunWithTabletReboots =========== RUN: Trace =========== Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] IGNORE Leader for TabletID 72057594046316545 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:120:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] IGNORE Leader for TabletID 72057594046447617 is [0:0:0] sender: [1:123:2058] recipient: [1:116:2145] Leader for TabletID 72057594046316545 is [1:126:2150] sender: [1:131:2058] recipient: [1:113:2143] Leader for TabletID 72057594046678944 is [1:134:2157] sender: [1:136:2058] recipient: [1:115:2144] Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:141:2058] recipient: [1:116:2145] 2026-01-04T02:39:41.669276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:39:41.669361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:41.669400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:39:41.669501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:39:41.669741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:39:41.669798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:39:41.669888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:39:41.669957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:39:41.670793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:39:41.671102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:39:41.799416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:39:41.799487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:41.800454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# Leader for TabletID 72057594046447617 is [1:139:2160] sender: [1:187:2058] recipient: [1:15:2062] 2026-01-04T02:39:41.815542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:39:41.815657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:39:41.815813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:39:41.820291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:39:41.820751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:39:41.821416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:39:41.821793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:39:41.824852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:41.825009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:39:41.826144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:39:41.826198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:39:41.826314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:39:41.826355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:39:41.826406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:39:41.826646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:39:41.957638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.958639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.958756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.958898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.958994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:39:41.959743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:218:2218] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "m ... in: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-04T02:42:17.107055Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-04T02:42:17.107214Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000040 TxId: 1003 } 2026-01-04T02:42:17.107345Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409547 Status: COMPLETE TxId: 1003 Step: 5000040 OrderId: 1003 ExecLatency: 3 ProposeLatency: 5 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186233409547 CpuTimeUsec: 1167 } } CommitVersion { Step: 5000040 TxId: 1003 } debug: NTableState::TProposedWaitParts operationId# 1003:0 2026-01-04T02:42:17.108185Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 838 RawX2: 420906797815 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:42:17.108231Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 1003, tablet: 72075186233409547, partId: 0 2026-01-04T02:42:17.108336Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 1003:0, at schemeshard: 72057594046678944, message: Source { RawX1: 838 RawX2: 420906797815 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:42:17.108383Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:42:17.108456Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 1003:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 838 RawX2: 420906797815 } Origin: 72075186233409547 State: 2 TxId: 1003 Step: 0 Generation: 2 2026-01-04T02:42:17.108522Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 1003:0, shardIdx: 72057594046678944:2, shard: 72075186233409547, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:17.108564Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:42:17.108601Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409546, at schemeshard: 72057594046678944 2026-01-04T02:42:17.108648Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 1003:0, datashard: 72075186233409547, at schemeshard: 72057594046678944 2026-01-04T02:42:17.108683Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 129 -> 240 2026-01-04T02:42:17.111332Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:42:17.111770Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:42:17.112225Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:42:17.112279Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0ProgressState, operation type TxCopyTable 2026-01-04T02:42:17.112337Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 1003:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-04T02:42:17.112396Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 1003, done: 0, blocked: 1 2026-01-04T02:42:17.112455Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 1003:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 1003 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-04T02:42:17.112487Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1003:0 240 -> 240 2026-01-04T02:42:17.115649Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1003:0, at schemeshard: 72057594046678944 2026-01-04T02:42:17.115702Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1003:0 ProgressState 2026-01-04T02:42:17.115798Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:42:17.115842Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:42:17.115887Z node 98 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1003:0 progress is 1/1 2026-01-04T02:42:17.115913Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:42:17.115945Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1003, ready parts: 1/1, is published: true 2026-01-04T02:42:17.115980Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1003 ready parts: 1/1 2026-01-04T02:42:17.116015Z node 98 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1003:0 2026-01-04T02:42:17.116044Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1003:0 2026-01-04T02:42:17.116174Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:42:17.116209Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 TestModificationResult got TxId: 1003, wait until txId: 1003 TestWaitNotification wait txId: 1003 2026-01-04T02:42:17.119153Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 1003: send EvNotifyTxCompletion 2026-01-04T02:42:17.119203Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 1003 2026-01-04T02:42:17.119553Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1003, at schemeshard: 72057594046678944 2026-01-04T02:42:17.119614Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 1003: got EvNotifyTxCompletionResult 2026-01-04T02:42:17.119642Z node 98 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 1003: satisfy waiter [98:937:2897] TestWaitNotification: OK eventTxId 1003 2026-01-04T02:42:17.120045Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TTLEnabledTableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:17.120224Z node 98 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TTLEnabledTableCopy" took 209us result status StatusSuccess 2026-01-04T02:42:17.120683Z node 98 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TTLEnabledTableCopy" PathDescription { Self { Name: "TTLEnabledTableCopy" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1003 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TTLEnabledTableCopy" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "modified_at" Type: "Timestamp" TypeId: 50 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 TTLSettings { Enabled { ColumnName: "modified_at" ExpireAfterSeconds: 3600 Tiers { ApplyAfterSeconds: 3600 Delete { } } } } IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 39 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegular [GOOD] >> BootstrapperTest::KeepExistingTablet [GOOD] >> BootstrapperTest::DuplicateNodes |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::NormalLifecycle [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_ttl/unittest >> TResourceBroker::TestOverusage [GOOD] >> TResourceBroker::TestNotifyActorDied >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling >> KqpExplain::MultiUsedStage [GOOD] >> KqpExplain::MergeConnection >> TTabletPipeTest::TestRebootUsingTabletWithoutAcceptor [GOOD] >> KqpExplain::FewEffects+UseSink [GOOD] >> TTabletPipeTest::TestRewriteSameNode >> KqpExplain::FewEffects-UseSink >> KqpStats::JoinNoStatsScan [GOOD] >> KqpStats::DeferredEffects+UseSink >> TTabletPipeTest::TestKillClientBeforServerIdKnown >> TResourceBroker::TestNotifyActorDied [GOOD] >> KqpStats::MultiTxStatsFullScan [GOOD] >> KqpLimits::StreamWrite-Allowed [GOOD] >> KqpLimits::TooBigColumn+useSink >> TTabletPipeTest::TestKillClientBeforServerIdKnown [GOOD] >> TTabletPipeTest::TestInterconnectSession >> TTabletPipeTest::TestRewriteSameNode [GOOD] >> KqpQuery::RandomNumber [GOOD] >> KqpQuery::RandomUuid >> KqpQuery::SelectCountAsteriskFromVar [GOOD] >> ConfiguredTabletBootstrapperTest::ConfigChangeHandling [GOOD] >> ConfiguredTabletBootstrapperTest::NodeListChange |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestNotifyActorDied [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TPipeTrackerTest::TestSimpleAdd [GOOD] >> TResourceBroker::TestAutoTaskId >> TTabletPipeTest::TestPipeConnectToHint >> BootstrapperTest::DuplicateNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::HeavyAggregation [GOOD] Test command err: 2026-01-04T02:42:13.560735Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [2:8:2055] 2026-01-04T02:42:13.560993Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:9:2056] worker 0 2026-01-04T02:42:13.561028Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:10:2057] worker 1 2026-01-04T02:42:13.561046Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:11:2058] worker 2 2026-01-04T02:42:13.561069Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:12:2059] worker 3 2026-01-04T02:42:13.561099Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:13:2060] worker 4 2026-01-04T02:42:13.561121Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:14:2061] worker 5 2026-01-04T02:42:13.561144Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:15:2062] worker 6 2026-01-04T02:42:13.561172Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:16:2063] worker 7 2026-01-04T02:42:13.561201Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:17:2064] worker 8 2026-01-04T02:42:13.561218Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [2:8:2055] self [2:18:2065] worker 9 Sending message to [2:10:2057] from [2:8:2055] id 1 Sending message to [2:11:2058] from [2:8:2055] id 2 Sending message to [2:12:2059] from [2:8:2055] id 3 Sending message to [2:13:2060] from [2:8:2055] id 4 Sending message to [2:14:2061] from [2:8:2055] id 5 Sending message to [2:15:2062] from [2:8:2055] id 6 Sending message to [2:16:2063] from [2:8:2055] id 7 Sending message to [2:17:2064] from [2:8:2055] id 8 Sending message to [2:18:2065] from [2:8:2055] id 9 Sending message to [2:9:2056] from [2:8:2055] id 10 2026-01-04T02:42:14.399090Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [2:12:2059] 2026-01-04T02:42:14.399175Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [2:13:2060] 2026-01-04T02:42:14.399279Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [2:14:2061] 2026-01-04T02:42:14.399323Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [2:15:2062] 2026-01-04T02:42:14.399374Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [2:16:2063] 2026-01-04T02:42:14.399438Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [2:17:2064] 2026-01-04T02:42:14.399500Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [2:18:2065] 2026-01-04T02:42:14.399868Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [2:9:2056] 2026-01-04T02:42:14.399914Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [2:10:2057] 2026-01-04T02:42:14.399953Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [2:11:2058] 2026-01-04T02:42:14.400000Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [2:9:2056] 2026-01-04T02:42:14.401349Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [2:9:2056] 2026-01-04T02:42:14.427358Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:9:2056] Initiator [2:8:2055] 2026-01-04T02:42:14.449869Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:10:2057] 2026-01-04T02:42:14.451581Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:10:2057] 2026-01-04T02:42:14.484655Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:10:2057] Initiator [2:8:2055] 2026-01-04T02:42:14.504633Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:11:2058] 2026-01-04T02:42:14.505702Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:11:2058] 2026-01-04T02:42:14.528945Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:11:2058] Initiator [2:8:2055] 2026-01-04T02:42:14.549495Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:12:2059] 2026-01-04T02:42:14.551006Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:12:2059] 2026-01-04T02:42:14.579267Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:12:2059] Initiator [2:8:2055] 2026-01-04T02:42:14.602804Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:13:2060] 2026-01-04T02:42:14.604198Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:13:2060] 2026-01-04T02:42:14.636538Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:13:2060] Initiator [2:8:2055] 2026-01-04T02:42:14.662415Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:14:2061] 2026-01-04T02:42:14.664101Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:14:2061] 2026-01-04T02:42:14.698559Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:14:2061] Initiator [2:8:2055] 2026-01-04T02:42:14.717115Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:15:2062] 2026-01-04T02:42:14.718704Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:15:2062] 2026-01-04T02:42:14.749216Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:15:2062] Initiator [2:8:2055] 2026-01-04T02:42:14.778410Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:16:2063] 2026-01-04T02:42:14.780173Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:16:2063] 2026-01-04T02:42:14.814840Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:16:2063] Initiator [2:8:2055] 2026-01-04T02:42:14.838345Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:17:2064] 2026-01-04T02:42:14.840131Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:17:2064] 2026-01-04T02:42:14.872045Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:17:2064] Initiator [2:8:2055] 2026-01-04T02:42:14.896887Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:18:2065] 2026-01-04T02:42:14.898699Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:18:2065] 2026-01-04T02:42:14.922315Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:18:2065] Initiator [2:8:2055] 2026-01-04T02:42:14.938513Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [2:8:2055] 2026-01-04T02:42:14.938657Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [2:8:2055] 2026-01-04T02:42:14.942999Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [2:8:2055] 2026-01-04T02:42:14.943159Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [2:8:2055] 2026-01-04T02:42:14.948393Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [2:8:2055] 2026-01-04T02:42:14.948588Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [2:8:2055] 2026-01-04T02:42:14.954774Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [2:8:2055] 2026-01-04T02:42:14.954953Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [2:8:2055] 2026-01-04T02:42:14.961991Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [2:8:2055] 2026-01-04T02:42:14.962171Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [2:8:2055] 2026-01-04T02:42:14.968498Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [2:8:2055] 2026-01-04T02:42:14.968672Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [2:8:2055] 2026-01-04T02:42:14.974830Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [2:8:2055] 2026-01-04T02:42:14.974999Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [2:8:2055] 2026-01-04T02:42:14.983445Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [2:8:2055] 2026-01-04T02:42:14.983627Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [2:8:2055] 2026-01-04T02:42:14.989567Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [2:8:2055] 2026-01-04T02:42:14.989711Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [2:8:2055] 2026-01-04T02:42:14.995086Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [2:8:2055] 2026-01-04T02:42:14.995223Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [2:8:2055] 2026-01-04T02:42:15.000430Z node 2 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [2:8:2055] Initiator [2:7:2054] TEST 2 10 duration 1.577824s 2026-01-04T02:42:15.257590Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [3:8:2055] 2026-01-04T02:42:15.258135Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:9:2056] worker 0 2026-01-04T02:42:15.258202Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [3:8:2055] self [3:10:2057] worker 1 2026-01-04T02:42:15.258237Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_ag ... ctor got response node 5 [3:8:2055] 2026-01-04T02:42:16.568149Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [3:8:2055] 2026-01-04T02:42:16.572594Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [3:8:2055] 2026-01-04T02:42:16.572722Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [3:8:2055] 2026-01-04T02:42:16.577872Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [3:8:2055] 2026-01-04T02:42:16.577991Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [3:8:2055] 2026-01-04T02:42:16.582007Z node 3 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [3:8:2055] Initiator [3:7:2054] TEST 2 20 duration 1.480853s 2026-01-04T02:42:16.967445Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2083: aggregator new request V2 [4:8:2055] 2026-01-04T02:42:16.967563Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [4:8:2055] self [4:9:2056] worker 0 Sending message to [4:9:2056] from [4:8:2055] id 1 Sending message to [4:9:2056] from [4:8:2055] id 2 Sending message to [4:9:2056] from [4:8:2055] id 3 Sending message to [4:9:2056] from [4:8:2055] id 4 Sending message to [4:9:2056] from [4:8:2055] id 5 Sending message to [4:9:2056] from [4:8:2055] id 6 Sending message to [4:9:2056] from [4:8:2055] id 7 Sending message to [4:9:2056] from [4:8:2055] id 8 Sending message to [4:9:2056] from [4:8:2055] id 9 Sending message to [4:9:2056] from [4:8:2055] id 10 2026-01-04T02:42:17.726017Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [4:9:2056] 2026-01-04T02:42:17.726075Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [4:9:2056] 2026-01-04T02:42:17.726093Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [4:9:2056] 2026-01-04T02:42:17.726115Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [4:9:2056] 2026-01-04T02:42:17.726179Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [4:9:2056] 2026-01-04T02:42:17.726204Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [4:9:2056] 2026-01-04T02:42:17.726228Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [4:9:2056] 2026-01-04T02:42:17.726249Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [4:9:2056] 2026-01-04T02:42:17.726274Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [4:9:2056] 2026-01-04T02:42:17.726298Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [4:9:2056] 2026-01-04T02:42:17.726533Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [4:9:2056] 2026-01-04T02:42:17.727682Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [4:9:2056] 2026-01-04T02:42:17.750543Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [4:9:2056] 2026-01-04T02:42:17.751624Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [4:9:2056] 2026-01-04T02:42:17.779164Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [4:9:2056] 2026-01-04T02:42:17.780763Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [4:9:2056] 2026-01-04T02:42:17.807612Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [4:9:2056] 2026-01-04T02:42:17.808724Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [4:9:2056] 2026-01-04T02:42:17.831942Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [4:9:2056] 2026-01-04T02:42:17.833292Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [4:9:2056] 2026-01-04T02:42:17.869160Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [4:9:2056] 2026-01-04T02:42:17.870678Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [4:9:2056] 2026-01-04T02:42:17.898949Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [4:9:2056] 2026-01-04T02:42:17.900130Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [4:9:2056] 2026-01-04T02:42:17.930460Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [4:9:2056] 2026-01-04T02:42:17.932182Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [4:9:2056] 2026-01-04T02:42:17.958306Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [4:9:2056] 2026-01-04T02:42:17.959382Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [4:9:2056] 2026-01-04T02:42:17.990173Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [4:9:2056] 2026-01-04T02:42:17.991781Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [4:9:2056] 2026-01-04T02:42:18.033978Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:9:2056] Initiator [4:8:2055] 2026-01-04T02:42:18.265169Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 0 [4:8:2055] 2026-01-04T02:42:18.265712Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 0 [4:8:2055] 2026-01-04T02:42:18.311815Z node 4 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [4:8:2055] Initiator [4:7:2054] TEST 2 1 duration 1.545384s 2026-01-04T02:42:18.647822Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2081: aggregator new request V2 Initiator [5:7:2054] self [5:8:2055] worker 0 Sending message to [5:8:2055] from [5:8:2055] id 1 Sending message to [5:8:2055] from [5:8:2055] id 2 Sending message to [5:8:2055] from [5:8:2055] id 3 Sending message to [5:8:2055] from [5:8:2055] id 4 Sending message to [5:8:2055] from [5:8:2055] id 5 Sending message to [5:8:2055] from [5:8:2055] id 6 Sending message to [5:8:2055] from [5:8:2055] id 7 Sending message to [5:8:2055] from [5:8:2055] id 8 Sending message to [5:8:2055] from [5:8:2055] id 9 Sending message to [5:8:2055] from [5:8:2055] id 10 2026-01-04T02:42:19.427351Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 1 [5:8:2055] 2026-01-04T02:42:19.427411Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 2 [5:8:2055] 2026-01-04T02:42:19.427439Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 3 [5:8:2055] 2026-01-04T02:42:19.427467Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 4 [5:8:2055] 2026-01-04T02:42:19.427492Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 5 [5:8:2055] 2026-01-04T02:42:19.427562Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 6 [5:8:2055] 2026-01-04T02:42:19.427603Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 7 [5:8:2055] 2026-01-04T02:42:19.427640Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 8 [5:8:2055] 2026-01-04T02:42:19.427676Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 9 [5:8:2055] 2026-01-04T02:42:19.427714Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2064: aggregator actor request to node 10 [5:8:2055] 2026-01-04T02:42:19.428011Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 1 [5:8:2055] 2026-01-04T02:42:19.429766Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 1 [5:8:2055] 2026-01-04T02:42:19.462613Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 2 [5:8:2055] 2026-01-04T02:42:19.464288Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 2 [5:8:2055] 2026-01-04T02:42:19.499733Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 3 [5:8:2055] 2026-01-04T02:42:19.501620Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 3 [5:8:2055] 2026-01-04T02:42:19.536952Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 4 [5:8:2055] 2026-01-04T02:42:19.538561Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 4 [5:8:2055] 2026-01-04T02:42:19.567865Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 5 [5:8:2055] 2026-01-04T02:42:19.568947Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 5 [5:8:2055] 2026-01-04T02:42:19.599429Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 6 [5:8:2055] 2026-01-04T02:42:19.600637Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 6 [5:8:2055] 2026-01-04T02:42:19.624088Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 7 [5:8:2055] 2026-01-04T02:42:19.625600Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 7 [5:8:2055] 2026-01-04T02:42:19.659433Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 8 [5:8:2055] 2026-01-04T02:42:19.660970Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 8 [5:8:2055] 2026-01-04T02:42:19.692299Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 9 [5:8:2055] 2026-01-04T02:42:19.694053Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 9 [5:8:2055] 2026-01-04T02:42:19.727433Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2145: aggregator actor got response node 10 [5:8:2055] 2026-01-04T02:42:19.729126Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2150: aggregator actor merged response node 10 [5:8:2055] 2026-01-04T02:42:19.781219Z node 5 :TABLET_AGGREGATOR INFO: tablet_counters_aggregator.cpp:2172: aggregator request processed [5:8:2055] Initiator [5:7:2054] TEST 2 1 duration 1.487781s |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestErrors >> TTabletPipeTest::TestInterconnectSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestRewriteSameNode [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:122:2149] sender: [1:123:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:164:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:122:2149] sender: [1:167:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] >> TTabletPipeTest::TestPipeConnectToHint [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries >> TTabletPipeTest::TestConsumerSidePipeReset >> ColumnShardTiers::DSConfigs [GOOD] >> TFlatMetrics::MaximumValue1 [GOOD] >> TFlatMetrics::MaximumValue2 [GOOD] >> TResourceBroker::TestErrors [GOOD] >> TResourceBroker::TestExecutionStat >> ConfiguredTabletBootstrapperTest::BasicInitialization >> TFlatMetrics::TimeSeriesAvg16x60 [GOOD] >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] >> KqpQuery::TryToUpdateNonExistentColumn [GOOD] >> KqpQuery::UpdateThenDelete+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestInterconnectSession [GOOD] Test command err: 2026-01-04T02:42:20.752278Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:315: [9437185] Detach 2026-01-04T02:42:20.776836Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-04T02:42:20.786469Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:338: [9437185] Activate 2026-01-04T02:42:20.790084Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:132:2157] 2026-01-04T02:42:20.790148Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:132:2157] 2026-01-04T02:42:20.790472Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:132:2157] 2026-01-04T02:42:20.790523Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:132:2157] 2026-01-04T02:42:20.790611Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:132:2157] 2026-01-04T02:42:20.790654Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:132:2157] 2026-01-04T02:42:20.790726Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:132:2157] 2026-01-04T02:42:20.790900Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:132:2157] Type# 269877249 Reason# ActorUnknown 2026-01-04T02:42:20.791034Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:135:2159] 2026-01-04T02:42:20.791071Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:135:2159] 2026-01-04T02:42:20.791121Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:135:2159] 2026-01-04T02:42:20.791152Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:135:2159] 2026-01-04T02:42:20.791208Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:135:2159] 2026-01-04T02:42:20.791237Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:135:2159] 2026-01-04T02:42:20.791281Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:135:2159] 2026-01-04T02:42:20.791393Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:135:2159] Type# 269877249 Reason# ActorUnknown 2026-01-04T02:42:20.791550Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:49: TClient[9437185] ::Bootstrap [1:137:2161] 2026-01-04T02:42:20.791573Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:542: TClient[9437185] lookup [1:137:2161] 2026-01-04T02:42:20.791636Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:188: TClient[9437185] forward result local node, try to connect [1:137:2161] 2026-01-04T02:42:20.791668Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:687: TClient[9437185]::SendEvent [1:137:2161] 2026-01-04T02:42:20.791701Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:396: TClient[9437185] poison pill while connecting [1:137:2161] 2026-01-04T02:42:20.791737Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:507: TClient[9437185] connect failed [1:137:2161] 2026-01-04T02:42:20.791817Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:291: [9437185] Accept Connect Originator# [1:137:2161] 2026-01-04T02:42:20.791918Z node 1 :PIPE_SERVER INFO: tablet_pipe_server.cpp:236: [9437185] Undelivered Target# [1:137:2161] Type# 269877249 Reason# ActorUnknown ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::DuplicateNodes [GOOD] Test command err: ... waiting for pipe to connect ... sleeping (original instance should be preserved) ... waiting for original instance to stop ... waiting for original instance to stop (done) ... waiting for pipe to connect 2026-01-04T02:42:20.142127Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:20.142217Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:20.142889Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:20.142941Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 838756400823690829 2026-01-04T02:42:20.143149Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:20.143180Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:9437184, type: Dummy, begin new round, seed: 2303809724928703835 2026-01-04T02:42:20.143946Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 5 state: UNKNOWN 2026-01-04T02:42:20.144003Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 9437184, type: Dummy, boot 2026-01-04T02:42:20.144422Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 9437184, type: Dummy, apply alien 4 state: OWNER 2026-01-04T02:42:20.144460Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 9437184, type: Dummy, become watch on node 4 (owner) |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::MultiTxStatsFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 15826, MsgBus: 2315 2026-01-04T02:41:53.065875Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331133375275851:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.065949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:53.099360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:53.387136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:53.387242Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:53.394010Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:53.429190Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:53.485388Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:53.760120Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:53.760149Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:53.760156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:53.760250Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:54.078504Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:54.218550Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:54.227331Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:54.284970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.464774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.621102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.694435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.444863Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331146260179580:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.444980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.445302Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331146260179590:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.445376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.804455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.838302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.874895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.909460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.942156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.987222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.030316Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.081320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.156823Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331150555147756:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.156914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.157012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331150555147761:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.157972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331150555147763:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.158037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.161412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:57.177474Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331150555147764:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:57.282998Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331150555147816:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:58.066743Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331133375275851:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.066804Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:58.994366Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494518986, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 15689, MsgBus: 19952 2026-01-04T02:41:59.993829Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.377435Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.408255Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.439293Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.480295Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.549988Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.604326Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.682195Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331234341213148:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.682276Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.682311Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331234341213153:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.682445Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331234341213155:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.682510Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.685611Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:17.697152Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331234341213157:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:17.788038Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331234341213208:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:18.649162Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331217161341251:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:18.649227Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:19.734322Z node 4 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494539741, txId: 281474976710673] shutting down {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Operators":[{"Inputs":[{"ExternalPlanNodeId":3}],"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"Inputs":[{"ExternalPlanNodeId":1}],"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","ReadRange":["Key [150, 266]"],"Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[2,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1211,"Sum":1211,"History":[2,1211],"Min":1211},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,2,1048576],"Min":1048576},"Introspections":["1 tasks for a single\/sequential source scan"],"Tasks":1,"FinishedTasks":1,"OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"IngressRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Mkql":{},"PhysicalStageId":0,"StageDurationUs":0,"BaseTimeMs":1767494539704,"Table":[{"Path":"\/Root\/EightShard","ReadBytes":{"Count":1,"Max":54,"Sum":54,"Min":54},"ReadRows":{"Count":1,"Max":3,"Sum":3,"Min":3}}],"CpuTimeUs":{"Count":1,"Max":903,"Sum":903,"History":[1,761,2,903],"Min":903},"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[2,144],"Min":144},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"External":{},"Ingress":{},"Name":"KqpReadRangesSource","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":144,"Sum":144,"History":[2,144],"Min":144},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1204,"Sum":1204,"History":[2,1204],"Min":1204},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"UpdateTimeMs":2}}],"Node Type":"Merge","SortColumns":["Data (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Max":1000,"Sum":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Max":1048576,"Sum":1048576,"History":[1,1048576,3,1048576],"Min":1048576},"BaseTimeMs":1767494539704,"Output":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[3,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1812,"Sum":1812,"History":[3,1812],"Min":1812},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"CpuTimeUs":{"Count":1,"Max":740,"Sum":740,"History":[1,608,3,740],"Min":740},"StageDurationUs":1000,"ResultRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"ResultBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"OutputBytes":{"Count":1,"Max":36,"Sum":36,"Min":36},"Input":[{"Pop":{"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[3,36],"Min":36},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Rows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Chunks":{"Count":1,"Max":1,"Sum":1,"Min":1},"ResumeMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"FirstMessageMs":{"Count":1,"Max":2,"Sum":2,"Min":2},"Bytes":{"Count":1,"Max":36,"Sum":36,"History":[3,36],"Min":36},"PauseMessageMs":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitTimeUs":{"Count":1,"Max":1740,"Sum":1740,"History":[3,1740],"Min":1740},"WaitPeriods":{"Count":1,"Max":1,"Sum":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":2,"Min":1}}}],"InputRows":{"Count":1,"Max":3,"Sum":3,"Min":3},"Tasks":1,"UpdateTimeMs":3}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"TotalDurationUs":187803,"ProcessCpuTimeUs":338,"Compilation":{"FromCache":false,"CpuTimeUs":162141,"DurationUs":167005}}},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"A-Rows":3,"A-SelfCpu":0.74,"A-Size":36,"A-Cpu":1.643,"Limit":"4","Name":"Limit"}],"Plans":[{"PlanNodeId":4,"Operators":[{"A-Rows":3,"A-SelfCpu":0.903,"A-Size":36,"A-Cpu":0.903,"Limit":"4","Name":"TopSort","TopSortBy":"row.Data"}],"Plans":[{"PlanNodeId":5,"Operators":[{"E-Rows":"1","ReadRange":["Key [150, 266]"],"E-Cost":"0","ReadColumns":["Data","Key","Text"],"Name":"TableRangeScan","E-Size":"0","Table":"EightShard"}],"Node Type":"TableRangeScan"}],"Node Type":"TopSort"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} >> TTabletPipeTest::TestConsumerSidePipeReset [GOOD] >> TTabletPipeTest::TestConnectReject |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestAutoTaskId [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] >> KqpQuery::QueryFromSqs [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::TimeSeriesAvg16Signed [GOOD] |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TFlatMetrics::MaximumValue2 [GOOD] |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::SelectCountAsteriskFromVar [GOOD] Test command err: Trying to start YDB, gRPC: 21395, MsgBus: 26469 2026-01-04T02:41:53.608052Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331129487602147:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.612574Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:53.869924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:53.877557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:53.877679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:53.882033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:53.978990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:54.070975Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:54.071021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:54.071037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:54.071136Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:54.121982Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:54.507212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:54.565697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.614108Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:54.680906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.825884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.888117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.783053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331142372505661:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.783166Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.783488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331142372505671:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.783550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.104962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.147974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.180843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.212714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.244511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.274590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.306793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.377056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.449841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331146667473836:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.449909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.450206Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331146667473842:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.450240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331146667473841:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.450296Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.453693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:57.463265Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331146667473845:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:57.535199Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331146667473896:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:58.609911Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331129487602147:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.609981Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17780, MsgBus: 61811 2026-01-04T02:42:00.301620Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331160076254937:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:00.302984Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:00.314933Z node 2 :KQP_PROXY WARN: kqp_fin ... p:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591331186521402121:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:11.870833Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 26166, MsgBus: 6060 2026-01-04T02:42:13.495055Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331216471344037:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:13.495104Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:13.513406Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:13.587787Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:13.598693Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591331216471344010:2081] 1767494533494459 != 1767494533494462 2026-01-04T02:42:13.627541Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:13.627639Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:13.634708Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:13.721169Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:13.734482Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:13.734505Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:13.734513Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:13.734580Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:14.254240Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:14.271209Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.341161Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.504167Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:14.542245Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.609047Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.546027Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331233651215064:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.546124Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.546690Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331233651215073:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.546759Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.631704Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.667453Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.700367Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.732819Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.763999Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.798295Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.833120Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.886460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.962512Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331233651215944:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.962592Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331233651215949:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.962602Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.962751Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331233651215951:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.962799Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.966241Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:17.976445Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331233651215952:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:18.064375Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331237946183300:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:18.495605Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331216471344037:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:18.495692Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] >> TTabletPipeTest::TestConnectReject [GOOD] >> ConfiguredTabletBootstrapperTest::BasicInitialization [GOOD] >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges |92.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBroker::TestExecutionStat [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::NodeListChange [GOOD] Test command err: 2026-01-04T02:42:20.542926Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-04T02:42:20.543092Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-04T02:42:20.547573Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:20.548503Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:20.548579Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-04T02:42:20.694949Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 1 2026-01-04T02:42:20.697916Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-04T02:42:20.698055Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 1 2026-01-04T02:42:20.698204Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-04T02:42:21.383521Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-04T02:42:21.383626Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-04T02:42:21.383714Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 3 2026-01-04T02:42:21.383796Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:21.384054Z node 3 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:21.384089Z node 3 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-04T02:42:21.592509Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 3 2026-01-04T02:42:21.592592Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 3 2026-01-04T02:42:21.592626Z node 3 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723360 bootstrapper on node 3 2026-01-04T02:42:21.593472Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-04T02:42:21.593511Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 4 2026-01-04T02:42:21.593586Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 4 2026-01-04T02:42:21.593672Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:21.594312Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: OK, leader: [3:220:2142] 2026-01-04T02:42:21.594678Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723360, type: Dummy, connect: ERROR 2026-01-04T02:42:21.594707Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot |92.0%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... waiting for client destroyed notification ... waiting for connect2 >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestConnectReject [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigs [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:22.526805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:22.643381Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:22.643943Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:22.644013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:22.890934Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:22.891074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:22.957141Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494420048612 != 1767494420048615 2026-01-04T02:40:22.970468Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:23.016276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:23.108735Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:23.585279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:23.585348Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:23.585378Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:23.588190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:23.588720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:23.603135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-04T02:40:35.786086Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.786481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.786953Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:980:2826], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.787052Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.792177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:35.981439Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1072:2899], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.981600Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.982033Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1077:2904], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.982178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1078:2905], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.982454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:35.988153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:36.120245Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1081:2908], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:40:36.741448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1175:2973] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:37.395445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:37.738766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.375895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:39.083721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:39.595766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:40.913536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.277594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-04T02:40:45.924487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:40:45.924563Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-04T02:40:56.528941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ... ated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:09.065149Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:09.065179Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-04T02:42:09.065199Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-01-04T02:42:09.065220Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-01-04T02:42:09.065249Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-01-04T02:42:09.065270Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:09.065297Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:09.065662Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3208:4473];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:42:09.065737Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3211:4475];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:42:09.065789Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3218:4482];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-04T02:42:20.161613Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.161740Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.161778Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.161835Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.161882Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.162008Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:20.162181Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.162233Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:20.162281Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162348Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.162408Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.162435Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:20.162463Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162488Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162524Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.162588Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.162617Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-04T02:42:20.162644Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162671Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162709Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.162740Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.162765Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-04T02:42:20.162790Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162813Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162848Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.162882Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.162904Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-04T02:42:20.162927Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162951Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.162985Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.163028Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:20.163051Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:20.163078Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.163101Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:20.163132Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:20.163370Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3208:4473];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:20.163466Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3211:4475];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:20.163531Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3218:4482];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 E0104 02:42:21.973579109 645087 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-04T02:42:21.973365448+00:00"} |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendBeforeBootTarget [GOOD] Test command err: Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [1:112:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [1:113:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:121:2057] recipient: [1:108:2140] Leader for TabletID 9437185 is [1:122:2149] sender: [1:123:2057] recipient: [1:110:2141] Leader for TabletID 9437184 is [1:120:2148] sender: [1:160:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:162:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:165:2057] recipient: [1:105:2139] Leader for TabletID 9437185 is [1:122:2149] sender: [1:166:2057] recipient: [1:14:2061] Leader for TabletID 9437185 is [1:122:2149] sender: [1:169:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:171:2057] recipient: [1:168:2179] Leader for TabletID 9437185 is [1:170:2180] sender: [1:200:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [1:120:2148] sender: [1:203:2057] recipient: [1:104:2138] Leader for TabletID 9437184 is [1:120:2148] sender: [1:206:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:208:2057] recipient: [1:205:2203] Leader for TabletID 9437184 is [1:207:2204] sender: [1:236:2057] recipient: [1:14:2061] Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] IGNORE Leader for TabletID 9437184 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:113:2057] recipient: [2:106:2139] Leader for TabletID 9437184 is [2:112:2143] sender: [2:132:2057] recipient: [2:14:2061] Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] IGNORE Leader for TabletID 9437185 is [0:0:0] sender: [2:168:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:173:2057] recipient: [2:166:2171] Leader for TabletID 9437185 is [2:172:2175] sender: [2:208:2057] recipient: [2:14:2061] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpStats::OneShardLocalExec-UseSink [GOOD] >> KqpStats::OneShardNonLocalExec+UseSink >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularCheckSingleTablet [GOOD] >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck >> TTabletPipeTest::TestOpen >> TResourceBrokerInstant::Test >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] >> TTabletPipeTest::TestOpen [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries >> TResourceBrokerInstant::Test [GOOD] >> TResourceBrokerInstant::TestErrors ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::QueryFromSqs [GOOD] Test command err: Trying to start YDB, gRPC: 23562, MsgBus: 9794 2026-01-04T02:41:53.178131Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331130810276576:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:53.178230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:53.415950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:53.416131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:53.472784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:53.483618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:53.489298Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:53.493911Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331130810276532:2081] 1767494513176316 != 1767494513176319 2026-01-04T02:41:53.602469Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:53.602488Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:53.602504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:53.602589Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:53.737516Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:54.046443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:54.063075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:54.127223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.194059Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:54.272192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.441969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:54.508588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.538798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331143695180293:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.538929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.539274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331143695180303:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.539314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:56.873405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.911214Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.946703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.983327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.016779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.076185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.103744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.141971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:57.212034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331147990148476:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.212117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.212418Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331147990148481:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.212438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331147990148482:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.212475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:57.216262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:57.226799Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331147990148485:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:57.306220Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331147990148536:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:58.177964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331130810276576:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.179213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:58.807030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... } cpu_time_us: 2310 affected_shards: 2 } compilation { duration_us: 225079 cpu_time_us: 219719 } process_cpu_time_us: 716 total_duration_us: 242655 total_cpu_time_us: 225385 Trying to start YDB, gRPC: 32746, MsgBus: 10421 2026-01-04T02:42:14.417939Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331219634279338:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:14.417988Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:14.459973Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:14.546387Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:14.586458Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:14.586562Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:14.596016Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:14.677414Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:14.684779Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:14.684803Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:14.684812Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:14.684907Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:15.211447Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:15.228070Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.301767Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.439917Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:15.457228Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.529508Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.339055Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331236814150362:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.339173Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.339966Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331236814150372:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.340037Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.422210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.463612Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.502948Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.537954Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.573135Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.609311Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.648065Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.700321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.784977Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331236814151247:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.785075Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.785194Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331236814151252:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.785238Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331236814151254:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.785298Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.788803Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:18.799086Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331236814151256:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:18.886909Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331236814151307:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:19.418374Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331219634279338:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:19.418451Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:20.696349Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TTabletPipeTest::TestSendAfterOpen >> TTabletResolver::TabletResolvePriority [GOOD] >> TTabletResolver::NodeProblem >> TabletState::SeqNoSubscriptionReplace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/service/ut/unittest >> ColumnStatistics::CountMinSketchServerlessStatistics 2026-01-04 02:42:17,453 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 02:42:17,662 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 513201 58.7M 58.2M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/c5zo/002d5f/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args 513501 2.1G 2.1G 1.6G └─ ydb-core-statistics-service-ut --trace-path-append /home/runner/.ya/build/build_root/c5zo/002d5f/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/c Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:32:22.958406Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:23.035942Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:23.042747Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:614:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:32:23.043076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:32:23.043215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:32:23.438477Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:23.548620Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:23.548766Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:23.611078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:23.732039Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:32:24.685294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:32:24.685993Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:32:24.686041Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:32:24.686074Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:32:24.686651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:32:24.785897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:32:25.465831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:32:28.797252Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:32:28.802928Z node 4 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 4 2026-01-04T02:32:28.806690Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:32:28.838441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:28.838598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:28.878460Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:32:28.881197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:29.061182Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:32:29.061309Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:32:29.062942Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.063614Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.064171Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.065348Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.065696Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.065892Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.066108Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.066347Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.066496Z node 4 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:32:29.082299Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:32:29.350705Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:32:29.375688Z node 4 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:32:29.375783Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:32:29.421394Z node 4 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:32:29.423489Z node 4 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:32:29.423742Z node 4 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:32:29.423827Z node 4 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:32:29.423892Z node 4 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:32:29.423952Z node 4 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:32:29.424007Z node 4 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:32:29.424060Z node 4 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:32:29.424660Z node 4 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:32:29.427043Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2079:2456] 2026-01-04T02:32:29.432656Z node 4 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:32:29.440944Z node 4 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Describe result: PathErrorUnknown 2026-01-04T02:32:29.441014Z node 4 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Creating table 2026-01-04T02:32:29.441094Z node 4 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:32:29.446951Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.447058Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [4:2317:2600], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:32:29.476772Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:2392:2654] 2026-01-04T02:32:29.477464Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [4:2392:2654], schemeshard id = 72075186224037897 2026-01-04T02:32:29.492308Z node 4 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [4:2412:2663], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:32:29.520524Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:32:29.536502Z node 4 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:32:29.536661Z node 4 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Subscribe on create table tx: 281474976720657 2026-01-04T02:32:29.547295Z node 4 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:32:29.740358Z node 4 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:32:29.791753Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:32:30.251185Z node 4 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:32:30.351382Z node 4 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:32:30.351470Z node 4 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [4:2291:2587] Owner: [4:2290:2586]. Column diff is empty, finishing 2026-01-04T02:32:31.164175Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... ActorId: [4:8141:5640], RunDataQuery with SessionId: ydb://session/3?node_id=4&id=OGYwNmFmYjAtYzE1MGQ3OTMtYzdmMzQwNzctMWY5NzEzZDI=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:41:46.022543Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [4:8157:5654]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:46.022960Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:41:46.023176Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2026-01-04T02:41:46.023242Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-04T02:41:46.023511Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-04T02:41:46.023601Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [4:8157:5654], StatRequests.size() = 1 2026-01-04T02:41:46.023706Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:41:46.282248Z node 4 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [4:8140:5639], ActorId: [4:8141:5640], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=4&id=OGYwNmFmYjAtYzE1MGQ3OTMtYzdmMzQwNzctMWY5NzEzZDI=, TxId: 2026-01-04T02:41:46.282373Z node 4 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [4:8140:5639], ActorId: [4:8141:5640], Finish with SUCCESS, SessionId: ydb://session/3?node_id=4&id=OGYwNmFmYjAtYzE1MGQ3OTMtYzdmMzQwNzctMWY5NzEzZDI=, TxId: 2026-01-04T02:41:46.282985Z node 4 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [4:8139:5638], ActorId: [4:8140:5639], Got response [4:8141:5640] SUCCESS 2026-01-04T02:41:46.283630Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:41:46.313423Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-04T02:41:46.313504Z node 4 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=opId1, ActorId=[1:6304:3982] 2026-01-04T02:41:46.317057Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [4:8179:5665] 2026-01-04T02:41:46.318509Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:8177:4492] , Record { OperationId: "opId1" Tables { PathId { OwnerId: 72075186224037905 LocalId: 32 } } Types: TYPE_COUNT_MIN_SKETCH Database: "/Root/Serverless2" } 2026-01-04T02:41:46.318582Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `opId1', DatabaseName: `/Root/Serverless2', Types: 1 2026-01-04T02:41:46.318631Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `opId1', PathId: [OwnerId: 72075186224037905, LocalPathId: 32], ColumnTags: 2026-01-04T02:41:46.363004Z node 4 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-04T02:41:46.363370Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:41:46.363451Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-04T02:41:46.363512Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-04T02:41:46.363558Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-04T02:41:46.427717Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:41:46.427841Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:41:46.455168Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7736:5393], schemeshard count = 1 2026-01-04T02:41:46.466901Z node 4 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [4:7736:5393], schemeshard count = 1 2026-01-04T02:41:46.840641Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:41:46.840719Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-04T02:41:46.840752Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-04T02:41:46.840779Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-04T02:41:47.309167Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:41:47.309246Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-04T02:41:47.309287Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-04T02:41:47.309321Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-04T02:41:47.321294Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:41:47.321381Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:656: [72075186224037894] ScheduleNextAnalyze. Don't start analyze for table [OwnerId: 72075186224037905, LocalPathId: 32] as there is still no info from its SchemeShard 2026-01-04T02:41:47.321420Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:687: [72075186224037894] ScheduleNextAnalyze. All the force traversal tables sent the requests. OperationId=opId1 2026-01-04T02:41:47.321452Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:691: [72075186224037894] ScheduleNextAnalyze. All the force traversal operations sent the requests. 2026-01-04T02:41:47.774988Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037905 2026-01-04T02:41:47.775102Z node 4 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.972000s, at schemeshard: 72075186224037905 2026-01-04T02:41:47.775572Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037905, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-04T02:41:47.794098Z node 4 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:41:47.970030Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:41:47.977592Z node 4 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [4:8266:2465], ActorId: [4:8276:5714], Bootstrap. Database: /Root/Serverless2, IsSystemUser: 0, run create session 2026-01-04T02:41:47.982722Z node 4 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [4:8266:2465], ActorId: [4:8276:5714], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(LowCardinalityString),HLL(LowCardinalityInt),min(LowCardinalityInt),max(LowCardinalityInt),HLL(Float),min(Float),max(Float),HLL(Date),min(Date),max(Date),HLL(NearNumericLimits),min(NearNumericLimits),max(NearNumericLimits) FROM `/Root/Serverless2/Table2` 2026-01-04T02:41:47.982894Z node 4 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [4:8266:2465], ActorId: [4:8276:5714], Start read next stream part 2026-01-04T02:41:48.110422Z node 4 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [4:8302:5727]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:41:48.110811Z node 4 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:41:48.111027Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 4, schemeshard count = 1, urgent = 0 2026-01-04T02:41:48.111100Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 4, schemeshard count = 1 2026-01-04T02:41:48.111455Z node 4 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 4 cookie: 18446744073709551615 2026-01-04T02:41:48.111583Z node 4 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [4:8302:5727], StatRequests.size() = 1 2026-01-04T02:41:48.111907Z node 4 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/002d5f/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/002d5f/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/chunk15/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestWriteOffsetWithBigMessage [GOOD] Test command err: 2026-01-04T02:39:51.418636Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:39:51.501296Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:51.501403Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:51.501461Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.501530Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:39:51.521433Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-04T02:39:51.521578Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:39:51.544491Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-04T02:39:51.545417Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:51.548153Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 PartitionIds: 3 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } Partitions { PartitionId: 3 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } AllPartitions { PartitionId: 3 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "test" Generation: 1 Important: false } 2026-01-04T02:39:51.548345Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.548447Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.548497Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.548539Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:3:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:39:51.549427Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:51.549893Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:39:51.553471Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:51.553546Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:39:51.553600Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:39:51.553669Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:51.553747Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:51.556625Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:39:51.556710Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:39:51.556752Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.556810Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.556845Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:39:51.556887Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.556935Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.557012Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:39:51.557058Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:39:51.557100Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.557164Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit request with generation 1 2026-01-04T02:39:51.557194Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test reinit with generation 1 done 2026-01-04T02:39:51.557221Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:39:51.557257Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:39:51.557337Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:39:51.557624Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.557667Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user test readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:39:51.557730Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:39:51.558044Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:39:51.558328Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:51.560348Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:1:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:39:51.560404Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:1:Initializer] Initializing completed. 2026-01-04T02:39:51.560440Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:51.560509Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][1][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 1 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:39:51.560569Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][1][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:39:51.562524Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][1][StateIdle] Process pending events. Count 0 2026-01-04T02:39:51.562570Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:39:51.562609Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.562672Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.562708Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][1][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:39:51.562739Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:39:51.562779Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.562835Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit request with generation 1 2026-01-04T02:39:51.562868Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][1][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 1 user user reinit with generation 1 done 2026-01-04T02:39:51.562897Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][1][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:39:51.562967Z node 1 :PERSQUEUE DEBUG: p ... p request needed for owner 2026-01-04T02:42:18.307984Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d4026c78-e6266c6-b15403ed-9ac95c2_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:18.532891Z node 74 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:18.533443Z node 74 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|710b1662-6c9d2d2d-6b6f5c61-e243518b_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:19.151344Z node 75 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 75 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [75:107:2057] recipient: [75:105:2138] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:112:2057] recipient: [75:105:2138] 2026-01-04T02:42:19.226410Z node 75 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:19.226478Z node 75 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:19.226530Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:19.226585Z node 75 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [75:153:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927938 is [75:157:2176] sender: [75:158:2057] recipient: [75:151:2172] Leader for TabletID 72057594037927937 is [75:111:2142] sender: [75:181:2057] recipient: [75:14:2061] 2026-01-04T02:42:19.249975Z node 75 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:19.251330Z node 75 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 73 actor [75:179:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 73 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 73 } Consumers { Name: "user" Generation: 73 Important: true } 2026-01-04T02:42:19.252699Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [75:187:2142] 2026-01-04T02:42:19.255822Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [75:187:2142] 2026-01-04T02:42:19.258030Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [75:188:2142] 2026-01-04T02:42:19.260349Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [75:188:2142] 2026-01-04T02:42:19.262215Z node 75 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [75:189:2142] 2026-01-04T02:42:19.264492Z node 75 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [75:189:2142] 2026-01-04T02:42:19.441685Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:19.442268Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|32cef9e7-a66bf552-44556d66-578868b0_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:19.881334Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:19.881927Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|d20f04df-389e0af7-8bc955f8-96cca773_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:20.266851Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:20.267370Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|f9518454-2c94747f-ad59ff7-cdf8eba5_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-04T02:42:20.619584Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:20.620207Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|2ccc2f8c-922725d4-2f6d4a53-3e2055b9_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:20.838302Z node 75 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:20.838811Z node 75 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|1eb4d839-e6df8d73-ebf0a404-9a341ab6_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:21.497460Z node 76 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 76 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:108:2057] recipient: [76:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [76:108:2057] recipient: [76:106:2138] Leader for TabletID 72057594037927937 is [76:112:2142] sender: [76:113:2057] recipient: [76:106:2138] 2026-01-04T02:42:21.562700Z node 76 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:42:21.562771Z node 76 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:42:21.562826Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:21.562884Z node 76 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:154:2057] recipient: [76:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [76:154:2057] recipient: [76:152:2172] Leader for TabletID 72057594037927938 is [76:158:2176] sender: [76:159:2057] recipient: [76:152:2172] Leader for TabletID 72057594037927937 is [76:112:2142] sender: [76:184:2057] recipient: [76:14:2061] 2026-01-04T02:42:21.589283Z node 76 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:42:21.590682Z node 76 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 74 actor [76:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 PartitionIds: 2 TopicName: "rt3.dc1--asdfgs--topic" Version: 74 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } Partitions { PartitionId: 2 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } AllPartitions { PartitionId: 2 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 74 } Consumers { Name: "user" Generation: 74 Important: true } 2026-01-04T02:42:21.592368Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [76:190:2142] 2026-01-04T02:42:21.595732Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [76:190:2142] 2026-01-04T02:42:21.598172Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [76:191:2142] 2026-01-04T02:42:21.600722Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [76:191:2142] 2026-01-04T02:42:21.602753Z node 76 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][2][StateInit] bootstrapping 2 [76:192:2142] 2026-01-04T02:42:21.605291Z node 76 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][2][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 2 generation 2 [76:192:2142] 2026-01-04T02:42:21.768537Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:21.769127Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|df58838-f06de035-5b413793-8fd1d043_0 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:22.208124Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 1 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:22.208701Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|49267a6a-366687a5-e897c081-9192a51b_1 generated for partition 1 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:22.578112Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:22.578648Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|e0e9226f-ec19579d-6c835f45-ac791c94_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 80000 2026-01-04T02:42:22.943608Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:22.944038Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|af474f8e-cc12e8e4-9928188e-ba43107c_0 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:42:23.149816Z node 76 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 2 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:42:23.150387Z node 76 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bec2153f-2adfa031-d47679b7-dfc75438_1 generated for partition 2 topic 'rt3.dc1--asdfgs--topic' owner default >> BootstrapperTest::RestartUnavailableTablet |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/service/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletCountersAggregator::IntegralPercentileAggregationRegularNoOverflowCheck [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] >> ColumnShardTiers::TTLUsage [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo >> TTabletPipeTest::TestSendAfterOpen [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor >> TabletState::SeqNoSubscriptionReplace [GOOD] >> KqpLimits::WaitCAsTimeout [GOOD] >> KqpParams::BadParameterType >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] >> KqpQuery::QueryCacheTtl [GOOD] >> KqpQuery::QueryCacheInvalidate >> TTabletResolver::NodeProblem [GOOD] >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] >> BootstrapperTest::RestartUnavailableTablet [GOOD] >> BootstrapperTest::UnavailableStateStorage >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] >> TTabletPipeTest::TestPipeWithVersionInfo [GOOD] >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries >> KqpExplain::SqlIn [GOOD] >> KqpExplain::SsaProgramInJsonPlan ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TResourceBrokerInstant::TestErrors [GOOD] Test command err: 2026-01-04T02:42:24.358378Z node 2 :RESOURCE_BROKER ERROR: resource_broker.cpp:1080: FinishTaskInstant failed for task 2: cannot finish unknown task |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TabletState::SeqNoSubscriptionReplace [GOOD] >> KqpParams::ImplicitSameParameterTypesQueryCacheCheck [GOOD] >> KqpParams::InvalidJson |92.1%| [TA] $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TA] {RESULT} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/service/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> ConfiguredTabletBootstrapperTest::ComplexConfigChanges [GOOD] Test command err: 2026-01-04T02:42:22.313548Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723360 config changed, recreating bootstrapper on node 1 2026-01-04T02:42:22.313711Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723360 bootstrapper on node 1 2026-01-04T02:42:22.313776Z node 1 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 1 2026-01-04T02:42:22.317491Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723360, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:22.318504Z node 1 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723360, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:22.318558Z node 1 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723360, type: Dummy, boot 2026-01-04T02:42:23.310637Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-04T02:42:23.310743Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-04T02:42:23.310855Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-04T02:42:23.310944Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.311004Z node 6 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 6 2026-01-04T02:42:23.311052Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-04T02:42:23.311355Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: NODATA, leader: [0:0:0] 2026-01-04T02:42:23.311407Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-04T02:42:23.518881Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-04T02:42:23.520075Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-04T02:42:23.520143Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-04T02:42:23.520261Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 4 2026-01-04T02:42:23.521012Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.521495Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-04T02:42:23.521546Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-04T02:42:23.521665Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-04T02:42:23.521785Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.522376Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:284:2143] 2026-01-04T02:42:23.522977Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-04T02:42:23.523034Z node 4 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 11436890671405667697 2026-01-04T02:42:23.523403Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:284:2143] 2026-01-04T02:42:23.524217Z node 4 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 5 state: UNKNOWN 2026-01-04T02:42:23.524267Z node 4 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-04T02:42:23.525090Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-04T02:42:23.525128Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:348: tablet:72075186232723361, type: Dummy, begin new round, seed: 5380068269072606170 2026-01-04T02:42:23.526009Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:446: tablet: 72075186232723361, type: Dummy, apply alien 4 state: OWNER 2026-01-04T02:42:23.526046Z node 5 :BOOTSTRAPPER INFO: bootstrapper.cpp:589: tablet: 72075186232723361, type: Dummy, become watch on node 4 (owner) 2026-01-04T02:42:23.654395Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 4 2026-01-04T02:42:23.654509Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 4 2026-01-04T02:42:23.654552Z node 4 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 4 2026-01-04T02:42:23.655767Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-04T02:42:23.655844Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-04T02:42:23.655883Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-04T02:42:23.655974Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 5 2026-01-04T02:42:23.656080Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.656157Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.657170Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [4:338:2166] 2026-01-04T02:42:23.657588Z node 5 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-04T02:42:23.657629Z node 5 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot 2026-01-04T02:42:23.939900Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 5 2026-01-04T02:42:23.940024Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 5 2026-01-04T02:42:23.940072Z node 5 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:73: Stopping tablet 72075186232723361 bootstrapper on node 5 2026-01-04T02:42:23.941539Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:85: Received bootstrap config update on node 7 2026-01-04T02:42:23.941585Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:101: Tablet 72075186232723361 config changed, recreating bootstrapper on node 7 2026-01-04T02:42:23.941673Z node 7 :BOOTSTRAPPER DEBUG: configured_tablet_bootstrapper.cpp:146: Started tablet 72075186232723361 bootstrapper on node 7 2026-01-04T02:42:23.941768Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 72075186232723361, type: Dummy, begin new cycle (lookup in state storage) 2026-01-04T02:42:23.942543Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 72075186232723361, type: Dummy, lookup: OK, leader: [5:382:2102] 2026-01-04T02:42:23.942919Z node 7 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:284: tablet: 72075186232723361, type: Dummy, connect: ERROR 2026-01-04T02:42:23.942958Z node 7 :BOOTSTRAPPER NOTICE: bootstrapper.cpp:698: tablet: 72075186232723361, type: Dummy, boot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeConnectAfterKillWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for client destroyed notification ... waiting for boot2 ... waiting for connect2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletResolver::NodeProblem [GOOD] Test command err: 2026-01-04T02:42:24.568219Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.568470Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [1:218:2139] followers: 0 2026-01-04T02:42:24.568543Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:218:2139] 2026-01-04T02:42:24.568843Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StResolve leader: [0:0:0] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.569079Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [1:224:2143] followers: 0 2026-01-04T02:42:24.569135Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:224:2143] 2026-01-04T02:42:24.570755Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:218:2139] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.570822Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [1:218:2139] 2026-01-04T02:42:24.571012Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:224:2143] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.571066Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 1 1:2:0 local 1 localDc 1 other 0 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [1:224:2143] 2026-01-04T02:42:24.571231Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 1 max(problemEpoch): 5 2026-01-04T02:42:24.571283Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [1:218:2139] by nodeId 2026-01-04T02:42:24.571351Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [1:218:2139] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.571400Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:42:24.571648Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [2:234:2096] followers: 0 2026-01-04T02:42:24.571707Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:234:2096] 2026-01-04T02:42:24.572158Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [1:224:2143] by nodeId 2026-01-04T02:42:24.572219Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [1:224:2143] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.572259Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 1 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:42:24.572462Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [2:240:2098] followers: 0 2026-01-04T02:42:24.572537Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:240:2098] 2026-01-04T02:42:24.574872Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 5 2026-01-04T02:42:24.574977Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:234:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.575024Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [2:234:2096] 2026-01-04T02:42:24.575274Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:240:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.575336Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:240:2098] 2026-01-04T02:42:24.575571Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 7 2026-01-04T02:42:24.575637Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 123 leader: [2:234:2096] by nodeId 2026-01-04T02:42:24.575697Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [2:234:2096] (known problem) followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.575744Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:42:24.576007Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 123 leader: [3:252:2096] followers: 0 2026-01-04T02:42:24.576105Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:252:2096] 2026-01-04T02:42:24.576754Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:240:2098] followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.576811Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [2:240:2098] 2026-01-04T02:42:24.577086Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:936: Handle TEvNodeProblem nodeId: 2 max(problemEpoch): 8 2026-01-04T02:42:24.577162Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 123 entry.State: StNormal leader: [3:252:2096] followers: 0 ev: {EvForward TabletID: 123 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.577215Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 123 followers: 0 countLeader 1 allowFollowers 0 winner: [3:252:2096] 2026-01-04T02:42:24.577467Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:839: Delayed invalidation of tabletId: 234 leader: [2:240:2098] by nodeId 2026-01-04T02:42:24.577519Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:882: Handle TEvForward tabletId: 234 entry.State: StNormal leader: [2:240:2098] (known problem) followers: 0 ev: {EvForward TabletID: 234 Ev: nullptr Flags: 1:2:0} 2026-01-04T02:42:24.577578Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC 1 leaderDC 2 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:42:24.577861Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:781: ApplyEntry tabletId: 234 leader: [3:258:2098] followers: 0 2026-01-04T02:42:24.577958Z node 1 :TABLET_RESOLVER DEBUG: tablet_resolver.cpp:667: SelectForward node 1 selfDC 1 leaderDC 3 1:2:0 local 0 localDc 0 other 1 disallowed 0 tabletId: 234 followers: 0 countLeader 1 allowFollowers 0 winner: [3:258:2098] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestSendAfterOpenUsingTabletWithoutAcceptor [GOOD] |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> TTabletCountersPercentile::WithoutZero [GOOD] >> TTabletLabeledCountersAggregator::DbAggregation >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndexUniq >> KqpStats::SelfJoin [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault >> VectorIndexBuildTest::Metering_Documentation_Formula [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false >> VectorIndexBuildTest::TTxReply_DoExecute_Throws >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] >> BootstrapperTest::UnavailableStateStorage [GOOD] >> KqpQuery::ExecuteDataQueryCollectMeta [GOOD] >> KqpQuery::GenericQueryNoRowsLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TTLUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:20.266702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:20.433542Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:20.434194Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:20.434255Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:20.856038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:20.856191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:20.944546Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494417452756 != 1767494417452759 2026-01-04T02:40:20.964155Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:21.012735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:21.114111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:21.907776Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:21.907912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:21.907946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:21.911639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:21.912241Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:21.926727Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:40:22.082171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnStore, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_store.cpp:461) 2026-01-04T02:40:22.178124Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:40:22.179261Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:40:22.180470Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:40:22.209292Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:40:22.209603Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037888 2026-01-04T02:40:22.218146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:40:22.218521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:40:22.218798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:40:22.218930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:40:22.219043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:40:22.219174Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:40:22.219314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:40:22.219437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:40:22.219555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:40:22.219673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:40:22.219787Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:40:22.219931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:40:22.220086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:40:22.243778Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828684, Sender [1:908:2775], Recipient [1:951:2806]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:40:22.244462Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:910:2776], Recipient [1:953:2808]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:40:22.245890Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828672, Sender [1:912:2778], Recipient [1:958:2812]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:40:22.246824Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:403: StateInit, received event# 268828673, Sender [1:910:2776], Recipient [1:953:2808]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:40:22.247090Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:40:22.279167Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:40:22.279416Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 72075186224037889 2026-01-04T02:40:22.285142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:40:22.285259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:40:22.285484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:40:22.285621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:40:22.285751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:40:22.285899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:40:22.286014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:40:22.286127Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:40:22.286237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:40:22.286343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:40:22.286478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];tablet_id=72075186224037889;process=TTx ... 2812]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.700178Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:958:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:42:23.700248Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.700279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP column0/uint64_value: 0 TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5027;count=35;size=14676;count=211;size=1506;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3749;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5027;count=35;size=14746;count=212;size=1506;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3749;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-04T02:42:23.873794Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.873905Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:42:23.874011Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2808]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.874052Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:42:23.874133Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:958:2812]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.874170Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:958:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:42:23.874242Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:23.874278Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP START_SLEEP TEvBlobStorage::TEvPut tId=72057594037968897;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1506;count=21;size=778;count=4;;1:size=15433;count=1;size=19818;count=212;size=3749;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=1;:66/0:size=1466;count=9;size=5310;count=36;size=14746;count=212;size=1506;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3749;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; TEvBlobStorage::TEvPut tId=72057594046316545;c=0;:66/0:size=1466;count=9;size=5310;count=36;size=14816;count=213;size=1506;count=21;size=778;count=4;;1:size=15433;count=1;size=19913;count=213;size=3749;count=21;size=6541;count=2;;2:size=0;count=0;;3:size=1662088;count=1;;4:size=1674816;count=1;;5:size=1655040;count=1;;6:size=1641248;count=1;;7:size=1641000;count=1;;8:size=1641128;count=1;;9:size=1641424;count=1;;10:size=1641128;count=1;;11:size=1640944;count=1;;12:size=1641040;count=1;;13:size=1641024;count=1;;14:size=1641496;count=1;;15:size=1641184;count=1;;16:size=1640968;count=1;;17:size=1081816;count=1;;18:size=0;count=0;;19:size=0;count=0;;20:size=0;count=0;;21:size=0;count=0;;22:size=0;count=0;;23:size=0;count=0;;24:size=0;count=0;;25:size=0;count=0;;26:size=0;count=0;;27:size=0;count=0;;28:size=0;count=0;;29:size=0;count=0;;30:size=0;count=0;;31:size=0;count=0;;32:size=0;count=0;;33:size=0;count=0;;34:size=0;count=0;;35:size=0;count=0;;36:size=0;count=0;;37:size=0;count=0;;38:size=0;count=0;;39:size=0;count=0;;40:size=0;count=0;;41:size=0;count=0;;42:size=0;count=0;;43:size=0;count=0;;44:size=0;count=0;;45:size=0;count=0;;46:size=0;count=0;;47:size=0;count=0;;48:size=0;count=0;;49:size=0;count=0;;50:size=0;count=0;;51:size=0;count=0;;52:size=0;count=0;;53:size=0;count=0;;54:size=0;count=0;;55:size=0;count=0;;56:size=0;count=0;;57:size=0;count=0;;58:size=0;count=0;;59:size=0;count=0;;60:size=0;count=0;;61:size=0;count=0;;62:size=0;count=0;;63:size=0;count=0;;64:size=0;count=0;;65:size=0;count=0;; 2026-01-04T02:42:24.028840Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:951:2806]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:24.028918Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037888;self_id=[1:951:2806];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037888; 2026-01-04T02:42:24.029004Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:953:2808]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:24.029036Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037889;self_id=[1:953:2808];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037889; 2026-01-04T02:42:24.029101Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:958:2812]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:24.029133Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037891;self_id=[1:958:2812];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037891; 2026-01-04T02:42:24.029201Z node 1 :TX_COLUMNSHARD TRACE: columnshard_impl.h:422: StateWork, received event# 65538, Sender [0:0:0], Recipient [1:962:2815]: NActors::TEvents::TEvWakeup 2026-01-04T02:42:24.029233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037890;self_id=[1:962:2815];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037890; FINISHED_SLEEP REQUEST=SELECT COUNT(*) FROM `/Root/olapStore/olapTable`;EXPECTATION=1 |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |92.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::MergeConnection [GOOD] >> KqpExplain::IdxFullscan >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletLabeledCountersAggregator::DbAggregation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> TTabletPipeTest::TestPipeReconnectAfterRestartWithoutRetries [GOOD] Test command err: ... waiting for boot1 ... waiting for connect1 ... waiting for boot2 ... received OnTabletStop ... received OnTabletStop ... received OnTabletStop ... waiting for client shutting down notification ... waiting for connect2 >> IndexBuildTest::Metering_Documentation_Formula [GOOD] >> IndexBuildTest::LockUniq ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet/ut/unittest >> BootstrapperTest::UnavailableStateStorage [GOOD] Test command err: ... waiting for pipe to connect ... waiting for blocked connect attempt ... blocking NKikimr::TEvTabletPipe::TEvConnect from TABLET_PIPE_CLIENT to TABLET_ACTOR cookie 1 ... waiting for blocked connect attempt (done) ... disconnecting nodes 2 <-> 1 ... waiting for pipe to disconnect ... waiting for pipe to connect ... waiting for pipe to connect ... waiting for multiple state storage lookup attempts 2026-01-04T02:42:25.590439Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 2026-01-04T02:42:25.591123Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:233: tablet: 9437184, type: Dummy, lookup: ERROR, leader: [0:0:0] 2026-01-04T02:42:25.591176Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:260: tablet: 9437184, type: Dummy, state storage unavailable, sleeping for 0.101463s 2026-01-04T02:42:25.745319Z node 6 :BOOTSTRAPPER DEBUG: bootstrapper.cpp:194: tablet: 9437184, type: Dummy, begin new cycle (lookup in state storage) ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 0} for [4:3:2050]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 0 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 1} for [4:6:2053]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 1 ... disconnecting nodes 2 <-> 0 ({EvReplicaLookup TabletID: 9437184 Cookie: 2} for [4:9:2056]) ... blocking NKikimr::TEvStateStorage::TEvReplicaLookup from SS_PROXY_REQUEST to SS_REPLICA cookie 2 ... waiting for multiple state storage lookup attempts (done) >> KqpExplain::UpdateSecondaryConditionalPrimaryKey+UseSink [GOOD] >> IndexBuildTest::ShadowDataNotAllowedByDefault [GOOD] >> KqpQuery::RandomUuid [GOOD] >> KqpQuery::CreateAsSelect_DisableDataShard [GOOD] >> KqpStats::DeferredEffects+UseSink [GOOD] >> KqpStats::DeferredEffects-UseSink >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink >> IndexBuildTest::ShadowDataEdgeCases >> KqpQuery::ReadOverloaded+StreamLookup >> KqpQuery::CreateAsSelect_BadCases ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::SelfJoin [GOOD] Test command err: Trying to start YDB, gRPC: 14476, MsgBus: 7812 2026-01-04T02:41:58.581851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331153097221083:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.585620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:58.837964Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:58.854738Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:58.854846Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:58.901472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:58.984316Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:58.989298Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331153097221060:2081] 1767494518580949 != 1767494518580952 2026-01-04T02:41:59.035701Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:59.084579Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:59.084619Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:59.084627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:59.084715Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:59.534740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:59.542893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:59.595482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.598139Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:59.750340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:59.927982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.003607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:01.811182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331165982124817:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.811304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.811644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331165982124827:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.811699Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.137497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.170287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.205383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.243603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.281068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.312907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.351919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.424093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.515080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331170277092994:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.515153Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.515178Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331170277092999:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.515336Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331170277093001:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.515383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.518772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:02.532372Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331170277093003:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:02.607897Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331170277093056:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:03.582220Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331153097221083:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:03.582320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[ ... OVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331238372321682:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:23.555930Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"E-Size":"0","PlanNodeId":3,"LookupKeyColumns":["Key"],"Node Type":"TableLookupJoin","Path":"\/Root\/TwoShard","Columns":["Key"],"E-Rows":"0","Table":"TwoShard","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["TwoShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/TwoShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Stage","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":48,"Max":48,"Min":48}}],"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":0,"FinishedTasks":1,"Introspections":["1 tasks for a single\/sequential source scan"],"IngressRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[4,1048576]},"BaseTimeMs":1767494544964,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[4,42]}},"Name":"RESULT","Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":2293,"Max":2293,"Min":2293,"History":[4,2293]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"CpuTimeUs":{"Count":1,"Sum":458,"Max":458,"Min":458,"History":[4,458]},"Ingress":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[4,96]}},"External":{},"Name":"KqpReadRangesSource","Ingress":{},"Push":{"LastMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"FirstMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"Bytes":{"Count":1,"Sum":96,"Max":96,"Min":96,"History":[4,96]},"PauseMessageMs":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitTimeUs":{"Count":1,"Sum":2305,"Max":2305,"Min":2305,"History":[4,2305]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":3,"Min":1}}}],"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"OutputBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"UpdateTimeMs":4,"Tasks":1}}],"PlanNodeType":"Connection","E-Cost":"0"}],"Node Type":"Collect","Stats":{"UseLlvm":"undefined","Table":[{"Path":"\/Root\/TwoShard","ReadRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ReadBytes":{"Count":1,"Sum":24,"Max":24,"Min":24}}],"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":1,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":42,"Max":42,"Min":42},"Introspections":["1 tasks same as previous stage"],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"BaseTimeMs":1767494544964,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":102,"Max":102,"Min":102,"History":[6,102]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"WaitTimeUs":{"Count":1,"Sum":2872,"Max":2872,"Min":2872,"History":[6,2872]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":5,"Min":4}}}],"CpuTimeUs":{"Count":1,"Sum":432,"Max":432,"Min":432,"History":[6,432]},"StageDurationUs":1000,"WaitInputTimeUs":{"Count":1,"Sum":1534,"Max":1534,"Min":1534,"History":[6,1534]},"OutputBytes":{"Count":1,"Sum":102,"Max":102,"Min":102},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[6,42]}},"Name":"2","Push":{"LastMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"ResumeMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"FirstMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"Bytes":{"Count":1,"Sum":42,"Max":42,"Min":42,"History":[6,42]},"PauseMessageMs":{"Count":1,"Sum":3,"Max":3,"Min":3},"WaitTimeUs":{"Count":1,"Sum":1633,"Max":1633,"Min":1633,"History":[6,1633]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":4,"Min":3}}}],"UpdateTimeMs":5,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Tasks":1}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":5}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","Output":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[6,27]}},"Name":"8","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"PauseMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"WaitTimeUs":{"Count":1,"Sum":1661,"Max":1661,"Min":1661,"History":[6,1661]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"WaitMessageMs":{"Count":1,"Max":5,"Min":4}}}],"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"Introspections":["1 minimum tasks for compute"],"InputBytes":{"Count":1,"Sum":102,"Max":102,"Min":102},"Tasks":1,"OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"FinishedTasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":2,"Mkql":{},"StageDurationUs":0,"BaseTimeMs":1767494544964,"OutputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"CpuTimeUs":{"Count":1,"Sum":837,"Max":837,"Min":837,"History":[6,837]},"UpdateTimeMs":6,"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":102,"Max":102,"Min":102,"History":[6,102]}},"Name":"4","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":102,"Max":102,"Min":102,"History":[6,102]},"PauseMessageMs":{"Count":1,"Sum":4,"Max":4,"Min":4},"WaitTimeUs":{"Count":1,"Sum":1482,"Max":1482,"Min":1482,"History":[6,1482]},"WaitPeriods":{"Count":1,"Sum":2,"Max":2,"Min":2},"WaitMessageMs":{"Count":1,"Max":5,"Min":4}}}]}}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":7}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit","Stats":{"UseLlvm":"undefined","OutputRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"PhysicalStageId":3,"FinishedTasks":1,"InputBytes":{"Count":1,"Sum":27,"Max":27,"Min":27},"Introspections":["1 minimum tasks for compute"],"DurationUs":{"Count":1,"Sum":1000,"Max":1000,"Min":1000},"Mkql":{},"MaxMemoryUsage":{"Count":1,"Sum":1048576,"Max":1048576,"Min":1048576,"History":[6,1048576]},"BaseTimeMs":1767494544964,"Output":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":24,"Max":24,"Min":24,"History":[6,24]}},"Name":"RESULT","Push":{"WaitTimeUs":{"Count":1,"Sum":5017,"Max":5017,"Min":5017,"History":[6,5017]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1},"Chunks":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5}}}],"CpuTimeUs":{"Count":1,"Sum":693,"Max":693,"Min":693,"History":[6,693]},"StageDurationUs":1000,"ResultRows":{"Count":1,"Sum":6,"Max":6,"Min":6},"ResultBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"OutputBytes":{"Count":1,"Sum":24,"Max":24,"Min":24},"Input":[{"Pop":{"Chunks":{"Count":1,"Sum":1,"Max":1,"Min":1},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[6,27]}},"Name":"6","Push":{"LastMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Rows":{"Count":1,"Sum":6,"Max":6,"Min":6},"Chunks":{"Count":1,"Sum":2,"Max":2,"Min":2},"ResumeMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"FirstMessageMs":{"Count":1,"Sum":5,"Max":5,"Min":5},"Bytes":{"Count":1,"Sum":27,"Max":27,"Min":27,"History":[6,27]},"WaitTimeUs":{"Count":1,"Sum":4960,"Max":4960,"Min":4960,"History":[6,4960]},"WaitPeriods":{"Count":1,"Sum":1,"Max":1,"Min":1}}}],"UpdateTimeMs":6,"Tasks":1,"InputRows":{"Count":1,"Sum":6,"Max":6,"Min":6}}}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"Compilation":{"FromCache":false,"DurationUs":311342,"CpuTimeUs":305160},"ProcessCpuTimeUs":452,"TotalDurationUs":333357,"ResourcePoolId":"default","QueuedTimeUs":531},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":7,"Plans":[{"PlanNodeId":9,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"TwoShard","ReadColumns":["Key (-∞, +∞)"],"E-Cost":"0"}],"Node Type":"TableFullScan"},{"Operators":[{"E-Rows":"0","Columns":["Key"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"TwoShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"TableLookup"}],"Operators":[{"Name":"LookupJoin","LookupKeyColumns":["Key"]}],"Node Type":"LookupJoin","PlanNodeType":"Connection"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.837,"A-Cpu":0.837,"A-Size":27,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"A-Rows":6,"A-SelfCpu":0.693,"A-Cpu":1.53,"A-Size":24,"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","PlanNodeType":"Query"}} |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet/ut/unittest >> KqpExplain::FewEffects-UseSink [GOOD] >> KqpExplain::FullOuterJoin >> KqpLimits::TooBigColumn+useSink [GOOD] >> KqpLimits::ReadsetCountLimit |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::DSConfigsWithQueryServiceDdl [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:28.507739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:28.596540Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:28.596876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:28.596918Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:28.821536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:28.821655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:28.880247Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494426336630 != 1767494426336633 2026-01-04T02:40:28.891538Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:28.936173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:29.035017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:29.530519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:29.530569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:29.530593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:29.541295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:29.541922Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:29.556213Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-04T02:40:41.332911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:956:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.333053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.333566Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.334419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:972:2833], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.334757Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:41.339735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:41.364558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:970:2831], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:40:41.404583Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1023:2865] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:41.621205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.464692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:42.891503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:43.563741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:44.131195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:44.503364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:45.518847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:45.830108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 2026-01-04T02:40:49.961003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:40:49.961073Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-04T02:41:00.538681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715702:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/abc1", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-04T02:41:01.623481Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:227;event=skip_tier_manager_start;tier=/Root/tier1;has_secrets=1;tier_config=0; 2026-01-04T02:41:01.623569Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:41:01.623620Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={}; 2026-01-04T02:41:01.623723Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:140;event=start_subscribing_metadata; 2026-01-04T02:41:01.623948Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:154;event=watch_scheme_objects;names= ... line=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:13.606830Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier2; 2026-01-04T02:42:13.607479Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier2; 2026-01-04T02:42:13.607520Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier2;tablet=0;has_config=0; 2026-01-04T02:42:13.607551Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:154 :Restarting tier '/Root/tier1' at tablet 0 2026-01-04T02:42:13.607582Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 2026-01-04T02:42:13.607621Z node 1 :TX_TIERING DEBUG: log.h:466: manager.cpp:174 :Tier '/Root/tier1' started at tablet 0 2026-01-04T02:42:13.607649Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:13.607688Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=1};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:13.608063Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3243:4504];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:42:13.608188Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3247:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; 2026-01-04T02:42:13.608344Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3255:4512];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=1; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier2`;EXPECTATION=1;WAITING=1 REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 2026-01-04T02:42:24.793204Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.793284Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.793327Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.793486Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.793548Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037892;has_config=0; 2026-01-04T02:42:24.793603Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.793647Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.793710Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:24.793753Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.793783Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037893;has_config=0; 2026-01-04T02:42:24.793827Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.793854Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.793893Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:24.793926Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.793953Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=72075186224037894;has_config=0; 2026-01-04T02:42:24.793980Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.794003Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.794040Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:24.794093Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.794262Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.794377Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=fetcher.h:149;event=object_deleted;path=/Root/tier1; 2026-01-04T02:42:24.794515Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.794548Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:24.794580Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.794607Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.794662Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:24.794817Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037892;self_id=[1:3243:4504];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:24.794910Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3247:4506];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:24.794983Z node 1 :TX_TIERING DEBUG: log.cpp:841: tablet_id=72075186224037894;self_id=[1:3255:4512];ev=NKikimr::NColumnShard::TEvPrivate::TEvTieringModified;fline=tiering.cpp:244;event=refresh_tiering;has_tiering=0;tiers=0;had_tiering_before=0; 2026-01-04T02:42:24.795035Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.795063Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:24.795090Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.795117Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier2;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.795156Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier2;has_config=0}{id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; 2026-01-04T02:42:24.795261Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:101;component=tiering_manager;event=object_deleted;name=/Root/tier1; 2026-01-04T02:42:24.795288Z node 1 :TX_TIERING INFO: log.cpp:841: fline=manager.cpp:295;event=update_tier_config;name=/Root/tier1;tablet=0;has_config=0; 2026-01-04T02:42:24.795315Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:208;event=skip_tier_manager_reloading;tier=/Root/tier1;has_secrets=1;found_tier_config=1; 2026-01-04T02:42:24.795352Z node 1 :TX_TIERING DEBUG: log.cpp:841: fline=manager.cpp:217;event=configs_updated;configs=TIERS={id=/Root/tier1;has_config=0};SECRETS={USId:root@builtin:accessKey;USId:root@builtin:secretKey;}; REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=DROP EXTERNAL DATA SOURCE `/Root/tier1`;EXPECTATION=1;WAITING=1 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037894 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::Time64Columns-EnableTableDatetime64+IsColumn [GOOD] Test command err: Trying to start YDB, gRPC: 18357, MsgBus: 11742 2026-01-04T02:42:01.136683Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331167000926607:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:01.136779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:01.383311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:01.397673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:01.397893Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:01.465941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331167000926578:2081] 1767494521135431 != 1767494521135434 2026-01-04T02:42:01.477959Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:01.484163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:01.582752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:01.582785Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:01.582792Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:01.582904Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:01.654713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:02.000096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:02.060271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.154807Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:02.239335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.417355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.494260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.114509Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179885830332:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.114622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.114942Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179885830342:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.115021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.445670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.481239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.514581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.544385Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.575917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.612107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.661451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.707222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.777702Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179885831214:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.777786Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.777903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179885831219:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.777971Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179885831221:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.778018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.782535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:04.795157Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331179885831223:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:04.888158Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331179885831274:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:06.136959Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331167000926607:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:06.137024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:06.464750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 1-04T02:42:23.924616Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.924632Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.928274Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.928355Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.928375Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037901;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.933387Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.933464Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.933482Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.937550Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.937618Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.937634Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037905;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.941775Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.942096Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.942139Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037903;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.946715Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.946800Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.946818Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037938;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.950513Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.950580Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.950596Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037914;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.956278Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.956344Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.956361Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037940;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.959203Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.959296Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.959312Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.965763Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.965891Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.965909Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.967993Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.968080Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.968096Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.975301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.975371Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:23.975387Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.027547Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.027616Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.027630Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037906;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.028470Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.028527Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.028541Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=38;result=not_found; 2026-01-04T02:42:24.060038Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331265569079520:2791], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:24.060130Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:24.060412Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331265569079522:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:24.060462Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:24.076334Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331265569079531:3874] txid# 281474976710660, issues: { message: "Type \'Datetime64\' specified for column \'Datetime\', but support for new date/time 64 types is disabled (EnableTableDatetime64 feature flag is off)" severity: 1 } >> DataShardVolatile::DistributedWrite >> KqpLimits::QueryExecTimeoutCancel [GOOD] >> KqpLimits::QueryExecTimeout >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] >> TSchemeShardLoginFinalize::NoPublicKeys >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogLoginSuccess >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true >> VectorIndexBuildTest::TTxReply_DoExecute_Throws [GOOD] >> IndexBuildTest::CheckLimitWithDroppedIndexUniq [GOOD] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] >> IndexBuildTest::LockUniq [GOOD] >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] >> test_sql_negative.py::test[watermarks-bad_column-default.txt] >> IndexBuildTest::Lock >> VectorIndexBuildTest::TTxProgress_Throws >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady >> KqpQuery::UpdateThenDelete+UseSink [GOOD] >> TWebLoginService::AuditLogLoginSuccess [GOOD] >> IndexBuildTest::ShadowDataEdgeCases [GOOD] >> IndexBuildTest::WithFollowers >> TWebLoginService::AuditLogLoginBadPassword |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest |92.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true >> TSchemeShardLoginFinalize::NoPublicKeys [GOOD] >> TSchemeShardLoginFinalize::InvalidPassword >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckQueryLimitsWorksAsExpectedQueryService [GOOD] Test command err: Trying to start YDB, gRPC: 8602, MsgBus: 18994 2026-01-04T02:41:47.508989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331105174833006:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:47.509561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:47.749930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:47.828656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:47.828780Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:47.840766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:47.844266Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331105174832975:2081] 1767494507507457 != 1767494507507460 2026-01-04T02:41:47.856033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:47.951168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:47.951197Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:47.951205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:47.951306Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:47.985545Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:48.391895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:48.467563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:48.521931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:48.595212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:48.738253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:48.813834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:50.735580Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331118059736731:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:50.735735Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:50.736203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331118059736741:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:50.736255Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.193731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.228708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.263470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.295280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.333773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.370473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.438377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.490857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:51.584768Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331122354704911:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.584867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.585091Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331122354704916:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.585103Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331122354704917:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.585142Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:51.588888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:51.601364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331122354704920:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:51.711760Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331122354704973:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:52.509296Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331105174833006:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:52.509373Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 16426, MsgBus: 19557 2026-01-04T02:41:54.587699Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331136943038910:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:54.587738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_sn ... lterResource ok# false data# peer# 2026-01-04T02:42:27.641418Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a58880] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-04T02:42:27.641552Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f88a2980] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-04T02:42:27.641585Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a55080] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-04T02:42:27.641753Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8749680] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-04T02:42:27.641770Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f88c9180] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-04T02:42:27.641961Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f88c7580] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-04T02:42:27.641973Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f88c7c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-04T02:42:27.642152Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f880f280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-04T02:42:27.642185Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89d4d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-04T02:42:27.642351Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b5b180] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-04T02:42:27.642379Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b58780] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-04T02:42:27.642543Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a41c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-04T02:42:27.642570Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a58180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-04T02:42:27.642755Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8897a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-04T02:42:27.642780Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89d2a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-04T02:42:27.642943Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87eb480] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-04T02:42:27.642972Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87ec980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-04T02:42:27.643111Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87ede80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-04T02:42:27.643174Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8895780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-04T02:42:27.643268Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a53480] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-04T02:42:27.643374Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a53b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-04T02:42:27.643448Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87ebb80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-04T02:42:27.643574Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87ed080] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-04T02:42:27.643601Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87ed780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-04T02:42:27.643771Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a42a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-04T02:42:27.643774Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f88a2280] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-04T02:42:27.643953Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a43f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-04T02:42:27.644001Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f87e6780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-04T02:42:27.644081Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b55680] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-04T02:42:27.644201Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b54f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-04T02:42:27.644269Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b55d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-04T02:42:27.644402Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b51780] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-04T02:42:27.644446Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b53a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-04T02:42:27.644585Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b56b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-04T02:42:27.644631Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b5a380] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-04T02:42:27.644753Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b5cd80] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-04T02:42:27.644850Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b4e680] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-04T02:42:27.644930Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b5db80] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-04T02:42:27.645055Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b61380] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-04T02:42:27.645116Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8983080] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-04T02:42:27.645252Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8946a80] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-04T02:42:27.645299Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89ff480] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-04T02:42:27.645448Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89fe680] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-04T02:42:27.645502Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a00980] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-04T02:42:27.645648Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8748880] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-04T02:42:27.645707Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b5e980] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-04T02:42:27.645749Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8b60580] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-04T02:42:27.645914Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a00280] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-04T02:42:27.645983Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a01780] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-04T02:42:27.646156Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a02580] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-04T02:42:27.646191Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a03380] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-04T02:42:27.646377Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a02c80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-04T02:42:27.646377Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a04180] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-04T02:42:27.646574Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a03a80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-04T02:42:27.646580Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a04f80] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-04T02:42:27.646779Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8812380] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-04T02:42:27.646785Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f861bf80] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-04T02:42:27.646979Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a05680] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-04T02:42:27.647011Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a04880] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-04T02:42:27.647200Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89fed80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-04T02:42:27.647275Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f89f4c80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-04T02:42:27.647404Z node 5 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7da5f8a01e80] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |92.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardLoginTest::RemoveUser-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false >> TWebLoginService::AuditLogLoginBadPassword [GOOD] >> TWebLoginService::AuditLogLdapLoginSuccess |92.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_ttl/test-results/unittest/{meta.json ... results_accumulator.log} |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test |92.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> IndexBuildTest::Lock [GOOD] >> IndexBuildTest::IndexPartitioningIsPersisted |92.1%| [TA] $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> VectorIndexBuildTest::TTxProgress_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Throws >> TWebLoginService::AuditLogLdapLoginSuccess [GOOD] >> TWebLoginService::AuditLogLdapLoginBadUser >> TSchemeShardLoginFinalize::InvalidPassword [GOOD] >> TSchemeShardLoginFinalize::Success >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] >> IndexBuildTest::WithFollowers [GOOD] >> IndexBuildTest::WithFollowersUniq ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::UpdateThenDelete+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25725, MsgBus: 3517 2026-01-04T02:41:58.846232Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331152544323013:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.846313Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:59.108306Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:59.111257Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:59.111374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:59.114214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:59.191851Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:59.194732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331152544322974:2081] 1767494518844745 != 1767494518844748 2026-01-04T02:41:59.277887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:59.294342Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:59.294366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:59.294376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:59.294523Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:59.776522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:59.784984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:59.854023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:59.855004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.004424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.148842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.227254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:01.935693Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331165429226736:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.935826Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.936287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331165429226746:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.936340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.240931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.274063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.307527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.343728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.387545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.430483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.472518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.529381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.611459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331169724194915:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.611529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.611838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331169724194920:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.611868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331169724194921:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.611903Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.615443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:02.625277Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331169724194924:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:02.714318Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331169724194975:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:03.845778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331152544323013:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:03.845861Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:04.500005Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591331178314129893:2539], TxId: 281474976710673, task: 1. Ctx: { TraceId ... ation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 84 } message: "At function: KiUpdateTable!" end_position { row: 3 column: 84 } severity: 1 issues { position { row: 3 column: 84 } message: "Column \'NonExistentColumn\' does not exist in table \'/Root/KeyValue\'." end_position { row: 3 column: 84 } issue_code: 2017 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 14942, MsgBus: 22260 2026-01-04T02:42:22.289640Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331256220808258:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:22.289689Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:22.299193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:22.387346Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:22.387446Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:22.408829Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:22.421249Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591331256220808230:2081] 1767494542288390 != 1767494542288393 2026-01-04T02:42:22.427121Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:22.435951Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:22.493602Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:22.493628Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:22.493637Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:22.493740Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:22.976733Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:22.986784Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:23.040540Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:23.178514Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:23.254404Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:23.299764Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:26.072587Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331273400679282:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.072681Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.072925Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331273400679291:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.072979Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.151911Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.187971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.223494Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.254846Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.290559Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.330384Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.362502Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.410534Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.478867Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331273400680165:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.478962Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.479038Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331273400680170:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.479122Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331273400680172:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.479155Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.482653Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:26.492459Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331273400680174:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:26.572085Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331273400680225:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:27.289844Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331256220808258:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:27.289928Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; [] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardLoginTest::RemoveGroup-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true >> KqpParams::BadParameterType [GOOD] >> KqpParams::CheckCacheByAst >> TWebLoginService::AuditLogLdapLoginBadUser [GOOD] >> TWebLoginService::AuditLogLogout >> KqpQuery::QueryCacheInvalidate [GOOD] >> KqpQuery::QueryCachePermissionsLoss >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true >> TSchemeShardLoginTest::AddAccess_NonYdb-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::BanUnbanUser >> TWebLoginService::AuditLogLogout [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::RemoveGroup_NonExisting-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false >> IndexBuildTest::IndexPartitioningIsPersisted [GOOD] >> IndexBuildTest::IndexPartitioningIsPersistedUniq >> TSchemeShardLoginTest::RemoveUser_Groups-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false >> TSchemeShardLoginTest::UserLogin >> IndexBuildTest::WithFollowersUniq [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq |92.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TWebLoginService::AuditLogLogout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.014955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.017505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.097267Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.097317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.111936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.112472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.113532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.123438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.123838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.126150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.128592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.136629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.137944Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.153498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153654Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.164114Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:42:29.305076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:42:29.306262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.307478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:42:29.307544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:42:29.308727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:42:29.308826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:29.312285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.313405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:42:29.313777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.313987Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:42:29.314057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:42:29.314089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:42:29.316276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.316338Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:42:29.316434Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:42:29.318276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.318330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.318375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:42:29.318429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:42:29.325747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:42:29.328052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:42:29.329359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:42:29.330637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.330774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:29.330821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:42:29.331133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:42:29.331183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:42:29.331381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:42:29.332390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:42:29.335350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.335400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... done id#101:0 progress is 1/1 2026-01-04T02:42:32.051929Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:42:32.051977Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:42:32.052019Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:42:32.052081Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:42:32.052144Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:42:32.052185Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:42:32.052222Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:42:32.052263Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 0 2026-01-04T02:42:32.052303Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 4 2026-01-04T02:42:32.052529Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:273:2262] Bootstrap 2026-01-04T02:42:32.053663Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:273:2262] Become StateWork (SchemeCache [5:279:2268]) 2026-01-04T02:42:32.054467Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:273:2262] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:42:32.056845Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:32.056939Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-04T02:42:32.057114Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:32.057152Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:32.057257Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:32.057288Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:42:32.057852Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:42:32.057939Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:42:32.057976Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:42:32.058016Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 4 2026-01-04T02:42:32.058067Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:42:32.058141Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:42:32.058343Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 2026-01-04T02:42:32.059512Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-04T02:42:32.059921Z node 5 :HTTP WARN: login_page.cpp:102: 127.0.0.1:0 POST /login 2026-01-04T02:42:32.061462Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:32.061514Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:42:32.210634Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:32.216794Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:32.216961Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:32.217005Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:32.217236Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-04T02:42:32.217282Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:32.217329Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:32.217961Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:42:32.218656Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:32.218834Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 203us result status StatusSuccess 2026-01-04T02:42:32.219300Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsu9ItF9agFrrV40shuZp\n91ObdN/VFOG0vkfmYaCFqZbsbTQhVeHheFjPEFK9h8onBvoeSIOIxoa2Y+0RiF2T\ndpyPh8ExiJGwRWuPQkzvVqEOVC359qeDBpzKdnJCQDfKeA5lKDPVOig+rhzWQqcQ\nzBo0EhhZvJ2vJvBXmbzdcKQc0QNJlG43yEC5IIfI1fX8LGImcj1K854P4lVOlBFf\nvOK0LBP4uWr/9kgn+l2NXV3x7KQAHo7gom3twSGwOYQy4Aj3FZ6Tfqj8yytCfOqn\nHny2JweQSngiDjunp8ZRiogQ50PpKvQsRLN9i5gVK2O0QEnC0+1jlwLoUFe9r+GE\nQQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580952205 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:32.221798Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-04T02:42:32.221883Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: No ydb_session_id cookie 2026-01-04T02:42:32.222263Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout 2026-01-04T02:42:32.231206Z node 5 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (589A015B): Token is not in correct format 2026-01-04T02:42:32.231337Z node 5 :HTTP ERROR: login_page.cpp:326: Logout: Token is not in correct format 2026-01-04T02:42:32.231794Z node 5 :HTTP WARN: login_page.cpp:248: 127.0.0.1:0 POST /logout AUDIT LOG buffer(4): 2026-01-04T02:42:32.030680Z: component=schemeshard, tx_id=1, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//MyRoot], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T02:42:32.051646Z: component=schemeshard, tx_id=101, remote_address={none}, subject={none}, sanitized_token={none}, database=/MyRoot, operation=CREATE USER, paths=[/MyRoot], status=SUCCESS, detailed_status=StatusSuccess, login_user_level=admin, login_user=user1 2026-01-04T02:42:32.213509Z: component=grpc-login, remote_address=localhost, database=/MyRoot, operation=LOGIN, status=SUCCESS, login_user=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3NTM3NzUyLCJpYXQiOjE3Njc0OTQ1NTIsInN1YiI6InVzZXIxIn0.**, login_user_level=admin 2026-01-04T02:42:32.232971Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3NTM3NzUyLCJpYXQiOjE3Njc0OTQ1NTIsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS AUDIT LOG checked line: 2026-01-04T02:42:32.232971Z: component=web-login, remote_address=127.0.0.1, subject=user1, sanitized_token=eyJhbGciOiJQUzI1NiIsImtpZCI6IjEifQ.eyJhdWQiOlsiXC9NeVJvb3QiXSwiZXhwIjoxNzY3NTM3NzUyLCJpYXQiOjE3Njc0OTQ1NTIsInN1YiI6InVzZXIxIn0.**, operation=LOGOUT, status=SUCCESS >> KqpParams::InvalidJson [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> LocalTableWriter::StringEscaping >> KqpQuery::GenericQueryNoRowsLimit [GOOD] >> KqpQuery::ExecuteWriteQuery >> LocalTableWriter::DataAlongWithHeartbeat >> LocalTableWriter::SupportedTypes >> LocalTableWriter::ApplyInCorrectOrder >> LocalTableWriter::WaitTxIds >> VectorIndexBuildTest::TTxInit_Throws [GOOD] >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex >> TSchemeShardLoginTest::RemoveGroup_Acl-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::FailedLoginWithInvalidUser >> TSchemeShardLoginTest::RemoveUser_Owner-StrictAclCheck-true [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true >> TSchemeShardLoginTest::BanUnbanUser [GOOD] >> TSchemeShardLoginTest::BanUserWithWaiting >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true >> KqpExplain::IdxFullscan [GOOD] >> KqpExplain::MultiJoinCteLinks >> IndexBuildTest::IndexPartitioningIsPersistedUniq [GOOD] >> IndexBuildTest::DropIndex >> TSchemeShardLoginTest::UserLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLogin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::InvalidJson [GOOD] Test command err: Trying to start YDB, gRPC: 17805, MsgBus: 61807 2026-01-04T02:41:58.945283Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331154048245070:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:58.945443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:59.184923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:59.211151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:59.211281Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:59.266113Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:59.269793Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:59.277080Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331154048244960:2081] 1767494518920261 != 1767494518920264 2026-01-04T02:41:59.388702Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:59.388744Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:59.388752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:59.388834Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:59.395275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:59.913559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:59.935803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:41:59.957046Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:00.001074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.153648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.325493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:00.419459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.236669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331171228116022:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.236791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.237058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331171228116031:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.237108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.543903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.575229Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.605460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.638184Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.714377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.750643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.789195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.851970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:02.915265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331171228116900:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.915332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.915494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331171228116905:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.915558Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331171228116907:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.915608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:02.919289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:02.929746Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331171228116909:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:03.032607Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331175523084258:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:03.935471Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331154048245070:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:03.935534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:04.767359Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NmM2NDNjNzItYmVlMzM4MTEtOTI4MGU2ODItNDE ... _finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:25.606133Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331270485539475:2081] 1767494545473232 != 1767494545473235 2026-01-04T02:42:25.619255Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:25.632087Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:25.632206Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:25.689109Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:25.736647Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:25.736678Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:25.736685Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:25.736773Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:25.814297Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:26.225356Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:26.235334Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.292381Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.455048Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:26.498231Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:26.528313Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.416455Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331287665410537:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.416542Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.416934Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331287665410546:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.416968Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.508314Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.551970Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.587911Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.626931Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.662492Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.704025Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.744480Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.795014Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.877266Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331287665411422:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.877357Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.877401Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331287665411427:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.877692Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331287665411429:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.877765Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:29.880884Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:29.894440Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331287665411430:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:29.996814Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331287665411482:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:30.481137Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331270485539616:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:30.481228Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:31.833063Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:31.972177Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NzMwOGM5MmMtMmFjNTgxNDMtODE1MjBkZTgtMTQ4ZDRkN2U=, ActorId: [5:7591331296255346397:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e52a544jf51axtjerf7vg, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value status# BAD_REQUEST issues# trace_id#
: Error: ydb/core/kqp/session_actor/kqp_session_actor.cpp:1295: Invalid Json value |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] >> TSchemeShardLoginFinalize::Success [GOOD] >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] >> LocalTableWriter::DecimalKeys >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] >> KqpBatchUpdate::ManyPartitions_1 [GOOD] >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::FailedLoginWithInvalidUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.015555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.016021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.017454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.117120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.117169Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.131720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.131908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.132047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.145088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.145330Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.146028Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.146283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.149102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.149315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.152848Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.152970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.325484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.326952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.327660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 02:42:34.300324Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:42:34.300364Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:42:34.300407Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:42:34.300448Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:42:34.300525Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:42:34.302536Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:42:34.302954Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:34.303310Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [5:675:2664] Bootstrap 2026-01-04T02:42:34.304508Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [5:675:2664] Become StateWork (SchemeCache [5:680:2669]) 2026-01-04T02:42:34.304787Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:34.304942Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 176us result status StatusSuccess 2026-01-04T02:42:34.305376Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:34.305875Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [5:675:2664] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:42:34.307412Z node 5 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944 2026-01-04T02:42:34.308168Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.308213Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:42:34.369178Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 2026-01-04T02:42:34.369303Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.369359Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.369542Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.369590Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:34.370110Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:42:34.370462Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:34.370637Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 198us result status StatusSuccess 2026-01-04T02:42:34.371122Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 1 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsiNTNa6oGt45BAI7ex9k\nc+ygl7J1tD9lH53CDsCK0jSLfZOZkUc3X6rHvuMGf5CscJuhFoTiNFNdBAmY3nss\nZXQ1tJMrqQgq9lzCoiTxjqcmzAEEEbIO1Ujo9ZbTOj5GQ0vRTMgcyaSR/E6yvJ2p\nCBPcMlgClj8sPS/7CfNy1SWnv/MEflWWbVGidbFOzKroEZpZIYxfP9GBKnt6EbCz\nijq0RSFaDU+pJQIFYEKJG6nlpahO8zSarwW0uw56baXn0jvdG2iedxX159SyNi1y\nO65XxCEhk4u9zuhb8Qy/boWwrETNQygTrIu/3G5T4c4R6tB3LjrE2DYwIQq7VQFb\niQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580954366 } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::TestExternalLogin [GOOD] >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain >> VectorIndexBuildTest::TTxInit_Checks_EnableVectorIndex [GOOD] >> VectorIndexBuildTest::UnknownState >> KqpLimits::ReadsetCountLimit [GOOD] >> KqpLimits::ReplySizeExceeded ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveGroup_Owner-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.016578Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.016672Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.016720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.016759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.016802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.016834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.016912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.017002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.017962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.018246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.108772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.108837Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.123697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.124347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.124586Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.132499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.132782Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.133533Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.133785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.137367Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.137957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153044Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153191Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.327245Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.328949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ne_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591740Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 14] 2026-01-04T02:42:34.591765Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 28] name: top_queries_by_cpu_time_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591784Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 28] 2026-01-04T02:42:34.591819Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 37] name: top_queries_by_duration_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591833Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 37] 2026-01-04T02:42:34.591851Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 25] name: top_queries_by_duration_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591878Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 25] 2026-01-04T02:42:34.591903Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 15] name: top_queries_by_read_bytes_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591939Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 15] 2026-01-04T02:42:34.591960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 16] name: top_queries_by_read_bytes_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.591973Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-04T02:42:34.591991Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.592004Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-04T02:42:34.592030Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:34.592047Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-04T02:42:34.592067Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.592081Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:42:34.592351Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 106:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-04T02:42:34.592450Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:42:34.592490Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:42:34.592522Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:42:34.592550Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:42:34.592593Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:42:34.592644Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: false 2026-01-04T02:42:34.592674Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:42:34.592706Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-04T02:42:34.592734Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 106, publications: 1, subscribers: 0 2026-01-04T02:42:34.592763Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 106, [OwnerId: 72057594046678944, LocalPathId: 1], 12 2026-01-04T02:42:34.599079Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 106, response: Status: StatusSuccess TxId: 106 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:34.599207Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 106, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE GROUP, path: /MyRoot 2026-01-04T02:42:34.599421Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.599470Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 106, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.599654Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.599700Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 106, path id: 1 2026-01-04T02:42:34.600271Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:42:34.600385Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 106 2026-01-04T02:42:34.600434Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 106 2026-01-04T02:42:34.600475Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 106, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-04T02:42:34.600523Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:42:34.600617Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 106, subscribers: 0 2026-01-04T02:42:34.602810Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 TestModificationResult got TxId: 106, wait until txId: 106 2026-01-04T02:42:34.603550Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:34.603792Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 274us result status StatusSuccess 2026-01-04T02:42:34.604365Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 12 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 12 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> DataShardVolatile::DistributedWrite [GOOD] >> DataShardVolatile::DistributedWriteBrokenLock >> KqpStats::DeferredEffects-UseSink [GOOD] >> KqpStats::DataQueryWithEffects-UseSink >> TSchemeShardLoginTest::AddAccess_NonExisting-StrictAclCheck-false [GOOD] >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock >> KqpExplain::SsaProgramInJsonPlan [GOOD] >> KqpExplain::UpdateConditional+UseSink >> KqpExplain::FullOuterJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::RemoveUser_Acl-StrictAclCheck-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.014995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015099Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.017444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.111134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.111193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.125730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.126180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.126317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.133572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.133819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.134380Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.134601Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.137101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.137960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.157386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.319074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.319840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.319941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.319992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.320588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... LocalPathId: 2] 2026-01-04T02:42:35.306243Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 16] 2026-01-04T02:42:35.306274Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 10] name: top_queries_by_request_units_one_hour type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:35.306296Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 10] 2026-01-04T02:42:35.306324Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 29] name: top_queries_by_request_units_one_minute type: EPathTypeSysView state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:42:35.306346Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 29] 2026-01-04T02:42:35.306379Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: Dir1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:35.306407Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:42:35.306442Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 39] name: DirSub1 type: EPathTypeDir state: EPathStateNoChanges stepDropped: 0 droppedTxId: 0 parent: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:42:35.306466Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:42:35.306946Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 107:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-04T02:42:35.307087Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:42:35.307135Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:42:35.307181Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:42:35.307221Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:42:35.307282Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:42:35.307352Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: false 2026-01-04T02:42:35.307396Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:42:35.307439Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:42:35.307487Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 107, publications: 1, subscribers: 0 2026-01-04T02:42:35.307526Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 107, [OwnerId: 72057594046678944, LocalPathId: 1], 15 2026-01-04T02:42:35.310039Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 107, response: Status: StatusSuccess TxId: 107 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:35.310160Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 107, database: /MyRoot, subject: , status: StatusSuccess, operation: REMOVE USER, path: /MyRoot 2026-01-04T02:42:35.310439Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:35.310491Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:35.310693Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:35.310744Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 107, path id: 1 2026-01-04T02:42:35.311291Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:42:35.311407Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:42:35.311495Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:42:35.311548Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 15 2026-01-04T02:42:35.311599Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:42:35.311702Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 107, subscribers: 0 2026-01-04T02:42:35.314376Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 TestModificationResult got TxId: 107, wait until txId: 107 2026-01-04T02:42:35.315068Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:35.315256Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1" took 232us result status StatusSuccess 2026-01-04T02:42:35.315682Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1" PathDescription { Self { Name: "Dir1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 2 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:35.316460Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Dir1/DirSub1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:35.316633Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Dir1/DirSub1" took 187us result status StatusSuccess 2026-01-04T02:42:35.316968Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Dir1/DirSub1" PathDescription { Self { Name: "DirSub1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000038 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 2 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:35.317692Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:35.317790Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: Cannot find user: user1, at schemeshard: 72057594046678944 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::TestExternalLoginWithIncorrectLdapDomain [GOOD] >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword >> IndexBuildTest::DropIndex [GOOD] >> IndexBuildTest::DropIndexUniq >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] >> LocalTableWriter::WaitTxIds [GOOD] >> LocalTableWriter::SupportedTypes [GOOD] >> LocalTableWriter::ApplyInCorrectOrder [GOOD] >> LocalTableWriter::StringEscaping [GOOD] >> KqpExplain::UpdateSecondaryConditionalPrimaryKey-UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] >> IndexBuildTest::RejectsOnDuplicatesUniq [GOOD] >> VectorIndexBuildTest::BuildTableWithEmptyShard |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/batch_operations/unittest >> KqpBatchUpdate::ManyPartitions_1 [GOOD] Test command err: Trying to start YDB, gRPC: 14613, MsgBus: 61377 2026-01-04T02:38:33.776731Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330270766131313:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:38:33.776897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:38:33.956012Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:38:33.966252Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:38:33.966362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:38:34.031297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:38:34.038777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:38:34.062234Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330270766131289:2081] 1767494313775712 != 1767494313775715 2026-01-04T02:38:34.107064Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:38:34.107093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:38:34.107105Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:38:34.107217Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:38:34.116188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:38:34.474029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:38:34.481645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:38:34.536416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.685600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.791539Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:38:34.829870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:34.911107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.448758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330283651035058:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.448900Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.449271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330283651035068:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.449320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:36.722818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.753278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.781339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.811917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.842390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.877570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.936312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:36.977222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:37.042458Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330287946003241:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.042550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.042566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330287946003246:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.042856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591330287946003248:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.042902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:38:37.046706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:38:37.057067Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591330287946003249:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:38:37.160132Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591330287946003301:3775] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:38:38.330270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:38:38.776777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330270766131313:2064];send_to=[0:7307199536658146131:7762515]; 20 ... 1Z node 20 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[20:7591331247691976193:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:20.018489Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:20.106154Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:20.193329Z node 20 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [20:7591331247691976166:2081] 1767494540017491 != 1767494540017494 2026-01-04T02:42:20.193476Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:20.201400Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:20.201558Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:20.218982Z node 20 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(20, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:20.329173Z node 20 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:20.330844Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:20.330880Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:20.330897Z node 20 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:20.331103Z node 20 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:21.033922Z node 20 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:21.463454Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:21.487687Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:21.588569Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:21.905287Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:22.101218Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:25.018401Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[20:7591331247691976193:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:25.018527Z node 20 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:26.847828Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331273461781839:2403], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.847990Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.848382Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331273461781849:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.848458Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:26.985633Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.039258Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.095699Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.149788Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.200705Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.254658Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.307542Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.390184Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:27.517434Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331277756750034:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:27.517555Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:27.517711Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331277756750039:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:27.517851Z node 20 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [20:7591331277756750040:2492], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:27.517949Z node 20 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:27.523978Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:27.538124Z node 20 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [20:7591331277756750043:2493], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:27.608147Z node 20 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [20:7591331277756750094:3790] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:30.689993Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/batch_operations/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeAll [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplit [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSplitWithPartialMergeOne [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DataAlongWithHeartbeat [GOOD] Test command err: 2026-01-04T02:42:33.468980Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331303499752927:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.470073Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:33.728125Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:33.772376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:33.772474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:33.824390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:33.829272Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.888178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:34.205055Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:34.205084Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:34.205093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:34.205165Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:34.477006Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:34.611198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:34.624527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.763411Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handshake: worker# [1:7591331307794721090:2549] 2026-01-04T02:42:34.763735Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:34.764002Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:34.764033Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Send handshake: worker# [1:7591331307794721090:2549] 2026-01-04T02:42:34.765242Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.772013Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-04T02:42:34.772171Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-04T02:42:34.772324Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331307794721093:2548] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:34.772371Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.772443Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331307794721093:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:42:34.774454Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331307794721093:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.774533Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.774597Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331307794721089:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::FullOuterJoin [GOOD] Test command err: Trying to start YDB, gRPC: 26007, MsgBus: 11693 2026-01-04T02:42:00.077783Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331162609346866:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:00.078230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:00.428617Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:00.453874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:00.453977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:00.455398Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331162609346636:2081] 1767494520060359 != 1767494520060362 2026-01-04T02:42:00.458179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:00.477329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:00.584701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:00.584728Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:00.584734Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:00.584815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:00.590460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:01.035653Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:01.072802Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:01.098388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:01.243832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:01.387332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:01.459916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.414606Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331175494250390:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.414702Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.416026Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331175494250400:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.416123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:03.715913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.745447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.773827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.801959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.831211Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.868305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.904909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:03.949961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:04.035174Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179789218567:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.035288Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.035376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179789218572:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.035506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331179789218574:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.035546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:04.038776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:04.048533Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331179789218576:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:04.136632Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331179789218629:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:05.073327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331162609346866:2256];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:05.073398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Operators":[{"Inputs":[],"Iterator":"precompute_0_0","Name":"Iterator"}],"Node Type":"ConstantExpr","CTE Name":"precompute_0_0"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":6,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{" ... :42:28.478784Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:28.480438Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331282115684496:2081] 1767494548379086 != 1767494548379089 2026-01-04T02:42:28.510622Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:28.510735Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:28.542492Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:28.597267Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:28.597286Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:28.597293Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:28.597357Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:28.758814Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:29.117339Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:29.132701Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.194844Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.396040Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:29.405916Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.482688Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:31.901583Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331295000588254:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:31.901680Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:31.901938Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331295000588263:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:31.901994Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:31.977844Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.010421Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.039881Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.073250Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.110509Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.143293Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.216266Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.264271Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.347334Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331299295556428:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.347431Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.347437Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331299295556433:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.347611Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331299295556435:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.347661Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.351527Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:32.362899Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331299295556436:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:32.443473Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331299295556490:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:33.380479Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331282115684524:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.380563Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:34.331687Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.677469Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.716831Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TSchemeShardLoginTest::UserStayLockedOutIfEnterValidPassword [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::SupportedTypes [GOOD] Test command err: 2026-01-04T02:42:33.452519Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331305237738480:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.452606Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:33.721268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:33.759917Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:33.760003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:33.820481Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.826292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:33.827408Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331305237738441:2081] 1767494553447675 != 1767494553447678 2026-01-04T02:42:33.909866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:34.206561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:34.206592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:34.206604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:34.206671Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:34.461307Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:34.612534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:34.629343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.774241Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handshake: worker# [1:7591331309532706573:2491] 2026-01-04T02:42:34.774591Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:34.774978Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:34.775031Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Send handshake: worker# [1:7591331309532706573:2491] 2026-01-04T02:42:34.775913Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 45b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 5 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 41b Offset: 6 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 7 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 44b Offset: 8 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 66b Offset: 9 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 71b Offset: 10 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 11 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 12 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 13 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 14 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 58b Offset: 15 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 16 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 17 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 18 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 76b Offset: 19 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 20 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 54b Offset: 21 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 61b Offset: 22 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 51b Offset: 23 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 45b Offset: 24 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 46b Offset: 25 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 47b Offset: 26 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 50b Offset: 27 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 28 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 72b Offset: 29 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 30 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 64b Offset: 31 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.776692Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 45 },{ Order: 2 BodySize: 45 },{ Order: 3 BodySize: 45 },{ Order: 4 BodySize: 45 },{ Order: 5 BodySize: 41 },{ Order: 6 BodySize: 41 },{ Order: 7 BodySize: 45 },{ Order: 8 BodySize: 44 },{ Order: 9 BodySize: 66 },{ Order: 10 BodySize: 71 },{ Order: 11 BodySize: 72 },{ Order: 12 BodySize: 49 },{ Order: 13 BodySize: 48 },{ Order: 14 BodySize: 51 },{ Order: 15 BodySize: 58 },{ Order: 16 BodySize: 51 },{ Order: 17 BodySize: 54 },{ Order: 18 BodySize: 57 },{ Order: 19 BodySize: 76 },{ Order: 20 BodySize: 45 },{ Order: 21 BodySize: 54 },{ Order: 22 BodySize: 61 },{ Order: 23 BodySize: 51 },{ Order: 24 BodySize: 45 },{ Order: 25 BodySize: 46 },{ Order: 26 BodySize: 47 },{ Order: 27 BodySize: 50 },{ Order: 28 BodySize: 49 },{ Order: 29 BodySize: 72 },{ Order: 30 BodySize: 57 },{ Order: 31 BodySize: 64 }] } 2026-01-04T02:42:34.776986Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309532706668:2552] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:34.777031Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.777300Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309532706668:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 4 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 5 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 6 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 41b },{ Order: 7 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 8 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 44b },{ Order: 9 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 66b },{ Order: 10 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 71b },{ Order: 11 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 12 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 13 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b },{ Order: 14 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 15 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 58b },{ Order: 16 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 17 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 18 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 19 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 76b },{ Order: 20 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 21 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 54b },{ Order: 22 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 61b },{ Order: 23 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 51b },{ Order: 24 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 45b },{ Order: 25 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 46b },{ Order: 26 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 47b },{ Order: 27 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 50b },{ Order: 28 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 29 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 72b },{ Order: 30 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 31 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 64b }] } 2026-01-04T02:42:34.842712Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309532706668:2552] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.842824Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.842904Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309532706665:2552] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::StringEscaping [GOOD] Test command err: 2026-01-04T02:42:33.452668Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331301469980269:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.452730Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:33.736859Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:33.741731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:33.741872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:33.767341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:33.875588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.923220Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:34.206596Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:34.206633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:34.206657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:34.206774Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:34.470084Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:34.625250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:34.636829Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.764105Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handshake: worker# [1:7591331305764948360:2490] 2026-01-04T02:42:34.764335Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:34.764552Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:34.764576Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Send handshake: worker# [1:7591331305764948360:2490] 2026-01-04T02:42:34.765036Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.765180Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-04T02:42:34.767257Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305764948454:2551] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:34.767318Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.767402Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305764948454:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:42:34.770040Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305764948454:2551] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.770096Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.770131Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305764948451:2551] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::ApplyInCorrectOrder [GOOD] Test command err: 2026-01-04T02:42:33.453795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331301598062180:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.462147Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:33.731880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:33.797996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:33.798107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:33.828783Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:33.877956Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331301598062142:2081] 1767494553447468 != 1767494553447471 2026-01-04T02:42:33.884662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.942650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:34.204896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:34.204933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:34.204939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:34.205013Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:34.464871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:34.614421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:34.619275Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:34.624444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.794215Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handshake: worker# [1:7591331305893030269:2486] 2026-01-04T02:42:34.794578Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:34.794924Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:34.794974Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Send handshake: worker# [1:7591331305893030269:2486] 2026-01-04T02:42:34.795477Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.800808Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-04T02:42:34.800962Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-04T02:42:34.801125Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305893030364:2548] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:34.801180Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.801290Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305893030364:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:42:34.803431Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305893030364:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.803497Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.803553Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-04T02:42:34.804158Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 48b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 19b Offset: 4 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.804636Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-04T02:42:34.804749Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 },{ Order: 3 BodySize: 48 }] } 2026-01-04T02:42:34.804917Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305893030364:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b },{ Order: 3 Group: 0 Step: 2 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:42:34.806681Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331305893030364:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.806762Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.806797Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331305893030361:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2,3] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::WaitTxIds [GOOD] Test command err: 2026-01-04T02:42:33.470943Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331302366566764:2236];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.472514Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:33.733146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:33.792368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:33.792503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:33.825901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:33.826179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.831012Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331302366566548:2081] 1767494553449256 != 1767494553449259 2026-01-04T02:42:33.910950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:34.204701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:34.204841Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:34.204851Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:34.204965Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:34.477229Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:34.671073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:34.678026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:34.684609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:34.805683Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handshake: worker# [1:7591331306661534770:2551] 2026-01-04T02:42:34.806117Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:34.806440Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint32 : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:34.806486Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Send handshake: worker# [1:7591331306661534770:2551] 2026-01-04T02:42:34.807065Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 48b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 49b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:34.821252Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 10 TxId: 0 } TxId: 1 } 2026-01-04T02:42:34.821456Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 48 }] } 2026-01-04T02:42:34.821596Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331306661534773:2550] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:34.821659Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.821744Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331306661534773:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 1 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 48b }] } 2026-01-04T02:42:34.824163Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331306661534773:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:34.824220Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:34.824259Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1] } 2026-01-04T02:42:35.810653Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:502: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimrReplication.TEvTxIdResult VersionTxIds { Version { Step: 20 TxId: 0 } TxId: 2 } 2026-01-04T02:42:35.810817Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 2 BodySize: 49 }] } 2026-01-04T02:42:35.810941Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331306661534773:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 2 Group: 0 Step: 11 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 49b }] } 2026-01-04T02:42:35.822460Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331306661534773:2550] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:35.822533Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:35.822567Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331306661534769:2550] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [2] } |92.2%| [TA] $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} >> LocalTableWriter::DecimalKeys [GOOD] >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |92.2%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/batch_operations/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestSimpleSplit [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> VectorIndexBuildTest::UnknownState [GOOD] >> KqpLimits::ComputeNodeMemoryLimit [GOOD] >> KqpLimits::DataShardReplySizeExceeded >> IndexBuildTest::DropIndexUniq [GOOD] >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestZeroTracks [GOOD] |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/service/ut_table_writer/unittest >> LocalTableWriter::DecimalKeys [GOOD] Test command err: 2026-01-04T02:42:34.841056Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331305647826856:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:34.841133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:35.105994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:35.106109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:35.171407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:35.176181Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:35.193099Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:35.197989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331305647826815:2081] 1767494554838583 != 1767494554838586 2026-01-04T02:42:35.329737Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:35.405604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:35.405625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:35.405633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:35.405710Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:35.640370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:35.650210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:35.774468Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:295: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handshake: worker# [1:7591331309942794937:2488] 2026-01-04T02:42:35.774824Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:314: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/Table TableId: [72057594046644480:38:1] RequestType: ByTableId Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:42:35.775083Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:390: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle TEvTxProxySchemeCache::TEvResolveKeySetResult: result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 ResultSet [{ TableId: [OwnerId: 72057594046644480, LocalPathId: 38] Access: 0 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 1 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Decimal(1,0) : NULL) IncFrom: 1 To: () IncTo: 0 }] } 2026-01-04T02:42:35.775134Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:420: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Send handshake: worker# [1:7591331309942794937:2488] 2026-01-04T02:42:35.775449Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:434: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle NKikimr::NReplication::NService::TEvWorker::TEvData { Source: TestSource Records [{ Codec: RAW Data: 57b Offset: 1 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 2 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: },{ Codec: RAW Data: 57b Offset: 3 SeqNo: 0 CreateTime: 1970-01-01T00:00:00.000000Z WriteTime: 1970-01-01T00:00:00.000000Z MessageGroupId: ProducerId: }] } 2026-01-04T02:42:35.775696Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:568: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRequestRecords { Records [{ Order: 1 BodySize: 57 },{ Order: 2 BodySize: 57 },{ Order: 3 BodySize: 57 }] } 2026-01-04T02:42:35.775919Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:54: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309942795030:2548] Handle NKikimr::TEvTxUserProxy::TEvGetProxyServicesResponse 2026-01-04T02:42:35.775959Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:35.776048Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:74: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309942795030:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRecords { Records [{ Order: 1 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 2 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b },{ Order: 3 Group: 0 Step: 0 TxId: 0 Kind: CdcDataChange Source: Unspecified Body: 57b }] } 2026-01-04T02:42:35.781425Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:111: [TablePartitionWriter][72057594046644480:38:1][72075186224037888][1:7591331309942795030:2548] Handle NKikimrTxDataShard.TEvApplyReplicationChangesResult Status: STATUS_OK 2026-01-04T02:42:35.781515Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:599: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle NKikimr::NChangeExchange::TEvChangeExchangePrivate::TEvReady { PartitionId: 72075186224037888 } 2026-01-04T02:42:35.781563Z node 1 :REPLICATION_SERVICE DEBUG: base_table_writer.cpp:582: [LocalTableWriter][OwnerId: 72057594046644480, LocalPathId: 38][1:7591331309942795027:2548] Handle NKikimr::NChangeExchange::TEvChangeExchange::TEvRemoveRecords { Records [1,2,3] } 2026-01-04T02:42:35.854030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::UnknownState [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:26.377478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:26.377562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.377612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:26.377648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:26.377682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:26.377711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:26.377768Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.377882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:26.378685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:26.378987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:26.458434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:26.458484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:26.472505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:26.472723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:26.472912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:26.481041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:26.481252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:26.482041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:26.482284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:26.485140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.485318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:26.486507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:26.486567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.486672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:26.486724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:26.486828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:26.487022Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:26.639331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.640933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641395Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.641493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Id: 18] was 0 2026-01-04T02:42:37.977744Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-04T02:42:37.977786Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-04T02:42:37.977836Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-04T02:42:37.977862Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-04T02:42:37.977888Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-04T02:42:37.977932Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-04T02:42:37.977986Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-04T02:42:37.978016Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-04T02:42:37.978040Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-04T02:42:37.978063Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:42:37.978089Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:42:37.978123Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:42:37.978147Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:42:37.978166Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:42:37.978181Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:42:37.978196Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:42:37.978212Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:42:37.978227Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:42:37.978242Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:42:37.978446Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 10, at schemeshard: 72057594046678944 2026-01-04T02:42:37.978599Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.978728Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:42:37.978771Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:42:37.978806Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:42:37.978822Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:42:37.978837Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-04T02:42:37.979010Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 4, at schemeshard: 72057594046678944 2026-01-04T02:42:37.979290Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.979492Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 12, at schemeshard: 72057594046678944 2026-01-04T02:42:37.979825Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.979879Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:42:37.980033Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980377Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980442Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980679Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980819Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980867Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.980940Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.981101Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.981168Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.981553Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.981972Z node 5 :BUILD_INDEX DEBUG: schemeshard_index_build_info.h:626: Restored index build id# 102: TBuildInfo{ IndexBuildId: 102, Uid: , DomainPathId: [OwnerId: 72057594046678944, LocalPathId: 1], TablePathId: [OwnerId: 72057594046678944, LocalPathId: 38], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, State: Filling, SubState: None, IsBroken: 1, IsCancellationRequested: 0, Issue: Unknown build kind: 999999, SubscribersCount: 0, CreateSender: [0:0:0], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976710757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976710758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 0, UnlockTxStatus: StatusSuccess, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:42:37.982102Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.982232Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.982310Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:37.982573Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 1 tables: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.982658Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.982750Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:37.987771Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:37.993986Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:37.994051Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:37.994410Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:37.994482Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:37.994537Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:37.997323Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:1269:3168] sender: [5:1331:2058] recipient: [5:15:2062] 2026-01-04T02:42:38.059871Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot" IndexBuildId: 102 2026-01-04T02:42:38.060155Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 102 Issues { message: "Unknown build kind: 999999" severity: 1 } State: STATE_TRANSFERING_DATA Settings { source_path: "/MyRoot/vectors" max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 } 2026-01-04T02:42:38.061314Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=102&Page=BuildIndexInfo 2026-01-04T02:42:38.061416Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=102&Page=BuildIndexInfo |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/service/ut_table_writer/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCount [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.015006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.017460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.115369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.115434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.130733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.130919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.131094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.145787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.146048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.146723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.146945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.149921Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.150087Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152813Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.152880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.152992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153041Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.314549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.315657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.315827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.315914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.316861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... d__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:42:34.522526Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.525657Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.525787Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.525854Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.526444Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.526528Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.526572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:34.527149Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:42:34.527436Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.527521Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.532095Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.536100Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.536401Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.536480Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.540988Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.542766Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.543224Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:34.543427Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2026-01-04T02:42:34.544007Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3ERIfbmf5Najy4adNzi0\nGJBoeiTVC+C9QA6Lq/7NC5rHjMrxBx7FEvR80iTcSF5uT057OrrZZAEhUHNlfb3+\nlv7AhHw2iOXP6BQymRt6rKHQXpvmqbOwd60m2eN4qpFTxRxm/GGUF/BsDZ1RYbB5\nU4BHkf1Ifq5S+HqlaD1Bu3IdYhrq8AU9kOS7kcbj46zBKXzNMep4NT2OKynFaOwG\ng/j/DK0a49ddgexBotP++zOnbszymYBvspw8Qx8D3O91q4DY3692JK1mg06kvDXP\nQdmdwSlQFFDjnHeGv0YUu7Vv8IrRlTDbaAXQVuYLzA1z7fo4bbhLle5TkUT5ALGx\nHQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580954517 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:38.544807Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.553579Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.556266Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:38.557571Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:38.558046Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.558164Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:38.562527Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.564852Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:38.565335Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.565435Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:38.571088Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.573516Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:38.574093Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.574223Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:38.580364Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.585795Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-04T02:42:38.586373Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:38.586609Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 257us result status StatusSuccess 2026-01-04T02:42:38.587067Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3ERIfbmf5Najy4adNzi0\nGJBoeiTVC+C9QA6Lq/7NC5rHjMrxBx7FEvR80iTcSF5uT057OrrZZAEhUHNlfb3+\nlv7AhHw2iOXP6BQymRt6rKHQXpvmqbOwd60m2eN4qpFTxRxm/GGUF/BsDZ1RYbB5\nU4BHkf1Ifq5S+HqlaD1Bu3IdYhrq8AU9kOS7kcbj46zBKXzNMep4NT2OKynFaOwG\ng/j/DK0a49ddgexBotP++zOnbszymYBvspw8Qx8D3O91q4DY3692JK1mg06kvDXP\nQdmdwSlQFFDjnHeGv0YUu7Vv8IrRlTDbaAXQVuYLzA1z7fo4bbhLle5TkUT5ALGx\nHQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580954517 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::DropIndexUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:26.380503Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:26.380596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.380626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:26.380656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:26.380686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:26.380712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:26.380766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.380860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:26.381546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:26.381837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:26.454598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:26.454650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:26.469062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:26.469264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:26.469448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:26.477074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:26.477317Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:26.477999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:26.478252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:26.480684Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.480866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:26.481992Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:26.482042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.482144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:26.482188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:26.482278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:26.482454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:26.599573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.600679Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.600837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.600912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.600966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.601626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 594046678944, cookie: 105 2026-01-04T02:42:38.444097Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.444131Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 43], version: 18446744073709551615 2026-01-04T02:42:38.444167Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 5 2026-01-04T02:42:38.444581Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.444659Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.444690Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.444725Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 44], version: 18446744073709551615 2026-01-04T02:42:38.444762Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 3 2026-01-04T02:42:38.446868Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.446946Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 18 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.446977Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.447010Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 18 2026-01-04T02:42:38.447045Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:42:38.447468Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.447591Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 43 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.447623Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.448703Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.448783Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 44 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.448811Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.450073Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:0, at schemeshard: 72057594046678944 2026-01-04T02:42:38.450140Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:42:38.450366Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 4 2026-01-04T02:42:38.450472Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-04T02:42:38.450506Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-04T02:42:38.450547Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:0 progress is 2/3 2026-01-04T02:42:38.450579Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 2/3 2026-01-04T02:42:38.450614Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: false 2026-01-04T02:42:38.451024Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.451097Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 45 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:42:38.451126Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:42:38.451159Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 45], version: 18446744073709551615 2026-01-04T02:42:38.451191Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 4 2026-01-04T02:42:38.451263Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 2/3, is published: true 2026-01-04T02:42:38.452272Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 105:2, at schemeshard: 72057594046678944 2026-01-04T02:42:38.452323Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 105:2 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:42:38.452513Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 3 2026-01-04T02:42:38.452612Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-04T02:42:38.452642Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-04T02:42:38.452678Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#105:2 progress is 3/3 2026-01-04T02:42:38.452706Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-04T02:42:38.452738Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 105, ready parts: 3/3, is published: true 2026-01-04T02:42:38.452792Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:817:2773] message: TxId: 105 2026-01-04T02:42:38.452834Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 105 ready parts: 3/3 2026-01-04T02:42:38.452875Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:0 2026-01-04T02:42:38.452907Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:42:38.453002Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 43] was 3 2026-01-04T02:42:38.453047Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:1 2026-01-04T02:42:38.453072Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:1 2026-01-04T02:42:38.453105Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 44] was 2 2026-01-04T02:42:38.453131Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 105:2 2026-01-04T02:42:38.453154Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:2 2026-01-04T02:42:38.453199Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 45] was 2 2026-01-04T02:42:38.453998Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.454389Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.455971Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.456024Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.456062Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.458096Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:42:38.458244Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:42:38.458283Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [6:1349:3271] TestWaitNotification: OK eventTxId 105 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::BanUserWithWaiting [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.014984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015344Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.017504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.118188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.118242Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.133487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.134002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.134223Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.144708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.144997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.145741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.146061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.149776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.149968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153030Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153148Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.328945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.329927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.330975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.331058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... rationPropose Complete, txId: 101, response: Status: StatusSuccess TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:34.810762Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusSuccess, operation: CREATE USER, path: /MyRoot 2026-01-04T02:42:34.810978Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.811033Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.811245Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.811306Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:42:34.811904Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:42:34.812028Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:42:34.812083Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:42:34.812142Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:42:34.812197Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:42:34.812312Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:42:34.814168Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 2026-01-04T02:42:34.814543Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.814600Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:42:34.863104Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.867196Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.867370Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.867425Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.868790Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.868874Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.868928Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:34.869599Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:42:34.869954Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.870054Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.874874Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.878634Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.878992Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.879089Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.884030Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.886054Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.886418Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.886517Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:34.894831Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:34.896697Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:34.899745Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterLogin AlterLogin { ModifyUser { User: "user1" CanLogin: false } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:42:34.900240Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusSuccess, reason: , at schemeshard: 72057594046678944 2026-01-04T02:42:34.900337Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:42:34.900372Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:42:34.900413Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:42:34.900450Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:42:34.900511Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:42:34.900572Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:42:34.900609Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:42:34.900646Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:42:34.900678Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 1, subscribers: 0 2026-01-04T02:42:34.900715Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:42:34.904845Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusSuccess TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:34.904988Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusSuccess, operation: MODIFY USER, path: /MyRoot 2026-01-04T02:42:34.905203Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:34.905268Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:34.905502Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:34.905559Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:210:2210], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:42:34.906216Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:42:34.906363Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:42:34.906422Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:42:34.906478Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:42:34.906537Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:42:34.906704Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:42:34.908543Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:42:38.909500Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:38.909653Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: account is blocked, at schemeshard: 72057594046678944 |92.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest |92.2%| [TA] $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/service/ut_table_writer/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::ResetFailedAttemptCountAfterModifyUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:33.027696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:33.027779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:33.027831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:33.027867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:33.027911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:33.027945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:33.028027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:33.028120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:33.028926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:33.029205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:33.118097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:33.118154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:33.132435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:33.132624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:33.132811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:33.143518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:33.143751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:33.144462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:33.144714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:33.147753Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:33.147959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:33.149156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:33.149221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:33.149329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:33.149370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:33.149410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:33.149592Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:33.308003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.308959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309277Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.309925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:33.310008Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 944, LocalPathId: 10] was 0 2026-01-04T02:42:39.443073Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 11] was 0 2026-01-04T02:42:39.443094Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 12] was 0 2026-01-04T02:42:39.443116Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 13] was 0 2026-01-04T02:42:39.443138Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 14] was 0 2026-01-04T02:42:39.443170Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 15] was 0 2026-01-04T02:42:39.443202Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 16] was 0 2026-01-04T02:42:39.443224Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 17] was 0 2026-01-04T02:42:39.443249Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 18] was 0 2026-01-04T02:42:39.443273Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 19] was 0 2026-01-04T02:42:39.443298Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 20] was 0 2026-01-04T02:42:39.443319Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 21] was 0 2026-01-04T02:42:39.443341Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 22] was 0 2026-01-04T02:42:39.443366Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 23] was 0 2026-01-04T02:42:39.443406Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 24] was 0 2026-01-04T02:42:39.443431Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 25] was 0 2026-01-04T02:42:39.443465Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 26] was 0 2026-01-04T02:42:39.443497Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 27] was 0 2026-01-04T02:42:39.443520Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 28] was 0 2026-01-04T02:42:39.443543Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 29] was 0 2026-01-04T02:42:39.443599Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 30] was 0 2026-01-04T02:42:39.443627Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 31] was 0 2026-01-04T02:42:39.443665Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 32] was 0 2026-01-04T02:42:39.443691Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 33] was 0 2026-01-04T02:42:39.446423Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 34] was 0 2026-01-04T02:42:39.446468Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 35] was 0 2026-01-04T02:42:39.446495Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 36] was 0 2026-01-04T02:42:39.446540Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 37] was 0 2026-01-04T02:42:39.446727Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.446837Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.446938Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.447061Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.447170Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.447326Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.447707Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.447861Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448287Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448382Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448614Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448777Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448837Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.448931Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.449236Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.449335Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.449732Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450043Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450146Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450219Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450408Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450464Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.450529Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:42:39.456420Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:39.461513Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:39.461595Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:39.461697Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:39.461753Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:39.461830Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:39.464361Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [5:815:2783] sender: [5:873:2058] recipient: [5:15:2062] 2026-01-04T02:42:39.502642Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:39.502707Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:101: TTxLogin RotateKeys at schemeshard: 72057594046678944 2026-01-04T02:42:39.538488Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:39.544585Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:39.544723Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:39.544784Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:39.545232Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-04T02:42:39.545307Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:39.545349Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [5:867:2824], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:39.546010Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 0 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] >> KqpQuery::CreateAsSelect_BadCases [GOOD] >> KqpQuery::CreateAsSelectView |92.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/balance_coverage/ut/unittest >> TBalanceCoverageBuilderTest::TestComplexSplitWithDuplicates [GOOD] |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/balance_coverage/ut/unittest >> KqpQuery::QueryCachePermissionsLoss [GOOD] >> KqpQuery::Pure >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] |92.3%| [TA] $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/balance_coverage/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpQuery::ReadOverloaded+StreamLookup [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_login/unittest >> TSchemeShardLoginTest::AccountLockoutAndAutomaticallyUnlock [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:29.014984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:29.015075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015114Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:29.015148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:29.015184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:29.015229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:29.015318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:29.015390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:29.016282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:29.018119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:29.114754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:29.114812Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:29.129320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:29.129917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:29.130100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:29.138364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:29.138648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:29.139304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:29.139582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:29.143942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.144126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:29.152942Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:29.153082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:29.153133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:29.153176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:29.154185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:29.304661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.306941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307205Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:29.307349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 6678944 2026-01-04T02:42:37.048396Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.052287Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:37.052408Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:37.052446Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:37.052933Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:37.052994Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:37.053031Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:210:2210], at schemeshard: 72057594046678944, txId: 0, path id: 1 2026-01-04T02:42:37.053484Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 0 2026-01-04T02:42:37.053734Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.053932Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:37.057849Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.059688Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:37.059966Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.060027Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:37.063923Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.066581Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:37.066949Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.067037Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:37.075696Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.077883Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:37.078314Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.078431Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-04T02:42:37.078839Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:37.078931Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with error: User user1 login denied: too many failed password attempts, at schemeshard: 72057594046678944 2026-01-04T02:42:37.079437Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:37.079665Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 291us result status StatusSuccess 2026-01-04T02:42:37.080245Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8MhG58P2QGk4ml+bdAE/\nUGGsCHTn4iqUW+4w+VjcozZ5wLydi5vQXIk05CZaWq3z2kccl96d6LX7N5APCvQI\n5haj5Zb5pUKpzllBZiF/+xIdIfJePkI8TeMUNMdCtrT60Od4T+xbKcCCJhP//JaG\nic04nrQm3+gz3TtwvI1FpPYzOA2ASe6sYfJxIdxIBY5w78wcfrG3l1SFRr2TVY7J\n90I4e9vDV4O7dbudwiz3srZuobwCBkqtS4v6cuJF+NNOBVvSuPW0P/B2bIIL8JCi\no5TDeIgZQOq70YyWkuCZpy9zaI7+/cyRnmFOnbWL+6/2hRYNKKaCcz3V54VOdAFn\nkQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580957043 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:42:41.081251Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:41.086262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:41.090195Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:41.090476Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with error: Invalid password at schemeshard: 72057594046678944 2026-01-04T02:42:41.090885Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:44: TTxLogin Execute at schemeshard: 72057594046678944 2026-01-04T02:42:41.091002Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login.cpp:96: TTxLogin Complete, with no errors, at schemeshard: 72057594046678944 2026-01-04T02:42:41.095234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:28: TTxLoginFinalize Execute at schemeshard: 72057594046678944 2026-01-04T02:42:41.100325Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__login_finalize.cpp:57: TTxLoginFinalize Completed, with no errors at schemeshard: 72057594046678944 2026-01-04T02:42:41.100883Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:42:41.101081Z node 7 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 220us result status StatusSuccess 2026-01-04T02:42:41.101539Z node 7 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 2 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 36 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { PublicKeys { KeyId: 1 KeyDataPEM: "-----BEGIN PUBLIC KEY-----\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA8MhG58P2QGk4ml+bdAE/\nUGGsCHTn4iqUW+4w+VjcozZ5wLydi5vQXIk05CZaWq3z2kccl96d6LX7N5APCvQI\n5haj5Zb5pUKpzllBZiF/+xIdIfJePkI8TeMUNMdCtrT60Od4T+xbKcCCJhP//JaG\nic04nrQm3+gz3TtwvI1FpPYzOA2ASe6sYfJxIdxIBY5w78wcfrG3l1SFRr2TVY7J\n90I4e9vDV4O7dbudwiz3srZuobwCBkqtS4v6cuJF+NNOBVvSuPW0P/B2bIIL8JCi\no5TDeIgZQOq70YyWkuCZpy9zaI7+/cyRnmFOnbWL+6/2hRYNKKaCcz3V54VOdAFn\nkQIDAQAB\n-----END PUBLIC KEY-----\n" ExpiresAt: 1767580957043 } Sids { Name: "user1" Type: USER } Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_login/unittest >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus >> THealthCheckTest::Basic >> DataShardVolatile::DistributedWriteBrokenLock [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink >> THealthCheckTest::SpecificServerless >> THealthCheckTest::Issues100GroupsListing >> THealthCheckTest::StaticGroupIssue >> THealthCheckTest::DatabaseDoesNotExist >> THealthCheckTest::TestNoSchemeShardResponse >> THealthCheckTest::Issues100Groups100VCardListing >> KqpExplain::MultiJoinCteLinks [GOOD] >> THealthCheckTest::OneIssueListing >> KqpStats::OneShardNonLocalExec+UseSink [GOOD] >> KqpStats::OneShardNonLocalExec-UseSink >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] |92.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpStats::DataQueryWithEffects-UseSink [GOOD] |92.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} |92.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_login/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpExplain::UpdateConditional+UseSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::MultiJoinCteLinks [GOOD] Test command err: Trying to start YDB, gRPC: 3514, MsgBus: 61612 2026-01-04T02:42:07.179950Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331193562072682:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:07.180007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:07.455235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:07.455363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:07.462636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:07.494826Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:07.510757Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:07.531399Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331193562072654:2081] 1767494527178337 != 1767494527178340 2026-01-04T02:42:07.621568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:07.621601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:07.621614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:07.621724Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:07.684625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:08.027045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:08.038176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:08.091028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.204936Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:08.242591Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.394632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.459598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.562572Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331206446976421:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.562713Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.563191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331206446976431:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.563286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.875819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.910012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.940270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.970803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.998627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.040854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.081906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.133080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.206290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331210741944597:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.206383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.206787Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331210741944602:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.206832Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331210741944603:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.206960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.210776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:11.223888Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331210741944606:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:11.307852Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331210741944657:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:12.180230Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331193562072682:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:12.180287Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"]," ... schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:35.338024Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:35.395132Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:35.547209Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:35.603023Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:35.680836Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.588473Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331324038663237:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:38.588595Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:38.588944Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331324038663247:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:38.589018Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:38.650281Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.687342Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.723569Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.758523Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.793826Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.833064Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.870337Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:38.922176Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.001302Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331328333631415:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.001403Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.001406Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331328333631420:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.001592Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331328333631422:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.001643Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.004994Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:39.019337Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331328333631423:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:39.102892Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331328333631475:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } {"Plan":{"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"PlanNodeId":7,"Plans":[{"E-Size":"0","PlanNodeId":6,"LookupKeyColumns":["Key"],"Node Type":"TableLookup","Path":"\/Root\/EightShard","Columns":["Data","Key","Text"],"E-Rows":"0","Table":"EightShard","Plans":[{"PlanNodeId":5,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Iterator":"PartitionByKey","Name":"Iterator"},{"Inputs":[],"Name":"PartitionByKey","Input":"precompute_0_0"}],"Node Type":"ConstantExpr-Aggregate","CTE Name":"precompute_0_0"}],"PlanNodeType":"Connection","E-Cost":"0"}],"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"1001"},{"Inputs":[{"InternalOperatorId":3},{"InternalOperatorId":2}],"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"},{"Inputs":[],"ToFlow":"precompute_0_0","Name":"ToFlow"},{"Inputs":[{"ExternalPlanNodeId":6}],"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Limit-InnerJoin (MapJoin)-ConstantExpr-Filter","CTE Name":"precompute_0_0"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":8}],"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"},{"PlanNodeId":3,"Subplan Name":"CTE precompute_0_0","Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["KeyValue"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/KeyValue","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Collect"}],"Node Type":"Precompute_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"lookup_by":["Key"],"columns":["Data","Key","Text"],"type":"Lookup"}]},{"name":"\/Root\/KeyValue","reads":[{"columns":["Key","Value"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":6,"Plans":[{"PlanNodeId":7,"Operators":[{"E-Rows":"0","Columns":["Data","Key","Text"],"E-Size":"0","E-Cost":"0","Name":"TableLookup","Table":"EightShard","LookupKeyColumns":["Key"]}],"Node Type":"TableLookup","PlanNodeType":"Connection"}],"Operators":[{"E-Rows":"0","Predicate":"Exist(item.Key)","Name":"Filter","E-Size":"0","E-Cost":"0"}],"Node Type":"Filter"},{"PlanNodeId":11,"Operators":[{"E-Size":"0","Name":"TableFullScan","E-Rows":"0","Table":"KeyValue","ReadColumns":["Key (-∞, +∞)","Value"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Operators":[{"E-Rows":"0","Condition":"es.Key = kv.Key","Name":"InnerJoin (MapJoin)","E-Size":"0","E-Cost":"0"}],"Node Type":"InnerJoin (MapJoin)"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"1001"}],"Node Type":"Limit"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::BuildTableWithEmptyShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:26.370069Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:26.370146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.370175Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:26.370204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:26.370235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:26.370257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:26.371067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.371161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:26.371961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:26.372250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:26.448824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:26.448902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:26.460210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:26.460439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:26.460594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:26.468069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:26.468308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:26.468976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:26.469276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:26.471967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.472190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:26.473533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:26.473601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.473706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:26.473745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:26.473863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:26.474084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:26.642544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.643563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.643701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.643769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.643883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.643986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644146Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.644743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [6:1553:3424], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725768, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725769, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10708, Billed: UploadRows: 42 UploadBytes: 812 ReadRows: 80 ReadBytes: 1270 CpuTimeUs: 10708} 2026-01-04T02:42:42.798996Z node 6 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-04T02:42:42.799136Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:42:42.799199Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [6:2323:4100] TestWaitNotification: OK eventTxId 107 2026-01-04T02:42:42.799957Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:42:42.800275Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 342us result status StatusSuccess 2026-01-04T02:42:42.800754Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "embedding" Type: "String" TypeId: 4097 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "prefix" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "String" TypeId: 4097 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "index1" LocalPathId: 33 Type: EIndexTypeGlobalVectorKmeansTree State: EIndexStateReady KeyColumnNames: "embedding" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } IndexImplTableDescriptions { } VectorIndexKmeansTreeDescription { Settings { settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_UINT8 vector_dimension: 4 } clusters: 20 levels: 2 overlap_clusters: 2 } } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:42:42.801560Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72075186233409549 2026-01-04T02:42:42.801884Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" took 333us result status StatusSuccess 2026-01-04T02:42:42.802694Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table/index1/indexImplPostingTable" PathDescription { Self { Name: "indexImplPostingTable" PathId: 35 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976725758 CreateStep: 400 ParentPathId: 33 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeVectorKmeansTreeIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplPostingTable" Columns { Name: "__ydb_parent" Type: "Uint64" TypeId: 4 Id: 1 NotNull: true IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "__ydb_parent" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 KeepEraseMarkers: false MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "pool-kind-1" } Log { PreferredPoolKind: "pool-kind-1" } Data { PreferredPoolKind: "pool-kind-1" } } } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409553 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 34 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 35 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:42:42.807632Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268830210, Sender [6:3429:5153], Recipient [6:1738:3602]: NKikimrTabletTxBase.TEvLocalMKQL Program { Program { Text: "(\n (let range \'(\'(\'__ydb_parent (Null) (Void)) \'(\'key (Null) (Void)) ))\n (let columns \'(\'__ydb_parent))\n (let result (SelectRange \'__user__indexImplPostingTable range columns \'()))\n (return (AsList (SetResult \'Result result) ))\n )" } } ... posting table contains 10 rows |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::DataQueryWithEffects-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18207, MsgBus: 27815 2026-01-04T02:42:11.759011Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331209240660603:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:11.759091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:11.966589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:11.966732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:11.985929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:12.045651Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:12.053915Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331209240660575:2081] 1767494531757165 != 1767494531757168 2026-01-04T02:42:12.060163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:12.214366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:12.214388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:12.214395Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:12.214506Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:12.304666Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:12.614435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:12.661946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.770099Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:12.802473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.951597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.017333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.070596Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331226420531630:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.070725Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.071269Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331226420531640:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.071357Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.390192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.425196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.458398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.489947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.520729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.559176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.593529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.647401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.721498Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331226420532512:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.721592Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.722450Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331226420532517:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.722521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331226420532518:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.722838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:15.726468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:15.736212Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331226420532521:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:15.801246Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331226420532574:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:16.759033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331209240660603:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:16.759100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:19.782527Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494537872, txId: 281474976710673] shutting down Trying to start YDB, gRPC: 30178, MsgBus: 31924 2026-01-04T02:42:20.520950Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;act ...
:3:58: Warning: IN may produce unexpected result when used with nullable arguments. Consider adding 'PRAGMA AnsiInForEmptyOrNullableItemsCollections;', code: 1108 Trying to start YDB, gRPC: 62467, MsgBus: 3039 2026-01-04T02:42:36.006325Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331316126803369:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:36.006412Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:36.025264Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:36.105832Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:36.107154Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591331311831836025:2081] 1767494555998798 != 1767494555998801 2026-01-04T02:42:36.121021Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:36.121121Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:36.145718Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:36.232487Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:36.232513Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:36.232521Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:36.232619Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:36.256449Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:36.728267Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:36.732676Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:36.742927Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:36.812139Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:36.982417Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.031356Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:37.070660Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.986477Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331329011707079:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.986581Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.986890Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331329011707088:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.986936Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.064641Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.097114Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.126938Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.158216Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.188708Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.225321Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.287026Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.344024Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.420421Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331333306675255:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.420531Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.420641Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331333306675260:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.420771Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331333306675262:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.420825Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.424629Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:40.436087Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331333306675264:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:40.530265Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331333306675315:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:41.006297Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331316126803369:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:41.006368Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateConditional+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12560, MsgBus: 62476 2026-01-04T02:42:04.914209Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331180706444808:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:04.914276Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:05.129822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:05.156686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:05.156808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:05.203263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:05.239965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:05.324428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:05.324455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:05.324462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:05.324562Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:05.368232Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:05.761085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:05.816274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:05.922208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:05.940494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.111533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.183276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.044608Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331197886315818:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.044746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.045100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331197886315828:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.045148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.333349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.368047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.402428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.432023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.461897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.495549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.552397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.590660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.655956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331197886316696:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.656016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331197886316701:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.656045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.656197Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331197886316703:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.656238Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.659458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:08.669892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331197886316704:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:08.726682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331197886316756:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:09.914504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331180706444808:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:09.914578Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"SortBy":"row.Text","Name":"Sort"},{"Scan":"Parallel","ReadRange":["Key [150, 266]"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/EightShard","E-Rows":"1","Table":"EightShard","ReadColumns":["Data","Key","Text"],"E-Cost":"0"}],"Node Type":"Sort-TableRangeScan"}],"Node Type":"Merge","SortColumns":["Text (Asc)"],"PlanNodeType":"Connection"}],"Node Type":"St ... 01-04T02:42:36.236800Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:36.264253Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:36.324947Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:36.324971Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:36.324981Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:36.325086Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:36.409157Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:36.883218Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:36.889127Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:42:36.900449Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:36.991314Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.124131Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:37.177030Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.255472Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.253900Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331335060347820:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.253999Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.254282Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331335060347829:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.254345Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.345397Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.378497Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.410566Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.444079Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.474998Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.508985Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.543589Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.650852Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.720893Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331335060348699:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.720969Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.721035Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331335060348704:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.721226Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331335060348706:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.721311Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.724570Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:40.734337Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331335060348707:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:40.804769Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331335060348759:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:41.061511Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331317880476774:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:41.061593Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"Tables":["EightShard"],"PlanNodeId":5,"Operators":[{"Inputs":[],"Path":"\/Root\/EightShard","Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Operators":[{"E-Rows":"0","Inputs":[{"ExternalPlanNodeId":1}],"Predicate":"item.Data \u003E 0","E-Cost":"0","E-Size":"0","Name":"Filter"}],"Plans":[{"Tables":["EightShard"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableFullScan","Inputs":[],"Path":"\/Root\/EightShard","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"EightShard","ReadColumns":["Key (-∞, +∞)","Data"],"E-Cost":"0"}],"Node Type":"TableFullScan"}],"Node Type":"Filter"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Stage"}],"Node Type":"Sink"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/EightShard","reads":[{"columns":["Data","Key"],"scan_by":["Key (-∞, +∞)"],"type":"FullScan"}],"writes":[{"columns":["Data","Key"],"type":"MultiUpsert"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Operators":[{"Name":"Upsert","SinkType":"KqpTableSink","Table":"EightShard"}],"Node Type":"Upsert"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THealthCheckTest::Basic [GOOD] >> THealthCheckTest::BasicNodeCheckRequest >> THealthCheckTest::DatabaseDoesNotExist [GOOD] >> THealthCheckTest::BridgeGroupNoIssues >> KqpLimits::ReplySizeExceeded [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey+UseSink [GOOD] >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 >> TColumnShardTestReadWrite::WriteReadDuplicate >> EvWrite::WriteInTransaction >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader >> TColumnShardTestReadWrite::CompactionGC >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 >> TColumnShardTestReadWrite::ReadWithProgramLike >> TColumnShardTestReadWrite::WriteReadModifications >> TColumnShardTestReadWrite::WriteReadNoCompression >> KqpQuery::CreateAsSelectView [GOOD] >> KqpQuery::CreateTableAs_MkDir >> THealthCheckTest::SpecificServerless [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes >> KqpQuery::Pure [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::ReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 15908, MsgBus: 9225 2026-01-04T02:41:51.494642Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331121357013444:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:51.497130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:51.783756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:51.783937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:51.787596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:51.850300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:51.893899Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331121357013401:2081] 1767494511490736 != 1767494511490739 2026-01-04T02:41:51.911454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:52.007443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:52.007465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:52.007470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:52.007526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:52.060085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:52.439076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:52.495652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:52.515740Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:56.493443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331121357013444:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:56.500005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:01.150541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331164306688159:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.150624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.150903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331164306688171:2446], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.150932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331164306688172:2447], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.151012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:01.154422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:01.165220Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331164306688175:2448], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:42:01.244448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331164306688226:3173] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:01.572955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.790802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:06.790841Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 19062, MsgBus: 27564 2026-01-04T02:42:10.125149Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331202292925018:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:10.125284Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:10.145188Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:10.232435Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:10.235238Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331202292924997:2081] 1767494530124828 != 1767494530124831 2026-01-04T02:42:10.278497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:10.278585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:10.286061Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:10.317482Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:10.343928Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:10.343958Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:10.343973Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:10.344085Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:10.728666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:10.748133Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.176425Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:15.125620Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591331202292925018:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:15.125698Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:18.710709Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236652665175:2443], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.710787Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236652665164:2440], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.710902Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.711256Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236652665179:2445], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.711326Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND ... e 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:35.969317Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:36.061602Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:36.066667Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:36.066764Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:36.070190Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:36.173773Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:36.173820Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:36.173832Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:36.173932Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:36.197788Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:36.770132Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:36.782899Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:36.848097Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:36.957889Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:37.039991Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.118388Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.899590Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331330191371707:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.899661Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.899863Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331330191371717:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.899891Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:39.968691Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.002537Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.031789Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.060970Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.094047Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.126548Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.160117Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.210229Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:40.282439Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331334486339882:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.282517Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.282701Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331334486339887:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.282733Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331334486339888:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.282759Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:40.286211Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:40.296293Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331334486339891:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:40.359542Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331334486339942:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:40.951406Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331313011500921:2269];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:40.951480Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:42.203411Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.570000Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZGIzNzU4OTAtMzQ5MTc3ZDEtN2E5MTU4NzItYmJmOTlkNDI=, ActorId: [5:7591331343076274857:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e5fa41bg9qw4hbgxakke0, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202587 > 50331648)" issue_code: 2013 severity: 1 } trace_id# |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TColumnShardTestReadWrite::WriteReadStandalone >> EvWrite::WriteInTransaction [GOOD] >> EvWrite::WriteWithLock >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] >> KqpLimits::DataShardReplySizeExceeded [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] >> THealthCheckTest::BasicNodeCheckRequest [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::Pure [GOOD] Test command err: Trying to start YDB, gRPC: 9140, MsgBus: 9846 2026-01-04T02:42:08.195386Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331195386764825:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:08.195677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:08.387449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:08.401270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:08.401399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:08.450387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:08.474155Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331195386764725:2081] 1767494528161468 != 1767494528161471 2026-01-04T02:42:08.484290Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:08.554476Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:08.554493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:08.554497Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:08.554563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:08.668440Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:08.948124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:09.001301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.134616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.231560Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:09.268034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.338486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.311701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208271668489:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.311827Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.312343Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208271668499:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.312380Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.642498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.681777Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.715104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.744065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.772297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.804309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.859193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.904593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.996769Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208271669369:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.996868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.997237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208271669374:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.997273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208271669375:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.997299Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.000783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:12.010581Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331208271669378:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:12.092627Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331212566636725:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:13.193608Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331195386764825:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:13.193674Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 4757, MsgBus: 16733 2026-01-04T02:42:14.668540Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331222433414306:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:14.669695Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snap ... !" end_position { row: 12 column: 30 } severity: 1 issues { position { row: 12 column: 30 } message: "Cannot find table \'db.[/Root/Test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 12 column: 30 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# Trying to start YDB, gRPC: 17019, MsgBus: 12733 2026-01-04T02:42:41.120457Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331336562101884:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:41.120521Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:41.136195Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:41.236815Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331336562101855:2081] 1767494561119368 != 1767494561119371 2026-01-04T02:42:41.247810Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:41.250535Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:41.250601Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:41.267451Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:41.322541Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:41.322567Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:41.322575Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:41.322663Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:41.325879Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:41.840589Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:41.849651Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:41.899784Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.069918Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.131705Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:42.137679Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.736935Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331349447005612:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:44.737047Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:44.737311Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331349447005621:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:44.737370Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:44.841483Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.873631Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.907418Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.940556Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.971839Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.004995Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.037284Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.085049Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.166252Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331353741973790:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:45.166339Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331353741973795:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:45.166353Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:45.166601Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331353741973797:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:45.166654Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:45.169542Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:45.180367Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331353741973798:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:45.237547Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331353741973850:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:46.120811Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331336562101884:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:46.120908Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> EvWrite::WriteWithLock [GOOD] >> THealthCheckTest::Issues100Groups100VCardListing [GOOD] >> THealthCheckTest::Issues100Groups100VCardMerging >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp >> THealthCheckTest::Issues100GroupsListing [GOOD] >> THealthCheckTest::Issues100VCardListing >> THealthCheckTest::StaticGroupIssue [GOOD] >> THealthCheckTest::StorageLimit95 >> THealthCheckTest::OneIssueListing [GOOD] >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus >> THealthCheckTest::RedGroupIssueWhenDisintegratedGroupStatus [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues >> TColumnShardTestReadWrite::RebootWriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramLike [GOOD] Test command err: 2026-01-04T02:42:47.249857Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.272241Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.273091Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.285909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.286113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.286305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.286378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.286462Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.286536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.286599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.286681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.286754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.286835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.286962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.287041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.287128Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.312213Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.312337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.312385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.312546Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.314320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.314365Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.314511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.314632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.314667Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.314860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.314967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.315011Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.315173Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.315249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.315293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.315324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.315371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.315405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.315453Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.315495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.315540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.315572Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.315812Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.315871Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.315904Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.316036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.316077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.316215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.316258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.316307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.316344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.316373Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.316491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.316537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nstructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:42:48.268692Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=10;finished=1; 2026-01-04T02:42:48.268743Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:42:48.268779Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:42:48.268953Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:48.269102Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=message: string;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.269155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:42:48.269309Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-04T02:42:48.269364Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=message; 2026-01-04T02:42:48.269581Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:369:2381];bytes=61;rows=10;faults=0;finished=0;fault=0;schema=message: string; 2026-01-04T02:42:48.269712Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.269871Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.269986Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.270127Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:48.270269Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.270402Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.270618Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:370:2382] finished for tablet 9437184 2026-01-04T02:42:48.271042Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:369:2381];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":1571599,"name":"_full_task","f":1571599,"d_finished":0,"c":0,"l":1579206,"d":7607},"events":[{"name":"bootstrap","f":1571779,"d_finished":1062,"c":1,"l":1572841,"d":1062},{"a":1578648,"name":"ack","f":1577479,"d_finished":1075,"c":1,"l":1578554,"d":1633},{"a":1578637,"name":"processing","f":1572949,"d_finished":2937,"c":3,"l":1578557,"d":3506},{"name":"ProduceResults","f":1572444,"d_finished":1985,"c":6,"l":1578966,"d":1985},{"a":1578970,"name":"Finish","f":1578970,"d_finished":0,"c":0,"l":1579206,"d":236},{"name":"task_result","f":1572961,"d_finished":1815,"c":2,"l":1577340,"d":1815}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.271116Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:369:2381];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:48.271482Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:369:2381];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.005},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":1571599,"name":"_full_task","f":1571599,"d_finished":0,"c":0,"l":1579692,"d":8093},"events":[{"name":"bootstrap","f":1571779,"d_finished":1062,"c":1,"l":1572841,"d":1062},{"a":1578648,"name":"ack","f":1577479,"d_finished":1075,"c":1,"l":1578554,"d":2119},{"a":1578637,"name":"processing","f":1572949,"d_finished":2937,"c":3,"l":1578557,"d":3992},{"name":"ProduceResults","f":1572444,"d_finished":1985,"c":6,"l":1578966,"d":1985},{"a":1578970,"name":"Finish","f":1578970,"d_finished":0,"c":0,"l":1579692,"d":722},{"name":"task_result","f":1572961,"d_finished":1815,"c":2,"l":1577340,"d":1815}],"id":"9437184::6"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.271563Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:48.261613Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:42:48.271599Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:48.271708Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:370:2382];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=6;column_names=message;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=6;column_names=message;);;program_input=(column_ids=6;column_names=message;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadModifications [GOOD] Test command err: 2026-01-04T02:42:47.249932Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.284695Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.284934Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.292774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.293080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.293340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.293463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.293577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.293693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.293855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.294007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.294133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.294273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.294396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.294524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.294660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.324559Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.324703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.324775Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.324966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.325223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.325271Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.325392Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.325551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.325589Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.325776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325890Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.325942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.325974Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.326102Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.326215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.326255Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.326314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.326355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.326396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.326442Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.326483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326541Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.326833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.326872Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.327013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.327062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327102Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.327228Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.327373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.327411Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.327538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.327579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 1: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-04T02:42:48.762186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-04T02:42:48.762233Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=2;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=2;prepared=1; 2026-01-04T02:42:48.762360Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=3;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=0;SRCS:[{1,13},{2,14},{3,15},];}};]};SF:0;PR:0;); 2026-01-04T02:42:48.762441Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=0; 2026-01-04T02:42:48.762479Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:42:48.762513Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:42:48.762598Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-04T02:42:48.762627Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-04T02:42:48.762686Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=1;prepared=1; 2026-01-04T02:42:48.762725Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=1; 2026-01-04T02:42:48.762765Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=2; 2026-01-04T02:42:48.762812Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=1;fline=abstract.cpp:30;event=finish_source;source_idx=3; 2026-01-04T02:42:48.762927Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.763147Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.763382Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:48.763606Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.763828Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.764276Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:456:2468] finished for tablet 9437184 2026-01-04T02:42:48.764818Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:452:2464];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.019}],"full":{"a":2013336,"name":"_full_task","f":2013336,"d_finished":0,"c":0,"l":2033249,"d":19913},"events":[{"name":"bootstrap","f":2013623,"d_finished":1552,"c":1,"l":2015175,"d":1552},{"a":2032266,"name":"ack","f":2032266,"d_finished":0,"c":0,"l":2033249,"d":983},{"a":2032248,"name":"processing","f":2015487,"d_finished":7558,"c":5,"l":2032112,"d":8559},{"name":"ProduceResults","f":2014611,"d_finished":2220,"c":7,"l":2032792,"d":2220},{"a":2032799,"name":"Finish","f":2032799,"d_finished":0,"c":0,"l":2033249,"d":450},{"name":"task_result","f":2015519,"d_finished":7446,"c":5,"l":2032109,"d":7446}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.764937Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:452:2464];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:48.765463Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:452:2464];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.018},{"events":["l_ProduceResults","f_Finish"],"t":0.019},{"events":["l_ack","l_processing","l_Finish"],"t":0.02}],"full":{"a":2013336,"name":"_full_task","f":2013336,"d_finished":0,"c":0,"l":2033901,"d":20565},"events":[{"name":"bootstrap","f":2013623,"d_finished":1552,"c":1,"l":2015175,"d":1552},{"a":2032266,"name":"ack","f":2032266,"d_finished":0,"c":0,"l":2033901,"d":1635},{"a":2032248,"name":"processing","f":2015487,"d_finished":7558,"c":5,"l":2032112,"d":9211},{"name":"ProduceResults","f":2014611,"d_finished":2220,"c":7,"l":2032792,"d":2220},{"a":2032799,"name":"Finish","f":2032799,"d_finished":0,"c":0,"l":2033901,"d":1102},{"name":"task_result","f":2015519,"d_finished":7446,"c":5,"l":2032109,"d":7446}],"id":"9437184::9"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:48.765600Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:48.741955Z;index_granules=0;index_portions=4;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=9344;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=9344;selected_rows=0; 2026-01-04T02:42:48.765659Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:48.765907Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:456:2468];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] >> KqpParams::CheckCacheByAst [GOOD] >> KqpParams::CheckCacheWithRecompilationQuery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> EvWrite::WriteWithLock [GOOD] Test command err: 2026-01-04T02:42:47.249935Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.284746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.285002Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.292816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.293089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.293336Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.293471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.293581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.293685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.293847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.293978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.294095Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.294224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.294448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.294565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.294704Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.326773Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.326946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.327015Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.327214Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.327444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.327532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.327590Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.327717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.327807Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.327862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.327899Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.328095Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.328165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.328210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.328246Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.328355Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.328412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.328465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.328498Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.328550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.328591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.328622Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.328678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.328758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.328804Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.329053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.329367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.329410Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.329551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.329635Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.329670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.329724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.329769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.329821Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.329874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.329914Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.329950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.330153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.330254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lts:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:42:49.546674Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=54;finished=1; 2026-01-04T02:42:49.546706Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:42:49.546740Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:42:49.546959Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:49.547119Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:54;schema=key: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.547158Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:42:49.547279Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=54; 2026-01-04T02:42:49.547337Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=54;batch_columns=key,field; 2026-01-04T02:42:49.547466Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=458752;rows=4096;faults=0;finished=0;fault=0;schema=key: uint64 field: string; 2026-01-04T02:42:49.547576Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.547681Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.547886Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.548038Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:49.548156Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.548247Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.548557Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:207:2219] finished for tablet 9437184 2026-01-04T02:42:49.549125Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:206:2218];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.027},{"events":["l_task_result"],"t":0.175},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.176}],"full":{"a":2662773,"name":"_full_task","f":2662773,"d_finished":0,"c":0,"l":2839723,"d":176950},"events":[{"name":"bootstrap","f":2663040,"d_finished":1299,"c":1,"l":2664339,"d":1299},{"a":2839121,"name":"ack","f":2690530,"d_finished":64881,"c":86,"l":2839040,"d":65483},{"a":2839111,"name":"processing","f":2664483,"d_finished":139116,"c":173,"l":2839044,"d":139728},{"name":"ProduceResults","f":2663853,"d_finished":110799,"c":261,"l":2839368,"d":110799},{"a":2839374,"name":"Finish","f":2839374,"d_finished":0,"c":0,"l":2839723,"d":349},{"name":"task_result","f":2664503,"d_finished":71950,"c":87,"l":2837859,"d":71950}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.549217Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:206:2218];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:49.549742Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:206:2218];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.027},{"events":["l_task_result"],"t":0.175},{"events":["l_ProduceResults","f_Finish"],"t":0.176},{"events":["l_ack","l_processing","l_Finish"],"t":0.177}],"full":{"a":2662773,"name":"_full_task","f":2662773,"d_finished":0,"c":0,"l":2840366,"d":177593},"events":[{"name":"bootstrap","f":2663040,"d_finished":1299,"c":1,"l":2664339,"d":1299},{"a":2839121,"name":"ack","f":2690530,"d_finished":64881,"c":86,"l":2839040,"d":66126},{"a":2839111,"name":"processing","f":2664483,"d_finished":139116,"c":173,"l":2839044,"d":140371},{"name":"ProduceResults","f":2663853,"d_finished":110799,"c":261,"l":2839368,"d":110799},{"a":2839374,"name":"Finish","f":2839374,"d_finished":0,"c":0,"l":2840366,"d":992},{"name":"task_result","f":2664503,"d_finished":71950,"c":87,"l":2837859,"d":71950}],"id":"9437184::5"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:49.549868Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:49.370163Z;index_granules=0;index_portions=2;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=474480;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=474480;selected_rows=0; 2026-01-04T02:42:49.549924Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:49.550086Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:207:2219];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=key;);;ff=(column_ids=1,2;column_names=field,key;);;program_input=(column_ids=1,2;column_names=field,key;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] >> THealthCheckTest::SpecificServerlessWithExclusiveNodes [GOOD] >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::DataShardReplySizeExceeded [GOOD] Test command err: Trying to start YDB, gRPC: 22646, MsgBus: 22420 2026-01-04T02:41:29.341965Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331029872435881:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:29.350720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:29.384331Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:29.605640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:29.605758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:29.691082Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:29.693881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:29.697145Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:29.794505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:29.794531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:29.794561Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:29.794674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:30.220688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:30.231120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:30.293431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:30.435509Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:32.752891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331042757339041:2359], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.752905Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331042757339033:2356], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.753007Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.753321Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331042757339048:2361], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.753375Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:32.756474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:32.766192Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331042757339047:2360], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:41:32.844212Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331042757339100:2810] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:33.259912Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=1;memory=1048576; 2026-01-04T02:41:33.259960Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 1. [Mem] memory 1048576 NOT granted 2026-01-04T02:41:33.270423Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591331047052306429:2355], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e37ngee825v0c31eedce6. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=MWQxMTgxYmUtNDlhN2E2MTItNzdmZTcyYjEtYzg0M2RmNDY=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-04T02:41:33.221702Z }, code: 2029 }. 2026-01-04T02:41:33.277518Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037889 Cancelled read: {[1:7591331047052306431:2355], 1} 2026-01-04T02:41:33.277583Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037888 Cancelled read: {[1:7591331047052306431:2355], 0} 2026-01-04T02:41:33.277609Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037890 Cancelled read: {[1:7591331047052306431:2355], 2} 2026-01-04T02:41:33.277632Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[1:7591331047052306431:2355], 3} 2026-01-04T02:41:33.277655Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[1:7591331047052306431:2355], 4} 2026-01-04T02:41:33.277686Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037893 Cancelled read: {[1:7591331047052306431:2355], 5} 2026-01-04T02:41:33.277718Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037894 Cancelled read: {[1:7591331047052306431:2355], 6} 2026-01-04T02:41:33.277762Z node 1 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037895 Cancelled read: {[1:7591331047052306431:2355], 7} 2026-01-04T02:41:33.306015Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MWQxMTgxYmUtNDlhN2E2MTItNzdmZTcyYjEtYzg0M2RmNDY=, ActorId: [1:7591331042757339031:2355], ActorState: ExecuteState, LegacyTraceId: 01ke3e37ngee825v0c31eedce6, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-04T02:41:33.221702Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 1: 1048586, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 10B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 1, started at: 2026-01-04T02:41:33.221702Z } , code: 2029 query_phases { duration_us: 58357 table_access { name: "/Root/LargeTable" partitions_count: 8 } cpu_time_us: 17134 affected_shards: 8 } compilation { duration_us: 368810 cpu_time_us: 360992 } process_cpu_time_us: 769 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"LargeTable\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/LargeTable\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Type\":\"Collect\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{},\"Name\":\"RESULT\",\"Push\":{}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048586,\"Max\":1048586,\"Min\":1048586,\"History\":[5,1048586]},\"Introspections\":[\"1 tasks for a single\\/sequential source scan\"],\"Tasks\":1,\"FinishedTasks\":0,\"PhysicalStageId\":0,\"Mkql\":{},\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/LargeTable\"}],\"BaseTimeMs\":1767494493270,\"CpuTimeUs\":{\"Count\":1,\"Sum\":1564,\"Max\":1564,\"Min\":1564,\"History\":[5,1564]},\"Ingress\":[{\"Pop\":{},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{}}]}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":368810,\"CpuTimeUs\":360992},\"ProcessCpuTimeUs\":769,\"TotalDurationUs\":554794,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":93749},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"LargeTable\",\"ReadColumns\":[\"Key (-\342\210\236, +\342\210\236)\",\"KeyText (-\342\210\236, +\342\210\236)\",\"Data\",\"DataText\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Node Typ ... 2026-01-04T02:42:38.889915Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331325188837834:2081] 1767494558813073 != 1767494558813076 2026-01-04T02:42:38.952744Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:38.957306Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:38.957430Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:38.958165Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:38.959344Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:39.046939Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:39.046960Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:39.046966Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:39.047057Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:39.171284Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:39.521325Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:39.539954Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.610526Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.785929Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:39.820635Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:39.864380Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.433703Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331342368708884:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.433797Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.434098Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331342368708894:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.434151Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.501767Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.530458Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.565123Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.598329Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.636098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.682999Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.721949Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.798444Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:42.905588Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331342368709772:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.905682Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.905734Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331342368709777:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.905943Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331342368709779:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.905995Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:42.910242Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:42.923084Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331342368709781:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:42.982586Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331342368709832:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:43.814700Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331325188837861:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:43.814794Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:44.894837Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.991867Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NWI5ZjdiYWMtYTE2OThkMDItYmFjMGMzNTUtOGYyZDE0MGY=, ActorId: [5:7591331350958644746:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3e5hmt8d7va3tgmh1tzqt1, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Query result size limit exceeded. (51202593 > 50331648)" issue_code: 2013 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::CreateTableAs_MkDir [GOOD] Test command err: Trying to start YDB, gRPC: 6302, MsgBus: 18516 2026-01-04T02:42:09.874380Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331201208490323:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:09.874836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:10.096210Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:10.096321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:10.146869Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:10.159775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:10.203998Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331201208490219:2081] 1767494529869377 != 1767494529869380 2026-01-04T02:42:10.211144Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:10.272444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:10.272470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:10.272477Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:10.272578Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:10.391420Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:10.688967Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:10.882263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:12.887213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331214093393008:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.887232Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331214093392996:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.887332Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.887651Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331214093393013:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.887712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.891111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:12.901642Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331214093393012:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:42:13.003562Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331214093393065:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:13.277058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.683917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.883174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:13.890161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:13.900608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 29507, MsgBus: 17142 2026-01-04T02:42:14.919891Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:14.932060Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:14.941916Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331219469051819:2081] 1767494534842642 != 1767494534842645 2026-01-04T02:42:14.975011Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:14.979326Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:14.979396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:14.993734Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:15.057224Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:15.057244Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:15.057253Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:15.057323Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:15.112777Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:15.514758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:15.522960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:15.874718Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:18.111241Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236648921894:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.111247Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236648921881:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.111323Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.114677Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:18.116401Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331236648921901:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.116555Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, is ... 01-04T02:42:44.456613Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:44.458964Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:44.471495Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331350882359570:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:42:44.532465Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331350882359623:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:44.576108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.613677Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.005715Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.201643Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:45.210110Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:45.483630Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331355177327594:2935] txid# 281474976710673, issues: { message: "Check failed: path: \'/Root/.tmp/sessions\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 47], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:45.495950Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:45.621041Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331333702489518:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:45.621119Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:46.234338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:46.240460Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) Trying to start YDB, gRPC: 14139, MsgBus: 8175 2026-01-04T02:42:47.412592Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331363344054280:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:47.412645Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:47.426830Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:47.554200Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:47.555308Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591331363344054253:2081] 1767494567411440 != 1767494567411443 2026-01-04T02:42:47.566805Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:47.566882Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:47.583011Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:47.632728Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:47.632746Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:47.632753Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:47.632820Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:47.641012Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.159470Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:48.417578Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:50.941059Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331376228957023:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.941059Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331376228957033:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.941172Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.941398Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331376228957038:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.941465Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.944778Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:50.955146Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331376228957037:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:42:51.037900Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331380523924386:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:51.122185Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:51.343072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:51.352437Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:51.364744Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandalone [GOOD] Test command err: 2026-01-04T02:42:48.455888Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:48.480805Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:48.480995Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:48.486961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:48.487242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:48.487449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:48.487616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:48.487677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:48.487766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:48.487886Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:48.487974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:48.488067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:48.488172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:48.488272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:48.488379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:48.488484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:48.510567Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:48.510690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:48.510792Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:48.510966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:48.511117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:48.511205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:48.511261Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:48.511373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:48.511430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:48.511479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:48.511510Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:48.511673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:48.511753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:48.511814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:48.511843Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:48.511952Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:48.512006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:48.512046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:48.512073Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:48.512114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:48.512166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:48.512193Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:48.512231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:48.512280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:48.512306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:48.512465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:48.512504Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:48.512525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:48.512616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:48.512646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:48.512668Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:48.512725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:48.512761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:48.512795Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:48.512842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:48.512870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:48.512889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:48.513003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:48.513049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:42:51.571709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:42:51.571930Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:42:51.572061Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.572167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.572278Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.572425Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:51.572540Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.572653Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.572911Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:475:2486] finished for tablet 9437184 2026-01-04T02:42:51.573246Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:474:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":3526426,"name":"_full_task","f":3526426,"d_finished":0,"c":0,"l":3538080,"d":11654},"events":[{"name":"bootstrap","f":3526681,"d_finished":1469,"c":1,"l":3528150,"d":1469},{"a":3537529,"name":"ack","f":3536398,"d_finished":1017,"c":1,"l":3537415,"d":1568},{"a":3537521,"name":"processing","f":3528288,"d_finished":3401,"c":3,"l":3537420,"d":3960},{"name":"ProduceResults","f":3527770,"d_finished":1855,"c":6,"l":3537793,"d":1855},{"a":3537796,"name":"Finish","f":3537796,"d_finished":0,"c":0,"l":3538080,"d":284},{"name":"task_result","f":3528307,"d_finished":2335,"c":2,"l":3536271,"d":2335}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.573295Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:51.573582Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:474:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":3526426,"name":"_full_task","f":3526426,"d_finished":0,"c":0,"l":3538445,"d":12019},"events":[{"name":"bootstrap","f":3526681,"d_finished":1469,"c":1,"l":3528150,"d":1469},{"a":3537529,"name":"ack","f":3536398,"d_finished":1017,"c":1,"l":3537415,"d":1933},{"a":3537521,"name":"processing","f":3528288,"d_finished":3401,"c":3,"l":3537420,"d":4325},{"name":"ProduceResults","f":3527770,"d_finished":1855,"c":6,"l":3537793,"d":1855},{"a":3537796,"name":"Finish","f":3537796,"d_finished":0,"c":0,"l":3538445,"d":649},{"name":"task_result","f":3528307,"d_finished":2335,"c":2,"l":3536271,"d":2335}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:51.573632Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:51.558174Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-04T02:42:51.573661Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:51.573860Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::BridgeGroupNoIssues [GOOD] >> THealthCheckTest::BridgeTwoGroups >> DataShardVolatile::DistributedWriteShardRestartBeforePlan-UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink >> KqpStats::SysViewClientLost [GOOD] >> KqpStats::SysViewCancelled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadNoCompression [GOOD] Test command err: 2026-01-04T02:42:47.384990Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.411030Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.411333Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.417954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.418157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.418325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.418400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.418466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.418563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.418647Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.418753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.418837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.418936Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.419054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.419181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.419286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.440536Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.440668Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.440721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.440885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.441029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.441118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.441171Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.441255Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.441308Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.441363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.441396Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.441541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.441591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.441621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.441657Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.441738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.441779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.441834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.441856Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.441893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.441923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.441978Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.442016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.442047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.442069Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.442233Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.442278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.442304Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.442394Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.442424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.442445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.442477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.442509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.442540Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.442572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.442613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.442639Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.442727Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.442763Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:42:52.528820Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:42:52.529162Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:42:52.529389Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.529555Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.529732Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.530020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:52.530226Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.530443Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.530846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1057:2924] finished for tablet 9437184 2026-01-04T02:42:52.531401Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.013}],"full":{"a":5569698,"name":"_full_task","f":5569698,"d_finished":0,"c":0,"l":5583420,"d":13722},"events":[{"name":"bootstrap","f":5569990,"d_finished":1664,"c":1,"l":5571654,"d":1664},{"a":5582495,"name":"ack","f":5580624,"d_finished":1647,"c":1,"l":5582271,"d":2572},{"a":5582477,"name":"processing","f":5571816,"d_finished":4337,"c":3,"l":5582275,"d":5280},{"name":"ProduceResults","f":5571202,"d_finished":2795,"c":6,"l":5582979,"d":2795},{"a":5582984,"name":"Finish","f":5582984,"d_finished":0,"c":0,"l":5583420,"d":436},{"name":"task_result","f":5571828,"d_finished":2626,"c":2,"l":5580380,"d":2626}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.531485Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:52.532043Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.013},{"events":["l_ack","l_processing","l_Finish"],"t":0.014}],"full":{"a":5569698,"name":"_full_task","f":5569698,"d_finished":0,"c":0,"l":5584040,"d":14342},"events":[{"name":"bootstrap","f":5569990,"d_finished":1664,"c":1,"l":5571654,"d":1664},{"a":5582495,"name":"ack","f":5580624,"d_finished":1647,"c":1,"l":5582271,"d":3192},{"a":5582477,"name":"processing","f":5571816,"d_finished":4337,"c":3,"l":5582275,"d":5900},{"name":"ProduceResults","f":5571202,"d_finished":2795,"c":6,"l":5582979,"d":2795},{"a":5582984,"name":"Finish","f":5582984,"d_finished":0,"c":0,"l":5584040,"d":1056},{"name":"task_result","f":5571828,"d_finished":2626,"c":2,"l":5580380,"d":2626}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:52.532138Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:52.513482Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:42:52.532188Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:52.532465Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] >> THealthCheckTest::BlueGroupIssueWhenPartialGroupStatusAndReplicationDisks [GOOD] >> THealthCheckTest::BridgeGroupDegradedInBothPiles >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 >> THealthCheckTest::Issues100VCardListing [GOOD] >> THealthCheckTest::Issues100GroupsMerging >> THealthCheckTest::StorageLimit95 [GOOD] >> THealthCheckTest::StorageLimit87 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::PortionInfoSize [GOOD] Test command err: 192 112 28 48 32 24 16 24 56 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::Issues100Groups100VCardMerging [GOOD] >> THealthCheckTest::GreenStatusWhenCreatingGroup ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ReadOverloaded-StreamLookup [GOOD] Test command err: Trying to start YDB, gRPC: 10239, MsgBus: 26435 2026-01-04T02:42:08.623217Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331195902709343:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:08.624327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:08.842686Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:08.842809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:08.866378Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:08.889461Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:08.894475Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331195902709314:2081] 1767494528621548 != 1767494528621551 2026-01-04T02:42:08.928861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:09.013179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:09.013206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:09.013213Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:09.013342Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:09.056486Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:09.425125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:09.489052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.598033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.681607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:09.724222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.791491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.733778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208787613073:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.733913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.734312Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331208787613083:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:11.734377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.072474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.107649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.138834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.170225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.200541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.242233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.279088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.328047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:12.394457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331213082581249:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.394533Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.394591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331213082581254:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.394759Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331213082581256:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.394809Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:12.398080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:12.408632Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331213082581257:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:12.488408Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331213082581309:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:13.623628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331195902709343:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:13.623703Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Request timeout 50ms exceeded
: Error: Cancelling after 50ms during compilation 2026-01-04T02:42:13.915289Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzVjNmE3NjItNGFkOTQwZjItMTkyYjhkMDEtZDAxMTE4OTk=, ActorId: [1:7591331217377548912:2531], ActorState: ExecuteState, LegacyTraceI ... ion 2026-01-04T02:42:46.619307Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:47.054887Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:47.120636Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.352427Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.724725Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:48.026460Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:48.595220Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1904:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:48.595684Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:48.596613Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:1977:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:48.596733Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:48.668467Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:48.877718Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:49.170644Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:49.404570Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:49.658595Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:49.922201Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.192137Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.496530Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.856471Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2789:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.856564Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.856907Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.856981Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.857042Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:2796:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.863105Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:51.020653Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:2798:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:51.090381Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:2858:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:53.297276Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:53.619660Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:53.935065Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:55.858548Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [5:3572:4765], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e5r718f186waanypk65d7. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=OWIwZThkNjYtYjBkNGE5MGMtZmEwYzZjOGEtOGU5ZGE3YjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. } 2026-01-04T02:42:55.858726Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [5:3572:4765], TxId: 281474976715676, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e5r718f186waanypk65d7. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=OWIwZThkNjYtYjBkNGE5MGMtZmEwYzZjOGEtOGU5ZGE3YjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED DEFAULT_ERROR: {
: Error: Table '/Root/SecondaryKeys' retry limit exceeded. }. 2026-01-04T02:42:55.859911Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:3573:4766], TxId: 281474976715676, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3e5r718f186waanypk65d7. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=OWIwZThkNjYtYjBkNGE5MGMtZmEwYzZjOGEtOGU5ZGE3YjI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:3566:4429], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-04T02:42:55.860952Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=OWIwZThkNjYtYjBkNGE5MGMtZmEwYzZjOGEtOGU5ZGE3YjI=, ActorId: [5:3127:4429], ActorState: ExecuteState, LegacyTraceId: 01ke3e5r718f186waanypk65d7, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Table \'/Root/SecondaryKeys\' retry limit exceeded." severity: 1 } trace_id# >> THealthCheckTest::SharedWhenTroublesWithExclusiveNodes [GOOD] >> THealthCheckTest::ShardsLimit999 |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TColumnShardTestReadWrite::ReadWithProgram >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReady [GOOD] >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq >> THealthCheckTest::OrangeGroupIssueWhenDegradedGroupStatus [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::UpdateSecondaryConditionalSecondaryKey-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 9264, MsgBus: 21953 2026-01-04T02:42:10.171439Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331204475756351:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:10.171572Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:10.394231Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:10.405631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:10.405770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:10.412874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:10.498029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:10.521926Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331204475756223:2081] 1767494530161486 != 1767494530161489 2026-01-04T02:42:10.578393Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:10.607032Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:10.607054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:10.607062Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:10.607132Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:11.015683Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:11.068887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.176315Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:11.203762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.350875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:11.421340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.430771Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331217360659990:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.430893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.431120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331217360660000:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.431178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.823616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.855846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.883224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.911159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.938108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.980518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.038128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.085351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:14.152862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331221655628170:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:14.152919Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:14.152976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331221655628175:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:14.153236Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331221655628177:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:14.153276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:14.156480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:14.170641Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331221655628178:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:14.243264Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331221655628232:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:15.170390Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331204475756351:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:15.170445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:15.949925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... ons } 2026-01-04T02:42:50.654092Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:50.705456Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.739534Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.768091Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.800606Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.831343Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.866271Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.903252Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:50.951950Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:51.046224Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331378794392288:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:51.046325Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:51.046330Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331378794392293:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:51.046566Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331378794392295:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:51.046621Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:51.049996Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:51.060124Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331378794392296:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:51.133444Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331378794392348:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:51.708264Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331357319553080:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:51.708336Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:53.266146Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:53.304805Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:53.344480Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":18,"Plans":[{"Tables":["SecondaryKeys"],"PlanNodeId":17,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys","Name":"Upsert","Table":"SecondaryKeys"},{"Inputs":[],"Iterator":"precompute_1_2","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_2"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":15,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Delete","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_0","Name":"Iterator"}],"Node Type":"Delete-ConstantExpr","CTE Name":"precompute_1_0"}],"Node Type":"Effect"},{"PlanNodeId":14,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":13,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","Name":"Upsert","Table":"SecondaryKeys\/Index\/indexImplTable"},{"Inputs":[],"Iterator":"precompute_1_1","Name":"Iterator"}],"Node Type":"Upsert-ConstantExpr","CTE Name":"precompute_1_1"}],"Node Type":"Effect"},{"PlanNodeId":11,"Plans":[{"PlanNodeId":10,"Plans":[{"PlanNodeId":9,"Plans":[{"PlanNodeId":8,"Plans":[{"Tables":["SecondaryKeys\/Index\/indexImplTable"],"PlanNodeId":7,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/SecondaryKeys\/Index\/indexImplTable","ReadRangesPointPrefixLen":"1","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","ReadRangesKeys":["Fk"],"Table":"SecondaryKeys\/Index\/indexImplTable","ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Subplan Name":"CTE Stage_5","Node Type":"Stage","Parent Relationship":"InitPlan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_0","Node Type":"Precompute_1_0","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":6,"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_1","Node Type":"Precompute_1_1","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"},{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":1,"Node Type":"UnionAll","CTE Name":"Stage_5","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Subplan Name":"CTE precompute_1_2","Node Type":"Precompute_1_2","Parent Relationship":"InitPlan","PlanNodeType":"Materialize"}],"Node Type":"Query","PlanNodeType":"Query","Stats":{"ResourcePoolId":"default"}},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/SecondaryKeys","writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"}]},{"name":"\/Root\/SecondaryKeys\/Index\/indexImplTable","reads":[{"columns":["Fk","Key"],"scan_by":["Fk [1, 4)"],"type":"Scan"}],"writes":[{"columns":["Key","Fk"],"type":"MultiUpsert"},{"type":"MultiErase"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Operators":[{"Name":"Upsert","Table":"SecondaryKeys"}],"Plans":[{"PlanNodeId":7,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"},{"PlanNodeId":8,"Plans":[{"PlanNodeId":9,"Operators":[{"Name":"Delete","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":15,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Delete"}],"Node Type":"Effect"},{"PlanNodeId":16,"Plans":[{"PlanNodeId":17,"Operators":[{"Name":"Upsert","Table":"indexImplTable"}],"Plans":[{"PlanNodeId":22,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","IndexSelectionInfo":"index:Index: (0,0,1,0,1,0,0)","Table":"indexImplTable","ReadRangesKeys":["Fk"],"ReadColumns":["Fk [1, 4)","Key"],"E-Cost":"0","ReadRangesExpectedSize":"3"}],"Node Type":"TableRangeScan"}],"Node Type":"Upsert"}],"Node Type":"Effect"}],"Node Type":"Query","PlanNodeType":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0}}} |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] >> THealthCheckTest::ProtobufUnderLimitFor70LargeVdisksIssues [GOOD] >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues >> test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] >> test_sql_streaming.py::test[hop-GroupByHop-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteRead [GOOD] Test command err: 2026-01-04T02:42:51.168961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:51.203128Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:51.203380Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:51.210827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:51.211090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:51.211294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:51.211444Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:51.211539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:51.211657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:51.211760Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:51.211879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:51.211974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:51.212102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.212225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:51.212362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:51.212471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:51.241430Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:51.241569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:51.241615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:51.241830Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:51.242015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:51.242098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:51.242149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:51.242258Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:51.242321Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:51.242365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:51.242402Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:51.242589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:51.242658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:51.242698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:51.242747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:51.242842Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:51.242891Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:51.242929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:51.242956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:51.243000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:51.243035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:51.243068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:51.243115Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:51.243171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:51.243207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:51.243441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:51.243494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:51.243533Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:51.243646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:51.243689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.243723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.243767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:51.243819Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:51.243898Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:51.243950Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:51.243986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:51.244016Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:51.244196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:51.244252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:42:56.542616Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:42:56.542975Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:42:56.543207Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.543398Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.543608Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.543918Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:56.544146Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.544357Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.544829Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1057:2924] finished for tablet 9437184 2026-01-04T02:42:56.545475Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.037},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.04}],"full":{"a":5803204,"name":"_full_task","f":5803204,"d_finished":0,"c":0,"l":5843785,"d":40581},"events":[{"name":"bootstrap","f":5803523,"d_finished":2053,"c":1,"l":5805576,"d":2053},{"a":5842755,"name":"ack","f":5840757,"d_finished":1763,"c":1,"l":5842520,"d":2793},{"a":5842737,"name":"processing","f":5805790,"d_finished":4995,"c":3,"l":5842524,"d":6043},{"name":"ProduceResults","f":5804963,"d_finished":3157,"c":6,"l":5843264,"d":3157},{"a":5843271,"name":"Finish","f":5843271,"d_finished":0,"c":0,"l":5843785,"d":514},{"name":"task_result","f":5805811,"d_finished":3160,"c":2,"l":5840499,"d":3160}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.545570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:56.546226Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.037},{"events":["l_ProduceResults","f_Finish"],"t":0.04},{"events":["l_ack","l_processing","l_Finish"],"t":0.041}],"full":{"a":5803204,"name":"_full_task","f":5803204,"d_finished":0,"c":0,"l":5844509,"d":41305},"events":[{"name":"bootstrap","f":5803523,"d_finished":2053,"c":1,"l":5805576,"d":2053},{"a":5842755,"name":"ack","f":5840757,"d_finished":1763,"c":1,"l":5842520,"d":3517},{"a":5842737,"name":"processing","f":5805790,"d_finished":4995,"c":3,"l":5842524,"d":6767},{"name":"ProduceResults","f":5804963,"d_finished":3157,"c":6,"l":5843264,"d":3157},{"a":5843271,"name":"Finish","f":5843271,"d_finished":0,"c":0,"l":5844509,"d":1238},{"name":"task_result","f":5805811,"d_finished":3160,"c":2,"l":5840499,"d":3160}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:56.546329Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:56.499578Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-04T02:42:56.546412Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:56.546722Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] >> TColumnShardTestReadWrite::RebootWriteReadStandalone |92.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_negative.py::test[watermarks-bad_column-default.txt] [FAIL] |92.3%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TColumnShardTestReadWrite::WriteOverload+InStore >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 >> KqpLimits::QueryExecTimeout [GOOD] >> THealthCheckTest::TestNoSchemeShardResponse [GOOD] >> THealthCheckTest::TestReBootingTabletIsDead >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation+UseSink [GOOD] >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgram [GOOD] Test command err: 2026-01-04T02:42:58.802320Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:58.826426Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:58.826630Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:58.832665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:58.832870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:58.833060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:58.833132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:58.833213Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:58.833293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:58.833351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:58.833428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:58.833495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:58.833586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:58.833660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:58.833758Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:58.833860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:58.853983Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:58.854122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:58.854179Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:58.854356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:58.854480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:58.854566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:58.854616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:58.854728Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:58.854811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:58.854874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:58.854909Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:58.855092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:58.855170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:58.855222Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:58.855254Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:58.855333Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:58.855375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:58.855400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:58.855419Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:58.855451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:58.855481Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:58.855505Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:58.855539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:58.855572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:58.855600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:58.855750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:58.855850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:58.855891Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:58.855981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:58.856012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:58.856040Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:58.856092Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:58.856123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:58.856165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:58.856201Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:58.856225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:58.856245Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:58.856337Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:58.856366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... urce_idx=0; 2026-01-04T02:42:59.696496Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=source.cpp:346;source_idx=0; 2026-01-04T02:42:59.696663Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-04T02:42:59.696722Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:47;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-04T02:42:59.696844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-04T02:42:59.697041Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=; 2026-01-04T02:42:59.697123Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=PROGRAM_EXECUTION;details={};;scan_step_idx=3; 2026-01-04T02:42:59.697167Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-04T02:42:59.697359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=AssembleOriginalData; 2026-01-04T02:42:59.697442Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Calculation; 2026-01-04T02:42:59.698772Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=abstract.cpp:30;execute=Filter; 2026-01-04T02:42:59.698922Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:33;scan_step=name=BUILD_STAGE_RESULT;details={};;scan_step_idx=4; 2026-01-04T02:42:59.699046Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;source_idx=0;tablet_id=9437184;fline=script_cursor.cpp:25;event=empty_result;scan_step_idx=5; 2026-01-04T02:42:59.699199Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:100;event=TEvTaskProcessedResult; 2026-01-04T02:42:59.699232Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=fetching.cpp:17;event=apply; 2026-01-04T02:42:59.699300Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:22;event=OnSourcePrepared;source_idx=0;prepared=1; 2026-01-04T02:42:59.699336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;sync_point=RESULT;aborted=0;tablet_id=9437184;prepared_source_idx=0;fline=abstract.cpp:30;event=finish_source;source_idx=0; 2026-01-04T02:42:59.699518Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.699669Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.699899Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:42:59.700082Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.700337Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.700588Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:350:2362] finished for tablet 9437184 2026-01-04T02:42:59.700979Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:349:2361];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.007}],"full":{"a":1444082,"name":"_full_task","f":1444082,"d_finished":0,"c":0,"l":1451642,"d":7560},"events":[{"name":"bootstrap","f":1444294,"d_finished":1193,"c":1,"l":1445487,"d":1193},{"a":1450879,"name":"ack","f":1450879,"d_finished":0,"c":0,"l":1451642,"d":763},{"a":1450865,"name":"processing","f":1445639,"d_finished":2055,"c":2,"l":1450721,"d":2832},{"name":"ProduceResults","f":1445074,"d_finished":1231,"c":4,"l":1451364,"d":1231},{"a":1451370,"name":"Finish","f":1451370,"d_finished":0,"c":0,"l":1451642,"d":272},{"name":"task_result","f":1445652,"d_finished":2023,"c":2,"l":1450719,"d":2023}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.701049Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:349:2361];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:42:59.701536Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:349:2361];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ProduceResults","f_Finish"],"t":0.007},{"events":["l_ack","l_processing","l_Finish"],"t":0.008}],"full":{"a":1444082,"name":"_full_task","f":1444082,"d_finished":0,"c":0,"l":1452082,"d":8000},"events":[{"name":"bootstrap","f":1444294,"d_finished":1193,"c":1,"l":1445487,"d":1193},{"a":1450879,"name":"ack","f":1450879,"d_finished":0,"c":0,"l":1452082,"d":1203},{"a":1450865,"name":"processing","f":1445639,"d_finished":2055,"c":2,"l":1450721,"d":3272},{"name":"ProduceResults","f":1445074,"d_finished":1231,"c":4,"l":1451364,"d":1231},{"a":1451370,"name":"Finish","f":1451370,"d_finished":0,"c":0,"l":1452082,"d":712},{"name":"task_result","f":1445652,"d_finished":2023,"c":2,"l":1450719,"d":2023}],"id":"9437184::2"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:42:59.701652Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:42:59.691010Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:42:59.701718Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:42:59.701938Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:350:2362];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpParams::CheckCacheWithRecompilationQuery [GOOD] Test command err: Trying to start YDB, gRPC: 62282, MsgBus: 8073 ... waiting for SysViewsRoster update finished 2026-01-04T02:42:04.794394Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:04.936683Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:04.937174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:04.937283Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:05.208008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:05.208138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:05.270732Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494521985128 != 1767494521985131 2026-01-04T02:42:05.287148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:05.333228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:05.459380Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:05.801454Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:05.801518Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:05.801560Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:05.801764Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:05.814132Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:06.157728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:06.267739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.509249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:06.859335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:07.158572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.008964Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.009141Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.010399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.010522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:08.045677Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.240758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.473652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.755365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:08.989528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.251766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.527055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:09.833673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.179570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.179689Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.180189Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.180270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.180345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:10.190268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:10.357664Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:10.422335Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:12.356372Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3169:4452] TxId: 281474976715673. Ctx: { TraceId: 01ke3e4f7yb8vv05n7rc7fj9v1, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NzBjMGE5My04ZjcwOGIyNy00MjYwZWUxYy02OTBmODlkOQ==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# 2026-01-04T02:42:12.357702Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:3177:4474], TxId: 28147497671567 ... lterResource ok# false data# peer# 2026-01-04T02:42:59.797006Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394d880] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DropResource ok# false data# peer# 2026-01-04T02:42:59.797185Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394ca80] received request Name# Ydb.RateLimiter.V1.RateLimiterService/ListResources ok# false data# peer# 2026-01-04T02:42:59.797238Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394d180] received request Name# Ydb.RateLimiter.V1.RateLimiterService/DescribeResource ok# false data# peer# 2026-01-04T02:42:59.797438Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394c380] received request Name# Ydb.RateLimiter.V1.RateLimiterService/AcquireResource ok# false data# peer# 2026-01-04T02:42:59.797489Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593939d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/CreateStream ok# false data# peer# 2026-01-04T02:42:59.797675Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593959c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreams ok# false data# peer# 2026-01-04T02:42:59.797739Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593958e80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeleteStream ok# false data# peer# 2026-01-04T02:42:59.797938Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259393d580] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStream ok# false data# peer# 2026-01-04T02:42:59.797975Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392b680] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListShards ok# false data# peer# 2026-01-04T02:42:59.798185Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392a880] received request Name# Ydb.DataStreams.V1.DataStreamsService/SetWriteQuota ok# false data# peer# 2026-01-04T02:42:59.798208Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593940d80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStream ok# false data# peer# 2026-01-04T02:42:59.798437Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259393f180] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecords ok# false data# peer# 2026-01-04T02:42:59.798478Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593929a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/PutRecord ok# false data# peer# 2026-01-04T02:42:59.798669Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593958080] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetRecords ok# false data# peer# 2026-01-04T02:42:59.798723Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394df80] received request Name# Ydb.DataStreams.V1.DataStreamsService/GetShardIterator ok# false data# peer# 2026-01-04T02:42:59.798951Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394ed80] received request Name# Ydb.DataStreams.V1.DataStreamsService/SubscribeToShard ok# false data# peer# 2026-01-04T02:42:59.798970Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394fb80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeLimits ok# false data# peer# 2026-01-04T02:42:59.799226Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593955680] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamSummary ok# false data# peer# 2026-01-04T02:42:59.799263Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593957980] received request Name# Ydb.DataStreams.V1.DataStreamsService/DecreaseStreamRetentionPeriod ok# false data# peer# 2026-01-04T02:42:59.799490Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259389b080] received request Name# Ydb.DataStreams.V1.DataStreamsService/IncreaseStreamRetentionPeriod ok# false data# peer# 2026-01-04T02:42:59.799514Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259395c680] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateShardCount ok# false data# peer# 2026-01-04T02:42:59.799757Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392ee80] received request Name# Ydb.DataStreams.V1.DataStreamsService/UpdateStreamMode ok# false data# peer# 2026-01-04T02:42:59.799802Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593933b80] received request Name# Ydb.DataStreams.V1.DataStreamsService/RegisterStreamConsumer ok# false data# peer# 2026-01-04T02:42:59.800032Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593930a80] received request Name# Ydb.DataStreams.V1.DataStreamsService/DeregisterStreamConsumer ok# false data# peer# 2026-01-04T02:42:59.800062Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593938180] received request Name# Ydb.DataStreams.V1.DataStreamsService/DescribeStreamConsumer ok# false data# peer# 2026-01-04T02:42:59.800285Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593936c80] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListStreamConsumers ok# false data# peer# 2026-01-04T02:42:59.800336Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593935080] received request Name# Ydb.DataStreams.V1.DataStreamsService/AddTagsToStream ok# false data# peer# 2026-01-04T02:42:59.800529Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593935780] received request Name# Ydb.DataStreams.V1.DataStreamsService/DisableEnhancedMonitoring ok# false data# peer# 2026-01-04T02:42:59.800587Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593933480] received request Name# Ydb.DataStreams.V1.DataStreamsService/EnableEnhancedMonitoring ok# false data# peer# 2026-01-04T02:42:59.800768Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593931f80] received request Name# Ydb.DataStreams.V1.DataStreamsService/MergeShards ok# false data# peer# 2026-01-04T02:42:59.800769Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593932680] received request Name# Ydb.DataStreams.V1.DataStreamsService/ListTagsForStream ok# false data# peer# 2026-01-04T02:42:59.800951Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593931880] received request Name# Ydb.DataStreams.V1.DataStreamsService/RemoveTagsFromStream ok# false data# peer# 2026-01-04T02:42:59.801012Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392c480] received request Name# Ydb.DataStreams.V1.DataStreamsService/SplitShard ok# false data# peer# 2026-01-04T02:42:59.801120Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392f580] received request Name# Ydb.DataStreams.V1.DataStreamsService/StartStreamEncryption ok# false data# peer# 2026-01-04T02:42:59.801276Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392af80] received request Name# Ydb.DataStreams.V1.DataStreamsService/StopStreamEncryption ok# false data# peer# 2026-01-04T02:42:59.801288Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593942980] received request Name# Ydb.Monitoring.V1.MonitoringService/SelfCheck ok# false data# peer# 2026-01-04T02:42:59.801462Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593944580] received request Name# Ydb.Monitoring.V1.MonitoringService/NodeCheck ok# false data# peer# 2026-01-04T02:42:59.801526Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259393a480] received request Name# Ydb.Monitoring.V1.MonitoringService/ClusterState ok# false data# peer# 2026-01-04T02:42:59.801633Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593949280] received request Name# Ydb.Query.V1.QueryService/CreateSession ok# false data# peer# 2026-01-04T02:42:59.801772Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394a780] received request Name# Ydb.Query.V1.QueryService/DeleteSession ok# false data# peer# 2026-01-04T02:42:59.801845Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593948b80] received request Name# Ydb.Query.V1.QueryService/AttachSession ok# false data# peer# 2026-01-04T02:42:59.802018Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593946f80] received request Name# Ydb.Query.V1.QueryService/BeginTransaction ok# false data# peer# 2026-01-04T02:42:59.802048Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593945a80] received request Name# Ydb.Query.V1.QueryService/CommitTransaction ok# false data# peer# 2026-01-04T02:42:59.802197Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593945380] received request Name# Ydb.Query.V1.QueryService/RollbackTransaction ok# false data# peer# 2026-01-04T02:42:59.802293Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593947680] received request Name# Ydb.Query.V1.QueryService/ExecuteQuery ok# false data# peer# 2026-01-04T02:42:59.802375Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394a080] received request Name# Ydb.Query.V1.QueryService/ExecuteScript ok# false data# peer# 2026-01-04T02:42:59.802552Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394ae80] received request Name# Ydb.Query.V1.QueryService/FetchScriptResults ok# false data# peer# 2026-01-04T02:42:59.802556Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593946180] received request Name# Ydb.Tablet.V1.TabletService/ExecuteTabletMiniKQL ok# false data# peer# 2026-01-04T02:42:59.802744Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593944c80] received request Name# Ydb.Tablet.V1.TabletService/ChangeTabletSchema ok# false data# peer# 2026-01-04T02:42:59.802812Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593943780] received request Name# Ydb.Tablet.V1.TabletService/RestartTablet ok# false data# peer# 2026-01-04T02:42:59.802922Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593943080] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogStore ok# false data# peer# 2026-01-04T02:42:59.803078Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593943e80] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogStore ok# false data# peer# 2026-01-04T02:42:59.803088Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593942280] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogStore ok# false data# peer# 2026-01-04T02:42:59.803251Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593937a80] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogStore ok# false data# peer# 2026-01-04T02:42:59.803331Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593948480] received request Name# Ydb.LogStore.V1.LogStoreService/CreateLogTable ok# false data# peer# 2026-01-04T02:42:59.803446Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593949980] received request Name# Ydb.LogStore.V1.LogStoreService/DescribeLogTable ok# false data# peer# 2026-01-04T02:42:59.803596Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259394b580] received request Name# Ydb.LogStore.V1.LogStoreService/DropLogTable ok# false data# peer# 2026-01-04T02:42:59.803621Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593946880] received request Name# Ydb.LogStore.V1.LogStoreService/AlterLogTable ok# false data# peer# 2026-01-04T02:42:59.803828Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392a180] received request Name# Ydb.Auth.V1.AuthService/Login ok# false data# peer# 2026-01-04T02:42:59.803865Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392cb80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeReplication ok# false data# peer# 2026-01-04T02:42:59.803994Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d259392bd80] received request Name# Ydb.Replication.V1.ReplicationService/DescribeTransfer ok# false data# peer# 2026-01-04T02:42:59.804112Z node 7 :GRPC_SERVER DEBUG: logger.cpp:36: [0x7d2593926980] received request Name# Ydb.View.V1.ViewService/DescribeView ok# false data# peer# |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QueryExecTimeout [GOOD] Test command err: Trying to start YDB, gRPC: 17407, MsgBus: 9670 2026-01-04T02:41:22.347792Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330999479893816:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:22.347850Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:22.529330Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:22.568280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:22.568390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:22.571592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:22.647774Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:22.742643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:22.742672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:22.742680Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:22.742789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:22.789360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:23.136603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:23.186954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.299851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.382870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:23.434333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.501559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.507000Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331012364797536:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:25.507160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:25.507499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331012364797546:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:25.507548Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:25.848876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.879596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.907213Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.931218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.961100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:25.993195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:26.045681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:26.086531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:26.160798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016659765711:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.160895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.161079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016659765717:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.161120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331016659765716:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.161168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:26.164657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:26.174429Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331016659765720:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:26.244603Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331016659765771:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:27.349942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330999479893816:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:27.350020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:27.708750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:29.113132Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591331025249701104:2567], SessionActorId: [1:7591331025249701076:2567], statusCode=PRECONDITI ... xists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591331280604129904:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:28.502679Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:28.524071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:28.621605Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:28.635160Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:28.635231Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:28.638822Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:28.686513Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:28.686538Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:28.686548Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:28.686634Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:28.764686Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:29.283493Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:29.302394Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.375942Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.518863Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:29.579116Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:29.672826Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.897455Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331297784000893:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.897539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.897875Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331297784000902:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.897925Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:32.977095Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.010072Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.050794Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.082163Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.119724Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.197052Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.240450Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.300572Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:33.383942Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331302078969070:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:33.384080Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:33.384174Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331302078969075:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:33.384293Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331302078969077:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:33.384354Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:33.388430Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:33.398393Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331302078969079:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:33.456105Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331302078969130:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:33.502125Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331280604129904:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:33.503616Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:43.572086Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:43.572122Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:49.101317Z node 5 :KQP_PROXY WARN: kqp_proxy_service.cpp:1330: Reply process error for request 271646720, status: TIMEOUT, issues: {
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=MWYzM2I5NjItNjU4ZDE2YjUtMTgzNzg0MzEtMjQ0Y2M1ZjI= }
: Error: Query did not complete within specified timeout 500ms, session id ydb://session/3?node_id=5&id=MWYzM2I5NjItNjU4ZDE2YjUtMTgzNzg0MzEtMjQ0Y2M1ZjI= |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> THealthCheckTest::BridgeTwoGroups [GOOD] >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] >> Normalizers::CleanEmptyPortionsNormalizer [GOOD] >> Normalizers::CleanUnusedTablesNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot >> TColumnShardTestReadWrite::WriteReadZSTD ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpStats::OneShardNonLocalExec-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 26070, MsgBus: 19792 2026-01-04T02:42:09.733270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331200844225405:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:09.733484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:09.952566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:09.985520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:09.985678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:10.035641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:10.048098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:10.163614Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:10.163637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:10.163645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:10.163721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:10.244926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:10.561964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:10.570690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:10.634166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.742155Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:10.750721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.897297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:10.963298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.041749Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331218024096424:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.041906Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.042222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331218024096434:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.042310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.374245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.411822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.442460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.472849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.502695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.538077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.572753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.619050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:13.713265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331218024097304:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.713346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.713583Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331218024097310:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.713593Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331218024097309:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.713628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.720115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:13.738530Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331218024097313:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:13.819091Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331218024097366:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:14.732300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331200844225405:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:14.732369Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17813, MsgBus: 28060 2026-01-04T02:42:16.625918Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331229422019891:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:16.625975Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.c ... (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:43.308677Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:43.308756Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:43.338359Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:43.340160Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-04T02:42:43.342772Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:43.419904Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:43.419929Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:43.419939Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:43.420033Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:43.440401Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:43.441378Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:43.931974Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:43.952482Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.035890Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.130538Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:44.153492Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:44.246479Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:44.357763Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.138872Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331364279041574:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.138985Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.139261Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331364279041583:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.139339Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.207832Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.315879Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.369242Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.415956Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.466719Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.518448Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.570045Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.630698Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:47.715926Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331364279042751:2428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.715992Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.716096Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331364279042756:2431], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.716160Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331364279042758:2432], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.716207Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:47.719338Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:47.737577Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331364279042760:2433], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:47.799730Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331364279042837:4897] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:48.114710Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331347099169971:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:48.114777Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:48.139326Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[6:7591331346898896774:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:48.139423Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:58.255383Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:58.255407Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> Normalizers::SchemaVersionsNormalizer >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 >> THealthCheckTest::BridgeGroupDegradedInBothPiles [GOOD] >> THealthCheckTest::BridgeGroupDegradedInOnePile >> THealthCheckTest::StorageLimit87 [GOOD] >> THealthCheckTest::StorageLimit80 >> THealthCheckTest::Issues100GroupsMerging [GOOD] >> THealthCheckTest::Issues100VCardMerging >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader >> THealthCheckTest::GreenStatusWhenCreatingGroup [GOOD] >> THealthCheckTest::GreenStatusWhenInitPending >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] >> THealthCheckTest::ShardsLimit999 [GOOD] >> THealthCheckTest::ShardsLimit995 >> THealthCheckTest::ProtobufUnderLimitFor100LargeVdisksIssues [GOOD] >> THealthCheckTest::ServerlessBadTablets >> Normalizers::CleanUnusedTablesNormalizer [GOOD] >> THealthCheckTest::OnlyDiskIssueOnSpaceIssues [GOOD] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks >> DataShardVolatile::DistributedWriteShardRestartAfterExpectation-UseSink [GOOD] >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot >> Normalizers::SchemaVersionsNormalizer [GOOD] >> Normalizers::RemoveWriteIdNormalizer ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload+InStore [GOOD] Test command err: 2026-01-04T02:43:00.679979Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:00.712541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:00.712838Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:00.720482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:00.720755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:00.720985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:00.721094Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:00.721219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:00.721330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:00.721448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:00.721571Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:00.721682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:00.721788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.721949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:00.722066Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:00.722170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:00.751270Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:00.751406Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:00.751456Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:00.751649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:00.751862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:00.751948Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:00.751995Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:00.752096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:00.752159Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:00.752214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:00.752242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:00.752467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:00.752542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:00.752584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:00.752616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:00.752703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:00.752774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:00.752821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:00.752852Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:00.752896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:00.752930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:00.752966Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:00.753006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:00.753046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:00.753087Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:00.753310Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:00.753367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:00.753403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:00.753520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:00.753562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.753594Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.753639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:00.753680Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:00.753719Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:00.753774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:00.753830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:00.753863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:00.753999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:00.754043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:43:05.782108Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:05.796474Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=173f49ae-e91711f0-8264b25b-b3d01dbe; 2026-01-04T02:43:05.796697Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-04T02:43:05.796740Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-04T02:43:05.796780Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-04T02:43:05.797153Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-04T02:43:05.797232Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-04T02:43:05.809036Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-04T02:43:05.809226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:05.810954Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=17bab56c-e91711f0-a8229c80-7fa9da3a; 2026-01-04T02:43:05.811180Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-04T02:43:05.811238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-04T02:43:05.811293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-04T02:43:05.811772Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-04T02:43:05.811861Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-04T02:43:05.823367Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-04T02:43:05.823575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:05.824932Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=183202de-e91711f0-9aa31305-39162992; 2026-01-04T02:43:05.825153Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-04T02:43:05.825195Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-04T02:43:05.825234Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-04T02:43:05.825606Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-04T02:43:05.825678Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-04T02:43:05.837134Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-04T02:43:05.837286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:05.855987Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-04T02:43:05.856090Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=18d41c86-e91711f0-a3fed255-177a0b64;in_flight=1;size_in_flight=6330728; 2026-01-04T02:43:06.436698Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-04T02:43:06.476575Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=18d41c86-e91711f0-a3fed255-177a0b64; 2026-01-04T02:43:06.476802Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-04T02:43:06.476863Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-04T02:43:06.476912Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-04T02:43:06.477440Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-04T02:43:06.477546Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-04T02:43:06.489638Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-04T02:43:06.489908Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |92.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::RebootWriteReadStandalone [GOOD] Test command err: 2026-01-04T02:43:00.619322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:00.652922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:00.653202Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:00.661261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:00.661547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:00.661795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:00.661957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:00.662060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:00.662196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:00.662318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:00.662429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:00.662548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:00.662669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.662817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:00.662937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:00.663053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:00.693040Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:00.693197Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:00.693252Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:00.693440Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:00.693612Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:00.693768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:00.693854Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:00.693975Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:00.694046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:00.694102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:00.694137Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:00.694331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:00.694408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:00.694467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:00.694499Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:00.694612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:00.694672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:00.694754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:00.694787Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:00.694847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:00.694888Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:00.694922Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:00.694989Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:00.695042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:00.695078Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:00.695369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:00.695427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:00.695467Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:00.695610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:00.695656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.695709Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:00.695759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:00.695821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:00.695863Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:00.695919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:00.695960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:00.696006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:00.696173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:00.696247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... s=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:43:06.340476Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:43:06.340792Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:43:06.340997Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.341160Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.341336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.341571Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:06.341769Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.341968Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.342383Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1057:2924] finished for tablet 9437184 2026-01-04T02:43:06.342923Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.029},{"events":["f_ack"],"t":0.03},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.032}],"full":{"a":6175110,"name":"_full_task","f":6175110,"d_finished":0,"c":0,"l":6207880,"d":32770},"events":[{"name":"bootstrap","f":6175373,"d_finished":1574,"c":1,"l":6176947,"d":1574},{"a":6206969,"name":"ack","f":6205235,"d_finished":1551,"c":1,"l":6206786,"d":2462},{"a":6206950,"name":"processing","f":6177149,"d_finished":4495,"c":3,"l":6206789,"d":5425},{"name":"ProduceResults","f":6176465,"d_finished":2749,"c":6,"l":6207423,"d":2749},{"a":6207429,"name":"Finish","f":6207429,"d_finished":0,"c":0,"l":6207880,"d":451},{"name":"task_result","f":6177169,"d_finished":2867,"c":2,"l":6205003,"d":2867}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.343005Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:06.343553Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["l_task_result"],"t":0.029},{"events":["f_ack"],"t":0.03},{"events":["l_ProduceResults","f_Finish"],"t":0.032},{"events":["l_ack","l_processing","l_Finish"],"t":0.033}],"full":{"a":6175110,"name":"_full_task","f":6175110,"d_finished":0,"c":0,"l":6208477,"d":33367},"events":[{"name":"bootstrap","f":6175373,"d_finished":1574,"c":1,"l":6176947,"d":1574},{"a":6206969,"name":"ack","f":6205235,"d_finished":1551,"c":1,"l":6206786,"d":3059},{"a":6206950,"name":"processing","f":6177149,"d_finished":4495,"c":3,"l":6206789,"d":6022},{"name":"ProduceResults","f":6176465,"d_finished":2749,"c":6,"l":6207423,"d":2749},{"a":6207429,"name":"Finish","f":6207429,"d_finished":0,"c":0,"l":6208477,"d":1048},{"name":"task_result","f":6177169,"d_finished":2867,"c":2,"l":6205003,"d":2867}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:06.343635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:06.305836Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-04T02:43:06.343671Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:06.343905Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer+useSubcolumns [GOOD] >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns >> THealthCheckTest::DontIgnoreServerlessWithExclusiveNodesWhenNotSpecific [GOOD] >> THealthCheckTest::BridgeNoBscResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadDuplicate [GOOD] Test command err: 2026-01-04T02:42:47.249923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.282350Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.282657Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.290142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.290406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.290611Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.290724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.290851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.290952Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.291082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.291194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.291293Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.291416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.291534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.291650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.291777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.321221Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.321364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.321433Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.321644Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.321797Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.321900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.321942Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.322043Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.322117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.322161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.322191Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.322407Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.322467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.322503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.322530Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.322618Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.322697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.322740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.322767Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.322814Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.322847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.322876Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.322921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.322953Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.323004Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.323208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.323261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.323292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.323416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.323460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.323487Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.323528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.323563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.323605Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.323658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.323693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.323722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.323855Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.323892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... [1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:07.220952Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:10;schema=timestamp: timestamp[us];);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.220994Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:07.221133Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=1;rows=10; 2026-01-04T02:43:07.221186Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=10;batch_columns=timestamp; 2026-01-04T02:43:07.221451Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3587:5593];bytes=80;rows=10;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us]; 2026-01-04T02:43:07.221612Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.221752Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.221893Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.222434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:07.222580Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.222709Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.222955Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:3594:5600] finished for tablet 9437184 2026-01-04T02:43:07.223491Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:3587:5593];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.076},{"events":["f_ack"],"t":0.077},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.079}],"full":{"a":20512397,"name":"_full_task","f":20512397,"d_finished":0,"c":0,"l":20592248,"d":79851},"events":[{"name":"bootstrap","f":20513058,"d_finished":1968,"c":1,"l":20515026,"d":1968},{"a":20591648,"name":"ack","f":20589968,"d_finished":1191,"c":1,"l":20591159,"d":1791},{"a":20591631,"name":"processing","f":20516015,"d_finished":15759,"c":14,"l":20591162,"d":16376},{"name":"ProduceResults","f":20514294,"d_finished":4288,"c":17,"l":20591972,"d":4288},{"a":20591976,"name":"Finish","f":20591976,"d_finished":0,"c":0,"l":20592248,"d":272},{"name":"task_result","f":20516036,"d_finished":14315,"c":13,"l":20588755,"d":14315}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.223582Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:3587:5593];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:07.224054Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:3587:5593];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.076},{"events":["f_ack"],"t":0.077},{"events":["l_ProduceResults","f_Finish"],"t":0.079},{"events":["l_ack","l_processing","l_Finish"],"t":0.08}],"full":{"a":20512397,"name":"_full_task","f":20512397,"d_finished":0,"c":0,"l":20592866,"d":80469},"events":[{"name":"bootstrap","f":20513058,"d_finished":1968,"c":1,"l":20515026,"d":1968},{"a":20591648,"name":"ack","f":20589968,"d_finished":1191,"c":1,"l":20591159,"d":2409},{"a":20591631,"name":"processing","f":20516015,"d_finished":15759,"c":14,"l":20591162,"d":16994},{"name":"ProduceResults","f":20514294,"d_finished":4288,"c":17,"l":20591972,"d":4288},{"a":20591976,"name":"Finish","f":20591976,"d_finished":0,"c":0,"l":20592866,"d":890},{"name":"task_result","f":20516036,"d_finished":14315,"c":13,"l":20588755,"d":14315}],"id":"9437184::49"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.224125Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:07.137677Z;index_granules=0;index_portions=12;index_batches=0;schema_columns=1;filter_columns=0;additional_columns=0;compacted_portions_bytes=16464;inserted_portions_bytes=14016;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=30480;selected_rows=0; 2026-01-04T02:43:07.224166Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:07.224291Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:3594:5600];TabletId=9437184;ScanId=0;TxId=18446744073709551615;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1;column_names=timestamp;);;program_input=(column_ids=1;column_names=timestamp;);;; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=1986225a-e91711f0-ae2b9c9e-7d1c3250; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::CleanUnusedTablesNormalizer [GOOD] Test command err: 2026-01-04T02:42:54.289417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:54.311123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:54.311380Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:54.317146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-04T02:42:54.317334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-04T02:42:54.317479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:54.317597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:54.317665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:54.317744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:54.317845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:54.317919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:54.317982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:54.318078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:54.318176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:54.318281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:54.318369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:54.318474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:54.340136Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:54.340266Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-04T02:42:54.340320Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:42:54.340598Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-04T02:42:54.340712Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-04T02:42:54.340778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-04T02:42:54.340813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-04T02:42:54.340908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:54.340966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:54.340994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:54.341025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-04T02:42:54.341084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:54.341118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:54.341142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:54.341161Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:54.341280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:54.341320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:54.341349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:54.341378Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:54.341448Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:54.341484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:54.341519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:54.341555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:54.341596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:54.341631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:54.341651Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:54.341679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:54.341733Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:54.341789Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:54.341971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:54.342000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:54.342018Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:54.342103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:54.342133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:54.342158Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:54.342207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:54.342234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksM ... };in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:07.659458Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-04T02:43:07.659493Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:07.659529Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:07.659987Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:07.660109Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.660140Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:07.660254Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-04T02:43:07.660305Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-04T02:43:07.660421Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-04T02:43:07.660500Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.660580Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.660707Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.660806Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:07.660910Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.661011Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.661354Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-04T02:43:07.661820Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.055},{"events":["l_task_result"],"t":0.722},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.723}],"full":{"a":13117830,"name":"_full_task","f":13117830,"d_finished":0,"c":0,"l":13841778,"d":723948},"events":[{"name":"bootstrap","f":13118108,"d_finished":1720,"c":1,"l":13119828,"d":1720},{"a":13841163,"name":"ack","f":13173554,"d_finished":285677,"c":421,"l":13841115,"d":286292},{"a":13841156,"name":"processing","f":13120089,"d_finished":607512,"c":843,"l":13841118,"d":608134},{"name":"ProduceResults","f":13119253,"d_finished":494751,"c":1266,"l":13841407,"d":494751},{"a":13841413,"name":"Finish","f":13841413,"d_finished":0,"c":0,"l":13841778,"d":365},{"name":"task_result","f":13120113,"d_finished":312443,"c":422,"l":13839915,"d":312443}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.661906Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:07.662409Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.001},{"events":["f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.055},{"events":["l_task_result"],"t":0.722},{"events":["l_ProduceResults","f_Finish"],"t":0.723},{"events":["l_ack","l_processing","l_Finish"],"t":0.724}],"full":{"a":13117830,"name":"_full_task","f":13117830,"d_finished":0,"c":0,"l":13842323,"d":724493},"events":[{"name":"bootstrap","f":13118108,"d_finished":1720,"c":1,"l":13119828,"d":1720},{"a":13841163,"name":"ack","f":13173554,"d_finished":285677,"c":421,"l":13841115,"d":286837},{"a":13841156,"name":"processing","f":13120089,"d_finished":607512,"c":843,"l":13841118,"d":608679},{"name":"ProduceResults","f":13119253,"d_finished":494751,"c":1266,"l":13841407,"d":494751},{"a":13841413,"name":"Finish","f":13841413,"d_finished":0,"c":0,"l":13842323,"d":910},{"name":"task_result","f":13120113,"d_finished":312443,"c":422,"l":13839915,"d":312443}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:07.662472Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:06.935782Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-04T02:43:07.662511Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:07.662667Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] >> ColumnShardTiers::TieringUsage [GOOD] >> Normalizers::RemoveWriteIdNormalizer [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpQuery::ExecuteWriteQuery [GOOD] Test command err: Trying to start YDB, gRPC: 31684, MsgBus: 27522 2026-01-04T02:42:10.298096Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331203381417391:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:10.298152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:10.528873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:10.534191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:10.534296Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:10.585578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:10.629129Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:10.630757Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331203381417361:2081] 1767494530296359 != 1767494530296362 2026-01-04T02:42:10.682335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:10.682356Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:10.682361Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:10.682449Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:10.814410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:11.111376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:11.309932Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:13.220840Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331216266320140:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.220852Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331216266320131:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.220961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.221191Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331216266320147:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.221242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:13.224308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:13.234570Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331216266320146:2326], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:42:13.348208Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331216266320199:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:13.582906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22761, MsgBus: 19579 2026-01-04T02:42:14.846545Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331221591943731:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:14.846605Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:14.875966Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:14.968590Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:14.968670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:14.982436Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331221591943700:2081] 1767494534846017 != 1767494534846020 2026-01-04T02:42:15.001440Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:15.008154Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:15.099900Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:15.101296Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:15.101312Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:15.101320Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:15.101388Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:15.493520Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:15.500290Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:15.863800Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:17.984492Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331234476846469:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.984493Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331234476846479:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.984573Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.984756Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331234476846485:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.984819Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:17.987440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:17.995869Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331234476846484:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:42:18.083690Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331238771813833:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:18.116460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.370704Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionI ... t called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.159095Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331322314664927:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.159191Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.159545Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331322314664937:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.159586Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.256418Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.296275Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.343839Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.394708Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.431146Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.475782Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.522058Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.583377Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:37.677832Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331322314665808:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.677940Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.678029Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331322314665813:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.678305Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591331322314665815:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.678358Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:37.682076Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:37.693860Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591331322314665817:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:42:37.794265Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591331322314665868:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:38.341489Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591331305134793902:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:38.341571Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:42:39.794134Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:48.048463Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-04T02:42:48.075420Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-04T02:42:48.430891Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:48.430927Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.888918Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-04T02:42:48.982638Z node 5 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=Rejecting data TxId 0 because datashard 72075186224037927: decided to reject due to given RejectProbability;tx_id=0; 2026-01-04T02:42:52.908840Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3e59xd602wvzjhjtedh5xt", SessionId: ydb://session/3?node_id=5&id=MWFhYjAzYzAtNDE0Y2EyMTYtNDU5NWUzNTItZjgzMGM4NGQ=, Slow query, duration: 13.243160s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-04T02:42:56.801439Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3e59xd602wvzjhjtedh5xt", SessionId: ydb://session/3?node_id=5&id=MWFhYjAzYzAtNDE0Y2EyMTYtNDU5NWUzNTItZjgzMGM4NGQ=, Slow query, duration: 17.135731s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE test_table (\n PRIMARY KEY (id)\n ) AS SELECT\n ROW_NUMBER() OVER w AS id, data\n FROM\n AS_TABLE(ListReplicate(<|data: 'aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'|>, 500000))\n WINDOW\n w AS (ORDER BY data)", parameters: 0b 2026-01-04T02:42:56.805896Z --------------- Start update --------------- 2026-01-04T02:42:56.806673Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:56.813785Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:42:58.537852Z node 5 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 74.5389 is higher than threshold of 60 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037927 table: [/Root/test_table] 2026-01-04T02:43:08.482044Z node 5 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3e5tnc6fbr5029nxyvz1bg", SessionId: ydb://session/3?node_id=5&id=NTFhMjg2NjEtMmI2NjcwYzMtZjBmNGM2NmItN2Q0ZTljYTI=, Slow query, duration: 11.665686s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n UPDATE test_table SET data = \"a\"\n ", parameters: 0b |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-04T02:43:06.890903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:06.925588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:06.925897Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:06.934082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:06.934353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:06.934616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:06.934741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:06.934896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:06.935008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:06.935141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:06.935266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:06.935389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:06.935510Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.935605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:06.935723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:06.935881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:06.965085Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:06.965272Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:06.965328Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:06.965504Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:06.965696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:06.965780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:06.965851Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:06.965980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:06.966061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:06.966107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:06.966142Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:06.966301Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:06.966390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:06.966452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:06.966489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:06.966608Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:06.966683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:06.966729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:06.966761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:06.966811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:06.966850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:06.966888Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:06.966934Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:06.966973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:06.967005Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:06.967299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:06.967363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:06.967393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:06.967535Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:06.967599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.967636Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.967683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:06.967737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:06.967776Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:06.967841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:06.967884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:06.967914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:06.968065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:06.968131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... t=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:10.682403Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-04T02:43:10.682444Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:10.682483Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:10.682961Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:10.683135Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.683179Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:10.683307Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-04T02:43:10.683364Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-04T02:43:10.683617Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-04T02:43:10.683771Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.683916Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.684055Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.684336Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:10.684473Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.684596Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.684801Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-04T02:43:10.685269Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.009}],"full":{"a":4259754,"name":"_full_task","f":4259754,"d_finished":0,"c":0,"l":4269589,"d":9835},"events":[{"name":"bootstrap","f":4260015,"d_finished":1105,"c":1,"l":4261120,"d":1105},{"a":4269043,"name":"ack","f":4267665,"d_finished":1144,"c":1,"l":4268809,"d":1690},{"a":4269029,"name":"processing","f":4261252,"d_finished":2887,"c":3,"l":4268812,"d":3447},{"name":"ProduceResults","f":4260755,"d_finished":1973,"c":6,"l":4269345,"d":1973},{"a":4269349,"name":"Finish","f":4269349,"d_finished":0,"c":0,"l":4269589,"d":240},{"name":"task_result","f":4261266,"d_finished":1690,"c":2,"l":4267233,"d":1690}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.685343Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:10.685755Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.009},{"events":["l_ack","l_processing","l_Finish"],"t":0.01}],"full":{"a":4259754,"name":"_full_task","f":4259754,"d_finished":0,"c":0,"l":4270123,"d":10369},"events":[{"name":"bootstrap","f":4260015,"d_finished":1105,"c":1,"l":4261120,"d":1105},{"a":4269043,"name":"ack","f":4267665,"d_finished":1144,"c":1,"l":4268809,"d":2224},{"a":4269029,"name":"processing","f":4261252,"d_finished":2887,"c":3,"l":4268812,"d":3981},{"name":"ProduceResults","f":4260755,"d_finished":1973,"c":6,"l":4269345,"d":1973},{"a":4269349,"name":"Finish","f":4269349,"d_finished":0,"c":0,"l":4270123,"d":774},{"name":"task_result","f":4261266,"d_finished":1690,"c":2,"l":4267233,"d":1690}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.685890Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:10.672841Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-04T02:43:10.685931Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:10.686056Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::ServerlessBadTablets [GOOD] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadZSTD [GOOD] Test command err: 2026-01-04T02:43:05.054788Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:05.084920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:05.085126Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:05.091482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:05.091679Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:05.091872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:05.091945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:05.092016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:05.092100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:05.092165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:05.092232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:05.092311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:05.092384Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.092476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:05.092554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:05.092626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:05.114614Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:05.114794Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:05.114879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:05.115079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.115259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:05.115372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:05.115436Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:05.115562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:05.115662Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:05.115732Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:05.115774Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:05.116024Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.116102Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:05.116149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:05.116183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:05.116288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:05.116358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:05.116408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:05.116443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:05.116502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:05.116564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:05.116611Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:05.116689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:05.116734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:05.116773Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:05.117047Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:05.117164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:05.117211Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:05.117367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:05.117447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.117486Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.117541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:05.117595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:05.117644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:05.117706Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:05.117769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:05.117830Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:05.118008Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:05.118075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... k=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:43:10.695153Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:43:10.695359Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:43:10.695531Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.695639Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.695774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.695966Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:10.696134Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.696279Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.696570Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:1057:2924] finished for tablet 9437184 2026-01-04T02:43:10.696963Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:1056:2923];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":6080577,"name":"_full_task","f":6080577,"d_finished":0,"c":0,"l":6092364,"d":11787},"events":[{"name":"bootstrap","f":6080764,"d_finished":1303,"c":1,"l":6082067,"d":1303},{"a":6091694,"name":"ack","f":6090384,"d_finished":1173,"c":1,"l":6091557,"d":1843},{"a":6091682,"name":"processing","f":6082235,"d_finished":3393,"c":3,"l":6091559,"d":4075},{"name":"ProduceResults","f":6081651,"d_finished":2074,"c":6,"l":6092052,"d":2074},{"a":6092057,"name":"Finish","f":6092057,"d_finished":0,"c":0,"l":6092364,"d":307},{"name":"task_result","f":6082248,"d_finished":2177,"c":2,"l":6090178,"d":2177}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.697025Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:1056:2923];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:10.697380Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:1056:2923];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":6080577,"name":"_full_task","f":6080577,"d_finished":0,"c":0,"l":6092804,"d":12227},"events":[{"name":"bootstrap","f":6080764,"d_finished":1303,"c":1,"l":6082067,"d":1303},{"a":6091694,"name":"ack","f":6090384,"d_finished":1173,"c":1,"l":6091557,"d":2283},{"a":6091682,"name":"processing","f":6082235,"d_finished":3393,"c":3,"l":6091559,"d":4515},{"name":"ProduceResults","f":6081651,"d_finished":2074,"c":6,"l":6092052,"d":2074},{"a":6092057,"name":"Finish","f":6092057,"d_finished":0,"c":0,"l":6092804,"d":747},{"name":"task_result","f":6082248,"d_finished":2177,"c":2,"l":6090178,"d":2177}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:10.697437Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:10.682175Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=4512;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=4512;selected_rows=0; 2026-01-04T02:43:10.697470Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:10.697665Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:1057:2924];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tiering/ut/unittest >> ColumnShardTiers::TieringUsage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:40:26.498633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:40:26.617404Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:40:26.617878Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:40:26.617933Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:40:26.891897Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:40:26.892020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:40:26.952787Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494424008246 != 1767494424008249 2026-01-04T02:40:26.963929Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:40:27.005795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:40:27.095722Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:40:27.557500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:40:27.557552Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:40:27.557583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:40:27.560285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:40:27.560821Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:40:27.574637Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 2026-01-04T02:40:37.964520Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:37.964779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:37.965482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:925:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:37.965588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:37.969336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:38.285883Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1018:2845], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:38.285993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:38.286428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2849], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:38.286507Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1025:2852], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:38.286557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:40:38.290864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:40:38.655593Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1027:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:40:38.728438Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1120:2918] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:40:39.305376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:39.696420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:40.350575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:41.207772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:41.640711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:40:42.171386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:40:42.985498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= UPSERT OBJECT `accessKey` (TYPE SECRET) WITH (value = `secretAccessKey`); UPSERT OBJECT `secretKey` (TYPE SECRET) WITH (value = `fakeSecret`); ;EXPECTATION=1;WAITING=1 REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;EXPECTATION=1;WAITING=1 2026-01-04T02:40:57.888984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExternalDataSource, opId: 281474976715704:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_external_data_source.cpp:268) REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AWS", AWS_ACCESS_KEY_ID_SECRET_NAME="accessKey", AWS_SECRET_ACCESS_KEY_SECRET_NAME="secretKey", AWS_REGION="ru-central1" ); ;RESULT=;EXPECTATION=1 FINISHED_REQUEST= CREATE EXTERNAL DATA SOURCE `/Root/tier1` WITH ( SOURCE_TYPE="ObjectStorage", LOCATION="http://fake.fake/fake", AUTH_METHOD="AW ... ine=abstract.cpp:13;event=new_stage;stage=Written;task_id=1c4a1a64-e91711f0-93f50683-f3e475d9; 2026-01-04T02:43:11.667435Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=130808;raw_bytes=3714405;count=3;records=3074} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 2026-01-04T02:43:11.667646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadBlobs;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.667990Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: external_task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;fline=actor.cpp:48;task=agents_waiting=1;additional_info=();; 2026-01-04T02:43:11.668311Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event_type=NKikimr::NBlobCache::TEvBlobCache::TEvReadBlobRangeResult;fline=task.cpp:110;event=OnDataReady;task=agents_waiting=0;additional_info=();;external_task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.668431Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.668581Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.672888Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3075:4354];task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;task_class=CS::GENERAL;fline=general_compaction.cpp:138;event=blobs_created_diff;appended=0;;column_id:1;chunk_idx:0;blob_range:[NO_BLOB:0:192];;column_id:2;chunk_idx:0;blob_range:[NO_BLOB:192:232];;column_id:3;chunk_idx:0;blob_range:[NO_BLOB:424:256];;column_id:4;chunk_idx:0;blob_range:[NO_BLOB:680:192];;column_id:5;chunk_idx:0;blob_range:[NO_BLOB:872:264];;column_id:6;chunk_idx:0;blob_range:[NO_BLOB:1136:192];;column_id:4294967040;chunk_idx:0;blob_range:[NO_BLOB:1328:192];;column_id:4294967041;chunk_idx:0;blob_range:[NO_BLOB:1520:192];;;;switched=(portion_id:20;path_id:1000000895;records_count:1;schema_version:2;level:0;cs:plan_step=1758856320500;tx_id=18446744073709551615;;wi:10;;column_size:1328;index_size:0;meta:(()););(portion_id:19;path_id:1000000895;records_count:1;schema_version:2;level:0;;column_size:1712;index_size:0;meta:(()););; 2026-01-04T02:43:11.672976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3075:4354];task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;task_class=CS::GENERAL;fline=general_compaction.cpp:140;event=blobs_created;appended=1;switched=2; 2026-01-04T02:43:11.673085Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;event=on_finished;consumer=GENERAL_COMPACTION;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;script=FULL_PORTIONS_FETCHING::GENERAL_COMPACTION;tablet_id=72075186224037893;parent_id=[1:3075:4354];task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;task_class=CS::GENERAL;fline=abstract.cpp:13;event=new_stage;stage=Constructed;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.673250Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:52;event=TEvWriteIndex;count=1; 2026-01-04T02:43:11.673348Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=columnshard__write_index.cpp:63;event=Limiter; 2026-01-04T02:43:11.673382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=AskDiskQuota;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.673422Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Writing;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.673678Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 72075186224037893 2026-01-04T02:43:11.673761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:48;memory_size=94;data_size=70;sum=2726;count=57; 2026-01-04T02:43:11.673791Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_meta.cpp:66;memory_size=190;data_size=182;sum=5510;count=58;size_of_meta=112; 2026-01-04T02:43:11.673846Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=constructor_portion.cpp:44;memory_size=270;data_size=262;sum=7830;count=29;size_of_portion=192; 2026-01-04T02:43:11.673884Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.673985Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[49] (CS::GENERAL) apply at tablet 72075186224037893 2026-01-04T02:43:11.674658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[1:3075:4354];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=72075186224037893;external_task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; 2026-01-04T02:43:11.674702Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 72075186224037893 Save Batch GenStep: 1:17 Blob count: 1 2026-01-04T02:43:11.674777Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=1328;raw_bytes=1089;count=1;records=1} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=130808;raw_bytes=3714405;count=3;records=3074} inactive {blob_bytes=3040;raw_bytes=2194;count=2;records=2} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 72075186224037893 Cleaning waiting... Fake storage clean FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=51;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=53;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=54;stage=COMPACTION; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=50;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=52;stage=CLEANUP_PORTIONS; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 0 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037892 FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=1c4a1a64-e91711f0-93f50683-f3e475d9; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=560;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=6240;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=CLEANUP_PORTIONS;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=5680;delta=560; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=abstract.cpp:106;event=AbortEmergency;reason=TTxWriteIndex destructor withno CompleteReady flag;prev_reason=; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=abstract.cpp:13;event=new_stage;stage=Aborted;task_id=1c4a5a7e-e91711f0-bdaf5bf0-c31eb587; FALLBACK_ACTOR_LOGGING;priority=WARN;component=332;fline=manager.cpp:51;message=aborted data locks manager; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=3600;delta=2080; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=560;delta=3040; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=COMPACTION;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=560; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier2' stopped at tablet 72075186224037893 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=1600;manager.cpp:162 :Tier '/Root/tier1' stopped at tablet 72075186224037893 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tiering/ut/unittest >> KqpLimits::CancelAfterRoTx [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup |92.4%| [TA] $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {RESULT} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tiering/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> THealthCheckTest::StorageLimit80 [GOOD] >> THealthCheckTest::StorageLimit50 >> THealthCheckTest::Issues100VCardMerging [GOOD] >> THealthCheckTest::NoBscResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::SubColumnsPortionsCleanerNormalizer-useSubcolumns [GOOD] Test command err: 2026-01-04T02:43:02.084520Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:02.120099Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:02.120397Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:02.128357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanSubColumnsPortions; 2026-01-04T02:43:02.128644Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:02.128867Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:02.129043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:02.129180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:02.129292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:02.129419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:02.129550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:02.129689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:02.129842Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:02.129980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:02.130108Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:02.130225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:02.130357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:02.165592Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:02.165772Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanSubColumnsPortions; 2026-01-04T02:43:02.165853Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:43:02.166186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=62; 2026-01-04T02:43:02.166310Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=50; 2026-01-04T02:43:02.166422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2026-01-04T02:43:02.166530Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=56; 2026-01-04T02:43:02.166731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanSubColumnsPortions;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:02.166823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-04T02:43:02.166873Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-04T02:43:02.167052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:02.167156Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:02.167215Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:02.167258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-04T02:43:02.167348Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:02.167407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:02.167447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:02.167525Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:02.167720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:02.167785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:02.167847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:02.167884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:02.168005Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:02.168078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:02.168136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:02.168168Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:02.168239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:02.168283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:02.168314Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:02.168373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:02.168487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:02.168526Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:02.168747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:02.168809Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:02.168845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:02.168998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:02.169046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:02.169079Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstr ... olumn_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:11.850847Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=138;finished=1; 2026-01-04T02:43:11.850880Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:11.850934Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:11.851289Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:11.851429Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:138;schema=id: uint64 json_payload: binary;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.851472Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:11.851605Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=138; 2026-01-04T02:43:11.851659Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=138;batch_columns=id,json_payload; 2026-01-04T02:43:11.851834Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:290:2291];bytes=283016;rows=4162;faults=0;finished=0;fault=0;schema=id: uint64 json_payload: binary; 2026-01-04T02:43:11.851969Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.852134Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.852299Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.852428Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:11.852546Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.852650Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.852974Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:292:2292] finished for tablet 9437184 2026-01-04T02:43:11.853540Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:290:2291];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.032},{"events":["l_task_result"],"t":0.421},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.423}],"full":{"a":9834982,"name":"_full_task","f":9834982,"d_finished":0,"c":0,"l":10258610,"d":423628},"events":[{"name":"bootstrap","f":9835225,"d_finished":1208,"c":1,"l":9836433,"d":1208},{"a":10257969,"name":"ack","f":9867839,"d_finished":169702,"c":251,"l":10257886,"d":170343},{"a":10257957,"name":"processing","f":9836640,"d_finished":364651,"c":503,"l":10257890,"d":365304},{"name":"ProduceResults","f":9836049,"d_finished":293592,"c":756,"l":10258226,"d":293592},{"a":10258232,"name":"Finish","f":10258232,"d_finished":0,"c":0,"l":10258610,"d":378},{"name":"task_result","f":9836672,"d_finished":189186,"c":252,"l":10256512,"d":189186}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.853661Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:290:2291];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:11.854243Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:290:2291];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.032},{"events":["l_task_result"],"t":0.421},{"events":["l_ProduceResults","f_Finish"],"t":0.423},{"events":["l_ack","l_processing","l_Finish"],"t":0.424}],"full":{"a":9834982,"name":"_full_task","f":9834982,"d_finished":0,"c":0,"l":10259271,"d":424289},"events":[{"name":"bootstrap","f":9835225,"d_finished":1208,"c":1,"l":9836433,"d":1208},{"a":10257969,"name":"ack","f":9867839,"d_finished":169702,"c":251,"l":10257886,"d":171004},{"a":10257957,"name":"processing","f":9836640,"d_finished":364651,"c":503,"l":10257890,"d":365965},{"name":"ProduceResults","f":9836049,"d_finished":293592,"c":756,"l":10258226,"d":293592},{"a":10258232,"name":"Finish","f":10258232,"d_finished":0,"c":0,"l":10259271,"d":1039},{"name":"task_result","f":9836672,"d_finished":189186,"c":252,"l":10256512,"d":189186}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:11.854345Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:11.428248Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=1409432;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=1409432;selected_rows=0; 2026-01-04T02:43:11.854398Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:11.854573Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:292:2292];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=id;);;ff=(column_ids=1,2;column_names=id,json_payload;);;program_input=(column_ids=1,2;column_names=id,json_payload;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveWriteIdNormalizer [GOOD] Test command err: 2026-01-04T02:43:05.778927Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:05.802018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:05.802297Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:05.808145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SchemaVersionCleaner; 2026-01-04T02:43:05.808363Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:05.808537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:05.808674Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:05.808740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:05.808808Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:05.808889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:05.808960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:05.809029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:05.809089Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:05.809175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.809257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:05.809392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:05.809505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:05.831508Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:05.831648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=SchemaVersionCleaner; 2026-01-04T02:43:05.831701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:43:05.832055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SchemaVersionCleaner;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:05.832135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-04T02:43:05.832215Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-04T02:43:05.832384Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.832473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:05.832527Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:05.832577Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-04T02:43:05.832673Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:05.832731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:05.832774Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:05.832809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:05.833007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.833093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:05.833141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:05.833173Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:05.833299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:05.833362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:05.833406Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:05.833449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:05.833517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:05.833568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:05.833613Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:05.833660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:05.833710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:05.833740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:05.834023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:05.834071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:05.834101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:05.834280Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:05.834348Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.834380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.834449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:05.834496Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:05.834532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=n ... l={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:12.237476Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-04T02:43:12.237507Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:12.237541Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:12.238180Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:12.238364Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.238401Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:12.238523Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-04T02:43:12.238577Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-04T02:43:12.238744Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-04T02:43:12.238869Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.239004Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.239188Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.239331Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:12.239453Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.239580Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.239936Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-04T02:43:12.240508Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.075},{"events":["l_task_result"],"t":0.754},{"events":["l_ProduceResults","f_Finish"],"t":0.756},{"events":["l_ack","l_processing","l_Finish"],"t":0.757}],"full":{"a":6181467,"name":"_full_task","f":6181467,"d_finished":0,"c":0,"l":6938482,"d":757015},"events":[{"name":"bootstrap","f":6181820,"d_finished":1720,"c":1,"l":6183540,"d":1720},{"a":6937795,"name":"ack","f":6256603,"d_finished":292470,"c":421,"l":6937707,"d":293157},{"a":6937772,"name":"processing","f":6183733,"d_finished":620605,"c":843,"l":6937710,"d":621315},{"name":"ProduceResults","f":6182986,"d_finished":501201,"c":1266,"l":6938084,"d":501201},{"a":6938092,"name":"Finish","f":6938092,"d_finished":0,"c":0,"l":6938482,"d":390},{"name":"task_result","f":6183759,"d_finished":317979,"c":422,"l":6936041,"d":317979}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.240598Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:12.241172Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.075},{"events":["l_task_result"],"t":0.754},{"events":["l_ProduceResults","f_Finish"],"t":0.756},{"events":["l_ack","l_processing","l_Finish"],"t":0.757}],"full":{"a":6181467,"name":"_full_task","f":6181467,"d_finished":0,"c":0,"l":6939129,"d":757662},"events":[{"name":"bootstrap","f":6181820,"d_finished":1720,"c":1,"l":6183540,"d":1720},{"a":6937795,"name":"ack","f":6256603,"d_finished":292470,"c":421,"l":6937707,"d":293804},{"a":6937772,"name":"processing","f":6183733,"d_finished":620605,"c":843,"l":6937710,"d":621962},{"name":"ProduceResults","f":6182986,"d_finished":501201,"c":1266,"l":6938084,"d":501201},{"a":6938092,"name":"Finish","f":6938092,"d_finished":0,"c":0,"l":6939129,"d":1037},{"name":"task_result","f":6183759,"d_finished":317979,"c":422,"l":6936041,"d":317979}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:12.241281Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:11.481379Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-04T02:43:12.241336Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:12.241505Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; >> THealthCheckTest::GreenStatusWhenInitPending [GOOD] >> THealthCheckTest::IgnoreOtherGenerations |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms >> EvWrite::WriteWithSplit >> THealthCheckTest::TestReBootingTabletIsDead [GOOD] >> THealthCheckTest::TestOnlyRequestNeededTablets >> THealthCheckTest::BridgeGroupDegradedInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInOnePile >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] >> THealthCheckTest::ShardsLimit995 [GOOD] >> THealthCheckTest::ShardsLimit905 >> DataShardVolatile::DistributedWriteEarlierSnapshotNotBlocked [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink >> TColumnShardTestReadWrite::WriteExoticTypes >> test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] >> THealthCheckTest::OnlyDiskIssueOnInitialPDisks [GOOD] >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] >> THealthCheckTest::ServerlessWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] >> test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] >> TColumnShardTestReadWrite::WriteRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadSomePrograms [GOOD] Test command err: 2026-01-04T02:43:15.939240Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:15.968612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:15.968878Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:15.976913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:15.977172Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:15.977412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:15.977560Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:15.977689Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:15.977792Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:15.977907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:15.978032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:15.978140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:15.978285Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:15.978417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:15.978545Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:15.978666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:16.001159Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:16.001322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:16.001376Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:16.001601Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:16.001778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:16.001894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:16.001945Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:16.002052Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:16.002117Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:16.002168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:16.002224Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:16.002424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:16.002506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:16.002552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:16.002599Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:16.002706Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:16.002761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:16.002810Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:16.002841Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:16.002893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:16.002931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:16.002965Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:16.003013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:16.003055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:16.003111Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:16.003352Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:16.003414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:16.003452Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:16.003584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:16.003631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:16.003670Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:16.003731Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:16.003778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:16.003840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:16.003896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:16.003940Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:16.003972Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:16.004176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:16.004224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... 5152;fline=columnshard__propose_transaction.cpp:105;event=actual tx operator; 2026-01-04T02:43:16.595206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136802078476448;op_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137008239585152;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:11;event=scheme_shard_tablet_not_initialized;source=[1:103:2137]; 2026-01-04T02:43:16.595279Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;request_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;this=136802078476448;op_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_op_tx=10:TX_KIND_SCHEMA;min=1767494596914;max=18446744073709551615;plan=0;src=[1:103:2137];cookie=00:0;;int_this=137008239585152;method=TTxController::FinishProposeOnComplete;tx_id=10;fline=propose_tx.cpp:32;message=;tablet_id=9437184;tx_id=10; 2026-01-04T02:43:16.595629Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxNotifyTxCompletion.Execute at tablet 9437184 2026-01-04T02:43:16.595804Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494596914 at tablet 9437184, mediator 0 2026-01-04T02:43:16.595871Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] execute at tablet 9437184 2026-01-04T02:43:16.596250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:43:16.596356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:43:16.596398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:43:16.596487Z node 1 :TX_COLUMNSHARD INFO: ctor_logger.h:56: EnsureTable for pathId: {internal: 1000000185, ss: 1} ttl settings: { Version: 1 } at tablet 9437184 2026-01-04T02:43:16.606747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:83;event=new_schema;snapshot=plan_step=1767494596914;tx_id=10;;switch_optimizer=0;switch_accessors=0; 2026-01-04T02:43:16.606849Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:16.607008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tables_manager.cpp:367;method=RegisterTable;path_id=1000000185; 2026-01-04T02:43:16.607096Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine.h:158;event=RegisterTable;path_id=1000000185; 2026-01-04T02:43:16.607436Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=tiling.cpp:809;message=creating tiling compaction optimizer; 2026-01-04T02:43:16.625909Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=10;fline=column_engine_logs.cpp:535;event=OnTieringModified;path_id=1000000185; 2026-01-04T02:43:16.650980Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[2] complete at tablet 9437184 FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=3200;columns=5; 2026-01-04T02:43:16.654354Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=1;last=1; 2026-01-04T02:43:16.654440Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=3200;operation_id=1f43d0e8-e91711f0-a2487d50-fd1e7198;in_flight=1;size_in_flight=3200; 2026-01-04T02:43:16.667024Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=1;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=3768;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-04T02:43:16.669020Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=3200;event=data_write_finished;writing_id=1f43d0e8-e91711f0-a2487d50-fd1e7198; 2026-01-04T02:43:16.669382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=60;data_size=20;sum=60;count=1; 2026-01-04T02:43:16.669458Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=156;data_size=132;sum=156;count=2;size_of_meta=112; 2026-01-04T02:43:16.669518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=236;data_size=212;sum=236;count=1;size_of_portion=192; 2026-01-04T02:43:16.670020Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:43:16.670164Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=4;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:43:16.682395Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:43:16.682617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:16.696407Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: PlanStep 1767494596920 at tablet 9437184, mediator 0 2026-01-04T02:43:16.696500Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] execute at tablet 9437184 2026-01-04T02:43:16.696878Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=100;fline=abstract.h:88;progress_tx_id=100;lock_id=1;broken=0; 2026-01-04T02:43:16.711050Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxPlanStep[6] complete at tablet 9437184 2026-01-04T02:43:16.711239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;fline=abstract.h:101;progress_tx_id=100;lock_id=1;broken=0; 2026-01-04T02:43:16.711497Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:194;event=remove_write_id_to_operation_id;write_id=2;operation_id=1; 2026-01-04T02:43:16.711548Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;tx_state=TTxProgressTx::Complete;commit_tx_id=100;commit_lock_id=1;fline=manager.cpp:197;event=remove_operation;operation_id=1; 2026-01-04T02:43:16.711998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:16.712060Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:16.712142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:16.712203Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:16.712276Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:16.729194Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:16.729316Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:16.729394Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:16.729556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:16.730171Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1767494596920:100} readable: {1767494596920:max} at tablet 9437184 2026-01-04T02:43:16.742495Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-04T02:43:16.744858Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tx_id=100;scan_id=0;gen=0;table=;snapshot={1767494596920:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: Can't parse TOlapProgram protobuf; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write >> TColumnShardTestReadWrite::CompactionGCFailingBs >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString >> EvWrite::WriteWithSplit [GOOD] >> Normalizers::ChunksV0MetaNormalizer >> THealthCheckTest::BridgeNoBscResponse [GOOD] >> THealthCheckTest::CLusterNotBootstrapped >> TColumnShardTestReadWrite::WriteRead [GOOD] >> THealthCheckTest::StorageLimit50 [GOOD] >> THealthCheckTest::StorageNoQuota ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32 [GOOD] Test command err: 2026-01-04T02:42:47.249919Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.284144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.284403Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.292396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.292663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.292895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.293019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.293121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.293271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.293383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.293515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.293638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.293829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.293972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.294084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.294203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.324798Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.324940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.325023Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.325238Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.325502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.325560Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.325662Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.325771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.325826Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.326031Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.326168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.326208Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.326307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.326420Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.326462Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.326524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.326578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.326619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.326669Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.326703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326740Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.327035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.327075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.327245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.327297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327346Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.327454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.327614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.327656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.327791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.327856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-04T02:43:19.820687Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=981; 2026-01-04T02:43:19.820740Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=78502; 2026-01-04T02:43:19.820787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=78622; 2026-01-04T02:43:19.820858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-04T02:43:19.821239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=333; 2026-01-04T02:43:19.821286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=79601; 2026-01-04T02:43:19.821449Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=107; 2026-01-04T02:43:19.821570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=71; 2026-01-04T02:43:19.822017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=400; 2026-01-04T02:43:19.822388Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=322; 2026-01-04T02:43:19.837655Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15192; 2026-01-04T02:43:19.849949Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=12177; 2026-01-04T02:43:19.850040Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:43:19.850092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-04T02:43:19.850124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-04T02:43:19.850177Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=30; 2026-01-04T02:43:19.850208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:43:19.850273Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=40; 2026-01-04T02:43:19.850309Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:19.850358Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=25; 2026-01-04T02:43:19.850423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2026-01-04T02:43:19.850488Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=39; 2026-01-04T02:43:19.850514Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=117705; 2026-01-04T02:43:19.850619Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:19.850710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:19.850753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:19.850805Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:19.850838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:19.850976Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:19.851019Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:19.851047Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:19.851077Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:19.851128Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494269613;tx_id=18446744073709551615;;current_snapshot_ts=1767494568679; 2026-01-04T02:43:19.851159Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:19.851193Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:19.851220Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:19.851284Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:19.851433Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.008000s; 2026-01-04T02:43:19.856000Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:19.856297Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:19.856361Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:19.856443Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:19.856512Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:19.856587Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494269613;tx_id=18446744073709551615;;current_snapshot_ts=1767494568679; 2026-01-04T02:43:19.856634Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:19.856687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:19.856734Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:19.856817Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:19.856871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:19.857699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.033000s; 2026-01-04T02:43:19.857738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink >> THealthCheckTest::IgnoreOtherGenerations [GOOD] >> THealthCheckTest::IgnoreServerlessWhenNotSpecific >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] >> THealthCheckTest::ServerlessWithExclusiveNodesWhenTroublesWithSharedNodes [GOOD] >> THealthCheckTest::ShardsLimit800 >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-false [GOOD] >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true >> TColumnShardTestReadWrite::WriteOverload-InStore >> Normalizers::ChunksV0MetaNormalizer [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] >> TColumnShardTestReadWrite::Write [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteRead [GOOD] Test command err: 2026-01-04T02:43:19.352047Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:19.375584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:19.375799Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:19.381474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:19.381672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:19.381883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:19.381954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:19.382016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:19.382082Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:19.382167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:19.382237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:19.382298Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:19.382376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:19.382456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:19.382551Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:19.382619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:19.404573Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:19.404750Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:19.404815Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:19.404994Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:19.405161Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:19.405251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:19.405294Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:19.405387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:19.405454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:19.405514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:19.405545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:19.405747Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:19.405849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:19.405899Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:19.405927Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:19.406035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:19.406088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:19.406129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:19.406157Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:19.406207Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:19.406244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:19.406276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:19.406317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:19.406359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:19.406399Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:19.406638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:19.406715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:19.406753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:19.406892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:19.406943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:19.406979Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:19.407029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:19.407071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:19.407113Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:19.407165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:19.407200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:19.407234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:19.407365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:19.407412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:43:23.158888Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:43:23.159168Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=2020;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: string json_payload: string ingested_at: timestamp[us] saved_at: timestamp[us] request_id: string; 2026-01-04T02:43:23.159392Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.159573Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.159767Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.160048Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:23.160258Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.160434Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.160768Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:475:2486] finished for tablet 9437184 2026-01-04T02:43:23.161248Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:474:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":4360496,"name":"_full_task","f":4360496,"d_finished":0,"c":0,"l":4376002,"d":15506},"events":[{"name":"bootstrap","f":4360835,"d_finished":1938,"c":1,"l":4362773,"d":1938},{"a":4375194,"name":"ack","f":4373412,"d_finished":1579,"c":1,"l":4374991,"d":2387},{"a":4375177,"name":"processing","f":4362948,"d_finished":4625,"c":3,"l":4374998,"d":5450},{"name":"ProduceResults","f":4362234,"d_finished":2770,"c":6,"l":4375627,"d":2770},{"a":4375631,"name":"Finish","f":4375631,"d_finished":0,"c":0,"l":4376002,"d":371},{"name":"task_result","f":4362984,"d_finished":2958,"c":2,"l":4373196,"d":2958}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.161336Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:23.161742Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:474:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.012},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":4360496,"name":"_full_task","f":4360496,"d_finished":0,"c":0,"l":4376553,"d":16057},"events":[{"name":"bootstrap","f":4360835,"d_finished":1938,"c":1,"l":4362773,"d":1938},{"a":4375194,"name":"ack","f":4373412,"d_finished":1579,"c":1,"l":4374991,"d":2938},{"a":4375177,"name":"processing","f":4362948,"d_finished":4625,"c":3,"l":4374998,"d":6001},{"name":"ProduceResults","f":4362234,"d_finished":2770,"c":6,"l":4375627,"d":2770},{"a":4375631,"name":"Finish","f":4375631,"d_finished":0,"c":0,"l":4376553,"d":922},{"name":"task_result","f":4362984,"d_finished":2958,"c":2,"l":4373196,"d":2958}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:23.161846Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:23.141325Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7600;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7600;selected_rows=0; 2026-01-04T02:43:23.161887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:23.162109Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] >> THealthCheckTest::ShardsLimit905 [GOOD] >> THealthCheckTest::ShardsNoLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteExoticTypes [GOOD] Test command err: 2026-01-04T02:43:17.717174Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:17.752017Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:17.752223Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:17.759211Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:17.759487Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:17.759728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:17.759881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:17.759991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:17.760091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:17.760216Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:17.760375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:17.760483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:17.760586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.760707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:17.760851Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:17.760977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:17.792417Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:17.792620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:17.792682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:17.792904Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:17.793135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:17.793237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:17.793292Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:17.793417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:17.793493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:17.793568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:17.793656Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:17.793924Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:17.794003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:17.794053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:17.794090Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:17.794196Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:17.794265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:17.794332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:17.794375Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:17.794434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:17.794477Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:17.794532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:17.794581Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:17.794649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:17.794691Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:17.794943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:17.795022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:17.795068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:17.795273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:17.795345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.795380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.795438Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:17.795483Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:17.795534Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:17.795596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:17.795646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:17.795682Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:17.795866Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:17.795919Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-04T02:43:23.792117Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64 [GOOD] Test command err: 2026-01-04T02:42:47.249934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.285069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.285323Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.293865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.294137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.294364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.294509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.294654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.294771Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.294895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.295025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.295141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.295283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.295439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.295556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.295664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.324450Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.324617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.324683Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.324884Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.325168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.325219Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.325322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.325443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.325475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.325683Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.325849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.325886Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.326000Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.326112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.326165Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.326251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.326294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.326331Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.326380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.326421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.326844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.326882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.327059Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.327119Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327156Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.327277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327318Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.327435Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.327469Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.327604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.327648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-04T02:43:23.284390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=816; 2026-01-04T02:43:23.284437Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=53581; 2026-01-04T02:43:23.284479Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=53696; 2026-01-04T02:43:23.284533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-04T02:43:23.284817Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=244; 2026-01-04T02:43:23.284856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54471; 2026-01-04T02:43:23.285007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=105; 2026-01-04T02:43:23.285112Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2026-01-04T02:43:23.285524Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=368; 2026-01-04T02:43:23.285868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=292; 2026-01-04T02:43:23.301566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15635; 2026-01-04T02:43:23.317222Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15554; 2026-01-04T02:43:23.317322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-04T02:43:23.317376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:43:23.317417Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:43:23.317484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-01-04T02:43:23.317523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:43:23.317654Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=74; 2026-01-04T02:43:23.317703Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:43:23.317767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-04T02:43:23.317901Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=93; 2026-01-04T02:43:23.317981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=42; 2026-01-04T02:43:23.318018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=95516; 2026-01-04T02:43:23.318143Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:23.318248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:23.318296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:23.318380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:23.318426Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:23.318570Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:23.318623Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:23.318658Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:23.318716Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:23.318772Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494269655;tx_id=18446744073709551615;;current_snapshot_ts=1767494568679; 2026-01-04T02:43:23.318811Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:23.318851Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:23.318884Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:23.318963Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:23.319131Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.012000s; 2026-01-04T02:43:23.322683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:23.323029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:23.323076Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:23.323150Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:23.323201Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:23.323273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494269655;tx_id=18446744073709551615;;current_snapshot_ts=1767494568679; 2026-01-04T02:43:23.323313Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:23.323355Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:23.323388Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:23.323454Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:23.323497Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:23.324353Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.120000s; 2026-01-04T02:43:23.324394Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::BridgeGroupDeadInOnePile [GOOD] >> THealthCheckTest::BridgeGroupDeadInBothPiles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadStandaloneExoticTypes [GOOD] Test command err: 2026-01-04T02:43:20.520443Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:20.554147Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:20.554485Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:20.562178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:20.562458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:20.562696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:20.562813Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:20.562916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:20.563055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:20.563186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:20.563301Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:20.563409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:20.563530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.563671Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:20.563802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:20.563932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:20.595217Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:20.595387Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:20.595449Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:20.595649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.595877Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:20.595971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:20.596032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:20.596138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:20.596200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:20.596249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:20.596285Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:20.596493Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.596555Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:20.596599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:20.596629Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:20.596753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:20.596816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:20.596883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:20.596916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:20.596969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:20.597011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:20.597044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:20.597105Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:20.597166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:20.597203Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:20.597421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:20.597476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:20.597517Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:20.597660Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:20.597710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.597742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.597788Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:20.597849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:20.597896Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:20.597955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:20.598017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:20.598055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:20.598188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:20.598252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... n_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:43:24.667409Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:43:24.667687Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-04T02:43:24.667887Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.668020Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.668155Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.668340Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:24.668500Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.668635Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.668945Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:475:2486] finished for tablet 9437184 2026-01-04T02:43:24.669332Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:474:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.015}],"full":{"a":4620935,"name":"_full_task","f":4620935,"d_finished":0,"c":0,"l":4636626,"d":15691},"events":[{"name":"bootstrap","f":4621254,"d_finished":1712,"c":1,"l":4622966,"d":1712},{"a":4635957,"name":"ack","f":4634328,"d_finished":1490,"c":1,"l":4635818,"d":2159},{"a":4635946,"name":"processing","f":4623111,"d_finished":4300,"c":3,"l":4635822,"d":4980},{"name":"ProduceResults","f":4622512,"d_finished":2534,"c":6,"l":4636294,"d":2534},{"a":4636299,"name":"Finish","f":4636299,"d_finished":0,"c":0,"l":4636626,"d":327},{"name":"task_result","f":4623131,"d_finished":2744,"c":2,"l":4634137,"d":2744}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.669405Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:24.669763Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:474:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack","l_task_result"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.015},{"events":["l_ack","l_processing","l_Finish"],"t":0.016}],"full":{"a":4620935,"name":"_full_task","f":4620935,"d_finished":0,"c":0,"l":4637079,"d":16144},"events":[{"name":"bootstrap","f":4621254,"d_finished":1712,"c":1,"l":4622966,"d":1712},{"a":4635957,"name":"ack","f":4634328,"d_finished":1490,"c":1,"l":4635818,"d":2612},{"a":4635946,"name":"processing","f":4623111,"d_finished":4300,"c":3,"l":4635822,"d":5433},{"name":"ProduceResults","f":4622512,"d_finished":2534,"c":6,"l":4636294,"d":2534},{"a":4636299,"name":"Finish","f":4636299,"d_finished":0,"c":0,"l":4637079,"d":780},{"name":"task_result","f":4623131,"d_finished":2744,"c":2,"l":4634137,"d":2744}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:24.669836Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:24.649496Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-04T02:43:24.669875Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:24.670094Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::NoBscResponse [GOOD] >> THealthCheckTest::LayoutIncorrect >> KqpAnalyze::AnalyzeTable+ColumnStore [GOOD] >> KqpAnalyze::AnalyzeTable-ColumnStore >> THealthCheckTest::OnlyDiskIssueOnFaultyPDisks [GOOD] >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::Write [GOOD] Test command err: 2026-01-04T02:43:20.397375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:20.435151Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:20.435381Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:20.443390Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:20.443664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:20.443928Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:20.444091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:20.444237Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:20.444355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:20.444495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:20.444715Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:20.444845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:20.444968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.445100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:20.445264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:20.445411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:20.477917Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:20.478079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:20.478138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:20.478396Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.478543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:20.478627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:20.478686Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:20.478783Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:20.478859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:20.478909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:20.478941Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:20.479144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.479217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:20.479258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:20.479289Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:20.479375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:20.479422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:20.479467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:20.479493Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:20.479540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:20.479579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:20.479609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:20.479650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:20.479687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:20.479718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:20.479992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:20.480118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:20.480170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:20.480322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:20.480373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.480404Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.480459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:20.480500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:20.480550Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:20.480605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:20.480646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:20.480679Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:20.480816Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:20.480858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-04T02:43:25.931307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::ChunksV0MetaNormalizer [GOOD] Test command err: 2026-01-04T02:43:16.287854Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:16.320589Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:16.320838Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:16.328344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:16.328610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:16.328852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:16.328983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:16.329084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:16.329193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:16.329320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:16.329450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:16.329559Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:16.329656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:16.329777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:16.329932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:16.330054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:16.360253Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:16.360404Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:16.360467Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:16.360664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:16.360836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:16.360908Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:16.360952Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:16.361060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:16.361146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:16.361191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:16.361220Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:16.361397Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:16.361454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:16.361492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:16.361536Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:16.361619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:16.361663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:16.361698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:16.361722Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:16.361765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:16.361821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:16.361850Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:16.361898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:16.361967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:16.362008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:16.362242Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:16.362356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:16.362388Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:16.362530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:16.362576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:16.362602Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:16.362645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:16.362683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:16.362713Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:16.362750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:16.362783Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:16.362810Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:16.362920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:16.362971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... :{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:25.775163Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-04T02:43:25.775182Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:25.775202Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:25.775482Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:25.775578Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.775599Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:25.775663Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-04T02:43:25.775691Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-04T02:43:25.775775Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:320:2321];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-04T02:43:25.775852Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.775936Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.776051Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.776125Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:25.776184Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.776234Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.776472Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:322:2322] finished for tablet 9437184 2026-01-04T02:43:25.776877Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:320:2321];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.044},{"events":["l_task_result"],"t":0.619},{"events":["l_ProduceResults","f_Finish"],"t":0.62},{"events":["l_ack","l_processing","l_Finish"],"t":0.621}],"full":{"a":9331093,"name":"_full_task","f":9331093,"d_finished":0,"c":0,"l":9952286,"d":621193},"events":[{"name":"bootstrap","f":9331333,"d_finished":1280,"c":1,"l":9332613,"d":1280},{"a":9951873,"name":"ack","f":9375618,"d_finished":253785,"c":421,"l":9951828,"d":254198},{"a":9951867,"name":"processing","f":9332793,"d_finished":530301,"c":843,"l":9951830,"d":530720},{"name":"ProduceResults","f":9332178,"d_finished":429278,"c":1266,"l":9952002,"d":429278},{"a":9952006,"name":"Finish","f":9952006,"d_finished":0,"c":0,"l":9952286,"d":280},{"name":"task_result","f":9332814,"d_finished":268095,"c":422,"l":9950972,"d":268095}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.776954Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:320:2321];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:25.777297Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:320:2321];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack"],"t":0.044},{"events":["l_task_result"],"t":0.619},{"events":["l_ProduceResults","f_Finish"],"t":0.62},{"events":["l_ack","l_processing","l_Finish"],"t":0.621}],"full":{"a":9331093,"name":"_full_task","f":9331093,"d_finished":0,"c":0,"l":9952745,"d":621652},"events":[{"name":"bootstrap","f":9331333,"d_finished":1280,"c":1,"l":9332613,"d":1280},{"a":9951873,"name":"ack","f":9375618,"d_finished":253785,"c":421,"l":9951828,"d":254657},{"a":9951867,"name":"processing","f":9332793,"d_finished":530301,"c":843,"l":9951830,"d":531179},{"name":"ProduceResults","f":9332178,"d_finished":429278,"c":1266,"l":9952002,"d":429278},{"a":9952006,"name":"Finish","f":9952006,"d_finished":0,"c":0,"l":9952745,"d":739},{"name":"task_result","f":9332814,"d_finished":268095,"c":422,"l":9950972,"d":268095}],"id":"9437184::3"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:25.777364Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:25.153941Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-04T02:43:25.777402Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:25.777530Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:322:2322];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64 [GOOD] Test command err: 2026-01-04T02:42:50.410727Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:50.442188Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:50.442399Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:50.449466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:50.449697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:50.449920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:50.450034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:50.450158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:50.450266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:50.450376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:50.450491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:50.450586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:50.450721Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.450843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:50.450961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:50.451054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:50.479706Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:50.479847Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:50.479923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:50.480092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:50.480252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:50.480331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:50.480372Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:50.480479Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:50.480539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:50.480585Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:50.480615Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:50.480790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:50.480845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:50.480882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:50.480910Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:50.480996Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:50.481042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:50.481099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:50.481152Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:50.481206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:50.481244Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:50.481276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:50.481314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:50.481349Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:50.481378Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:50.481593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:50.481653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:50.481692Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:50.481830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:50.481873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.481902Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.481966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:50.482016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:50.482055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:50.482116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:50.482155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:50.482183Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:50.482304Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:50.482342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ge_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-04T02:43:25.539767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=898; 2026-01-04T02:43:25.539828Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=61280; 2026-01-04T02:43:25.539875Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=61395; 2026-01-04T02:43:25.539943Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-04T02:43:25.540297Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=305; 2026-01-04T02:43:25.540344Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=62309; 2026-01-04T02:43:25.540506Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2026-01-04T02:43:25.540624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=71; 2026-01-04T02:43:25.541033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=365; 2026-01-04T02:43:25.541361Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=282; 2026-01-04T02:43:25.557163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15733; 2026-01-04T02:43:25.573139Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=15858; 2026-01-04T02:43:25.573259Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=21; 2026-01-04T02:43:25.573315Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=13; 2026-01-04T02:43:25.573357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:43:25.573451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2026-01-04T02:43:25.573500Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:43:25.573602Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=68; 2026-01-04T02:43:25.573642Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:43:25.573715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=42; 2026-01-04T02:43:25.573825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-01-04T02:43:25.573922Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=50; 2026-01-04T02:43:25.573963Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=104873; 2026-01-04T02:43:25.574098Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:25.574208Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:25.574262Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:25.574350Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:25.574401Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:25.574556Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:25.574616Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:25.574669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:25.574713Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:25.574789Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494272861;tx_id=18446744073709551615;;current_snapshot_ts=1767494571884; 2026-01-04T02:43:25.574832Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:25.574877Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.574910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.575000Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:25.575177Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.134000s; 2026-01-04T02:43:25.580325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:25.581070Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:25.581132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:25.581233Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:25.581293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:25.581369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494272861;tx_id=18446744073709551615;;current_snapshot_ts=1767494571884; 2026-01-04T02:43:25.581416Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:25.581466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.581503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.581579Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:25.581624Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:25.582518Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.025000s; 2026-01-04T02:43:25.582572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> IndexBuildTest::MergeIndexTableShardsOnlyWhenReadyUniq [GOOD] >> IndexBuildTest::RejectsCancel >> THealthCheckTest::CLusterNotBootstrapped [GOOD] >> THealthCheckTest::BridgeTimeDifference ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKTimestamp [GOOD] Test command err: 2026-01-04T02:42:50.448362Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:50.479868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:50.480182Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:50.487266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:50.487523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:50.487747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:50.487884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:50.488014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:50.488151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:50.488265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:50.488388Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:50.488511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:50.488658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.488802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:50.488923Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:50.489033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:50.514872Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:50.514964Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:50.515000Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:50.515148Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:50.515268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:50.515330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:50.515359Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:50.515422Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:50.515464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:50.515502Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:50.515532Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:50.515715Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:50.515776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:50.515827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:50.515861Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:50.515946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:50.516012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:50.516057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:50.516086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:50.516130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:50.516186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:50.516218Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:50.516257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:50.516292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:50.516322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:50.516531Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:50.516584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:50.516619Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:50.516723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:50.516759Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.516785Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:50.516870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:50.516918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:50.516970Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:50.517019Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:50.517055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:50.517101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:50.517231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:50.517272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-04T02:43:25.836509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=821; 2026-01-04T02:43:25.836569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=38942; 2026-01-04T02:43:25.836615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=39040; 2026-01-04T02:43:25.836680Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-04T02:43:25.837065Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=332; 2026-01-04T02:43:25.837116Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=39863; 2026-01-04T02:43:25.837282Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=106; 2026-01-04T02:43:25.837447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=79; 2026-01-04T02:43:25.837852Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=347; 2026-01-04T02:43:25.838127Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=232; 2026-01-04T02:43:25.847061Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8871; 2026-01-04T02:43:25.857013Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9834; 2026-01-04T02:43:25.857113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-04T02:43:25.857160Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:43:25.857190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:43:25.857252Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=37; 2026-01-04T02:43:25.857286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:43:25.857369Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=51; 2026-01-04T02:43:25.857409Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:25.857485Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=42; 2026-01-04T02:43:25.857557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2026-01-04T02:43:25.857623Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=40; 2026-01-04T02:43:25.857653Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=67197; 2026-01-04T02:43:25.857755Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:25.857863Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:25.857908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:25.857975Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:25.858025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:25.858136Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:25.858179Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:25.858208Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:25.858239Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:25.858289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494272895;tx_id=18446744073709551615;;current_snapshot_ts=1767494571919; 2026-01-04T02:43:25.858324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:25.858371Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.858412Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.858497Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:25.858699Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.142000s; 2026-01-04T02:43:25.861876Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:25.862232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:25.862284Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:25.862357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:25.862399Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:25.862449Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494272895;tx_id=18446744073709551615;;current_snapshot_ts=1767494571919; 2026-01-04T02:43:25.862486Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:25.862523Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.862558Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:25.862615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:25.862669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:25.863523Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.006000s; 2026-01-04T02:43:25.863570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3184:5178];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::InsertedPortionsCleanerNormalizer >> THealthCheckTest::TestOnlyRequestNeededTablets [GOOD] >> THealthCheckTest::TestNodeDisconnected >> KqpLimits::CancelAfterRwTx+useSink [GOOD] >> KqpLimits::CancelAfterRwTx-useSink >> THealthCheckTest::IgnoreServerlessWhenNotSpecific [GOOD] >> THealthCheckTest::HealthCheckConfigUpdate >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenCommit-UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink >> Normalizers::InsertedPortionsCleanerNormalizer [GOOD] >> Normalizers::EmptyTablesNormalizer >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot >> THealthCheckTest::StorageNoQuota [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead >> TColumnShardTestReadWrite::WriteReadExoticTypes >> TCmsTenatsTest::TestTenantLimit >> KqpStats::SysViewCancelled [GOOD] >> KqpTypes::DyNumberCompare >> THealthCheckTest::LayoutIncorrect [GOOD] >> THealthCheckTest::LayoutCorrect ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteOverload-InStore [GOOD] Test command err: 2026-01-04T02:43:26.293597Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:26.316785Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:26.317023Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:26.325197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:26.325476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:26.325696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:26.325773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:26.325859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:26.325917Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:26.325996Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:26.326079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:26.326146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:26.326208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:26.326302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:26.326415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:26.326493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:26.354349Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:26.354503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:26.354554Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:26.354793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:26.355013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:26.355114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:26.355172Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:26.355321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:26.355409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:26.355495Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:26.355535Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:26.355753Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:26.355872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:26.355927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:26.355959Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:26.356076Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:26.356139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:26.356186Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:26.356214Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:26.356265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:26.356300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:26.356330Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:26.356375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:26.356418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:26.356468Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:26.356751Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:26.356823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:26.356873Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:26.357022Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:26.357065Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:26.357101Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:26.357149Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:26.357188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:26.357231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:26.357292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:26.357343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:26.357380Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:26.357564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:26.357597Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... G: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:1 Blob count: 1 2026-01-04T02:43:31.205472Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=4;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:31.223979Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=26899ac2-e91711f0-8144223f-5b6f3ade; 2026-01-04T02:43:31.224271Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=188;count=3; 2026-01-04T02:43:31.224329Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=380;count=4;size_of_meta=112; 2026-01-04T02:43:31.224384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=540;count=2;size_of_portion=192; 2026-01-04T02:43:31.225017Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-04T02:43:31.225157Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=5;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=3;operation_id=2; 2026-01-04T02:43:31.237320Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:2 Blob count: 1 2026-01-04T02:43:31.237550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=5;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:31.239429Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=27021006-e91711f0-ac360c7a-ffa64cc0; 2026-01-04T02:43:31.239670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=282;count=5; 2026-01-04T02:43:31.239720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=570;count=6;size_of_meta=112; 2026-01-04T02:43:31.239771Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=810;count=3;size_of_portion=192; 2026-01-04T02:43:31.240298Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-04T02:43:31.240411Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=6;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=4;operation_id=3; 2026-01-04T02:43:31.252513Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:3 Blob count: 1 2026-01-04T02:43:31.252735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=6;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:31.254904Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=2769aeb4-e91711f0-852cf9f6-bd124943; 2026-01-04T02:43:31.255163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=376;count=7; 2026-01-04T02:43:31.255216Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=760;count=8;size_of_meta=112; 2026-01-04T02:43:31.255269Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1080;count=4;size_of_portion=192; 2026-01-04T02:43:31.255751Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-04T02:43:31.255865Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=7;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=5;operation_id=4; 2026-01-04T02:43:31.267762Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:4 Blob count: 1 2026-01-04T02:43:31.267979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:31.287436Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=manager.cpp:227;event=register_operation;operation_id=5;last=5; 2026-01-04T02:43:31.287543Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;fline=write_queue.cpp:27;writing_size=6330728;operation_id=27fca49e-e91711f0-90f61713-6f3f5e9b;in_flight=1;size_in_flight=6330728; 2026-01-04T02:43:31.954200Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=9437184;event=TEvWrite;scope=TBuildBatchesTask::DoExecute;tablet_id=9437184;parent_id=[1:128:2159];write_id=5;path_id={internal: 1000000185, ss: 1};fline=write_actor.cpp:24;event=actor_created;tablet_id=9437184;debug=size=8246112;count=1;actions=__DEFAULT,;waiting=1;; 2026-01-04T02:43:32.012394Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;fline=columnshard__write.cpp:88;writing_size=6330728;event=data_write_finished;writing_id=27fca49e-e91711f0-90f61713-6f3f5e9b; 2026-01-04T02:43:32.012634Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:48;memory_size=94;data_size=68;sum=470;count=9; 2026-01-04T02:43:32.012693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_meta.cpp:66;memory_size=190;data_size=180;sum=950;count=10;size_of_meta=112; 2026-01-04T02:43:32.012754Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=constructor_portion.cpp:44;memory_size=270;data_size=260;sum=1350;count=5;size_of_portion=192; 2026-01-04T02:43:32.013229Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager on execute at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-04T02:43:32.013318Z node 1 :TX_COLUMNSHARD_WRITE DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::NPrivateEvents::NWrite::TEvWritePortionResult;tablet_id=9437184;event=TEvWritePortionResult;tablet_id=9437184;local_tx_no=8;method=execute;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=execute;fline=manager.h:213;event=add_write_id_to_operation_id;write_id=6;operation_id=5; 2026-01-04T02:43:32.025222Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Save Batch GenStep: 2:5 Blob count: 1 2026-01-04T02:43:32.025428Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=8;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection >> IndexBuildTest::RejectsCancel [GOOD] >> IndexBuildTest::NullsAreUniq >> TColumnShardTestReadWrite::WriteStandalone >> Normalizers::EmptyTablesNormalizer [GOOD] >> THealthCheckTest::TestNodeDisconnected [GOOD] >> THealthCheckTest::TestStateStorageOk >> THealthCheckTest::ShardsLimit800 [GOOD] >> Normalizers::PortionsNormalizer >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] >> THealthCheckTest::ShardsNoLimit [GOOD] >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] >> TCmsTest::TestKeepAvailableMode >> test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] >> TCmsTenatsTest::TestTenantLimit [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunning [GOOD] >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsLimit800 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:48.012779Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:48.013799Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:48.017847Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:48.113385Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.120166Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.121309Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:48.121631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.121903Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.123983Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.124036Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.513693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.612272Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.612414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.612826Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.612909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.673558Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:42:48.674080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.674393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.782209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.808840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:49.611826Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:49.611884Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:49.611940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:49.612129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:56.536921Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:56.538055Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:56.538507Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:56.548970Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:56.549593Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:56.552081Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:56.552444Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:56.552592Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:56.554482Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:56.554634Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:56.884500Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:56.986669Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.986779Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.987346Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.987412Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:57.031983Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:57.032521Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:57.032841Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:57.091169Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:57.116940Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:57.965057Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:57.965129Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:57.965167Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:57.965596Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:05.333688Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.334747Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.336967Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:05.346450Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.347686Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.349110Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.349461Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:05.349561Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.351358Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.351447Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.697911Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:05.789150Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.789272Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:05.790003Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.790064Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 N ... esActor] ActorId: [7:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:11.372336Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:11.372521Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:11.732903Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:11.810913Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:11.811068Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:11.858616Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:11.915855Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:12.789164Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:12.789235Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:12.789283Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:12.789719Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:17.105718Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:17.112974Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:17.118117Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:114:2161], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:17.118467Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:17.118746Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:17.377188Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:17.377348Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:17.419648Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1767494593779774 != 1767494593779778 2026-01-04T02:43:17.425983Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:17.474364Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:17.544440Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:18.140821Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:18.140897Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:18.140944Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:18.141584Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:23.020478Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:23.026492Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:23.029286Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:23.029574Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:23.029746Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:23.446856Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:23.543477Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:23.543657Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:23.595917Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:23.679060Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:24.621676Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:24.621769Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:24.621848Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:24.622374Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:33.028230Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:33.029211Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:33.030517Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:687:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:33.041547Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:33.043734Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:33.045174Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:33.045405Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:33.045631Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:33.047664Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:33.047770Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:33.413326Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:33.520322Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:33.520532Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:33.521108Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:33.521200Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:33.591750Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-04T02:43:33.592740Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:33.593260Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:33.703777Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:33.718400Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:34.675326Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:34.675407Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:34.675454Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:34.676101Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::ShardsNoLimit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:45.542913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:45.664867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:45.674123Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:45.674620Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:45.674774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:46.114391Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:46.219269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:46.219401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:46.267503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:46.341036Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:47.130035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:47.130090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:47.130133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:47.130647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:50.589545Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:50.599050Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:530:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:50.599322Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:50.599456Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:50.899560Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:50.987465Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:50.987577Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:51.021695Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:51.088648Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:51.869556Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:51.869616Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:51.869653Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:51.870190Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:55.952423Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.956797Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:55.958512Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:444:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:55.958725Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.958893Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:56.324863Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:56.421219Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.421366Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.483472Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.521849Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:57.426940Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:57.427004Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:57.427043Z node 6 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:57.427494Z node 6 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:05.177260Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.177580Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.186728Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.187821Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.189502Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:05.189861Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.190015Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.190661Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:05.190931Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.191106Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.524628Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:05.623902Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.624056Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:05.624797Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.624880Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:05.669461Z node 8 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 9 Cookie 9 2026-01-04T02:43:05.670648Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:05.671084Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:05.732504Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:05.745629Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:06.603076Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:06.603150Z node 8 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:06.603193Z node 8 :NET_CLASSIFI ... _manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:14.164665Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:14.164849Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:14.519290Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:14.620730Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:14.620872Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:14.621370Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:14.621454Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:14.692056Z node 10 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 11 Cookie 11 2026-01-04T02:43:14.692529Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:14.693007Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:14.816983Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:14.841069Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:15.641268Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:15.641326Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:15.641354Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:15.641864Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:23.888546Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:23.889688Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:23.891481Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:687:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:23.903164Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:23.905672Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:23.907252Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:306:2229], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:23.907452Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:23.907633Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:23.909601Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:23.909706Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:24.264853Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:24.369320Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:24.369463Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:24.370058Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:24.370150Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:24.439515Z node 12 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 13 Cookie 13 2026-01-04T02:43:24.440409Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:24.440734Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:24.540934Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:24.593692Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:25.326664Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:25.326731Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:25.326761Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:25.327167Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:34.103608Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:34.104559Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:34.104964Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:34.114138Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:34.117107Z node 15 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:34.118725Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:34.119167Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:34.119416Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:34.121496Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:34.121603Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:34.487398Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:34.601416Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:34.601542Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:34.602577Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:34.602683Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:34.647660Z node 14 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-04T02:43:34.648472Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:34.648833Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:34.737042Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:34.751538Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:35.544981Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:35.545042Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:35.545070Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:35.545456Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::EmptyTablesNormalizer [GOOD] Test command err: 2026-01-04T02:43:30.393967Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:30.426463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:30.426700Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:30.433661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanInsertedPortions; 2026-01-04T02:43:30.433955Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:30.434197Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:30.434393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:30.434534Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:30.434651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:30.434785Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:30.434909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:30.435035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:30.435141Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:30.435299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:30.435429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:30.435565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:30.435686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:30.458419Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:30.458577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=13;current_normalizer=CLASS_NAME=CleanInsertedPortions; 2026-01-04T02:43:30.458625Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:43:30.458910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2026-01-04T02:43:30.459036Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=72; 2026-01-04T02:43:30.459133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=42; 2026-01-04T02:43:30.459239Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=53; 2026-01-04T02:43:30.459472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanInsertedPortions;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:30.459562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-04T02:43:30.459612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-04T02:43:30.459754Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:30.459857Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:30.459900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:30.459923Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-04T02:43:30.460006Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:30.460054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:30.460086Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:30.460124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:30.460267Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:30.460325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:30.460361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:30.460385Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:30.460483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:30.460542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:30.460580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:30.460612Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:30.460655Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:30.460682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:30.460709Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:30.460739Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:30.460856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:30.460891Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:30.461071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:30.461107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:30.461141Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:30.461259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:30.461297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:30.461318Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cp ... ommon_data.cpp:29;PRECHARGE:column_enginesLoadingTime=18; 2026-01-04T02:43:35.068422Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:countersLoadingTime=90; 2026-01-04T02:43:35.068555Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:countersLoadingTime=68; 2026-01-04T02:43:35.068640Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:sharding_infoLoadingTime=32; 2026-01-04T02:43:35.068706Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:sharding_infoLoadingTime=27; 2026-01-04T02:43:35.068756Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-04T02:43:35.068819Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=22; 2026-01-04T02:43:35.068871Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=570; 2026-01-04T02:43:35.068992Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=62; 2026-01-04T02:43:35.069103Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=64; 2026-01-04T02:43:35.069250Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=104; 2026-01-04T02:43:35.069369Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=73; 2026-01-04T02:43:35.069556Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=143; 2026-01-04T02:43:35.080883Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=11264; 2026-01-04T02:43:35.080993Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:43:35.081054Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-04T02:43:35.081097Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:43:35.081193Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=56; 2026-01-04T02:43:35.081239Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:43:35.081351Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=74; 2026-01-04T02:43:35.081404Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:43:35.081498Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=55; 2026-01-04T02:43:35.081605Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=61; 2026-01-04T02:43:35.081716Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=58; 2026-01-04T02:43:35.081782Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23306; 2026-01-04T02:43:35.081949Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 0 inserted {blob_bytes=0;raw_bytes=0;count=0;records=0} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:35.082082Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:35.082154Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:35.082236Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:35.082292Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:35.082355Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:35.082441Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:35.082524Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:35.082580Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:35.082630Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:35.082749Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:35.108611Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.157000s; 2026-01-04T02:43:35.109198Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:35.109306Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:35.109379Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:35.109470Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:35.109552Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:35.109624Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:35.109680Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:35.109801Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:35.110032Z node 2 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.195000s; 2026-01-04T02:43:35.110096Z node 2 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; 2026-01-04T02:43:35.193075Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 111 scanId: 0 version: {1767494614273:111} readable: {1767494614273:max} at tablet 9437184 2026-01-04T02:43:35.193258Z node 2 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 111 scanId: 0 at tablet 9437184 2026-01-04T02:43:35.193329Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1767494614273:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=1;result=not_found; 2026-01-04T02:43:35.193450Z node 2 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[2:270:2279];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=111;scan_id=0;gen=0;table=;snapshot={1767494614273:111};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot build table metadata accessor for request: incorrect table name and table id for scan start: undefined::1;details=; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=uint64;records=0;count=0; FALLBACK_ACTOR_LOGGING;priority=INFO;component=2100;fline=simple_arrays_cache.h:65;event=slice_from_cache;key=string;records=0;count=0; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadAggregate-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; 2026-01-04T02:43:33.057149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:33.086541Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:33.086750Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:33.093001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:33.093257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:33.093497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:33.093640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:33.093768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:33.093904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:33.094015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:33.094129Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:33.094250Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:33.094355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.094453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:33.094572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:33.094734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:33.123324Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:33.123486Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:33.123556Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:33.123739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:33.123932Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:33.124016Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:33.124059Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:33.124178Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:33.124252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:33.124292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:33.124324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:33.124498Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:33.124583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:33.124639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:33.124688Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:33.124796Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:33.124848Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:33.124887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:33.124915Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:33.124958Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:33.124993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:33.125025Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:33.125062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:33.125096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:33.125129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:33.125418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:33.125474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:33.125509Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:33.125658Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:33.125719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.125761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.125837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:33.125897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:33.125944Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:33.125998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:33.126033Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:33.126079Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:33.126221Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:33.126273Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tab ... s=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:36.262810Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=1;finished=1; 2026-01-04T02:43:36.262844Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:36.262876Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:36.263254Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:36.263393Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.263433Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:36.263532Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-04T02:43:36.263581Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-04T02:43:36.263811Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-04T02:43:36.263943Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.264052Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.264167Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.264401Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:36.264514Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.264620Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.264788Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:464:2476] finished for tablet 9437184 2026-01-04T02:43:36.265161Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:463:2475];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":3708922,"name":"_full_task","f":3708922,"d_finished":0,"c":0,"l":3717214,"d":8292},"events":[{"name":"bootstrap","f":3709166,"d_finished":908,"c":1,"l":3710074,"d":908},{"a":3716767,"name":"ack","f":3715618,"d_finished":954,"c":1,"l":3716572,"d":1401},{"a":3716756,"name":"processing","f":3710244,"d_finished":2417,"c":3,"l":3716574,"d":2875},{"name":"ProduceResults","f":3709769,"d_finished":1641,"c":6,"l":3717021,"d":1641},{"a":3717024,"name":"Finish","f":3717024,"d_finished":0,"c":0,"l":3717214,"d":190},{"name":"task_result","f":3710257,"d_finished":1419,"c":2,"l":3715276,"d":1419}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.265222Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:463:2475];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:36.265566Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:463:2475];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.006},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":3708922,"name":"_full_task","f":3708922,"d_finished":0,"c":0,"l":3717647,"d":8725},"events":[{"name":"bootstrap","f":3709166,"d_finished":908,"c":1,"l":3710074,"d":908},{"a":3716767,"name":"ack","f":3715618,"d_finished":954,"c":1,"l":3716572,"d":1834},{"a":3716756,"name":"processing","f":3710244,"d_finished":2417,"c":3,"l":3716574,"d":3308},{"name":"ProduceResults","f":3709769,"d_finished":1641,"c":6,"l":3717021,"d":1641},{"a":3717024,"name":"Finish","f":3717024,"d_finished":0,"c":0,"l":3717647,"d":623},{"name":"task_result","f":3710257,"d_finished":1419,"c":2,"l":3715276,"d":1419}],"id":"9437184::76"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.265642Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:36.254605Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=2;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-04T02:43:36.265676Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:36.265780Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:464:2476];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,19;column_names=i32,jsondoc;);;program_input=(column_ids=4,19;column_names=i32,jsondoc;);;; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::PortionsNormalizer [GOOD] >> Normalizers::RemoveDeleteFlagNormalizer >> test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeGroupDeadInBothPiles [GOOD] Test command err: 2026-01-04T02:42:42.541537Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331341536424212:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:42.541617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:42.788660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:42.823480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:42.823626Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:42.842169Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:42.930769Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:42.934086Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331341536424099:2081] 1767494562534121 != 1767494562534124 2026-01-04T02:42:42.999078Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:43.126661Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:43.126687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:43.126704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:43.126812Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:43.499080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:43.545615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:45.703914Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331353241025330:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:45.703974Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:45.718697Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:45.802260Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331353241025303:2081] 1767494565702967 != 1767494565702970 2026-01-04T02:42:45.810991Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:45.818202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:45.818253Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:45.838963Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:45.879563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:45.879603Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:45.879612Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:45.879705Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:45.984326Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:46.081608Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) ... waiting for SysViewsRoster update finished 2026-01-04T02:42:54.513069Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:54.513998Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:54.514728Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:54.523277Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:54.523767Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:54.525912Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:54.526271Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:54.526393Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:54.528261Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:54.528428Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:54.852903Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:54.955581Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:54.955713Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:54.956760Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:54.956861Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:55.001977Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:55.002825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:55.003215Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:55.087757Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:55.101182Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:55.983196Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:55.983272Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:55.983318Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:55.983838Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:03.480862Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.481666Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.483658Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:03.492058Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.493699Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.495318Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:03.495672Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:03.495799Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:03.497581Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026 ... orkload_manager/delayed_requests } 2026-01-04T02:43:24.363318Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:24.363706Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:24.366467Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:24.366614Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:24.746091Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:24.860471Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:24.860597Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:24.861616Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:24.861682Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:24.908893Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-04T02:43:24.909907Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:24.910376Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:24.975108Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:24.988590Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:25.974293Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:25.974371Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:25.974415Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:25.974979Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: MAINTENANCE_REQUIRED issue_log { id: "ORANGE-af30-1231c6b1" status: ORANGE message: "Storage has no redundancy" location { database { name: "/Root" } } reason: "ORANGE-cf29-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "ORANGE-a3e2-1231c6b1-2147483648" status: ORANGE message: "Group dead in some piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" type: "STORAGE_GROUP" level: 4 } issue_log { id: "ORANGE-cf29-1231c6b1-f7549920" status: ORANGE message: "Pool has no redundancy" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "ORANGE-a3e2-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-9-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 9 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "ORANGE-5cc9-1231c6b1" status: ORANGE message: "Database has storage issues" location { database { name: "/Root" } } reason: "ORANGE-af30-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-04T02:43:34.535216Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:34.536342Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:34.538651Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:34.550178Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:34.551664Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:34.554269Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:34.554753Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:34.555019Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:34.557531Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:34.557611Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:34.929311Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:35.049044Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:35.049241Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:35.050834Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:35.050960Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:35.096487Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-04T02:43:35.097553Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:35.098089Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:35.185744Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:35.200807Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:36.162536Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:36.162616Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:36.162656Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:36.163178Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: EMERGENCY issue_log { id: "RED-2f2e-1231c6b1" status: RED message: "Storage failed" location { database { name: "/Root" } } reason: "RED-40f1-1231c6b1-f7549920" type: "STORAGE" level: 2 } issue_log { id: "RED-6100-1231c6b1-2147483648" status: RED message: "Group dead in all piles" location { storage { pool { name: "/Root:test" group { id: "2147483648" } } } database { name: "/Root" } } reason: "RED-1a83-1231c6b1-2147483649" reason: "RED-1a83-1231c6b1-2147483650" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-40f1-1231c6b1-f7549920" status: RED message: "Pool failed" location { storage { pool { name: "/Root:test" } } database { name: "/Root" } } reason: "RED-6100-1231c6b1-2147483648" type: "STORAGE_POOL" level: 3 } issue_log { id: "RED-1a83-1231c6b1-2147483650" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483650" pile { name: "2" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-1a83-1231c6b1-2147483649" status: RED message: "Groups failed" location { storage { pool { name: "/Root:test" group { id: "2147483649" pile { name: "1" } } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" type: "BRIDGE_GROUP" level: 5 listed: 1 count: 1 } issue_log { id: "RED-9f89-1231c6b1-11-2147483650-3-58-0-58" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483650-3-58-0-58" id: "2147483650-3-59-0-59" id: "2147483650-3-60-0-60" } pile { name: "2" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-9f89-1231c6b1-11-2147483649-3-55-0-55" status: RED message: "VDisks are not available" location { storage { node { id: 11 host: "::1" port: 12001 } pool { name: "/Root:test" group { vdisk { id: "2147483649-3-55-0-55" id: "2147483649-3-56-0-56" id: "2147483649-3-57-0-57" } pile { name: "1" } } } } database { name: "/Root" } } type: "VDISK" level: 6 listed: 3 count: 3 } issue_log { id: "RED-5cc9-1231c6b1" status: RED message: "Database has storage issues" location { database { name: "/Root" } } reason: "RED-2f2e-1231c6b1" type: "DATABASE" level: 1 } location { id: 11 host: "::1" port: 12001 pile { name: "pile0" } } |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> THealthCheckTest::ProtobufBelowLimitFor10VdisksIssues [GOOD] >> THealthCheckTest::NoStoragePools ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadWithProgramNoProjection [GOOD] Test command err: 2026-01-04T02:43:35.670246Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:35.702430Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:35.702662Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:35.710081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:35.710319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:35.710562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:35.710699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:35.710815Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:35.710926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:35.711046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:35.711165Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:35.711268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:35.711399Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.711537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:35.711640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:35.711753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:35.740765Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:35.740903Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:35.740951Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:35.741138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:35.741324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:35.741428Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:35.741475Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:35.741582Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:35.741646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:35.741687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:35.741716Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:35.741921Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:35.741986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:35.742046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:35.742094Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:35.742200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:35.742254Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:35.742292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:35.742324Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:35.742367Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:35.742400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:35.742432Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:35.742476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:35.742515Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:35.742563Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:35.742786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:35.742835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:35.742867Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:35.742981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:35.743023Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.743082Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.743131Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:35.743168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:35.743207Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:35.743258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:35.743296Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:35.743327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:35.743519Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:35.743562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... (column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=2;rows=100; 2026-01-04T02:43:36.585938Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=100;batch_columns=level,timestamp; 2026-01-04T02:43:36.586194Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:327:2339];bytes=1200;rows=100;faults=0;finished=0;fault=0;schema=level: int32 timestamp: timestamp[us]; 2026-01-04T02:43:36.586369Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.586502Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.586649Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.586762Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:36.586926Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.587109Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.587338Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:335:2347] finished for tablet 9437184 2026-01-04T02:43:36.587811Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:327:2339];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1374023,"name":"_full_task","f":1374023,"d_finished":0,"c":0,"l":1389119,"d":15096},"events":[{"name":"bootstrap","f":1374308,"d_finished":2493,"c":1,"l":1376801,"d":2493},{"a":1388450,"name":"ack","f":1387114,"d_finished":1269,"c":1,"l":1388383,"d":1938},{"a":1388439,"name":"processing","f":1377094,"d_finished":4686,"c":3,"l":1388389,"d":5366},{"name":"ProduceResults","f":1376079,"d_finished":2272,"c":6,"l":1388843,"d":2272},{"a":1388847,"name":"Finish","f":1388847,"d_finished":0,"c":0,"l":1389119,"d":272},{"name":"task_result","f":1377111,"d_finished":3357,"c":2,"l":1387012,"d":3357}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.587885Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:327:2339];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:36.588304Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:327:2339];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_ProduceResults"],"t":0.002},{"events":["f_processing","f_task_result"],"t":0.003},{"events":["l_task_result"],"t":0.012},{"events":["f_ack"],"t":0.013},{"events":["l_ProduceResults","f_Finish"],"t":0.014},{"events":["l_ack","l_processing","l_Finish"],"t":0.015}],"full":{"a":1374023,"name":"_full_task","f":1374023,"d_finished":0,"c":0,"l":1389645,"d":15622},"events":[{"name":"bootstrap","f":1374308,"d_finished":2493,"c":1,"l":1376801,"d":2493},{"a":1388450,"name":"ack","f":1387114,"d_finished":1269,"c":1,"l":1388383,"d":2464},{"a":1388439,"name":"processing","f":1377094,"d_finished":4686,"c":3,"l":1388389,"d":5892},{"name":"ProduceResults","f":1376079,"d_finished":2272,"c":6,"l":1388843,"d":2272},{"a":1388847,"name":"Finish","f":1388847,"d_finished":0,"c":0,"l":1389645,"d":798},{"name":"task_result","f":1377111,"d_finished":3357,"c":2,"l":1387012,"d":3357}],"id":"9437184::1"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:36.588373Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:36.497558Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=8392;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=8392;selected_rows=0; 2026-01-04T02:43:36.588419Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:36.588585Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[1:335:2347];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=1,9;column_names=saved_at,timestamp;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;program_input=(column_ids=1,5,9;column_names=level,saved_at,timestamp;);;; 2026-01-04T02:43:36.589079Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Finished read cookie: 1 at tablet 9437184 2026-01-04T02:43:36.589370Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: EvScan txId: 100 scanId: 0 version: {1767494616645:100} readable: {1767494616645:max} at tablet 9437184 2026-01-04T02:43:36.589486Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TTxScan prepare txId: 100 scanId: 0 at tablet 9437184 2026-01-04T02:43:36.589720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767494616645:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-04T02:43:36.589838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767494616645:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 100 } Function { Id: 1 Arguments { Id: 1 } Arguments { Id: 9 } } } } Command { Filter { Predicate { Id: 100 } } } ; 2026-01-04T02:43:36.589935Z node 1 :TX_COLUMNSHARD_SCAN WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::TEvDataShard::TEvKqpScan;tx_id=100;scan_id=0;gen=0;table=;snapshot={1767494616645:100};tablet=9437184;timeout=0.000000s;cpu_limits=Disabled;;fline=tx_scan.cpp:12;event=TTxScan failed;problem=cannot parse program;details=Can't parse SsaProgram: program has no projections; |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> IndexBuildTest::NullsAreUniq [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithNonePolicy [GOOD] >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy >> TCmsTest::TestKeepAvailableMode [GOOD] >> TCmsTest::TestForceRestartMode >> TCmsTest::PermissionDuration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::HealthCheckConfigUpdate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:47.595570Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.596245Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.599389Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:47.690493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:47.695241Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:47.696075Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:47.696317Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:47.696520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:47.697937Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:47.697987Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.052089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.161298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.161405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.161831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.161889Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.219474Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:42:48.220168Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.220540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.333865Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.346638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:49.161157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:49.161226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:49.161274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:49.161661Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:55.529440Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.529612Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.542518Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:55.543035Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.543122Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.544536Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:681:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:55.544744Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.544921Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.853838Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:55.954695Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:55.954822Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:55.955250Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:55.955319Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.024569Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:56.025155Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.025575Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.116663Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:56.130421Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:56.946474Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:56.946541Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:56.946580Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:56.947014Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:03.948187Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.948969Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.951163Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:03.961135Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.962946Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.964564Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:03.964886Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:03.964982Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:03.966455Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:03.966537Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:04.257705Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:04.349649Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:04.349761Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:04.350392Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:04.350469Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:04.394888Z node 5 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 6 Cookie 6 2026-01-04T02:43:04.395464Z node 5 :HIVE WARN: nod ... , broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:13.980009Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:13.980059Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:13.980488Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-7" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 7 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-8" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 8 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-7" reason: "YELLOW-7932-1231c6b1-8" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 7 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-04T02:43:21.848848Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:21.850015Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:21.850596Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:21.861736Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:21.863991Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:21.865434Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:21.866107Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:21.866393Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:21.868328Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:21.868451Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:22.237639Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:22.373088Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:22.373290Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:22.374417Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:22.374529Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:22.421038Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-04T02:43:22.422084Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:22.422530Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:22.483737Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:22.524954Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:23.538997Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:23.539086Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:23.539148Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:23.539897Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-04T02:43:28.975011Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:28.982405Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:29.048902Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:29.049496Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:29.049729Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:29.483750Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:29.581120Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:29.581296Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:29.641018Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:29.739023Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:30.697949Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:30.698035Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:30.698122Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:30.698774Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:35.782717Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:35.789950Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:35.792806Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:35.793175Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:35.793372Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:36.203950Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:36.285829Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:36.285969Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:36.321301Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:36.380112Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:37.373270Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:37.373347Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:37.373399Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:37.375457Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCmsTest::StateStorageTwoRings >> TCmsTest::WalleRebootDownNode >> TPQTest::TestSourceIdDropBySourceIdCount [GOOD] >> TPQTest::TestTimeRetention ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> IndexBuildTest::NullsAreUniq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:27.094357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:27.094468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:27.094511Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:27.094555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:27.094592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:27.094626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:27.094689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:27.094781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:27.095671Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:27.095979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:27.186917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:27.186990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:27.201782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:27.202015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:27.202205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:27.211019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:27.211268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:27.212049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:27.212327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:27.215513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:27.215711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:27.217090Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:27.217155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:27.217271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:27.217322Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:27.217374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:27.217518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:27.352572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.353668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.353781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.353870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.353949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:27.354806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-04T02:43:39.035372Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 2/2, is published: true 2026-01-04T02:43:39.035439Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1014:2953] message: TxId: 281474976725761 2026-01-04T02:43:39.035499Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 2/2 2026-01-04T02:43:39.035540Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-04T02:43:39.035567Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-04T02:43:39.035627Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 4 2026-01-04T02:43:39.035657Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:1 2026-01-04T02:43:39.035677Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:1 2026-01-04T02:43:39.035705Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 34] was 3 2026-01-04T02:43:39.039907Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-04T02:43:39.039988Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-04T02:43:39.040054Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 107, txId# 281474976725761 2026-01-04T02:43:39.040172Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1672:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0}, txId# 281474976725761 2026-01-04T02:43:39.041847Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking 2026-01-04T02:43:39.041972Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Unlocking TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Unlocking, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1672:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-04T02:43:39.042035Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:43:39.043592Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done 2026-01-04T02:43:39.043705Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 107 Done TBuildInfo{ IndexBuildId: 107, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalUnique, IndexName: test_index, IndexColumn: index1, IndexColumn: index2, State: Done, SubState: UniqIndexValidation, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1672:3542], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 400, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0, Billed: UploadRows: 108 UploadBytes: 2768 ReadRows: 216 ReadBytes: 4848 CpuTimeUs: 0} 2026-01-04T02:43:39.043749Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 107, subscribers count# 1 2026-01-04T02:43:39.043895Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:43:39.043960Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [5:1687:3557] TestWaitNotification: OK eventTxId 107 2026-01-04T02:43:39.044698Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 107 2026-01-04T02:43:39.044978Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 107 State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" index { name: "test_index" index_columns: "index1" index_columns: "index2" global_index { } } max_shards_in_flight: 2 ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-04T02:43:39.045733Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:43:39.046044Z node 5 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 328us result status StatusSuccess 2026-01-04T02:43:39.046615Z node 5 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableSchemaVersion: 3 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index1" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "index2" Type: "Uint32" TypeId: 2 Id: 3 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 4 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableIndexes { Name: "test_index" LocalPathId: 33 Type: EIndexTypeGlobalUnique State: EIndexStateReady KeyColumnNames: "index1" KeyColumnNames: "index2" SchemaVersion: 2 PathOwnerId: 72075186233409549 DataSize: 0 IndexImplTableDescriptions { } } TableSchemaVersion: 3 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 33 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |92.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> KqpLimits::OutOfSpaceBulkUpsertFail [GOOD] >> KqpLimits::OutOfSpaceYQLUpsertFail ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32 [GOOD] Test command err: 2026-01-04T02:43:01.297299Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:01.335476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:01.335732Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:01.343836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:01.344118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:01.344340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:01.344468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:01.344606Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:01.344744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:01.344859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:01.345003Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:01.345118Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:01.345238Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:01.345402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:01.345509Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:01.345626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:01.377045Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:01.377186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:01.377242Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:01.377469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:01.377654Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:01.377749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:01.377796Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:01.377920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:01.377985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:01.378036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:01.378068Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:01.378270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:01.378360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:01.378410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:01.378443Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:01.378551Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:01.378599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:01.378684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:01.378714Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:01.378800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:01.378844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:01.378882Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:01.378925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:01.378965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:01.379008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:01.379260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:01.379322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:01.379361Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:01.379552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:01.379625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:01.379660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:01.379714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:01.379779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:01.379855Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:01.379910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:01.379969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:01.380008Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:01.380151Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:01.380198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... age_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-04T02:43:37.038032Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=967; 2026-01-04T02:43:37.038084Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=78735; 2026-01-04T02:43:37.038133Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=78864; 2026-01-04T02:43:37.038202Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-04T02:43:37.038577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=327; 2026-01-04T02:43:37.038620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=79829; 2026-01-04T02:43:37.038762Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=86; 2026-01-04T02:43:37.038883Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=70; 2026-01-04T02:43:37.039286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=354; 2026-01-04T02:43:37.039637Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=304; 2026-01-04T02:43:37.054832Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=15127; 2026-01-04T02:43:37.069933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=14987; 2026-01-04T02:43:37.070044Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=14; 2026-01-04T02:43:37.070108Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=14; 2026-01-04T02:43:37.070158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-04T02:43:37.070234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=41; 2026-01-04T02:43:37.070280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=8; 2026-01-04T02:43:37.070371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=52; 2026-01-04T02:43:37.070414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:43:37.070483Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-04T02:43:37.070569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=47; 2026-01-04T02:43:37.070648Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-04T02:43:37.070686Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=121056; 2026-01-04T02:43:37.070820Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:37.070923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:37.070976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:37.071043Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:37.071087Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:37.071298Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:37.071354Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:37.071392Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:37.071435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:37.071496Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494283696;tx_id=18446744073709551615;;current_snapshot_ts=1767494582762; 2026-01-04T02:43:37.071539Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:37.071582Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.071622Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.071711Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:37.071903Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.072000s; 2026-01-04T02:43:37.078957Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:37.079242Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:37.079299Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:37.079380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:37.079434Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:37.079509Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494283696;tx_id=18446744073709551615;;current_snapshot_ts=1767494582762; 2026-01-04T02:43:37.079560Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:37.079628Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.079675Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.079751Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:37.079818Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:37.080615Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.011000s; 2026-01-04T02:43:37.080664Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3095:5089];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::TestKeepAvailableModeScheduled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteReadExoticTypes [GOOD] Test command err: 2026-01-04T02:43:34.409216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:34.443521Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:34.443777Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:34.451283Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:34.451541Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:34.451750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:34.451911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:34.452014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:34.452135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:34.452259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:34.452387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:34.452500Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:34.452616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:34.452746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:34.452880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:34.453000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:34.482856Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:34.482998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:34.483052Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:34.483253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:34.483414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:34.483498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:34.483555Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:34.483666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:34.483729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:34.483804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:34.483834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:34.484016Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:34.484083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:34.484125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:34.484160Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:34.484253Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:34.484305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:34.484345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:34.484374Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:34.484422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:34.484463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:34.484494Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:34.484547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:34.484610Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:34.484643Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:34.484862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:34.484913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:34.484956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:34.485067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:34.485106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:34.485138Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:34.485179Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:34.485217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:34.485258Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:34.485309Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:34.485345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:34.485393Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:34.485570Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:34.485645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... (column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=10;rows=31; 2026-01-04T02:43:37.980073Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=31;batch_columns=timestamp,resource_type,resource_id,uid,level,message,json_payload,ingested_at,saved_at,request_id; 2026-01-04T02:43:37.980372Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=2791;rows=31;faults=0;finished=0;fault=0;schema=timestamp: timestamp[us] resource_type: string resource_id: string uid: string level: int32 message: binary json_payload: binary ingested_at: timestamp[us] saved_at: timestamp[us] request_id: binary; 2026-01-04T02:43:37.980602Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.980774Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.980940Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.981159Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:37.981344Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.981517Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.981784Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [1:475:2486] finished for tablet 9437184 2026-01-04T02:43:37.982144Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[1:474:2485];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":4051968,"name":"_full_task","f":4051968,"d_finished":0,"c":0,"l":4065255,"d":13287},"events":[{"name":"bootstrap","f":4052177,"d_finished":1320,"c":1,"l":4053497,"d":1320},{"a":4064552,"name":"ack","f":4062770,"d_finished":1613,"c":1,"l":4064383,"d":2316},{"a":4064534,"name":"processing","f":4053603,"d_finished":3930,"c":3,"l":4064386,"d":4651},{"name":"ProduceResults","f":4053123,"d_finished":2586,"c":6,"l":4064958,"d":2586},{"a":4064962,"name":"Finish","f":4064962,"d_finished":0,"c":0,"l":4065255,"d":293},{"name":"task_result","f":4053614,"d_finished":2268,"c":2,"l":4062601,"d":2268}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.982210Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[1:474:2485];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:37.982555Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[1:474:2485];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.01},{"events":["l_ProduceResults","f_Finish"],"t":0.012},{"events":["l_ack","l_processing","l_Finish"],"t":0.013}],"full":{"a":4051968,"name":"_full_task","f":4051968,"d_finished":0,"c":0,"l":4065662,"d":13694},"events":[{"name":"bootstrap","f":4052177,"d_finished":1320,"c":1,"l":4053497,"d":1320},{"a":4064552,"name":"ack","f":4062770,"d_finished":1613,"c":1,"l":4064383,"d":2723},{"a":4064534,"name":"processing","f":4053603,"d_finished":3930,"c":3,"l":4064386,"d":5058},{"name":"ProduceResults","f":4053123,"d_finished":2586,"c":6,"l":4064958,"d":2586},{"a":4064962,"name":"Finish","f":4064962,"d_finished":0,"c":0,"l":4065662,"d":700},{"name":"task_result","f":4053614,"d_finished":2268,"c":2,"l":4062601,"d":2268}],"id":"9437184::12"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:37.982609Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:37.966063Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=10;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=7928;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=7928;selected_rows=0; 2026-01-04T02:43:37.982641Z node 1 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:37.982822Z node 1 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: TEST_STEP=11;SelfId=[1:475:2486];TabletId=9437184;ScanId=0;TxId=103;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=resource_id,resource_type,timestamp,uid;);;ff=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;program_input=(column_ids=1,2,3,4,5,6,7,8,9,10;column_names=ingested_at,json_payload,level,message,request_id,resource_id,resource_type,saved_at,timestamp,uid;);;; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] >> TCmsTenatsTest::TestNoneTenantPolicy >> THealthCheckTest::LayoutCorrect [GOOD] >> TClusterInfoTest::DeviceId [GOOD] >> TClusterInfoTest::FillInfo [GOOD] >> TCmsTenatsTest::CollectInfo >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort+UseSink [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink >> TCmsTenatsTest::TestClusterLimit >> TCmsTest::StateStorageTwoRings [GOOD] >> TCmsTest::StateStorageTwoBrokenRings ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt32_Reboot [GOOD] Test command err: 2026-01-04T02:42:51.874260Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:51.899028Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:51.899239Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:51.906424Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:51.906676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:51.906900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:51.907037Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:51.907173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:51.907294Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:51.907409Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:51.907522Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:51.907648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:51.907784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.907943Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:51.908057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:51.908167Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:51.932028Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:51.932158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:51.932200Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:51.932341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:51.932459Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:51.932538Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:51.932574Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:51.932645Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:51.932687Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:51.932719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:51.932747Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:51.932909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:51.932969Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:51.933001Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:51.933031Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:51.933101Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:51.933140Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:51.933183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:51.933217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:51.933249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:51.933284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:51.933311Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:51.933359Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:51.933385Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:51.933406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:51.933577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:51.933616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:51.933648Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:51.933746Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:51.933781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.933823Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:51.933878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:51.933906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:51.933940Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:51.933975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:51.934000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:51.934021Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:51.934114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:51.934145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... d_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-04T02:43:37.769349Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=879; 2026-01-04T02:43:37.769402Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=53798; 2026-01-04T02:43:37.769446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=53915; 2026-01-04T02:43:37.769507Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-04T02:43:37.769850Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=294; 2026-01-04T02:43:37.769894Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54765; 2026-01-04T02:43:37.770041Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=95; 2026-01-04T02:43:37.770153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=67; 2026-01-04T02:43:37.770552Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=353; 2026-01-04T02:43:37.770898Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=298; 2026-01-04T02:43:37.780717Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9758; 2026-01-04T02:43:37.789391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8592; 2026-01-04T02:43:37.789471Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:43:37.789531Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:43:37.789563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:43:37.789629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=35; 2026-01-04T02:43:37.789660Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:43:37.789731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=42; 2026-01-04T02:43:37.789760Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-04T02:43:37.789821Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=20; 2026-01-04T02:43:37.789897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2026-01-04T02:43:37.789954Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=32; 2026-01-04T02:43:37.789981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=83237; 2026-01-04T02:43:37.790098Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:37.790183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:37.790227Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:37.790275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:37.790307Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:37.790436Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:37.790482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:37.790510Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:37.790542Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:37.790588Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494275944;tx_id=18446744073709551615;;current_snapshot_ts=1767494573401; 2026-01-04T02:43:37.790618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:37.790664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.790694Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.790770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:37.790916Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.043000s; 2026-01-04T02:43:37.792952Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:37.793104Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:37.793142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:37.793190Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:37.793227Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:37.793283Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494275944;tx_id=18446744073709551615;;current_snapshot_ts=1767494573401; 2026-01-04T02:43:37.793321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:37.793356Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.793384Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:37.793435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:43:37.793470Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:37.794036Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.193000s; 2026-01-04T02:43:37.794068Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::TestForceRestartMode [GOOD] >> TCmsTest::TestKeepAvailableModeDisconnects >> KqpTypes::DyNumberCompare [GOOD] >> KqpTypes::MultipleCurrentUtcTimestamp >> TCmsTenatsTest::TestScheduledPermissionWithDefaultPolicy [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartMode >> TCmsTest::PermissionDuration [GOOD] >> TCmsTest::RacyStartCollecting >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc >> TPQTest::TestTimeRetention [GOOD] >> TPQTest::TestStorageRetention >> TCmsTest::StateRequestUnknownNode >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] >> TCmsTest::ManagePermissions >> TCmsTest::WalleRebootDownNode [GOOD] >> TCmsTest::WalleCleanupTest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::LayoutCorrect [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:47.791086Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.791715Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.794590Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:47.871821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:47.876064Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:47.876767Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:47.876955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:47.877128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:47.878385Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:47.878428Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.284317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.388096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.388244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.388598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.388659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.445917Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:42:48.446527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.446899Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.561550Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.597956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:49.377261Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:49.377330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:49.377398Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:49.377624Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:55.394217Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.394888Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.395177Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:55.404771Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:55.405377Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:55.407000Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:55.407331Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.407428Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.408606Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.408702Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.724473Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:55.830132Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:55.830278Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:55.831071Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:55.831156Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:55.879897Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:55.880592Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:55.880949Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:55.949193Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:55.972314Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:56.766542Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:56.766607Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:56.766639Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:56.767068Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:03.971745Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.972768Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:03.974912Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:03.984603Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.986374Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:03.987907Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:03.988217Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:03.988309Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:03.989826Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:03.989910Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:04.274721Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:04.370146Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:04.370281Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:04.371087Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:04.371170Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 N ... eScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:12.368985Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:12.370207Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:687:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:12.370570Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:12.370802Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:12.372410Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:682:2349], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:12.372800Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:12.372942Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:12.752723Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:12.855877Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:12.856042Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:12.856565Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:12.856641Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:12.901956Z node 7 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 8 Cookie 8 2026-01-04T02:43:12.902869Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:12.903290Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:12.963180Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:12.998422Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:13.877139Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:13.877196Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:13.877226Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:13.877892Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:19.036057Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:19.042857Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:19.098045Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:450:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:19.098574Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:19.098811Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:19.476615Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:19.578816Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:19.578943Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:19.638702Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:19.756420Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:20.682927Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:20.682999Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:20.683067Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:20.683684Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:20.748720Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T02:43:32.134427Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:32.143094Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:32.146409Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:266:2224], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:32.146916Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:32.147132Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:32.549052Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:32.655614Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:32.655822Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:32.697692Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:32.761736Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:33.674015Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:33.674089Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:33.674134Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:33.674672Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:38.879700Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:38.886537Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:38.889058Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:38.889404Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:38.889606Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:39.321989Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:39.401245Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:39.401396Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:39.435555Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:39.505224Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:40.305887Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:40.305944Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:40.305977Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:40.306450Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCmsTest::TestKeepAvailableModeScheduled [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects >> TMaintenanceApiTest::SingleCompositeActionGroup >> TCmsTenatsTest::CollectInfo [GOOD] >> TCmsTenatsTest::RequestShutdownHost ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::WriteStandalone [GOOD] Test command err: 2026-01-04T02:43:35.694656Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:35.718286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:35.718639Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:35.724896Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:35.725143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:35.725374Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:35.725492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:35.725584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:35.725678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:35.725777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:35.725918Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:35.726039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:35.726146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.726272Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:35.726398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:35.726520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:35.756307Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:35.756451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:35.756507Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:35.756697Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:35.756873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:35.756976Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:35.757032Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:35.757132Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:35.757209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:35.757259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:35.757319Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:35.757535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:35.757605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:35.757648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:35.757680Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:35.757771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:35.757858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:35.757905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:35.757939Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:35.758000Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:35.758040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:35.758075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:35.758125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:35.758164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:35.758198Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:35.758497Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:35.758580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:35.758649Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:35.758781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:35.758834Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.758866Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:35.758925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:35.758966Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:35.759009Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:35.759052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:35.759078Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:35.759099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:35.759193Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:35.759226Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... lude":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"21,21,21,21,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"22,22,22,22,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"23,23,23,23,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"24,24,24,24,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"25,25,25,25,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"26,26,26,26,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"27,27,27,27,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"28,28,28,28,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"29,29,29,29,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"30,30,30,30,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"31,31,31,31,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"32,32,32,32,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"33,33,33,33,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"34,34,34,34,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"35,35,35,35,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"36,36,36,36,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"37,37,37,37,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"38,38,38,38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"39,39,39,39,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"40,40,40,40,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"41,41,41,41,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"42,42,42,42,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"43,43,43,43,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"44,44,44,44,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"45,45,45,45,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"46,46,46,46,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"47,47,47,47,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"48,48,48,48,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"49,49,49,49,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"50,50,50,50,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"51,51,51,51,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"52,52,52,52,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"53,53,53,53,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"54,54,54,54,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"55,55,55,55,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"56,56,56,56,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"57,57,57,57,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"58,58,58,58,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"59,59,59,59,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"60,60,60,60,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"61,61,61,61,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"62,62,62,62,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"63,63,63,63,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"64,64,64,64,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"65,65,65,65,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"66,66,66,66,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"67,67,67,67,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"68,68,68,68,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"69,69,69,69,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"70,70,70,70,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"71,71,71,71,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"72,72,72,72,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"73,73,73,73,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"74,74,74,74,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"75,75,75,75,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"76,76,76,76,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"77,77,77,77,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"78,78,78,78,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"79,79,79,79,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"80,80,80,80,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"81,81,81,81,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"82,82,82,82,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"83,83,83,83,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"84,84,84,84,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"85,85,85,85,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"86,86,86,86,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"87,87,87,87,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"88,88,88,88,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"89,89,89,89,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"90,90,90,90,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"91,91,91,91,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"92,92,92,92,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"93,93,93,93,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"94,94,94,94,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"95,95,95,95,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"96,96,96,96,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"97,97,97,97,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"98,98,98,98,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":1}],"finishes":[{"inc":{"count_include":1},"id":1}]},"p":{"include":0,"pk":"99,99,99,99,"}}]}; 2026-01-04T02:43:40.794865Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;local_tx_no=7;method=complete;tx_info=TTxBlobsWritingFinished;tablet_id=9437184;tx_state=complete;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTenatsTest::TestNoneTenantPolicy [GOOD] >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost >> TCmsTest::TestKeepAvailableModeDisconnects [GOOD] >> TCmsTest::TestForceRestartModeDisconnects >> TPQTest::TestStorageRetention [GOOD] >> TPQTest::TestStatusWithMultipleConsumers >> TCmsTenatsTest::TestClusterLimit [GOOD] >> TCmsTest::RacyStartCollecting [GOOD] >> TCmsTest::PriorityRange >> TCmsTenatsTest::TestClusterRatioLimit >> TCmsTest::StateStorageTwoBrokenRings [GOOD] >> TCmsTest::StateStorageRollingRestart >> TCmsTenatsTest::TestTenantLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled >> TCmsTest::ActionWithZeroDuration >> TCmsTest::StateRequestUnknownNode [GOOD] >> TCmsTest::StateStorageNodesFromOneRing >> TCmsTest::ManagePermissions [GOOD] >> TCmsTest::ManagePermissionWrongRequest >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestMirror3dc [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction >> TPQTest::TestStatusWithMultipleConsumers [GOOD] >> TPQTest::TestTabletRestoreEventsOrder >> TCmsTest::WalleCleanupTest [GOOD] >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> Normalizers::RemoveDeleteFlagNormalizer [GOOD] Test command err: 2026-01-04T02:43:36.553626Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:36.585897Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:36.586131Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:36.593583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=EmptyPortionsCleaner; 2026-01-04T02:43:36.593880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=LeakedBlobsNormalizer; 2026-01-04T02:43:36.594014Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.594217Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:36.594376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:36.594485Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:36.594596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:36.594729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:36.594841Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:36.594946Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:36.595081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:36.595345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:36.595474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:36.595583Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:36.595690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:127:2158];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:36.626116Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:36.626378Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=14;current_normalizer=CLASS_NAME=EmptyPortionsCleaner; 2026-01-04T02:43:36.626446Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.626777Z node 1 :TX_COLUMNSHARD CRIT: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_empty.cpp:323;tasks_for_remove=0;distribution=; 2026-01-04T02:43:36.626930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=EmptyPortionsCleaner;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.627009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.627064Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=NO_VALUE_OPTIONAL;type=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.627382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TablesLoadingTime=46; 2026-01-04T02:43:36.627466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetLoadingTime=43; 2026-01-04T02:43:36.627563Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;TableVersionssLoadingTime=47; 2026-01-04T02:43:36.627649Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=common_data.cpp:29;SchemaPresetVersionsLoadingTime=37; 2026-01-04T02:43:36.627735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=LeakedBlobsNormalizer;id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:36.627804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Granules;id=Granules; 2026-01-04T02:43:36.627845Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=1;type=Granules; 2026-01-04T02:43:36.627982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:36.628046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:36.628084Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:36.628125Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=2;type=Chunks; 2026-01-04T02:43:36.628244Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:36.628302Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:36.628362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:36.628402Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:36.628596Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:36.628673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:36.628722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:36.628753Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:36.628895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:36.628951Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:36.628990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:36.629086Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:36.629139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:36.629181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:36.629231Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:36.629364Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:36.629422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:36.629457Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=NO_VALUE_OPTIONAL;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:36.629672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline ... ernal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:1;); 2026-01-04T02:43:42.188789Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=1;count=77;finished=1; 2026-01-04T02:43:42.188817Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:43:42.188857Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:43:42.189411Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:42.189544Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:77;schema=key1: uint64 key2: uint64 field: string;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.189576Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:43:42.189683Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=3;rows=77; 2026-01-04T02:43:42.189730Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=77;batch_columns=key1,key2,field; 2026-01-04T02:43:42.189944Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=130200;rows=1085;faults=0;finished=0;fault=0;schema=key1: uint64 key2: uint64 field: string; 2026-01-04T02:43:42.190069Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.190202Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.190389Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.190541Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:43:42.190633Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.190729Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.191062Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [2:330:2330] finished for tablet 9437184 2026-01-04T02:43:42.191643Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[2:328:2329];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.08},{"events":["l_task_result"],"t":0.767},{"events":["l_ProduceResults","f_Finish"],"t":0.769},{"events":["l_ack","l_processing","l_Finish"],"t":0.77}],"full":{"a":5316320,"name":"_full_task","f":5316320,"d_finished":0,"c":0,"l":6086546,"d":770226},"events":[{"name":"bootstrap","f":5316577,"d_finished":1829,"c":1,"l":5318406,"d":1829},{"a":6085930,"name":"ack","f":5396857,"d_finished":300719,"c":421,"l":6085837,"d":301335},{"a":6085911,"name":"processing","f":5318655,"d_finished":636458,"c":843,"l":6085840,"d":637093},{"name":"ProduceResults","f":5317880,"d_finished":519919,"c":1266,"l":6086157,"d":519919},{"a":6086162,"name":"Finish","f":6086162,"d_finished":0,"c":0,"l":6086546,"d":384},{"name":"task_result","f":5318679,"d_finished":326021,"c":422,"l":6084285,"d":326021}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.191722Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[2:328:2329];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:43:42.192267Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[2:328:2329];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["f_ProduceResults"],"t":0.001},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.002},{"events":["f_ack"],"t":0.08},{"events":["l_task_result"],"t":0.767},{"events":["l_ProduceResults","f_Finish"],"t":0.769},{"events":["l_ack","l_processing","l_Finish"],"t":0.77}],"full":{"a":5316320,"name":"_full_task","f":5316320,"d_finished":0,"c":0,"l":6087178,"d":770858},"events":[{"name":"bootstrap","f":5316577,"d_finished":1829,"c":1,"l":5318406,"d":1829},{"a":6085930,"name":"ack","f":5396857,"d_finished":300719,"c":421,"l":6085837,"d":301967},{"a":6085911,"name":"processing","f":5318655,"d_finished":636458,"c":843,"l":6085840,"d":637725},{"name":"ProduceResults","f":5317880,"d_finished":519919,"c":1266,"l":6086157,"d":519919},{"a":6086162,"name":"Finish","f":6086162,"d_finished":0,"c":0,"l":6087178,"d":1016},{"name":"task_result","f":5318679,"d_finished":326021,"c":422,"l":6084285,"d":326021}],"id":"9437184::4"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:43:42.192361Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:43:41.419118Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=3;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=2488696;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=2488696;selected_rows=0; 2026-01-04T02:43:42.192429Z node 2 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:43:42.192602Z node 2 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[2:330:2330];TabletId=9437184;ScanId=0;TxId=111;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2;column_names=key1,key2;);;ff=(column_ids=1,2,3;column_names=field,key1,key2;);;program_input=(column_ids=1,2,3;column_names=field,key1,key2;);;; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TDowntimeTest::AddDowntime [GOOD] >> TDowntimeTest::HasUpcomingDowntime [GOOD] >> TDowntimeTest::SetIgnoredDowntimeGap [GOOD] >> TDowntimeTest::CleanupOldSegments [GOOD] >> TPQTest::TestTabletRestoreEventsOrder [GOOD] >> TCmsTest::TestKeepAvailableModeScheduledDisconnects [GOOD] >> TCmsTest::TestLoadLog >> THealthCheckTest::NoStoragePools [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TDowntimeTest::CleanupOldSegments [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageNodesFromOneRing [GOOD] >> TCmsTest::StateStorageAvailabilityMode >> TCmsTenatsTest::TestDefaultTenantPolicyWithSingleTenantHost [GOOD] >> TCmsTenatsTest::TestLimitsWithDownNode >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] >> TCmsTest::TestForceRestartModeDisconnects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::TestTabletRestoreEventsOrder [GOOD] Test command err: 2026-01-04T02:39:53.917013Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:39:53.973951Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:53.974011Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:53.974050Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:53.974107Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:39:53.989425Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.005120Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } 2026-01-04T02:39:54.006104Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:39:54.008596Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:39:54.010869Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [1:190:2142] 2026-01-04T02:39:54.012734Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 1 generation 2 [1:190:2142] 2026-01-04T02:39:54.020091Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.020523Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|fded3029-4d1dc2dd-d8053901-1ab0c554_0 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 100 2026-01-04T02:39:54.033495Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.033953Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|cb471b3-bdc64cde-6e555f40-6a7d5522_1 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.044515Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.044812Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|bb833803-d88717e2-4b81d4ee-53289f2a_2 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-04T02:39:54.058545Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.058856Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|b9edeb9c-6535e14f-68e4c88a-f49691ff_3 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-04T02:39:54.060018Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.062084Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|a4288b89-e61c4e1a-d92a6599-4fda2413_4 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default 2026-01-04T02:39:54.086697Z node 1 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 1 requestId: error: new GetOwnership request needed for owner 2026-01-04T02:39:54.087014Z node 1 :PERSQUEUE INFO: ownerinfo.cpp:30: new Cookie default|ba01ffc8-732b8502-d60642a-217f8356_5 generated for partition 0 topic 'rt3.dc1--asdfgs--topic' owner default Got start offset = 2000 2026-01-04T02:39:54.472697Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:108:2057] recipient: [2:106:2138] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:113:2057] recipient: [2:106:2138] 2026-01-04T02:39:54.509438Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:54.509500Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:54.509562Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.509614Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [2:154:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927938 is [2:158:2176] sender: [2:159:2057] recipient: [2:152:2172] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:182:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:112:2142]) on event NKikimr::TEvPersQueue::TEvUpdateConfigBuilder ! Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:184:2057] recipient: [2:104:2137] Leader for TabletID 72057594037927937 is [2:112:2142] sender: [2:187:2057] recipient: [2:186:2195] Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:189:2057] recipient: [2:186:2195] 2026-01-04T02:39:54.551408Z node 2 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:39:54.551486Z node 2 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:39:54.551559Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:54.551623Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info !Reboot 72057594037927937 (actor [2:112:2142]) rebooted! 2026-01-04T02:39:54.562191Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 !Reboot 72057594037927937 (actor [2:112:2142]) tablet resolver refreshed! new actor is[2:188:2196] 2026-01-04T02:39:54.624285Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.655390Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.665932Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.697015Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.748545Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.779662Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.923705Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:54.934335Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.214669Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.286850Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.637841Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.804634Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:39:55.990107Z node 2 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 2 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [2:188:2196] sender: [2:268:2057] recipient: [2:14:2061] 2026-01-04T02:39:56.120413Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:39:56.121583Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 2 actor [2:180:2192] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "rt3.dc1--asdfgs--topic" Version: 2 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 2 } 2026-01-04T02:39:56.122914Z node 2 :PERSQUEUE INFO: partition_ini ... 2026-01-04T02:43:44.760180Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:590:2578] connected; active server actors: 1 2026-01-04T02:43:44.762225Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:595:2583] connected; active server actors: 1 2026-01-04T02:43:44.764233Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:600:2588] connected; active server actors: 1 2026-01-04T02:43:44.766115Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:605:2593] connected; active server actors: 1 2026-01-04T02:43:44.768136Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:610:2598] connected; active server actors: 1 2026-01-04T02:43:44.770095Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:615:2603] connected; active server actors: 1 2026-01-04T02:43:44.772056Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:620:2608] connected; active server actors: 1 2026-01-04T02:43:44.774030Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:625:2613] connected; active server actors: 1 2026-01-04T02:43:44.775894Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:630:2618] connected; active server actors: 1 2026-01-04T02:43:44.777940Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:635:2623] connected; active server actors: 1 2026-01-04T02:43:44.780148Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:640:2628] connected; active server actors: 1 2026-01-04T02:43:44.782582Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:645:2633] connected; active server actors: 1 2026-01-04T02:43:44.784857Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:650:2638] connected; active server actors: 1 2026-01-04T02:43:44.786864Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:655:2643] connected; active server actors: 1 2026-01-04T02:43:44.788740Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:660:2648] connected; active server actors: 1 2026-01-04T02:43:44.790536Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:665:2653] connected; active server actors: 1 2026-01-04T02:43:44.792262Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:670:2658] connected; active server actors: 1 2026-01-04T02:43:44.794155Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:675:2663] connected; active server actors: 1 2026-01-04T02:43:44.796319Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:680:2668] connected; active server actors: 1 2026-01-04T02:43:44.798244Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:685:2673] connected; active server actors: 1 2026-01-04T02:43:44.800262Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:690:2678] connected; active server actors: 1 2026-01-04T02:43:44.802030Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:695:2683] connected; active server actors: 1 2026-01-04T02:43:44.803733Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:700:2688] connected; active server actors: 1 2026-01-04T02:43:44.806110Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:705:2693] connected; active server actors: 1 2026-01-04T02:43:44.808069Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:710:2698] connected; active server actors: 1 2026-01-04T02:43:44.809983Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:715:2703] connected; active server actors: 1 2026-01-04T02:43:44.811682Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:720:2708] connected; active server actors: 1 2026-01-04T02:43:44.813479Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:725:2713] connected; active server actors: 1 2026-01-04T02:43:44.815809Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:730:2718] connected; active server actors: 1 2026-01-04T02:43:44.818070Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:735:2723] connected; active server actors: 1 2026-01-04T02:43:44.819897Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:740:2728] connected; active server actors: 1 2026-01-04T02:43:44.821678Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:745:2733] connected; active server actors: 1 2026-01-04T02:43:44.823966Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:750:2738] connected; active server actors: 1 2026-01-04T02:43:44.826282Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:755:2743], now have 1 active actors on pipe 2026-01-04T02:43:44.827414Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:758:2746], now have 1 active actors on pipe 2026-01-04T02:43:44.828362Z node 156 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [156:761:2749], now have 1 active actors on pipe 2026-01-04T02:43:44.829235Z node 156 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72057594037927938][rt3.dc1--topic] pipe [156:764:2752] connected; active server actors: 1 2026-01-04T02:43:45.359278Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:43:45.402082Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:43:45.402145Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:43:45.402217Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:43:45.402278Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:43:45.436744Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:43:45.436812Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:43:45.436878Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:43:45.436940Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:43:45.437215Z node 157 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 157 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:43:45.439752Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:43:45.440824Z node 157 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 163 actor [157:182:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 PartitionIds: 1 TopicName: "topic" Version: 163 LocalDC: true Topic: "topic" TopicPath: "/topic" YcCloudId: "somecloud" YcFolderId: "somefolder" YdbDatabaseId: "PQ" YdbDatabasePath: "/Root/PQ" Partitions { PartitionId: 0 } Partitions { PartitionId: 1 } FederationAccount: "federationAccount" MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } AllPartitions { PartitionId: 1 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 163 } Consumers { Name: "aaa" Generation: 163 Important: true } 2026-01-04T02:43:45.441612Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:249:2196] 2026-01-04T02:43:45.442598Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 3 [157:249:2196] 2026-01-04T02:43:45.443794Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:251:2196] 2026-01-04T02:43:45.444412Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 3 [157:251:2196] 2026-01-04T02:43:45.472316Z node 157 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:43:45.472387Z node 157 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:43:45.473654Z node 157 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:43:45.473712Z node 157 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:43:45.474424Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [157:327:2254] 2026-01-04T02:43:45.475412Z node 157 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][1][StateInit] bootstrapping 1 [157:329:2254] 2026-01-04T02:43:45.479381Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:43:45.479616Z node 157 :PERSQUEUE INFO: partition_init.cpp:1016: [topic:1:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:43:45.479753Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'topic' partition 0 generation 4 [157:327:2254] 2026-01-04T02:43:45.480237Z node 157 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][1][StateInit] init complete for topic 'topic' partition 1 generation 4 [157:329:2254] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest >> TCmsTest::PriorityRange [GOOD] >> TCmsTest::PriorityLocks >> TCmsTest::TestLoadLog [GOOD] >> TCmsTest::TestLogOperationsRollback >> TCmsTenatsTest::TestClusterRatioLimit [GOOD] >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy >> THealthCheckTest::BridgeTimeDifference [GOOD] >> TCmsTest::RequestRestartServicesOk >> TCmsTest::ActionWithZeroDuration [GOOD] >> TCmsTest::AllVDisksEvictionInRack |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestForceRestartModeDisconnects [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnUnsupportedAction [GOOD] >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::NoStoragePools [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:47.984633Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.985606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:47.989985Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:48.116549Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.125378Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.126509Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:48.126803Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.127041Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.128963Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.129041Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.568216Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.679702Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.679836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.680235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.680294Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.737292Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:42:48.737994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.738387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.775678Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.852114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:49.649675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:49.649726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:49.649757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:49.650152Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:42:56.268575Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:56.269684Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:56.270143Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:56.280207Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:56.280764Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:56.283351Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:56.283744Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:56.283911Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:56.285462Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:56.285596Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:56.559689Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:56.660017Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.660141Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.660829Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.660901Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.705307Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:56.706009Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.706376Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.761490Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:56.786316Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:57.615524Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:57.615579Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:57.615607Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:57.615945Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:05.292071Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.293940Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:05.305037Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.305397Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:05.307429Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:498:2403], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:05.307726Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.307846Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.309408Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:491:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:05.309626Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:05.309908Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:05.634005Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:05.730634Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.730769Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:05.731240Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:05.731310Z node 5 :HIVE W ... 01-04T02:43:25.636791Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:25.638591Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:25.638680Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:26.009038Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:26.116412Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:26.116577Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:26.117468Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:26.117556Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:26.162729Z node 9 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 10 Cookie 10 2026-01-04T02:43:26.163494Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:26.163920Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:26.222902Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:26.264436Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:27.223308Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:27.223380Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:27.223430Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:27.223949Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-9" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 9 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-10" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 10 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-10" reason: "YELLOW-7932-1231c6b1-9" type: "COMPUTE" level: 2 } issue_log { id: "RED-a838-9-9-42" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-42" path: "/home/runner/.ya/build/build_root/c5zo/002a42/r3tmp/tmpUH5W2E/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-43" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-43" path: "/home/runner/.ya/build/build_root/c5zo/002a42/r3tmp/tmpUH5W2E/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "RED-a838-9-9-44" status: RED message: "PDisk state is FAULTY" location { storage { node { id: 9 host: "::1" port: 12001 } pool { group { vdisk { pdisk { id: "9-44" path: "/home/runner/.ya/build/build_root/c5zo/002a42/r3tmp/tmpUH5W2E/pdisk_1.dat" } } } } } } type: "PDISK" level: 6 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 9 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-04T02:43:36.220125Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:36.221230Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:36.223498Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:36.234975Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:36.236524Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:36.238981Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:36.239418Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:36.239674Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:36.242292Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:36.242401Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:36.631674Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:36.745069Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:36.745234Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:36.746406Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:36.746499Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:36.791564Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-04T02:43:36.792571Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:36.793051Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:36.854469Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:36.905309Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:37.848072Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:37.848143Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:37.848178Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:37.848563Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:43.414808Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:43.420126Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:43.453715Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:43.454081Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:43.454281Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:43.849637Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:43.933598Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:43.933853Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:43.968999Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:44.028044Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:44.935437Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:44.935526Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:44.935584Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:44.936317Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCmsTest::ManagePermissionWrongRequest [GOOD] >> TCmsTest::ManageRequests >> TCmsTest::VDisksEvictionShouldFailWhileSentinelIsDisabled [GOOD] >> TCmsTest::WalleDisableMaintenance >> TMaintenanceApiTest::SingleCompositeActionGroup [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC >> TCmsTest::CollectInfo ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestBootingTabletIsNotDead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:48.001603Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:48.002339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:48.005251Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:48.088985Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.093101Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:48.093846Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:48.094068Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.094235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.095577Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:48.095610Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:48.453410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:48.548376Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.548515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.549064Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:48.549158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:48.606939Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:42:48.607535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.607916Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:48.712038Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:48.725186Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:49.497795Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:49.497903Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:49.497955Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:49.498311Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: DEGRADED issue_log { id: "YELLOW-7932-1231c6b1-1" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 1 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-2" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 2 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-1" reason: "YELLOW-7932-1231c6b1-2" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-a8c7-1231c6b1" status: YELLOW message: "Storage degraded" location { database { name: "/Root" } } reason: "YELLOW-f700-1231c6b1-80c02825" type: "STORAGE" level: 2 } issue_log { id: "YELLOW-f700-1231c6b1-80c02825" status: YELLOW message: "Pool degraded" location { storage { pool { name: "static" } } database { name: "/Root" } } reason: "YELLOW-5d3e-1231c6b1-0" type: "STORAGE_POOL" level: 3 } issue_log { id: "YELLOW-5d3e-1231c6b1-0" status: YELLOW message: "Group degraded" location { storage { pool { name: "static" group { id: "0" } } } database { name: "/Root" } } reason: "RED-9f89-1231c6b1-1-0-3-55-0-55" type: "STORAGE_GROUP" level: 4 } issue_log { id: "RED-9f89-1231c6b1-1-0-3-55-0-55" status: RED message: "VDisk is not available" location { storage { node { id: 1 host: "::1" port: 12001 } pool { name: "static" group { vdisk { id: "0-3-55-0-55" } } } } database { name: "/Root" } } type: "VDISK" level: 5 } issue_log { id: "YELLOW-edf5-1231c6b1" status: YELLOW message: "Database has multiple issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" reason: "YELLOW-a8c7-1231c6b1" type: "DATABASE" level: 1 } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-04T02:42:55.830901Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.831944Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:55.832370Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:42:55.841636Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:55.842160Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:55.844230Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:55.844547Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.844664Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:55.846345Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:55.846488Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:56.135028Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:56.233759Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.233922Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.234767Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:56.234848Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:56.279632Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:42:56.280413Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.280811Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:56.338596Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:56.374840Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:57.146089Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:57.146152Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:57.146186Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:57.146573Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:04.046864Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:04.047546Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:04.049017Z node 6 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [6:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:04.055519Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:04.056691Z node 6 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02: ... cting -> Connected 2026-01-04T02:43:21.592730Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:21.617066Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:22.472697Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:22.472762Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:22.472794Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:22.473310Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:31.025965Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:31.026101Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:31.041705Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:31.042712Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:692:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:31.043001Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:31.043061Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:31.045475Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:687:2352], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:31.045641Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:31.045763Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:31.349703Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:31.446790Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:31.446933Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:31.447402Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:31.447485Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:31.517943Z node 11 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 12 Cookie 12 2026-01-04T02:43:31.518634Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:31.519046Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:31.620310Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:31.649948Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:32.428976Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:32.429033Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:32.429059Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:32.429430Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... waiting for SysViewsRoster update finished 2026-01-04T02:43:39.813637Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:762:2348], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:39.814972Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:39.816012Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:39.829081Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:39.829662Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:767:2407], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:39.830130Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:39.830335Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:39.831969Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:39.832066Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:40.140797Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:40.243507Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:40.243670Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:40.244215Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:40.244309Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:40.302670Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-04T02:43:40.303228Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:40.303592Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:40.390228Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:40.417222Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:44.470948Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:44.478075Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:44.478141Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:44.478177Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:44.478848Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:44.495830Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:44.496013Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:44.538644Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-04T02:43:44.539500Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected self_check_result: GOOD issue_log { id: "YELLOW-7932-1231c6b1-13" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 13 host: "::1" port: 12001 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-14" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 14 host: "::1" port: 12002 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-7932-1231c6b1-15" status: YELLOW message: "LoadAverage above 100%" location { compute { node { id: 15 host: "::1" port: 12003 } } database { name: "/Root" } } type: "LOAD_AVERAGE" level: 4 } issue_log { id: "YELLOW-197d-1231c6b1" status: YELLOW message: "Compute is overloaded" location { database { name: "/Root" } } reason: "YELLOW-7932-1231c6b1-13" reason: "YELLOW-7932-1231c6b1-14" reason: "YELLOW-7932-1231c6b1-15" type: "COMPUTE" level: 2 } issue_log { id: "YELLOW-5995-1231c6b1" status: YELLOW message: "Database has compute issues" location { database { name: "/Root" } } reason: "YELLOW-197d-1231c6b1" type: "DATABASE" level: 1 } location { id: 13 host: "::1" port: 12001 } |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestTenantLimitForceRestartModeScheduled [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestShutdownHost [GOOD] >> TCmsTenatsTest::RequestRestartServices >> TCmsTest::TestOutdatedState ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::BridgeTimeDifference [GOOD] Test command err: 2026-01-04T02:42:42.560256Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331339688172754:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:42:42.560324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:42:42.587320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:42.816147Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:42:42.864199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:42.864282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:42.902055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:42.952811Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:42.954049Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331339688172725:2081] 1767494562557283 != 1767494562557286 2026-01-04T02:42:43.077344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:43.125314Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:43.125358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:43.125444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:43.125541Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:43.499212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:43.506379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:42:43.570665Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; issue_log { id: "0" status: GREY message: "Database does not exist" } location { id: 1 host: "::1" port: 12001 } ... waiting for SysViewsRoster update finished 2026-01-04T02:42:51.742544Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:51.742816Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:51.754674Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:51.756134Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:51.758595Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:500:2404], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:51.758946Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:51.759131Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:51.762097Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:493:2168], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:51.762562Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:51.762613Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:52.129469Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:52.235840Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:52.236011Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:52.236982Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:52.237086Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:52.293033Z node 2 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 3 Cookie 3 2026-01-04T02:42:52.293697Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:52.294121Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:52.386297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:52.411017Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:53.188432Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:53.188512Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:53.188549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:53.189005Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration self_check_result: GOOD location { id: 2 host: "::1" port: 12001 pile { name: "pile0" } } ... waiting for SysViewsRoster update finished 2026-01-04T02:43:00.462785Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:00.463927Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:00.465439Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:681:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:43:00.475749Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:00.477789Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:00.479463Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:686:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:00.479830Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:00.480035Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:00.481655Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:00.481792Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:00.837140Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:00.963611Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:00.963772Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:00.964290Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:00.964372Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:01.021066Z node 4 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 5 Cookie 5 2026-01-04T02:43:01.021884Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:01.022351Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:01.061435Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:01.161693Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finish ... T WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:14.482284Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:14.482476Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:14.544615Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:14.648425Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:15.640345Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:15.640431Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:15.640492Z node 9 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:15.641025Z node 9 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:15.704505Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T02:43:27.147010Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:27.154959Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:27.158022Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [11:266:2224], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:27.158515Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:27.158722Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:27.568015Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:27.658579Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:27.658764Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:27.693430Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:27.751892Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:28.635949Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:28.636021Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:28.636067Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:28.636543Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:45.017590Z node 13 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:45.018192Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:45.018779Z node 15 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:45.019007Z node 16 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:45.049445Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:43:45.050815Z node 13 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:43:45.093500Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:45.093589Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:45.093648Z node 13 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:45.097928Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [13:1181:2354], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:45.099484Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:45.099579Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:45.103427Z node 16 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [16:1196:2341], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:45.104186Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:45.104388Z node 16 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:45.105172Z node 15 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [15:1191:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:45.105455Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [14:1185:2330], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:45.105775Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:45.105861Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:45.105966Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:45.106013Z node 15 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:45.453962Z node 13 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:45.462463Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:45.462704Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:45.464207Z node 13 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [13:37:2084] 1767494609995454 != 1767494609995459 2026-01-04T02:43:45.475616Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:45.485569Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:45.485718Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:45.489356Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:45.489454Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:45.493219Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:45.493293Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:45.503037Z node 16 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [16:157:2075] 1767494610020439 != 1767494610020443 2026-01-04T02:43:45.505836Z node 15 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [15:117:2075] 1767494610013717 != 1767494610013721 2026-01-04T02:43:45.506687Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:77:2075] 1767494610006826 != 1767494610006829 2026-01-04T02:43:45.539766Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 14 Cookie 14 2026-01-04T02:43:45.540730Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 15 Cookie 15 2026-01-04T02:43:45.541166Z node 13 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 16 Cookie 16 2026-01-04T02:43:45.541580Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(13, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:45.542300Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:45.542465Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(15, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:45.542589Z node 13 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(16, (0,0,0,0)) VolatileState: Connecting -> Connected |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TCmsTenatsTest::TestLimitsWithDownNode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] >> TMaintenanceApiTest::SimplifiedMirror3DC [GOOD] >> TMaintenanceApiTest::TestDrainAction >> TCmsTest::StateRequest >> TCmsTest::RestartNodeInDownState >> TCmsTest::StateStorageAvailabilityMode [GOOD] >> TCmsTest::StateStorageLockedNodes >> TCmsTest::PriorityLocks [GOOD] >> DataShardVolatile::DistributedWriteLaterSnapshotBlockedThenAbort-UseSink [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction >> TCmsTest::VDisksEvictionShouldFailOnMultipleActions [GOOD] >> TCmsTest::VDisksEviction >> TCmsTest::RequestRestartServicesOk [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUtf8 [GOOD] Test command err: 2026-01-04T02:43:11.594200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:11.625591Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:11.625875Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:11.633836Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:11.634132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:11.634373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:11.634501Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:11.634608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:11.634749Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:11.634873Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:11.634988Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:11.635113Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:11.635230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:11.635382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:11.635498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:11.635608Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:11.660477Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:11.660592Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:11.660630Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:11.660806Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:11.660983Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:11.661055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:11.661096Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:11.661175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:11.661227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:11.661261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:11.661280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:11.661425Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:11.661474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:11.661499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:11.661518Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:11.661576Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:11.661609Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:11.661638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:11.661660Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:11.661691Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:11.661724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:11.661755Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:11.661786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:11.661837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:11.661887Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:11.662063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:11.662112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:11.662140Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:11.662219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:11.662251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:11.662272Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:11.662312Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:11.662360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:11.662407Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:11.662465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:11.662508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:11.662547Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:11.662664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:11.662694Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... e_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-04T02:43:45.511855Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1033; 2026-01-04T02:43:45.511908Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=80921; 2026-01-04T02:43:45.511955Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=81042; 2026-01-04T02:43:45.512019Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=12; 2026-01-04T02:43:45.512376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=302; 2026-01-04T02:43:45.512424Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=81992; 2026-01-04T02:43:45.512584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=108; 2026-01-04T02:43:45.512705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=74; 2026-01-04T02:43:45.513136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=372; 2026-01-04T02:43:45.513534Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=326; 2026-01-04T02:43:45.532934Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=19322; 2026-01-04T02:43:45.552027Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=18987; 2026-01-04T02:43:45.552123Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:43:45.552179Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-04T02:43:45.552221Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:43:45.552306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=42; 2026-01-04T02:43:45.552352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:43:45.552466Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=76; 2026-01-04T02:43:45.552518Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-04T02:43:45.552598Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2026-01-04T02:43:45.552692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=52; 2026-01-04T02:43:45.552781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=49; 2026-01-04T02:43:45.552825Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=131812; 2026-01-04T02:43:45.552961Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:45.553062Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:45.553113Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:45.553191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:45.553245Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:45.553403Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:45.553463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:45.553498Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:45.553540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:45.553601Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494294054;tx_id=18446744073709551615;;current_snapshot_ts=1767494593067; 2026-01-04T02:43:45.553640Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:45.553686Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:45.553720Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:45.553859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:45.554050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.127000s; 2026-01-04T02:43:45.558061Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:45.558315Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:45.558368Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:45.558460Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:45.558522Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:45.558595Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494294054;tx_id=18446744073709551615;;current_snapshot_ts=1767494593067; 2026-01-04T02:43:45.558641Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:45.558693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:45.558735Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:45.558812Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:45.558873Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:45.559584Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.012000s; 2026-01-04T02:43:45.559631Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:3207:5201];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::WalleTasks >> TCmsTest::CollectInfo [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::PriorityLocks [GOOD] Test command err: 2026-01-04T02:43:42.859304Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-04T02:43:42.859399Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-04T02:43:42.859429Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-04T02:43:42.859456Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-04T02:43:42.859484Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-04T02:43:42.859512Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-04T02:43:42.859537Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-04T02:43:42.859563Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-04T02:43:42.866447Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-04T02:43:42.866515Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-04T02:43:42.866542Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-04T02:43:42.866567Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-04T02:43:42.866593Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-04T02:43:42.866619Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-04T02:43:42.866644Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-04T02:43:42.866671Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 2026-01-04T02:43:42.894145Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 9:9 2026-01-04T02:43:42.894195Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 10:10 2026-01-04T02:43:42.894211Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 11:11 2026-01-04T02:43:42.894231Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 12:12 2026-01-04T02:43:42.894255Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 13:13 2026-01-04T02:43:42.894275Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 14:14 2026-01-04T02:43:42.894292Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 15:15 2026-01-04T02:43:42.894306Z node 9 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 16:16 |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageRollingRestart [GOOD] >> TCmsTest::SysTabletsNode >> TCmsTest::ManageRequests [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag >> TCmsTest::ManualRequestApprovalLockingAllNodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpTypes::MultipleCurrentUtcTimestamp [GOOD] Test command err: Trying to start YDB, gRPC: 10493, MsgBus: 15763 ... waiting for SysViewsRoster update finished 2026-01-04T02:42:13.296724Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:13.423356Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:13.423841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:13.423900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:13.720551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:13.720688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:13.810313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494530532415 != 1767494530532418 2026-01-04T02:42:13.823427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:13.870794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:14.010706Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:14.367586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:14.367654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:14.367708Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:14.367940Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:42:14.380359Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:14.698298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:14.778303Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.069476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.457753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:15.748588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:16.505459Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:16.505597Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:16.506532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:16.506648Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:16.532117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:16.737311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:16.984375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.262721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.507572Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:17.772669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.048988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.337425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:18.689376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.689489Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.689862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.689911Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.689952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:42:18.694805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:42:18.863480Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:42:18.913118Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:42:20.398370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:24.569882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:24.569953Z ... unction: >
:9:22: Error: Uncompatible types in compare: Optional '>' Optional
:6:13: Error: At function: SqlProjectItem, At lambda
:10:22: Error: At function: <=
:10:22: Error: Uncompatible types in compare: Optional '<=' Optional Trying to start YDB, gRPC: 10328, MsgBus: 27606 2026-01-04T02:43:41.909991Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591331597054806128:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:41.910042Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:41.925082Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:42.000551Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:42.001425Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591331597054806100:2081] 1767494621909249 != 1767494621909252 2026-01-04T02:43:42.015427Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:42.015508Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:42.029238Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:42.087948Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:42.087972Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:42.087980Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:42.088070Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:42.101988Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:42.560351Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:42.568402Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:42.615280Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:42.755872Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:42.859590Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:42.962832Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:43:45.104715Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331614234677146:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.104817Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.105017Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331614234677155:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.105071Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.184764Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.209070Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.233907Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.258749Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.282780Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.311571Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.343117Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.385759Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:43:45.450257Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331614234678023:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.450341Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.450355Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331614234678028:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.450540Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591331614234678030:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.450576Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:43:45.453128Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:43:45.463263Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591331614234678031:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:43:45.546299Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591331614234678083:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:43:46.910132Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591331597054806128:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:46.910219Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TCmsTest::TestOutdatedState [GOOD] >> TCmsTest::TestSetResetMarkers >> TMaintenanceApiTest::TestDrainAction [GOOD] >> TMaintenanceApiTest::TestCordonAction >> TCmsTest::StateRequest [GOOD] >> TCmsTest::StateRequestNode >> TCmsTest::ManualRequestApproval >> TCmsTenatsTest::TestClusterRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled >> TCmsTenatsTest::RequestShutdownHostWithTenantPolicy [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartMode >> TCmsTest::RestartNodeInDownState [GOOD] >> TCmsTest::ScheduledEmergencyDuringRollingRestart >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] >> TCmsTest::RequestRestartServicesMultipleNodes [GOOD] >> TCmsTest::RequestRestartServicesNoUser >> TMaintenanceApiTest::ManyActionGroupsWithSingleAction [GOOD] >> TMaintenanceApiTest::LastRefreshTime >> TCmsTest::StateStorageLockedNodes [GOOD] >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] >> TCmsTest::VDisksEviction [GOOD] >> TCmsTest::WalleDisableMaintenance [GOOD] >> TCmsTest::CheckUnreplicatedDiskPreventsRestart [GOOD] >> TCmsTest::BridgeModeStateStorage >> TCmsTenatsTest::RequestRestartServices [GOOD] >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::TestSetResetMarkers [GOOD] >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 >> TCmsTenatsTest::TestTenantRatioLimit |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::StateStorageLockedNodes [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::VDisksEviction [GOOD] Test command err: 2026-01-04T02:43:49.941463Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:43:49.941554Z node 26 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-04T02:43:49.941685Z node 26 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-04T02:43:49.943421Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-04T02:43:49.944035Z node 26 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-04T02:43:49.944288Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-04T02:43:49.944349Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-04T02:43:49.944396Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-04T02:43:49.944528Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:43:49.944742Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:49.944803Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-04T02:43:49.945021Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-04T02:43:49.945064Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:43:49.945144Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-04T02:43:49.945184Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-04T02:43:49.945209Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326: ... torage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-26-26" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 26 InterconnectPort: 12001 Location { DataCenter: "1" Module: "1" Rack: "1" Unit: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-27-27" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 27 InterconnectPort: 12002 Location { DataCenter: "1" Module: "2" Rack: "2" Unit: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-28-28" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 28 InterconnectPort: 12003 Location { DataCenter: "1" Module: "3" Rack: "3" Unit: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-29-29" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 29 InterconnectPort: 12004 Location { DataCenter: "1" Module: "4" Rack: "4" Unit: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-30-30" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 30 InterconnectPort: 12005 Location { DataCenter: "1" Module: "5" Rack: "5" Unit: "5" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-31-31" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 31 InterconnectPort: 12006 Location { DataCenter: "1" Module: "6" Rack: "6" Unit: "6" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-32-32" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 32 InterconnectPort: 12007 Location { DataCenter: "1" Module: "7" Rack: "7" Unit: "7" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120622560 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120622560 } Devices { Name: "pdisk-33-33" State: UP Timestamp: 120622560 } Timestamp: 120622560 NodeId: 33 InterconnectPort: 12008 Location { DataCenter: "1" Module: "8" Rack: "8" Unit: "8" } StartTimeSeconds: 0 } Timestamp: 120622560 } 2026-01-04T02:43:50.368072Z node 26 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-04T02:43:50.368170Z node 26 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 2026-01-04T02:43:50.368247Z node 26 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 26 has not yet been completed) 2026-01-04T02:43:50.368414Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:43:50.368653Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-3, owner# user, order# 3, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 26 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:50.368709Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 26, marker# MARKER_DISK_FAULTY 2026-01-04T02:43:50.368973Z node 26 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 0.100000s 2026-01-04T02:43:50.369028Z node 26 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:43:50.369126Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 26, wbId# [26:8388350642965737326:1634689637] 2026-01-04T02:43:50.369207Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 27, wbId# [27:8388350642965737326:1634689637] 2026-01-04T02:43:50.369258Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 28, wbId# [28:8388350642965737326:1634689637] 2026-01-04T02:43:50.369328Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 29, wbId# [29:8388350642965737326:1634689637] 2026-01-04T02:43:50.369380Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 30, wbId# [30:8388350642965737326:1634689637] 2026-01-04T02:43:50.369416Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 31, wbId# [31:8388350642965737326:1634689637] 2026-01-04T02:43:50.369460Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 32, wbId# [32:8388350642965737326:1634689637] 2026-01-04T02:43:50.369497Z node 26 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 33, wbId# [33:8388350642965737326:1634689637] 2026-01-04T02:43:50.369835Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 26, response# PDiskStateInfo { PDiskId: 26 CreateTime: 120524072 ChangeTime: 120524072 Path: "/26/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.370491Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 33, response# PDiskStateInfo { PDiskId: 33 CreateTime: 120524072 ChangeTime: 120524072 Path: "/33/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.370760Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 31, response# PDiskStateInfo { PDiskId: 31 CreateTime: 120524072 ChangeTime: 120524072 Path: "/31/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.370974Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 32, response# PDiskStateInfo { PDiskId: 32 CreateTime: 120524072 ChangeTime: 120524072 Path: "/32/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.371074Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 30, response# PDiskStateInfo { PDiskId: 30 CreateTime: 120524072 ChangeTime: 120524072 Path: "/30/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.371150Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 27, response# PDiskStateInfo { PDiskId: 27 CreateTime: 120524072 ChangeTime: 120524072 Path: "/27/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.371217Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 28, response# PDiskStateInfo { PDiskId: 28 CreateTime: 120524072 ChangeTime: 120524072 Path: "/28/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.371284Z node 26 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 29, response# PDiskStateInfo { PDiskId: 29 CreateTime: 120524072 ChangeTime: 120524072 Path: "/29/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 120622 2026-01-04T02:43:50.371336Z node 26 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:43:50.383848Z node 26 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-04T02:43:50.384124Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvPermissionRequest { User: "user" Actions { Type: RESTART_SERVICES Host: "26" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: DISALLOW_TEMP Reason: "VDisks eviction from host 26 has not yet been completed" } RequestId: "user-r-3" Deadline: 0 } 2026-01-04T02:43:50.384765Z node 26 :CMS INFO: cms.cpp:1441: User user removes request user-r-3 2026-01-04T02:43:50.384822Z node 26 :CMS DEBUG: cms.cpp:1464: Resulting status: OK 2026-01-04T02:43:50.384907Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:21: TTxRemoveRequest Execute 2026-01-04T02:43:50.384963Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 26 2026-01-04T02:43:50.385088Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-3, reason# explicit remove 2026-01-04T02:43:50.397366Z node 26 :CMS DEBUG: cms_tx_remove_request.cpp:45: TTxRemoveRequest Complete 2026-01-04T02:43:50.397567Z node 26 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManageRequestRequest { User: "user" Command: REJECT RequestId: "user-r-3" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManageRequestResponse { Status { Code: OK } } |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SysTabletsNode [GOOD] >> TCmsTest::TestLogOperationsRollback [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::EnableCMSRequestPrioritiesFeatureFlag [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::RequestRestartServices [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleDisableMaintenance [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] >> TCmsTest::ManualRequestApprovalLockingAllNodes [GOOD] >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestLogOperationsRollback [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::StateRequestNode [GOOD] >> TCmsTest::StateRequestUnknownMultipleNodes >> TCmsTest::RequestRestartServicesNoUser [GOOD] >> TCmsTest::RequestRestartServicesDryRun ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::TestCordonAction [GOOD] Test command err: 2026-01-04T02:43:49.142059Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-04T02:43:49.142172Z node 20 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action DRAIN_NODE is not supported 2026-01-04T02:43:51.242145Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported 2026-01-04T02:43:51.242258Z node 28 :CMS ERROR: cluster_info.cpp:747: FindLockedItems: action CORDON_NODE is not supported |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> test_sql_streaming.py::test[watermarks-watermarks-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] >> TCmsTest::ScheduledEmergencyDuringRollingRestart [GOOD] >> TCmsTest::SamePriorityRequest >> TCmsTest::ManualRequestApproval [GOOD] >> TCmsTest::ManualRequestApprovalAlreadyLockedNode >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] >> TMaintenanceApiTest::LastRefreshTime [GOOD] >> TMaintenanceApiTest::RequestReplaceDevicePDisk >> TCmsTenatsTest::TestClusterLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled >> TMaintenanceApiTest::CompositeActionGroupSameStorageGroup [GOOD] >> TMaintenanceApiTest::ActionReason >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] >> TCmsTest::AllVDisksEvictionInRack [GOOD] >> TCmsTest::BridgeModeCollectInfo |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterRatioLimitForceRestartModeScheduled [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKString_Reboot [GOOD] Test command err: 2026-01-04T02:43:05.031629Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:05.063923Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:05.064151Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:05.069832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:05.070072Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:05.070284Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:05.070369Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:05.070465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:05.070567Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:05.070643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:05.070705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:05.070765Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:05.070854Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.070931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:05.070993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:05.071054Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:05.092953Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:05.093088Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:05.093127Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:05.093261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.093393Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:05.093458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:05.093488Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:05.093548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:05.093588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:05.093618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:05.093640Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:05.093777Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:05.093847Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:05.093885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:05.093916Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:05.094011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:05.094056Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:05.094104Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:05.094124Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:05.094154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:05.094176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:05.094202Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:05.094252Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:05.094303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:05.094344Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:05.094593Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:05.094667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:05.094717Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:05.094856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:05.094903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.094950Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:05.095017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:05.095070Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:05.095119Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:05.095171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:05.095208Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:05.095240Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:05.095357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:05.095396Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-04T02:43:50.673117Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1017; 2026-01-04T02:43:50.673172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=48018; 2026-01-04T02:43:50.673211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=48102; 2026-01-04T02:43:50.673270Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=18; 2026-01-04T02:43:50.673665Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=350; 2026-01-04T02:43:50.673716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=48903; 2026-01-04T02:43:50.673867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=101; 2026-01-04T02:43:50.673970Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=60; 2026-01-04T02:43:50.674382Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=338; 2026-01-04T02:43:50.674590Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=170; 2026-01-04T02:43:50.683322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8690; 2026-01-04T02:43:50.692211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=8813; 2026-01-04T02:43:50.692301Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=26; 2026-01-04T02:43:50.692345Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:43:50.692376Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:43:50.692432Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=33; 2026-01-04T02:43:50.692461Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=3; 2026-01-04T02:43:50.692515Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=32; 2026-01-04T02:43:50.692545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:50.692612Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=30; 2026-01-04T02:43:50.692671Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=34; 2026-01-04T02:43:50.692723Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=30; 2026-01-04T02:43:50.692748Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73841; 2026-01-04T02:43:50.692854Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=58229640;raw_bytes=56100060;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23768;raw_bytes=20000;count=1;records=200} inactive {blob_bytes=348800;raw_bytes=16800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:50.692933Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:50.692971Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:50.693014Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:50.693045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:50.693174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:50.693212Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:50.693238Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:50.693267Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:50.693314Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494289172;tx_id=18446744073709551615;;current_snapshot_ts=1767494586562; 2026-01-04T02:43:50.693343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:50.693379Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:50.693403Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:50.693461Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:50.693580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.062000s; 2026-01-04T02:43:50.696072Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:50.696429Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:50.696499Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:50.696577Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:50.696625Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:50.696697Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494289172;tx_id=18446744073709551615;;current_snapshot_ts=1767494586562; 2026-01-04T02:43:50.696747Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:50.696791Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:50.696828Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:50.696910Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:43:50.696974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:50.697553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.168000s; 2026-01-04T02:43:50.697597Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5565:7196];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::TestTwoOrMoreDisksFromGroupAtTheSameRequestBlock42 [GOOD] >> TCmsTest::TestProcessingQueue >> test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] >> TCmsTenatsTest::TestTenantRatioLimit [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode >> TCmsTest::ManualRequestApprovalWithPartialAlreadyApproved [GOOD] >> TCmsTest::Notifications >> TCmsTest::StateRequestUnknownMultipleNodes [GOOD] >> TCmsTest::ScheduledWalleRequestDuringRollingRestart >> TCmsTest::RequestRestartServicesDryRun [GOOD] >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] >> DataShardVolatile::DistributedWriteAsymmetricExecute [GOOD] >> DataShardVolatile::DistributedWriteThenCopyTable >> TMaintenanceApiTest::ActionReason [GOOD] >> TMaintenanceApiTest::CreateTime >> TCmsTest::SamePriorityRequest [GOOD] >> TCmsTest::SamePriorityRequest2 >> TCmsTest::ManualRequestApprovalAlreadyLockedNode [GOOD] >> TCmsTest::ManageRequestsWrong >> test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::RequestRestartServicesDryRun [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] >> TCmsTest::BridgeModeStateStorage [GOOD] >> TCmsTest::BridgeModeSysTablets >> TCmsTest::TestProcessingQueue [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTenatsTest::TestClusterLimitForceRestartModeScheduled [GOOD] |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt32_Reboot [GOOD] Test command err: 2026-01-04T02:43:09.759414Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:09.794467Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:09.794741Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:09.802264Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:09.802471Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:09.802653Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:09.802738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:09.802821Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:09.802897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:09.802975Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:09.803051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:09.803112Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:09.803206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.803290Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:09.803358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:09.803417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:09.823067Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:09.823204Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:09.823243Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:09.823398Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.823533Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:09.823599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:09.823637Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:09.823701Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:09.823741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:09.823770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:09.823809Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:09.823953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.824012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:09.824041Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:09.824066Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:09.824172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:09.824224Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:09.824256Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:09.824276Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:09.824306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:09.824331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:09.824354Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:09.824398Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:09.824427Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:09.824470Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:09.824645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:09.824682Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:09.824709Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:09.824805Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:09.824843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.824864Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.824905Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:09.824947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:09.824990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:09.825031Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:09.825057Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:09.825077Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:09.825225Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:09.825257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ad_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-04T02:43:53.073452Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=642; 2026-01-04T02:43:53.073489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=36513; 2026-01-04T02:43:53.073526Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=36608; 2026-01-04T02:43:53.073575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-04T02:43:53.073837Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=225; 2026-01-04T02:43:53.073866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=37238; 2026-01-04T02:43:53.073991Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=79; 2026-01-04T02:43:53.074097Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=60; 2026-01-04T02:43:53.074321Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=192; 2026-01-04T02:43:53.074557Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=203; 2026-01-04T02:43:53.083566Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=8960; 2026-01-04T02:43:53.092969Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9317; 2026-01-04T02:43:53.093049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-04T02:43:53.093093Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=10; 2026-01-04T02:43:53.093124Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:43:53.093175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=29; 2026-01-04T02:43:53.093209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:43:53.093264Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=34; 2026-01-04T02:43:53.093294Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:53.093340Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=22; 2026-01-04T02:43:53.093402Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=34; 2026-01-04T02:43:53.093474Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=40; 2026-01-04T02:43:53.093503Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=62847; 2026-01-04T02:43:53.093610Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:53.093690Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:53.093732Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:53.093778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:53.093824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:53.093956Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:53.093998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:53.094025Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:53.094055Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:53.094103Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494293839;tx_id=18446744073709551615;;current_snapshot_ts=1767494591296; 2026-01-04T02:43:53.094133Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:53.094164Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.094191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.094273Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:53.094418Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.119000s; 2026-01-04T02:43:53.096424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:53.096729Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:53.096770Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:53.096825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:53.096859Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:53.096907Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494293839;tx_id=18446744073709551615;;current_snapshot_ts=1767494591296; 2026-01-04T02:43:53.096941Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:53.096977Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.097018Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.097086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:43:53.097121Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:53.097543Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.032000s; 2026-01-04T02:43:53.097576Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardUserAttrsTest::Boot >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps >> Other::TraceNoValidGroupForbidden >> ActorPage::NoValidGroupForbidden >> Other::TraceHttpOk >> TSchemeShardUserAttrsTest::SetAttrs >> TSchemeShardUserAttrsTest::MkDir >> Other::UnknownPathNotFound >> TSchemeShardUserAttrsTest::VariousUse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::TestProcessingQueue [GOOD] Test command err: 2026-01-04T02:43:48.638316Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-04T02:43:48.757336Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-04T02:43:48.773450Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-04T02:43:48.883561Z node 1 :CMS ERROR: info_collector.cpp:300: [InfoCollector] Couldn't get base config 2026-01-04T02:43:55.277937Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 26:26 2026-01-04T02:43:55.278003Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 27:27 2026-01-04T02:43:55.278038Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 28:28 2026-01-04T02:43:55.278058Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 29:29 2026-01-04T02:43:55.278075Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 30:30 2026-01-04T02:43:55.278092Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 31:31 2026-01-04T02:43:55.278108Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 32:32 2026-01-04T02:43:55.278124Z node 25 :CMS ERROR: cluster_info.cpp:490: Cannot update state for unknown PDisk 25:25 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKUInt64 [GOOD] Test command err: 2026-01-04T02:42:47.249876Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.272247Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.273112Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.285907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.286098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.286303Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.286392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.286457Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.286514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.286605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.286683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.286756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.286846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.286942Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.287015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.287101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.311676Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.311831Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.311901Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.312153Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.314320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.314370Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.314477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.314725Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.314761Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.314940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.315002Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.315043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.315076Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.315175Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.315259Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.315306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.315403Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.315456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.315498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.315545Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.315588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.315624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.315658Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.315907Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.315961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.316006Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.316123Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.316163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316190Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316239Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.316276Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.316322Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.316371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.316407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.316435Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.316562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.316617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... olumn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5792; 2026-01-04T02:43:53.795489Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-04T02:43:53.796212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=674; 2026-01-04T02:43:53.796268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6753; 2026-01-04T02:43:53.796306Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6862; 2026-01-04T02:43:53.796352Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-04T02:43:53.796431Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=47; 2026-01-04T02:43:53.796468Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7378; 2026-01-04T02:43:53.796606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=94; 2026-01-04T02:43:53.796705Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=68; 2026-01-04T02:43:53.796862Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=114; 2026-01-04T02:43:53.796980Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=89; 2026-01-04T02:43:53.798604Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1581; 2026-01-04T02:43:53.800048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1381; 2026-01-04T02:43:53.800098Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-04T02:43:53.800134Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=7; 2026-01-04T02:43:53.800163Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=5; 2026-01-04T02:43:53.800218Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=32; 2026-01-04T02:43:53.800248Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:43:53.800313Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=43; 2026-01-04T02:43:53.800342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:53.800420Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=46; 2026-01-04T02:43:53.800511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=57; 2026-01-04T02:43:53.800575Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=41; 2026-01-04T02:43:53.800606Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19500; 2026-01-04T02:43:53.800731Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:53.800824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:53.800877Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:53.800970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:53.801029Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:53.801175Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:53.801229Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:53.801275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:53.801315Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:53.801369Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:53.801411Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.801453Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.801550Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:53.801717Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.194000s; 2026-01-04T02:43:53.804022Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:53.804217Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:53.804263Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:53.804324Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:53.804370Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:53.804417Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:53.804476Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:53.804520Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.804569Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:53.804639Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:43:53.804693Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:53.805283Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.119000s; 2026-01-04T02:43:53.805325Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> Other::TraceInvalidTokenForbidden >> TCmsTest::Notifications [GOOD] >> TCmsTest::Mirror3dcPermissions |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::WalleTasks [GOOD] >> TCmsTest::WalleTasksWithNodeLimit >> TCmsTenatsTest::TestTenantRatioLimitForceRestartMode [GOOD] >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] >> TColumnShardTestReadWrite::CompactionGC [GOOD] |92.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TMaintenanceApiTest::CreateTime [GOOD] >> TMaintenanceApiTest::DisableMaintenance |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TCmsTest::BridgeModeCollectInfo [GOOD] >> TCmsTest::BridgeModeGroups >> TMaintenanceApiTest::RequestReplaceDevicePDisk [GOOD] >> TMaintenanceApiTest::ForceAvailabilityMode >> TCmsTest::ManageRequestsWrong [GOOD] >> TCmsTest::ManageRequestsDry |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ScheduledWalleRequestDuringRollingRestart [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] >> TSchemeShardUserAttrsTest::Boot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKInt64_Reboot [GOOD] Test command err: 2026-01-04T02:43:09.157998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:09.181119Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:09.181345Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:09.187333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:09.187513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:09.187681Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:09.187777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:09.187900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:09.187993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:09.188067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:09.188138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:09.188220Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:09.188320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.188401Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:09.188473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:09.188550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:09.208308Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:09.208441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:09.208477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:09.208613Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.208755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:09.208856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:09.208892Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:09.208959Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:09.208999Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:09.209032Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:09.209055Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:09.209187Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.209229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:09.209258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:09.209285Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:09.209356Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:09.209389Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:09.209413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:09.209435Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:09.209479Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:09.209506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:09.209529Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:09.209556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:09.209578Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:09.209597Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:09.209752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:09.209784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:09.209842Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:09.209962Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:09.209994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.210024Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.210062Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:09.210097Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:09.210146Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:09.210198Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:09.210235Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:09.210264Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:09.210391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:09.210421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ad_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-04T02:43:54.811025Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=588; 2026-01-04T02:43:54.811060Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=43076; 2026-01-04T02:43:54.811092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=43156; 2026-01-04T02:43:54.811136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=8; 2026-01-04T02:43:54.811371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=205; 2026-01-04T02:43:54.811399Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43766; 2026-01-04T02:43:54.811509Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=76; 2026-01-04T02:43:54.811587Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=47; 2026-01-04T02:43:54.811866Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=249; 2026-01-04T02:43:54.812064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=164; 2026-01-04T02:43:54.821580Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=9456; 2026-01-04T02:43:54.831018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9346; 2026-01-04T02:43:54.831096Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=10; 2026-01-04T02:43:54.831138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-04T02:43:54.831172Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:43:54.831242Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=38; 2026-01-04T02:43:54.831275Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:43:54.831338Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=38; 2026-01-04T02:43:54.831368Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=4; 2026-01-04T02:43:54.831415Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=21; 2026-01-04T02:43:54.831479Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2026-01-04T02:43:54.831537Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=32; 2026-01-04T02:43:54.831561Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=69549; 2026-01-04T02:43:54.831658Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:54.831738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:43:54.831793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:43:54.831848Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:43:54.831892Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:43:54.832075Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:54.832134Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:43:54.832165Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:54.832196Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:54.832241Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494293284;tx_id=18446744073709551615;;current_snapshot_ts=1767494590685; 2026-01-04T02:43:54.832277Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:54.832308Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:54.832334Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:54.832400Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:54.832565Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.009000s; 2026-01-04T02:43:54.834554Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:54.834843Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:43:54.834879Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:54.834929Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:43:54.834972Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:43:54.835016Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494293284;tx_id=18446744073709551615;;current_snapshot_ts=1767494590685; 2026-01-04T02:43:54.835051Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:54.835084Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:54.835110Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:54.835155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:43:54.835191Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:54.835685Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.139000s; 2026-01-04T02:43:54.835716Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] >> TSchemeShardUserAttrsTest::MkDir [GOOD] >> TSchemeShardUserAttrsTest::VariousUse [GOOD] >> TSchemeShardUserAttrsTest::SpecialAttributes |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::SamePriorityRequest2 [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SetAttrs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:57.540935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:57.541059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:57.541149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:57.541220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:57.541260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:57.541339Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:57.542439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:57.546097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:57.641528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:57.641604Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.659987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:57.660230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:57.661303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:57.690602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:57.690862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:57.691692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:57.692002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:57.697265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.697467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:57.698659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:57.698733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.698875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:57.698983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:57.699049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:57.699234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:57.870798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.871898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... eTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:43:58.517663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:43:58.517727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.523308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.523461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: MyRoot 2026-01-04T02:43:58.523611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.523646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.523685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-04T02:43:58.523767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:43:58.525583Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:43:58.525749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-04T02:43:58.526108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:58.526211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.526258Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-04T02:43:58.526419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:43:58.526451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:43:58.526487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:43:58.526526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:43:58.526596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:43:58.526659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:43:58.526711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:58.526745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:43:58.526796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:43:58.526831Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:43:58.526884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:43:58.526917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-04T02:43:58.526951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:43:58.528810Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:58.528860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:58.529092Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:58.529145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:43:58.529599Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:43:58.529700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:43:58.529754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:43:58.529791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:43:58.529849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:43:58.529927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:43:58.531195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:43:58.531355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:43:58.531381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:43:58.531644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:43:58.531705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:43:58.531729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:747:2737] TestWaitNotification: OK eventTxId 103 2026-01-04T02:43:58.532064Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.532210Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 155us result status StatusSuccess 2026-01-04T02:43:58.532593Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrRoot" Value: "ValRoot" } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::Boot [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:57.542714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:57.542817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.542858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:57.542897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:57.542948Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:57.542990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:57.543054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.543135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:57.544013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:57.544446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:57.646879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:57.646958Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.664004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:57.664572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:57.664740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:57.675532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:57.675843Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:57.677326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:57.680519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:57.694178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.694401Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:57.697392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:57.697480Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.697562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:57.697616Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:57.697670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:57.698518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:57.876898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.877952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.878974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.879048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.879142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... meshard: 72057594046678944, cookie: 281474976710672 2026-01-04T02:43:58.458958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 281474976710672 2026-01-04T02:43:58.459022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Handle TEvNotifyTxCompletionResult, create sys view '/MyRoot/.sys/streaming_queries' 2026-01-04T02:43:58.459049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:221: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvRosterUpdateFinished 2026-01-04T02:43:58.465414Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 2026-01-04T02:43:58.477566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:43:58.477754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.477950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:43:58.477994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:43:58.478185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:43:58.478253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.480071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:58.480212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:43:58.480408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.480474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:43:58.480515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:43:58.480567Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:43:58.482114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.482169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:43:58.482214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:43:58.483729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.483770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.483841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:43:58.483884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:43:58.483999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:43:58.485619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:43:58.485936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 2026-01-04T02:43:58.486239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000037, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:58.486463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000037 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.486514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:43:58.486776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:43:58.486875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:43:58.487020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:43:58.487092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:43:58.489529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:58.489574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 1, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:58.489718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:58.489753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 1, path id: 1 2026-01-04T02:43:58.490089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.490130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 1:0 ProgressState 2026-01-04T02:43:58.490215Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:43:58.490247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:43:58.490308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#1:0 progress is 1/1 2026-01-04T02:43:58.490355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:43:58.490390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 1/1, is published: false 2026-01-04T02:43:58.490423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 1 ready parts: 1/1 2026-01-04T02:43:58.490453Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 1:0 2026-01-04T02:43:58.490483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 1:0 2026-01-04T02:43:58.490539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:43:58.490575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 1, publications: 1, subscribers: 0 2026-01-04T02:43:58.490675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 1, [OwnerId: 72057594046678944, LocalPathId: 1], 6 2026-01-04T02:43:58.491138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:43:58.491250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046678944, cookie: 1 2026-01-04T02:43:58.491292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 1 2026-01-04T02:43:58.491327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 6 2026-01-04T02:43:58.491407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:43:58.491475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:43:58.493941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:43:58.494333Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::MkDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:57.540947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:57.541049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541108Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:57.541140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:57.541174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:57.541216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:57.541290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:57.542266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:57.544473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:57.648852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:57.648942Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.662536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:57.663164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:57.663352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:57.673552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:57.674512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:57.677305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:57.680418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:57.690651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.690838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:57.698250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:57.698325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.698396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:57.698450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:57.698529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:57.698680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:57.867555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.872936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.873469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... StateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.605444Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.605624Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 193us result status StatusSuccess 2026-01-04T02:43:58.606023Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 } ChildrenExist: true } Children { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA1" Value: "ValA1" } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.606598Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.606741Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 157us result status StatusSuccess 2026-01-04T02:43:58.607063Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } UserAttributes { Key: "AttrB2" Value: "ValB2" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.607569Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.607768Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA" took 188us result status StatusSuccess 2026-01-04T02:43:58.608157Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA" PathDescription { Self { Name: "SubDirA" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 5000040 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAA1" Value: "ValAA1" } UserAttributes { Key: "AttrAA2" Value: "ValAA2" } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.608744Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA/SubDirA/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.608904Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA/SubDirA/DirB" took 167us result status StatusSuccess 2026-01-04T02:43:58.609239Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA/SubDirA/DirB" PathDescription { Self { Name: "DirB" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 103 CreateStep: 5000041 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrAB1" Value: "ValAB1" } UserAttributes { Key: "AttrAB2" Value: "ValAB2" } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtCreateDropOps [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:57.540932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:57.541033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:57.541140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:57.541185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:57.541215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:57.541278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.541355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:57.542246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:57.546152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:57.647890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:57.647950Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.664104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:57.664651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:57.664836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:57.673011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:57.674447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:57.677567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:57.680597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:57.689491Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.690833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:57.697356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:57.697433Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.697492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:57.697548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:57.697668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:57.699206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:57.886466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.887594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.887733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.887854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.887948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Operation and all the parts is done, operation id: 105:0 2026-01-04T02:43:58.638142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 105:0 2026-01-04T02:43:58.638257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:43:58.638326Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 105, publications: 2, subscribers: 0 2026-01-04T02:43:58.638364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:43:58.638398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 105, [OwnerId: 72057594046678944, LocalPathId: 40], 18446744073709551615 2026-01-04T02:43:58.642847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:43:58.643010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:43:58.644837Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:58.644883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:58.645081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 105, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:43:58.645237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:58.645271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 1 2026-01-04T02:43:58.645322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 105, path id: 40 FAKE_COORDINATOR: Erasing txId 105 2026-01-04T02:43:58.646087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:43:58.646181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:43:58.646216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:43:58.646268Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:43:58.646314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:43:58.646746Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:43:58.646816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 105 2026-01-04T02:43:58.646854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 105 2026-01-04T02:43:58.646895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 105, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-04T02:43:58.646925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:43:58.647005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 105, subscribers: 0 2026-01-04T02:43:58.647238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:43:58.647286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:43:58.647370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:43:58.650093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:43:58.651238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:43:58.651351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:43:58.651725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:43:58.651770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:43:58.652278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:43:58.652412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:43:58.652451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [1:803:2793] TestWaitNotification: OK eventTxId 105 2026-01-04T02:43:58.653140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirC" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.653351Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirC" took 242us result status StatusPathDoesNotExist 2026-01-04T02:43:58.653605Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/DirC\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/DirC" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:43:58.654261Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.654445Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 202us result status StatusSuccess 2026-01-04T02:43:58.654941Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 14 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 14 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 12 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 101 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::VariousUse [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:57.591623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:57.591716Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.591766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:57.591841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:57.591884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:57.591937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:57.591995Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:57.592103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:57.592836Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:57.593092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:57.690287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:57.690355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.706594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:57.706823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:57.707027Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:57.715962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:57.716237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:57.717034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:57.717328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:57.720352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.720618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:57.721907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:57.721975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:57.722139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:57.722213Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:57.722258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:57.722404Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:57.887147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.888963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.889047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.889136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.889214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.889292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:57.889404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:43:58.763076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:58.763121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:58.763309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:43:58.763383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 112, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:43:58.763525Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:58.763560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 1 2026-01-04T02:43:58.763600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 39 2026-01-04T02:43:58.763643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 112, path id: 40 FAKE_COORDINATOR: Erasing txId 112 2026-01-04T02:43:58.764492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.764586Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.764624Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:43:58.764670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 12 2026-01-04T02:43:58.764713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:43:58.765167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.765299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 7 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.765340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:43:58.765370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 7 2026-01-04T02:43:58.765402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:43:58.766318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.766420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 112 2026-01-04T02:43:58.766455Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 112 2026-01-04T02:43:58.766497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 112, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-04T02:43:58.766546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 1 2026-01-04T02:43:58.766631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 112, subscribers: 0 2026-01-04T02:43:58.766862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:43:58.766912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:43:58.766981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:43:58.768597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:43:58.770033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:43:58.773024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 112 2026-01-04T02:43:58.773141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 112, wait until txId: 112 TestWaitNotification wait txId: 112 2026-01-04T02:43:58.773608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 112: send EvNotifyTxCompletion 2026-01-04T02:43:58.773669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 112 2026-01-04T02:43:58.774340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 112, at schemeshard: 72057594046678944 2026-01-04T02:43:58.774441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 112: got EvNotifyTxCompletionResult 2026-01-04T02:43:58.774486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 112: satisfy waiter [1:903:2893] TestWaitNotification: OK eventTxId 112 2026-01-04T02:43:58.775360Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirB" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:43:58.775575Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirB" took 240us result status StatusSuccess 2026-01-04T02:43:58.776002Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirB" PathDescription { Self { Name: "DirB" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 109 CreateStep: 5000044 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrB1" Value: "ValB1" } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 113 2026-01-04T02:43:58.780610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "DirB" } ApplyIf { PathId: 38 PathVersion: 8 } ApplyIf { PathId: 39 PathVersion: 7 } ApplyIf { PathId: 40 PathVersion: 3 } } TxId: 113 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:43:58.780817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/DirB, pathId: 0, opId: 113:0, at schemeshard: 72057594046678944 2026-01-04T02:43:58.780955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 113:1, propose status:StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], at schemeshard: 72057594046678944 2026-01-04T02:43:58.783613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 113, response: Status: StatusPreconditionFailed Reason: "fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40]" TxId: 113 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:43:58.783863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 113, database: /MyRoot, subject: , status: StatusPreconditionFailed, reason: fail user constraint: ApplyIf section: no path with id [OwnerId: 72057594046678944, LocalPathId: 40], operation: DROP DIRECTORY, path: /MyRoot/DirB TestModificationResult got TxId: 113, wait until txId: 113 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TCmsTenatsTest::TestTenantRatioLimitForceRestartModeScheduled [GOOD] >> TCmsTest::ActionIssue >> TCmsTest::BridgeModeSysTablets [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGC [GOOD] Test command err: 2026-01-04T02:42:47.249906Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.284511Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.284730Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.292577Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.292852Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.293063Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.293203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.293331Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.293446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.293556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.293664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.293789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.293941Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.294087Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.294206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.294326Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.324785Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.324920Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.324990Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.325155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.325416Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.325459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.325574Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.325657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.325709Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.325742Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.325957Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326025Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.326071Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.326100Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.326194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.326247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.326291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.326338Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.326397Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.326434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.326477Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.326525Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.326575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326618Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.326840Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.326893Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.326928Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.327075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.327121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327150Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.327200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.327240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327286Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.327335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.327378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.327409Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.327547Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.327588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... icWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:43:36.873339Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:36.873431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-04T02:43:36.873501Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:43:36.873664Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:36.873875Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.873956Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-04T02:43:36.874050Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.874825Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.874978Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.875155Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:36.875232Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.631500s; 2026-01-04T02:43:36.875286Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:36.875826Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.876211Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=2b5106f8-e91711f0-bb08408f-258313b3;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=2b5106f8-e91711f0-bb08408f-258313b3; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-04T02:43:36.876541Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-04T02:43:36.876606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.876861Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[190] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-04T02:43:36.877705Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=2b5106f8-e91711f0-bb08408f-258313b3;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.878311Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:43:36.890992Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.891092Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-04T02:43:36.891513Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::2b5106f8-e91711f0-bb08408f-258313b3; 2026-01-04T02:43:36.891596Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:43:36.891673Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-04T02:43:36.891741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:43:36.891824Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:43:36.891895Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:36.891945Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:43:36.892039Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.630000s; 2026-01-04T02:43:36.892102Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=2b5106f8-e91711f0-bb08408f-258313b3;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:43:36.892208Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:7:3:0:6171112:0] 2026-01-04T02:43:36.892280Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:19:3:0:6171112:0] 2026-01-04T02:43:36.892324Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:17:4:0:6171112:0] 2026-01-04T02:43:36.892365Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:22:3:0:6171112:0] 2026-01-04T02:43:36.892407Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:3:2:0:6171112:0] 2026-01-04T02:43:36.892448Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:21:2:0:6171112:0] 2026-01-04T02:43:36.892489Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:14:4:0:6171112:0] 2026-01-04T02:43:36.892529Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:8:4:0:6171112:0] 2026-01-04T02:43:36.892562Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-04T02:43:36.892619Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:9:2:0:6171112:0] 2026-01-04T02:43:36.892680Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:10:3:0:6171112:0] 2026-01-04T02:43:36.892711Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:13:3:0:6171112:0] 2026-01-04T02:43:36.892739Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:15:2:0:6171112:0] 2026-01-04T02:43:36.892766Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:11:4:0:6171112:0] 2026-01-04T02:43:36.892801Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:5:4:0:6171112:0] 2026-01-04T02:43:36.892841Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:6:2:0:6171112:0] 2026-01-04T02:43:36.892890Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:4:3:0:6171112:0] 2026-01-04T02:43:36.892931Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:18:2:0:6171112:0] 2026-01-04T02:43:36.892994Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-04T02:43:36.893038Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:20:4:0:6171112:0] GC for channel 4 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] >> TCmsTest::ManageRequestsDry [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::SpecialAttributes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:59.401422Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:59.401501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:59.401560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:59.401598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:59.401633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:59.401665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:59.401729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:59.401822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:59.402698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:59.403000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:59.493095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:59.493157Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:59.507451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:59.507642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:59.507842Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:59.515631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:59.515874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:59.516540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:59.516793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:59.519497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:59.519685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:59.520804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:59.520878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:59.520994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:59.521035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:59.521074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:59.521239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:59.681986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.682971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.683965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.684071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... -04T02:44:00.251605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.251663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.251687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:00.251714Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 2 2026-01-04T02:44:00.251755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:00.251817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 0/1, is published: true 2026-01-04T02:44:00.253377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-04T02:44:00.253504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 102 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000038 2026-01-04T02:44:00.253873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:00.253966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:00.254006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 102:0 HandleReply TEvPrivate::TEvOperationPlan, step: 5000038, at schemeshard: 72057594046678944 2026-01-04T02:44:00.254122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 128 -> 240 2026-01-04T02:44:00.254254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:00.254314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:00.255100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:00.256541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T02:44:00.257009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:00.257035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:00.257128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:00.257202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:00.257236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:44:00.257261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T02:44:00.257299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:44:00.257331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:44:00.257417Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:44:00.257445Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:00.257487Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:44:00.257510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:00.257542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:44:00.257566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:00.257590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:44:00.257630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:44:00.257682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:00.257709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:44:00.257734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:44:00.257754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:44:00.258515Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.258610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.258642Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:00.258675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:44:00.258702Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:00.259665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.259730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:00.259752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:00.259770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:44:00.259802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:00.259856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:44:00.261488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:00.262147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 2026-01-04T02:44:00.264246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "DirD" } AlterUserAttributes { UserAttributes { Key: "__extra_path_symbols_allowed" Value: "./_" } } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:00.264431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/DirD, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:00.264517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, at schemeshard: 72057594046678944 2026-01-04T02:44:00.266063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusInvalidParameter Reason: "UserAttributes: attribute \'__extra_path_symbols_allowed\' has invalid value \'./_\', forbidden symbols are found" TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:00.266213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: UserAttributes: attribute '__extra_path_symbols_allowed' has invalid value './_', forbidden symbols are found, operation: CREATE DIRECTORY, path: /MyRoot/DirD TestModificationResult got TxId: 103, wait until txId: 103 >> Cache::Test4 [GOOD] >> Cache::Test5 >> EscapingBasics::EncloseSecretShouldWork [GOOD] >> EscapingBasics::HideSecretsOverEncloseSecretShouldWork [GOOD] >> EscapingBasics::EscapeStringShouldWork [GOOD] >> IcebergClusterProcessor::ValidateDdlCreationForHiveWithS3 [GOOD] >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] >> EntityId::Distinct [GOOD] >> EntityId::MaxId [GOOD] >> EntityId::CheckId [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> SplitterBasic::EqualSplitByMaxRowsLimitPerChunk [GOOD] >> SplitterBasic::LimitExceed [GOOD] >> EntityId::Order >> IssuesTextFiltering::ShouldRemoveDatabasePath [GOOD] >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::ForceAvailabilityMode [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ManageRequestsDry [GOOD] Test command err: 2026-01-04T02:43:51.687198Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-04T02:43:51.687681Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-04T02:43:51.687797Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected 2026-01-04T02:43:52.030374Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected 2026-01-04T02:43:52.030978Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 2, reason# Disconnected 2026-01-04T02:43:52.031080Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 3, reason# Disconnected >> EntityId::Order [GOOD] >> EntityId::MinId [GOOD] >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_user_attributes/unittest >> TSchemeShardUserAttrsTest::UserConditionsAtAlter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:43:59.548337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:43:59.548420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:59.548471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:43:59.548526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:43:59.548565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:43:59.548597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:43:59.548649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:43:59.548735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:43:59.549579Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:43:59.549864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:43:59.640834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:43:59.640915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:59.655590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:43:59.655828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:43:59.655998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:43:59.664321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:43:59.664557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:43:59.665210Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:43:59.665462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:43:59.668259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:59.668437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:43:59.669502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:43:59.669554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:43:59.669676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:43:59.669721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:43:59.669760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:43:59.669961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:43:59.826328Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.827993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.828078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.828163Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.828241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.828307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:43:59.828406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... alue: "ValA2" } } ApplyIf { PathId: 38 PathVersion: 4 } } TxId: 103 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:00.445963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_user_attrs.cpp:26: TAlterUserAttrs Propose, path: /MyRoot/DirA, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:00.446124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-04T02:44:00.446160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxAlterUserAttributes target path: [OwnerId: 72057594046678944, LocalPathId: 38] source path: 2026-01-04T02:44:00.446262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:00.446305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 103:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:44:00.448287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:00.448473Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: ALTER USER ATTRIBUTES, path: /MyRoot/DirA 2026-01-04T02:44:00.448640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:00.448679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:97: TAlterUserAttrs ProgressState, opId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:00.448725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-04T02:44:00.448832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:00.450535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:44:00.450669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-04T02:44:00.450980Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:00.451086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:00.451123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_user_attrs.cpp:114: TAlterUserAttrs HandleReply TEvOperationPlan, opId: 103:0, stepId:5000040, at schemeshard: 72057594046678944 2026-01-04T02:44:00.451262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:44:00.451300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:00.451331Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:44:00.451363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:00.451414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:00.451454Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:44:00.451491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:00.451528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:00.451554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:44:00.451574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:44:00.451612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:00.451637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 1, subscribers: 0 2026-01-04T02:44:00.451667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 5 2026-01-04T02:44:00.453405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:00.453442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:00.453560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:00.453593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:44:00.454027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:00.454097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:00.454135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:44:00.454183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T02:44:00.454217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:00.454288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:44:00.455895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:44:00.456118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:44:00.456166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:44:00.456498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:44:00.456557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:44:00.456602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:755:2745] TestWaitNotification: OK eventTxId 103 2026-01-04T02:44:00.456975Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/DirA" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:00.457135Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/DirA" took 149us result status StatusSuccess 2026-01-04T02:44:00.457430Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/DirA" PathDescription { Self { Name: "DirA" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 3 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "AttrA2" Value: "ValA2" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_user_attributes/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EntityId::CheckId [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EscapeStringShouldWork [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateRiseErrors [GOOD] Test command err: test case: 1 test case: 2 test case: 3 test case: 4 test case: 5 test case: 6 test case: 7 test case: 8 test case: 9 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::LimitExceed [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> EscapingBasics::EncloseAndEscapeStringShouldWork [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> SplitterBasic::EqualSplitByMaxBytesLimitPerChunk [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> Cache::Test5 [GOOD] >> Cache::Test6 |92.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} |92.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_user_attributes/test-results/unittest/{meta.json ... results_accumulator.log} >> TMaintenanceApiTest::DisableMaintenance [GOOD] >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] >> EscapingBasics::HideSecretsShouldWork [GOOD] >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] >> TCmsTest::Mirror3dcPermissions [GOOD] >> Other::TraceNoValidGroupForbidden [GOOD] >> Other::TraceInvalidTokenForbidden [GOOD] >> ActorPage::NoValidGroupForbidden [GOOD] >> ActorPage::OptionsNoContent >> TCmsTest::BridgeModeGroups [GOOD] >> TCmsTest::BridgeModeNodeLimit >> Other::UnknownPathNotFound [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> IcebergClusterProcessor::ValidateConfigurationWithoutCatalog [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest >> Other::TraceHttpOk [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TMaintenanceApiTest::DisableMaintenance [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::CheckSysTabletsOnNodesWithPDisks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::Mirror3dcPermissions [GOOD] Test command err: 2026-01-04T02:43:50.758981Z node 1 :CMS ERROR: sentinel.cpp:854: [Sentinel] [StateUpdater] Cannot get pdisks state: nodeId# 1, reason# Disconnected |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] >> TCmsTest::WalleTasksWithNodeLimit [GOOD] >> TCmsTest::WalleRequestDuringRollingRestart >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesTimeout [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnFailedGetAllTopicsRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst >> TConsoleConfigTests::TestModifyConfigItem >> Cache::Test6 [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNotOkStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnZeroBalancerTabletIdInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceNoValidGroupForbidden [GOOD] Test command err: 2026-01-04T02:43:57.334773Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331662979757097:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:57.334844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:57.621745Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:57.621895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:57.661156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:57.692998Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:57.694901Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.949723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:57.949753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:57.949758Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:57.949861Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:57.952955Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:58.341163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:43:58.366468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.425477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:43:58.437483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.445022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceInvalidTokenForbidden [GOOD] Test command err: 2026-01-04T02:43:57.510914Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331664624757952:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:57.511627Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:57.766116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:57.821114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:57.821273Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:57.887693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.889238Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:57.893885Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331664624757909:2081] 1767494637509316 != 1767494637509319 2026-01-04T02:43:58.015629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:58.015678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:58.015693Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:58.015829Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:58.046292Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:58.359461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.367212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:43:58.421212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:43:58.437685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.520698Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TCmsTest::ActionIssue [GOOD] >> TCmsTest::ActionIssuePartialPermissions >> TCacheTest::Navigate >> TCacheTest::RacyRecreateAndSync >> TCacheTest::SystemViews >> TCacheTestWithoutRealSystemViewPaths::SystemViews >> TCacheTest::WatchRoot >> DataShardVolatile::DistributedWriteThenCopyTable [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsert |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::UnknownPathNotFound [GOOD] Test command err: 2026-01-04T02:43:57.441874Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331665668032275:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:57.441929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:57.730098Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:57.764426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:57.764586Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:57.807832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:57.836240Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.837962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331665668032246:2081] 1767494637440276 != 1767494637440279 2026-01-04T02:43:57.953095Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:57.953126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:57.953136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:57.953216Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:57.954483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:58.331651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.338305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:43:58.427356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:43:58.453420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:43:58.454995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.461932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::SuccessfullyPassesResponsesFromTablets [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond >> TCacheTest::List >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnEmptyTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName >> TCacheTest::Attributes >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] |92.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/common/ut/unittest >> Cache::Test6 [GOOD] |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest |92.6%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/common/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> Other::TraceHttpOk [GOOD] Test command err: 2026-01-04T02:43:57.334044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331665971642827:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:57.334631Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:57.678946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:57.680982Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331665971642788:2081] 1767494637331054 != 1767494637331057 2026-01-04T02:43:57.692269Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:57.692403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:57.693693Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.704505Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:57.896371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:57.949031Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:57.949515Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:57.949529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:57.949628Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:58.333436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.342003Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:43:58.345626Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:43:58.423032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:43:58.447300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.453509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 >> TCacheTest::MigrationCommon |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesFirst [GOOD] >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TConsoleConfigTests::TestModifyConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItem ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::FailsOnNoBalancerInGetNodeRequest [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } |92.7%| [TA] $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TCacheTest::Navigate [GOOD] >> TCacheTest::PathBelongsToDomain >> TCacheTest::SystemViews [GOOD] >> TCacheTest::TableSchemaVersion >> TCacheTestWithoutRealSystemViewPaths::SystemViews [GOOD] >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess >> TCacheTest::List [GOOD] >> TCacheTest::MigrationCommit >> TConsoleConfigHelpersTests::TestConfigCourier >> TCacheTest::Attributes [GOOD] >> TCacheTest::CheckAccess |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |92.7%| [TA] {RESULT} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/common/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRestartConsoleAndPools >> TCacheTest::RacyRecreateAndSync [GOOD] >> TCacheTest::RacyCreateAndSync >> TCacheTest::WatchRoot [GOOD] >> TCacheTestWithDrops::LookupErrorUponEviction >> TConsoleTests::TestSetDefaultStorageUnitsQuota >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::FailsOnDuplicatedTopicName [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, rt3.dc1--topic1, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' has no balancer, Marker# PQ193" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "TopicRequest must have Topic field." ErrorCode: BAD_REQUEST } Assert failed: Check response: { Status: 128 ErrorReason: "multiple TopicRequest for topic \'rt3.dc1--topic1\'" ErrorCode: BAD_REQUEST } |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] >> TCacheTest::PathBelongsToDomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_DisconnectionComesSecond [GOOD] Test command err: Assert failed: Check response: { Status: 130 ErrorReason: "Timeout while waiting for response, may be just slow, Marker# PQ16" ErrorCode: ERROR } 2026-01-04T02:44:04.072207Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.075742Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.076063Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:44:04.076133Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.076206Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:44:04.076963Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2026-01-04T02:44:04.077080Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.105301Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.105515Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.106594Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.106815Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.107304Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.107673Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2225] 2026-01-04T02:44:04.109300Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.109353Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:44:04.109392Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2225] 2026-01-04T02:44:04.109435Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.109486Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.109519Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.109551Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:44:04.109590Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.109625Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.109663Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.109697Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:44:04.109784Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.109994Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.110387Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2259], now have 1 active actors on pipe 2026-01-04T02:44:04.161070Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.164818Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.165178Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:44:04.165236Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.165295Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:44:04.166152Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-04T02:44:04.166252Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.168954Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:44:04.169078Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.170101Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:44:04.170261Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.170628Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.170870Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [2:407:2326] 2026-01-04T02:44:04.173096Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.173159Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:44:04.173207Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [2:407:2326] 2026-01-04T02:44:04.173259Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.173323Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.173375Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.173414Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:44:04.173455Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.173515Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.173566Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.173606Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:04.173697Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.173940Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.174496Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-04T02:44:04.175883Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:416:2363], now have 1 active actors on pipe 2026-01-04T02:44:04.176135Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [2:418:2364], now have 1 active actors on pipe 2026-01-04T02:44:04.176414Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.178829Z node 2 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.179123Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [2:416:2363] destroyed 2026-01-04T02:44:04.179415Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928139] server disconnected, pipe [2:418:2364] destroyed 2026-01-04T02:44:04.680152Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.683971Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.684293Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:44:04.684359Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.684435Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01 ... titionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.822356Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.833541Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:545:2452], now have 1 active actors on pipe 2026-01-04T02:44:04.861765Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.864085Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.865362Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.865426Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:44:04.865555Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.865953Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.866217Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:601:2455] 2026-01-04T02:44:04.868419Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:44:04.869916Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:44:04.870250Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:44:04.870387Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:44:04.870748Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:44:04.870845Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:44:04.871065Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:44:04.871121Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:44:04.871168Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:44:04.871240Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:44:04.871360Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:44:04.871633Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:44:04.871689Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.871736Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:44:04.871800Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:601:2455] 2026-01-04T02:44:04.871864Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.871927Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.871976Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.872019Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:44:04.872070Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.872115Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.872162Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.872204Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:04.872370Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.872596Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.873285Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:536:2445] destroyed 2026-01-04T02:44:04.873355Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:535:2444] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 92 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 92 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionStatusMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-04T02:44:04.150421Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.154546Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.154870Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:44:04.154937Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.155020Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:44:04.155801Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:260:2254], now have 1 active actors on pipe 2026-01-04T02:44:04.155917Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.180240Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.180434Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.181278Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.181449Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.181919Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.182384Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [2:268:2225] 2026-01-04T02:44:04.185005Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.185072Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:44:04.185147Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [2:268:2225] 2026-01-04T02:44:04.185208Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.185274Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.185318Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.185357Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:44:04.185405Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.185446Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.185497Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.185536Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:44:04.185649Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.185933Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.186437Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [2:271:2259], now have 1 active actors on pipe 2026-01-04T02:44:04.238762Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.242750Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.243108Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-04T02:44:04.243172Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.243246Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-04T02:44:04.243975Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:399:2355], now have 1 active actors on pipe 2026-01-04T02:44:04.244066Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.246151Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.246274Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.246858Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.246966Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.247212Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.247382Z node 2 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [2:407:2326] 2026-01-04T02:44:04.248777Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.248821Z node 2 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-04T02:44:04.248851Z node 2 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [2:407:2326] 2026-01-04T02:44:04.248886Z node 2 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.248936Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.248969Z node 2 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.248994Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:44:04.249021Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.249047Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.249089Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.249118Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-04T02:44:04.249190Z node 2 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.249358Z node 2 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.249734Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [2:410:2360], now have 1 active actors on pipe 2026-01-04T02:44:04.263646Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.266042Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.266241Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-04T02:44:04.266281Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.266321Z node 2 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-04T02:44:04.266935Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [2:459:2396], now have 1 active actors on pipe 2026-01-04T02:44:04.266993Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.268675Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:44:04.268760Z node 2 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.269311Z node 2 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [2:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { ... ][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.808326Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.808354Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:04.808451Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.808633Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.809004Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:528:2440], now have 1 active actors on pipe 2026-01-04T02:44:04.809881Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:534:2443], now have 1 active actors on pipe 2026-01-04T02:44:04.810067Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:535:2444], now have 1 active actors on pipe 2026-01-04T02:44:04.810205Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928037][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.810382Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:536:2444], now have 1 active actors on pipe 2026-01-04T02:44:04.810441Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928138][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.810647Z node 3 :PERSQUEUE DEBUG: partition.cpp:1025: [72057594037928139][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 0 PartitionConfig{ MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } 2026-01-04T02:44:04.821596Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [3:544:2451], now have 1 active actors on pipe 2026-01-04T02:44:04.845959Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.848283Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.849452Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.849512Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:44:04.849638Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.849989Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.850230Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [3:600:2454] 2026-01-04T02:44:04.852278Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:44:04.853482Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:44:04.853782Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:44:04.853922Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:44:04.854221Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:44:04.854312Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:44:04.854533Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:44:04.854584Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:44:04.854625Z node 3 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:44:04.854669Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:44:04.854777Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:44:04.855005Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:44:04.855053Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.855106Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:44:04.855162Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [3:600:2454] 2026-01-04T02:44:04.855221Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.855278Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.855322Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.855365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:44:04.855404Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.855443Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.855484Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.855519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:04.855605Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.855797Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.856495Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [3:535:2444] destroyed 2026-01-04T02:44:04.856555Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [3:534:2443] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionStatusResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionResult { Partition: 0 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 38 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 38 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionResult { Partition: 1 Status: STATUS_OK LastInitDurationSeconds: 0 CreationTimestamp: 0 GapCount: 0 GapSize: 0 AvgWriteSpeedPerSec: 0 AvgWriteSpeedPerMin: 0 AvgWriteSpeedPerHour: 0 AvgWriteSpeedPerDay: 0 AvgReadSpeedPerSec: 0 AvgReadSpeedPerMin: 0 AvgReadSpeedPerHour: 0 AvgReadSpeedPerDay: 0 ReadBytesQuota: 0 WriteBytesQuota: 50000000 PartitionSize: 0 StartOffset: 0 EndOffset: 0 LastWriteTimestampMs: 78 WriteLagMs: 0 AvgQuotaSpeedPerSec: 0 AvgQuotaSpeedPerMin: 0 AvgQuotaSpeedPerHour: 0 AvgQuotaSpeedPerDay: 0 SourceIdCount: 0 SourceIdRetentionPeriodSec: 0 UsedReserveSize: 0 AggregatedCounters { Values: 78 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 50000000 Values: 0 Values: 9223372036854775807 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 1 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 Values: 0 ExtendedCounters { Values: 0 Values: 0 Values: 0 } } Generation: 2 Cookie: 1 ScaleStatus: NORMAL } PartitionResult { Partition: 2 Status: STATUS_UNKNOWN } ErrorCode: OK } } } >> TConsoleConfigHelpersTests::TestConfigCourier [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriber >> TCmsTest::WalleRequestDuringRollingRestart [GOOD] >> TCmsTest::WalleTasksDifferentPriorities >> TCacheTest::CheckAccess [GOOD] >> TCacheTest::CheckSystemViewAccess |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TCacheTest::RacyCreateAndSync [GOOD] >> TCacheTest::TableSchemaVersion [GOOD] >> TCacheTest::MigrationCommon [GOOD] >> TCacheTest::MigrationDeletedPathNavigate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetPartitionLocationsMetaRequestTest::HandlesPipeDisconnection_AnswerDoesNotArrive [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "no path \'Root/PQ\', Marker# PQ150" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'rt3.dc1--topic1\' is not created, Marker# PQ94" ErrorCode: UNKNOWN_TOPIC } 2026-01-04T02:44:04.504215Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928037] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.508581Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928037] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.508952Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928037] no config, start with empty partitions and default config 2026-01-04T02:44:04.509048Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.509130Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928037] doesn't have tx writes info 2026-01-04T02:44:04.509919Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:258:2252], now have 1 active actors on pipe 2026-01-04T02:44:04.510069Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928037] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.539123Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928037] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.539334Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928037] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.540346Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928037] Config applied version 1 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic1" Version: 1 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.540557Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.541035Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.541553Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928037][Partition][0][StateInit] bootstrapping 0 [3:266:2223] 2026-01-04T02:44:04.544406Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic1:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.544480Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic1:0:Initializer] Initializing completed. 2026-01-04T02:44:04.544538Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928037][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic1' partition 0 generation 2 [3:266:2223] 2026-01-04T02:44:04.544596Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928037][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic1 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.544670Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928037][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.544714Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928037][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.544759Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928037][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:44:04.544816Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.544859Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928037][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.545043Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928037][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.545091Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928037][Partition][0][StateIdle] Try persist 2026-01-04T02:44:04.545221Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928037][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.545556Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928037][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.546082Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [3:269:2257], now have 1 active actors on pipe 2026-01-04T02:44:04.601725Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928137] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.605641Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928137] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.606094Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928137] no config, start with empty partitions and default config 2026-01-04T02:44:04.606182Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.606250Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928137] doesn't have tx writes info 2026-01-04T02:44:04.607022Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:399:2355], now have 1 active actors on pipe 2026-01-04T02:44:04.607171Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928137] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.610017Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928137] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.610150Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928137] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.610999Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928137] Config applied version 2 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--topic2" Version: 2 Partitions { PartitionId: 0 } AllPartitions { PartitionId: 0 } 2026-01-04T02:44:04.611151Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:04.611531Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:04.611796Z node 3 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928137][Partition][0][StateInit] bootstrapping 0 [3:407:2326] 2026-01-04T02:44:04.614246Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:04.614312Z node 3 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:0:Initializer] Initializing completed. 2026-01-04T02:44:04.614367Z node 3 :PERSQUEUE INFO: partition.cpp:712: [72057594037928137][Partition][0][StateInit] init complete for topic 'rt3.dc1--topic2' partition 0 generation 2 [3:407:2326] 2026-01-04T02:44:04.614421Z node 3 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928137][Partition][0][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:04.614495Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928137][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:04.614541Z node 3 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928137][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:44:04.614584Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928137][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:44:04.614626Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.614666Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928137][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:04.614712Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928137][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:04.614752Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928137][Partition][0][StateIdle] Try persist 2026-01-04T02:44:04.614846Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928137][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:04.615079Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928137][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:44:04.615597Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928137] server connected, pipe [3:410:2360], now have 1 active actors on pipe 2026-01-04T02:44:04.633114Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928138] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:04.636427Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928138] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:04.636777Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928138] no config, start with empty partitions and default config 2026-01-04T02:44:04.636845Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.636908Z node 3 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928138] doesn't have tx writes info 2026-01-04T02:44:04.637625Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [3:459:2396], now have 1 active actors on pipe 2026-01-04T02:44:04.637769Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928138] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:04.640066Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928138] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 1 TopicName: "rt3.dc1--topic2" Version: 3 Partitions { PartitionId: 1 } AllPartitions { PartitionId: 1 } 2026-01-04T02:44:04.640219Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928138] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:04.641037Z node 3 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928138] Config applied version 3 actor [3:103:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInfligh ... 5.243569Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928138][Partition][1][StateIdle] Try persist 2026-01-04T02:44:05.243646Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928138][Partition][1][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:05.243866Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928138][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T02:44:05.244364Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:471:2401], now have 1 active actors on pipe 2026-01-04T02:44:05.261757Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:05.264520Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:05.264754Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037928139] no config, start with empty partitions and default config 2026-01-04T02:44:05.264808Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:05.264857Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:44:05.265475Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:520:2437], now have 1 active actors on pipe 2026-01-04T02:44:05.265612Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037928139] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:44:05.267387Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037928139] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:44:05.267491Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:05.268193Z node 4 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037928139] Config applied version 8 actor [4:104:2137] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 MaxWriteInflightSize: 90000000 } PartitionIds: 2 TopicName: "rt3.dc1--topic2" Version: 8 Partitions { PartitionId: 2 } AllPartitions { PartitionId: 2 } 2026-01-04T02:44:05.268318Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:05.268561Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:05.268754Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:528:2408] 2026-01-04T02:44:05.270614Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:05.270666Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:44:05.270708Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 2 [4:528:2408] 2026-01-04T02:44:05.270758Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:05.270815Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:05.270852Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:44:05.270884Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:44:05.270919Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:05.270964Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:05.271001Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:05.271029Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:05.271108Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:05.271282Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:05.271717Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:531:2442], now have 1 active actors on pipe 2026-01-04T02:44:05.272606Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928037] server connected, pipe [4:537:2445], now have 1 active actors on pipe 2026-01-04T02:44:05.272734Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928138] server connected, pipe [4:538:2446], now have 1 active actors on pipe 2026-01-04T02:44:05.272777Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:539:2446], now have 1 active actors on pipe 2026-01-04T02:44:05.283822Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037928139] server connected, pipe [4:544:2450], now have 1 active actors on pipe 2026-01-04T02:44:05.309724Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037928139] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:44:05.311852Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037928139] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:44:05.313029Z node 4 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037928139] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:05.313113Z node 4 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037928139] doesn't have tx writes info 2026-01-04T02:44:05.313244Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:44:05.313563Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:44:05.313917Z node 4 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037928139][Partition][2][StateInit] bootstrapping 2 [4:600:2453] 2026-01-04T02:44:05.316015Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDiskStatusStep 2026-01-04T02:44:05.317227Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMetaStep 2026-01-04T02:44:05.317543Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitInfoRangeStep 2026-01-04T02:44:05.317670Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From m0000000002 to m0000000003 2026-01-04T02:44:05.317937Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataRangeStep 2026-01-04T02:44:05.318018Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From d0000000002 to d0000000003 2026-01-04T02:44:05.318305Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitDataStep 2026-01-04T02:44:05.318367Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitEndWriteTimestampStep 2026-01-04T02:44:05.318417Z node 4 :PERSQUEUE INFO: partition_init.cpp:1016: [rt3.dc1--topic2:2:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T02:44:05.318462Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitMessageDeduplicatorStep 2026-01-04T02:44:05.318574Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:1498: Read range request. From e0000000002|0000000000000000 to e0000000003 2026-01-04T02:44:05.318776Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TDeleteKeysStep 2026-01-04T02:44:05.318829Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--topic2:2:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:44:05.318872Z node 4 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--topic2:2:Initializer] Initializing completed. 2026-01-04T02:44:05.318925Z node 4 :PERSQUEUE INFO: partition.cpp:712: [72057594037928139][Partition][2][StateInit] init complete for topic 'rt3.dc1--topic2' partition 2 generation 3 [4:600:2453] 2026-01-04T02:44:05.318983Z node 4 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037928139][Partition][2][StateInit] SYNC INIT topic rt3.dc1--topic2 partitition 2 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:44:05.319046Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037928139][Partition][2][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:44:05.319090Z node 4 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037928139][Partition][2][StateIdle] Process pending events. Count 0 2026-01-04T02:44:05.319132Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037928139][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:44:05.319175Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:05.319214Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037928139][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:44:05.319254Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037928139][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:44:05.319312Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037928139][Partition][2][StateIdle] Try persist 2026-01-04T02:44:05.319408Z node 4 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037928139][Partition][2][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:44:05.319597Z node 4 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037928139][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T02:44:05.320259Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928037] server disconnected, pipe [4:537:2445] destroyed 2026-01-04T02:44:05.320482Z node 4 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72057594037928138] server disconnected, pipe [4:538:2446] destroyed RESPONSE Status: 1 ErrorCode: OK MetaResponse { CmdGetPartitionLocationsResult { TopicResult { Topic: "rt3.dc1--topic1" PartitionLocation { Partition: 0 Host: "::1" HostId: 4 ErrorCode: OK } ErrorCode: OK } TopicResult { Topic: "rt3.dc1--topic2" PartitionLocation { Partition: 1 Host: "::1" HostId: 4 ErrorCode: OK } PartitionLocation { Partition: 2 ErrorCode: INITIALIZING ErrorReason: "Tablet for that partition is not running" } ErrorCode: OK } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithoutRealSystemViewPaths::CheckSystemViewAccess [GOOD] Test command err: 2026-01-04T02:44:04.664465Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.664544Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-04T02:44:04.725661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 2026-01-04T02:44:05.266107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:05.266182Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded TestModificationResults wait txId: 1 2026-01-04T02:44:05.303724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-04T02:44:05.309568Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:05.315587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:44:05.316320Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 102, wait until txId: 102 2026-01-04T02:44:05.321104Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:214:2199], for# user1@builtin, access# DescribeSchema 2026-01-04T02:44:05.322026Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [2:220:2205], for# user1@builtin, access# |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> KqpAnalyze::AnalyzeTable-ColumnStore [GOOD] >> KqpExplain::AggGroupLimit |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TConsoleConfigTests::TestRemoveConfigItem [GOOD] >> TConsoleConfigTests::TestRemoveConfigItems >> TCacheTest::CheckSystemViewAccess [GOOD] >> TCacheTest::MigrationCommit [GOOD] >> TCacheTest::CookiesArePreserved ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::PathBelongsToDomain [GOOD] Test command err: 2026-01-04T02:44:04.651344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.652195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... Step5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:05.469892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:05.477724Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:44:05.478494Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 TestModificationResults wait txId: 103 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 FAKE_COORDINATOR: Erasing txId 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 102 TestWaitNotification wait txId: 103 2026-01-04T02:44:05.487569Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:44:05.487697Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 TestWaitNotification: OK eventTxId 103 TestModificationResults wait txId: 104 2026-01-04T02:44:05.488557Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 104:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 FAKE_COORDINATOR: Erasing txId 104 TestModificationResult got TxId: 104, wait until txId: 104 TestModificationResults wait txId: 105 FAKE_COORDINATOR: Add transaction: 105 at step: 5000042 FAKE_COORDINATOR: advance: minStep5000042 State->FrontStep: 5000041 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 105 at step: 5000042 FAKE_COORDINATOR: Erasing txId 105 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 104 TestWaitNotification wait txId: 105 2026-01-04T02:44:05.497614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:44:05.497706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 TestWaitNotification: OK eventTxId 105 2026-01-04T02:44:05.502576Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:505:2473], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:44:05.502817Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:507:2475], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:05.503112Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:509:2477], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:05.504438Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:522:2484], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:44:05.505330Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:531:2487], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:44:05.506137Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:539:2495], domain# [OwnerId: 72057594046678944, LocalPathId: 1], path's domain# [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:44:05.506389Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:541:2497], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:05.506716Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:543:2499], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:05.507490Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:549:2505], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:44:05.507739Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:551:2507], domain# [OwnerId: 72057594046678944, LocalPathId: 39], path's domain# [OwnerId: 72057594046678944, LocalPathId: 41] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKTimestamp [GOOD] Test command err: 2026-01-04T02:42:57.174423Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:57.208924Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:57.209192Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:57.217067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:57.217343Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:57.217576Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:57.217756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:57.217887Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:57.218006Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:57.218158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:57.218305Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:57.218461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:57.218603Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.218753Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:57.218882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:57.219010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:57.242921Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:57.243035Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:57.243075Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:57.243216Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:57.243339Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:57.243412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:57.243445Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:57.243517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:57.243563Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:57.243594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:57.243616Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:57.243781Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:57.243843Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:57.243872Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:57.243894Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:57.243961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:57.243998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:57.244026Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:57.244045Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:57.244076Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:57.244124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:57.244151Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:57.244184Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:57.244214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:57.244237Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:57.244391Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:57.244476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:57.244516Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:57.244604Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:57.244631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.244651Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.244692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:57.244720Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:57.244751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:57.244786Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:57.244811Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:57.244836Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:57.244930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:57.244974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... nes;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=9216; 2026-01-04T02:44:02.462867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=11; 2026-01-04T02:44:02.463858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=915; 2026-01-04T02:44:02.463924Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=10537; 2026-01-04T02:44:02.463976Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=10710; 2026-01-04T02:44:02.464048Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-04T02:44:02.464143Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=48; 2026-01-04T02:44:02.464190Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=11466; 2026-01-04T02:44:02.464390Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=131; 2026-01-04T02:44:02.464585Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=144; 2026-01-04T02:44:02.464801Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=162; 2026-01-04T02:44:02.465018Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=143; 2026-01-04T02:44:02.467940Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2863; 2026-01-04T02:44:02.470651Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2622; 2026-01-04T02:44:02.470736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=12; 2026-01-04T02:44:02.470816Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=35; 2026-01-04T02:44:02.470861Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:44:02.470946Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=48; 2026-01-04T02:44:02.470995Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:44:02.471105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=71; 2026-01-04T02:44:02.471150Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:44:02.471236Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2026-01-04T02:44:02.471337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=59; 2026-01-04T02:44:02.471446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=56; 2026-01-04T02:44:02.471501Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=30045; 2026-01-04T02:44:02.471686Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:02.471824Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:02.471887Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:02.471970Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:02.472030Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:02.472197Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:02.472275Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:02.472327Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:02.472383Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:02.472466Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:02.472521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.472613Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.472731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:02.472979Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.020000s; 2026-01-04T02:44:02.476374Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:02.476626Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:02.476708Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:02.476799Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:02.476863Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:02.476952Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:02.477031Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:02.477095Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.477144Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.477247Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:02.477319Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:02.478069Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.027000s; 2026-01-04T02:44:02.478154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::RacyCreateAndSync [GOOD] Test command err: 2026-01-04T02:44:04.651343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.652200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... 2 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:05.717306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:05.730362Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::TableSchemaVersion [GOOD] Test command err: 2026-01-04T02:44:04.652603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.652666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... _COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:05.493467Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-04T02:44:05.532643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 1 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 38 BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } BindedChannels { StoragePoolName: "pool-1" StoragePoolKind: "pool-kind-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:44:05.652326Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72075186233409546 for txId: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestWaitNotification: OK eventTxId 102 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeThrottler >> TCacheTest::CookiesArePreserved [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CheckSystemViewAccess [GOOD] Test command err: 2026-01-04T02:44:04.724700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.724768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... ate->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:06.074509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-04T02:44:06.078117Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:06.080293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:06.082563Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:430:2410], for# user1@builtin, access# DescribeSchema 2026-01-04T02:44:06.083051Z node 3 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [3:436:2416], for# user1@builtin, access# |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TConsoleConfigTests::TestRemoveConfigItems [GOOD] >> TConsoleConfigTests::TestValidation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt32 [GOOD] Test command err: 2026-01-04T02:42:57.820868Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:57.855666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:57.855921Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:57.863413Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:57.863668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:57.863927Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:57.864083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:57.864194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:57.864317Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:57.864461Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:57.864573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:57.864699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:57.864830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.864974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:57.865103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:57.865232Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:57.895162Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:57.895302Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:57.895352Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:57.895550Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:57.895728Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:57.895828Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:57.895884Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:57.895985Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:57.896046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:57.896090Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:57.896131Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:57.896331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:57.896408Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:57.896456Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:57.896489Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:57.896584Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:57.896637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:57.896676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:57.896704Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:57.896750Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:57.896806Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:57.896849Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:57.896892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:57.896930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:57.896964Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:57.897202Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:57.897261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:57.897308Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:57.897422Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:57.897465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.897492Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:57.897536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:57.897579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:57.897620Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:57.897676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:57.897716Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:57.897749Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:57.897916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:57.897977Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... mn_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5239; 2026-01-04T02:44:02.764912Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=7; 2026-01-04T02:44:02.765533Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=581; 2026-01-04T02:44:02.765570Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6083; 2026-01-04T02:44:02.765605Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6186; 2026-01-04T02:44:02.765650Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=9; 2026-01-04T02:44:02.765710Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=32; 2026-01-04T02:44:02.765741Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=6707; 2026-01-04T02:44:02.765956Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=148; 2026-01-04T02:44:02.766149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=132; 2026-01-04T02:44:02.766371Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=164; 2026-01-04T02:44:02.766522Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=103; 2026-01-04T02:44:02.768066Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=1504; 2026-01-04T02:44:02.769473Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1360; 2026-01-04T02:44:02.769519Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=7; 2026-01-04T02:44:02.769548Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=5; 2026-01-04T02:44:02.769572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=3; 2026-01-04T02:44:02.769630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=34; 2026-01-04T02:44:02.769682Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=4; 2026-01-04T02:44:02.769758Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=54; 2026-01-04T02:44:02.769787Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=3; 2026-01-04T02:44:02.769858Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=33; 2026-01-04T02:44:02.769917Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=37; 2026-01-04T02:44:02.769987Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=48; 2026-01-04T02:44:02.770023Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19253; 2026-01-04T02:44:02.770146Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:02.770234Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:02.770288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:02.770342Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:02.770381Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:02.770515Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:02.770589Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:02.770628Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:02.770695Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:02.770807Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:02.770854Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.770887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.770974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:02.771156Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.169000s; 2026-01-04T02:44:02.772974Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:02.773796Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:02.773871Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:02.773931Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:02.773965Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:02.774010Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:02.774082Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:02.774126Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.774160Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:02.774224Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:02.774272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:02.774689Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.082000s; 2026-01-04T02:44:02.774736Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::ActionIssuePartialPermissions [GOOD] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::CookiesArePreserved [GOOD] Test command err: 2026-01-04T02:44:04.675357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.675428Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... RDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:06.727921Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 TestModificationResults wait txId: 101 2026-01-04T02:44:06.733147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_subdomain.cpp:259) FAKE_COORDINATOR: Add transaction: 101 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000038 FAKE_COORDINATOR: Erasing txId 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:06.739467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 FAKE_COORDINATOR: Erasing txId 102 TestModificationResult got TxId: 102, wait until txId: 102 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets >> TConsoleConfigHelpersTests::TestConfigSubscriber [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets >> TJaegerTracingConfiguratorTests::RequestTypeThrottler [GOOD] >> TJaegerTracingConfiguratorTests::RequestTypeSampler >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable >> TStoragePoolsQuotasTest::DifferentQuotasInteraction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::BridgeModeNodeLimit [GOOD] Test command err: 2026-01-04T02:43:47.507259Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:43:47.507365Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-04T02:43:47.507513Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:02:00Z 2026-01-04T02:43:47.509417Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvClusterStateRequest { }, response# NKikimr::NCms::TEvCms::TEvClusterStateResponse { Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } } 2026-01-04T02:43:47.510195Z node 9 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-0-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-9-9" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 9 InterconnectPort: 12001 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-1-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-10-10" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 10 InterconnectPort: 12002 Location { Rack: "1" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-2-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-11-11" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 11 InterconnectPort: 12003 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-3-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-12-12" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 12 InterconnectPort: 12004 Location { Rack: "2" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-4-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-13-13" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 13 InterconnectPort: 12005 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-5-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-14-14" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 14 InterconnectPort: 12006 Location { Rack: "3" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-6-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-15-15" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 15 InterconnectPort: 12007 Location { Rack: "4" } StartTimeSeconds: 0 } Hosts { Name: "::1" State: UP Services { Name: "storage" State: UP Version: "-1" Timestamp: 120110512 } Devices { Name: "vdisk-0-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-1-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-2-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "vdisk-3-1-0-7-0" State: UP Timestamp: 120110512 } Devices { Name: "pdisk-16-16" State: UP Timestamp: 120110512 } Timestamp: 120110512 NodeId: 16 InterconnectPort: 12008 Location { Rack: "4" } StartTimeSeconds: 0 } Timestamp: 120110512 } 2026-01-04T02:43:47.510492Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } PartialPermissionAllowed: false Schedule: false DryRun: false EvictVDisks: true 2026-01-04T02:43:47.510571Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-04T02:43:47.510626Z node 9 :CMS DEBUG: cms.cpp:416: Result: DISALLOW_TEMP (reason: VDisks eviction from host 9 has not yet been completed) 2026-01-04T02:43:47.510778Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:43:47.511007Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:47.511064Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Add host marker: host# 9, marker# MARKER_DISK_FAULTY 2026-01-04T02:43:47.511254Z node 9 :CMS DEBUG: sentinel.cpp:1052: [Sentinel] [Main] Config was updated in 120.083512s 2026-01-04T02:43:47.511283Z node 9 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T02:43:47.511348Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 9, wbId# [9:8388350642965737326:1634689637] 2026-01-04T02:43:47.511381Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 10, wbId# [10:8388350642965737326:1634689637] 2026-01-04T02:43:47.511422Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 11, wbId# [11:8388350642965737326:1634689637] 2026-01-04T02:43:47.511449Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 12, wbId# [12:8388350642965737326:1634689637] 2026-01-04T02:43:47.511478Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 13, wbId# [13:8388350642965737326:1634689637] 2026-01-04T02:43:47.511530Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 14, wbId# [14:8388350642965737326:1634689637] 2026-01-04T02:43:47.511557Z node 9 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 15, ... : sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T02:43:52.578752Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 10:10, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-04T02:43:52.578816Z node 9 :CMS NOTICE: sentinel.cpp:1147: [Sentinel] [Main] PDisk status changed: pdiskId# 9:9, status# ACTIVE, required status# FAULTY, reason# Forced status, dry run# 0 2026-01-04T02:43:52.578864Z node 9 :CMS DEBUG: sentinel.cpp:1212: [Sentinel] [Main] Change pdisk status: requestsSize# 2 2026-01-04T02:43:52.579102Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-04T02:43:52.579297Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:19: TTxLogAndSend Execute 2026-01-04T02:43:52.579397Z node 9 :CMS DEBUG: sentinel.cpp:1343: [Sentinel] [Main] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true } Status { Success: true } Success: true, cookie# 1 2026-01-04T02:43:52.579435Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 9:9 2026-01-04T02:43:52.579465Z node 9 :CMS NOTICE: sentinel.cpp:1367: [Sentinel] [Main] PDisk status has been changed: pdiskId# 10:10 2026-01-04T02:43:52.592109Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-04T02:43:52.592196Z node 9 :CMS DEBUG: cms_tx_log_and_send.cpp:27: TTxLogAndSend Complete 2026-01-04T02:43:52.606962Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:43:52.607053Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-04T02:43:52.607105Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-04T02:43:52.607711Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:52.607809Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 9 has not yet been completed" } 2026-01-04T02:43:52.607876Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 9, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 0, down nodes: 0 2026-01-04T02:43:52.607925Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 0; State: Ok 2026-01-04T02:43:52.607946Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 1; State: Ok 2026-01-04T02:43:52.607960Z node 9 :CMS DEBUG: cms.cpp:760: Ring: 2; State: Ok 2026-01-04T02:43:52.607983Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-04T02:43:52.608108Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-1, requestId# user-r-1, owner# user, priority# 0 2026-01-04T02:43:52.608156Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-04T02:43:52.608228Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:43:52.608379Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-1, validity# 1970-01-01T00:13:00.210512Z, action# Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 2026-01-04T02:43:52.608479Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-1, owner# user, order# 1, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:52.620535Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-04T02:43:52.620772Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-1" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-1" Permissions { Id: "user-p-1" Action { Type: RESTART_SERVICES Host: "9" Services: "storage" Duration: 600000000 } Deadline: 780210512 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 9 InterconnectPort: 12001 } } } } 2026-01-04T02:43:52.620838Z node 9 :CMS DEBUG: cms.cpp:1094: Schedule cleanup at 1970-01-01T00:33:00.210512Z 2026-01-04T02:43:52.657958Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12001 (9) (permission user-p-1 until 1970-01-01T00:13:00Z) 2026-01-04T02:43:52.658267Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:17: TTxUpdateDowntimes Execute 2026-01-04T02:43:52.658329Z node 9 :CMS DEBUG: cms_tx_update_downtimes.cpp:26: TTxUpdateDowntimes Complete 2026-01-04T02:43:52.658410Z node 9 :CMS DEBUG: cluster_info.cpp:991: Timestamp: 1970-01-01T00:03:00Z 2026-01-04T02:43:52.658988Z node 9 :CMS INFO: cms.cpp:364: Check request: User: "user" Actions { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } } PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:52.659071Z node 9 :CMS DEBUG: cms.cpp:397: Checking action: Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 Issue { Type: GENERIC Message: "VDisks eviction from host 10 has not yet been completed" } 2026-01-04T02:43:52.659121Z node 9 :CMS DEBUG: node_checkers.cpp:115: [Nodes Counter] Checking Node: 10, with state: Up, with limit: 0, with ratio limit: 0, locked nodes: 1, down nodes: 0 2026-01-04T02:43:52.659162Z node 9 :CMS DEBUG: cms.cpp:405: Result: ALLOW 2026-01-04T02:43:52.659316Z node 9 :CMS DEBUG: cms.cpp:1066: Accepting permission: id# user-p-2, requestId# user-r-2, owner# user, priority# 0 2026-01-04T02:43:52.659387Z node 9 :CMS INFO: cluster_info.cpp:782: Adding lock for Host ::1:12002 (10) (permission user-p-2 until 1970-01-01T00:13:00Z) 2026-01-04T02:43:52.659481Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:26: TTxStorePermissions Execute 2026-01-04T02:43:52.659656Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store permission: id# user-p-2, validity# 1970-01-01T00:13:00.312024Z, action# Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 2026-01-04T02:43:52.659752Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Store request: id# user-r-2, owner# user, order# 2, priority# 0, body# User: "user" PartialPermissionAllowed: false Schedule: false Reason: "" TenantPolicy: DEFAULT AvailabilityMode: MODE_MAX_AVAILABILITY EvictVDisks: true 2026-01-04T02:43:52.671982Z node 9 :CMS DEBUG: cms_tx_store_permissions.cpp:141: TTxStorePermissions complete 2026-01-04T02:43:52.672297Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvCheckRequest { User: "user" RequestId: "user-r-2" DryRun: false AvailabilityMode: MODE_MAX_AVAILABILITY }, response# NKikimr::NCms::TEvCms::TEvPermissionResponse { Status { Code: ALLOW } RequestId: "user-r-2" Permissions { Id: "user-p-2" Action { Type: RESTART_SERVICES Host: "10" Services: "storage" Duration: 600000000 } Deadline: 780312024 Extentions { Type: HostInfo Hosts { Name: "::1" State: UP NodeId: 10 InterconnectPort: 12002 } } } } 2026-01-04T02:43:52.672937Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-1 2026-01-04T02:43:52.673007Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-04T02:43:52.673074Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-04T02:43:52.673164Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 9 2026-01-04T02:43:52.673256Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-1, reason# permission user-p-1 was removed 2026-01-04T02:43:52.673293Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-1, reason# explicit remove 2026-01-04T02:43:52.685426Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-04T02:43:52.685619Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-1" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-04T02:43:52.686114Z node 9 :CMS INFO: cms.cpp:1363: User user is done with permissions user-p-2 2026-01-04T02:43:52.686157Z node 9 :CMS DEBUG: cms.cpp:1386: Resulting status: OK 2026-01-04T02:43:52.686215Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:28: TTxRemovePermissions Execute 2026-01-04T02:43:52.686289Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reset host markers: host# 10 2026-01-04T02:43:52.686373Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove request: id# user-r-2, reason# permission user-p-2 was removed 2026-01-04T02:43:52.686409Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Remove permission: id# user-p-2, reason# explicit remove 2026-01-04T02:43:52.698784Z node 9 :CMS DEBUG: cms_tx_remove_permissions.cpp:80: TTxRemovePermissions Complete 2026-01-04T02:43:52.698978Z node 9 :CMS NOTICE: audit_log.cpp:12: [AuditLog] [CMS tablet] Reply: request# NKikimr::NCms::TEvCms::TEvManagePermissionRequest { User: "user" Command: DONE Permissions: "user-p-2" DryRun: false }, response# NKikimr::NCms::TEvCms::TEvManagePermissionResponse { Status { Code: OK } } 2026-01-04T02:44:03.996658Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 49 2026-01-04T02:44:03.997730Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 50 2026-01-04T02:44:03.997825Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 51 2026-01-04T02:44:03.997858Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 52 2026-01-04T02:44:03.997882Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 53 2026-01-04T02:44:03.997914Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 54 2026-01-04T02:44:03.997938Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 55 2026-01-04T02:44:03.997958Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 56 2026-01-04T02:44:03.997985Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 57 2026-01-04T02:44:03.998008Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 58 2026-01-04T02:44:03.998035Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 59 2026-01-04T02:44:03.998064Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 60 2026-01-04T02:44:03.998090Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 61 2026-01-04T02:44:03.998112Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 63 2026-01-04T02:44:03.998138Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 64 2026-01-04T02:44:03.998163Z node 49 :CMS ERROR: sentinel.cpp:808: [Sentinel] [StateUpdater] There is no pdisk info: nodeId# 62 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TConsoleConfigTests::TestValidation [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats >> TCmsTest::WalleTasksDifferentPriorities [GOOD] >> TSchemeShardSubDomainTest::RmDir >> ActorPage::OptionsNoContent [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTablets [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets >> TJaegerTracingConfiguratorTests::RequestTypeSampler [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantTenant [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants >> TSchemeShardSubDomainTest::CreateWithoutTimeCastBuckets [GOOD] >> TSchemeShardSubDomainTest::DeclareAndDelete >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionCreate [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut/unittest >> TCmsTest::WalleTasksDifferentPriorities [GOOD] |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::CreateOpsAreCovered [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutSomeTablets [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.377857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.377956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.377999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.378035Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.378068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.378104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.378148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.378204Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.378908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.380108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.452226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.452285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.465486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.466011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.467577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.475971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.477197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.478711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.481866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.489199Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.490692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.497396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.497590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.497710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.497970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.505075Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:08.634588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:08.636031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.637921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:08.637989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:08.639259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:08.639358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:08.642535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.643692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:08.644091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.644235Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:08.644284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:08.644315Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:08.646568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.646632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:08.646699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:08.648698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.648751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.648804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:08.648859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:08.653353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:08.655426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:08.656606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:08.659065Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.659200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:08.659250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:08.659630Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:08.659691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:08.659913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:08.660016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:08.662397Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.662442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 1, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 3 2026-01-04T02:44:09.237260Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:09.237324Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 1, subscribers: 0 2026-01-04T02:44:09.239314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 1 2026-01-04T02:44:09.239625Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 1, at schemeshard: 72057594046678944 TestModificationResults wait txId: 100 2026-01-04T02:44:09.240062Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:433: actor# [2:274:2263] Bootstrap 2026-01-04T02:44:09.240827Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:452: actor# [2:274:2263] Become StateWork (SchemeCache [2:279:2268]) 2026-01-04T02:44:09.242816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Name: "USER_1" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 100 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:09.242999Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_1, opId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.243069Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 100:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, at schemeshard: 72057594046678944 2026-01-04T02:44:09.243915Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:212: actor# [2:274:2263] HANDLE TEvClientConnected success connect from tablet# 72057594046447617 2026-01-04T02:44:09.245728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 100, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with coordinators, but no mediators" TxId: 100 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:09.245945Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 100, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with coordinators, but no mediators, operation: CREATE DATABASE, path: /MyRoot/USER_1 2026-01-04T02:44:09.246211Z node 2 :TX_PROXY DEBUG: client.cpp:89: Handle TEvAllocateResult ACCEPTED RangeBegin# 281474976715656 RangeEnd# 281474976720656 txAllocator# 72057594046447617 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 2026-01-04T02:44:09.248241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateSubDomain SubDomain { PlanResolution: 50 Mediators: 1 Name: "USER_2" TimeCastBucketsPerMediator: 2 StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:09.248411Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_subdomain.cpp:92: TCreateSubDomain Propose, path: /MyRoot/USER_2, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.248478Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, at schemeshard: 72057594046678944 2026-01-04T02:44:09.249899Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cant create subdomain with mediators, but no coordinators" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:09.250044Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cant create subdomain with mediators, but no coordinators, operation: CREATE DATABASE, path: /MyRoot/USER_2 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-04T02:44:09.250246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:44:09.250280Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:44:09.250334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:44:09.250349Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:44:09.250593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:44:09.250669Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.250700Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [2:293:2282] 2026-01-04T02:44:09.250805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:44:09.250847Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.250878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:293:2282] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:09.251240Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.251395Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 175us result status StatusPathDoesNotExist 2026-01-04T02:44:09.251555Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_1\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_1" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:44:09.251863Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.251962Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 103us result status StatusPathDoesNotExist 2026-01-04T02:44:09.252035Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_2\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_2" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:44:09.252313Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.252425Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 127us result status StatusSuccess 2026-01-04T02:44:09.252679Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.377855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.377946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.377982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.378014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.378070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.378106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.378156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.378222Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.378990Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.380109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.463281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.463331Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.474243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.474574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.474704Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.480091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.480262Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.480764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.481873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.489049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.490690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.497306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.497454Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.497552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.497735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.643714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644812Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.644975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.645037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.645092Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:09.133380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:09.133482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:09.133511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:44:09.133546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-04T02:44:09.133836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.133865Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:44:09.133985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:09.134034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.134087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:09.134144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.134184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:44:09.134220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.134254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:44:09.134276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:44:09.134326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:44:09.134358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:44:09.134381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:44:09.134402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:44:09.134834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.134905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.134927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:09.134967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:44:09.135005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:09.135456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.135523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.135555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:09.135577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:44:09.135596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:09.135687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:44:09.138466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:44:09.138804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-04T02:44:09.139033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:44:09.139075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:44:09.139135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:44:09.139159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:44:09.139410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:44:09.139485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.139511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:721:2711] 2026-01-04T02:44:09.139679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:44:09.139739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.139753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:721:2711] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:09.140053Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.140200Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 160us result status StatusSuccess 2026-01-04T02:44:09.140561Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:09.140976Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.141144Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 141us result status StatusPathDoesNotExist 2026-01-04T02:44:09.141249Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/USER_0\' (id: [OwnerId: 72057594046678944, LocalPathId: 38])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/USER_0" LastExistedPrefixPathId: 38 LastExistedPrefixDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::RmDir [GOOD] >> TJaegerTracingConfiguratorTests::SamplingSameScope [GOOD] >> TJaegerTracingConfiguratorTests::ThrottlingByDb |92.7%| [TA] $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.7%| [TA] {RESULT} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleConfigSubscriptionTests::TestRemoveConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TConsoleTests::TestRestartConsoleAndPools [GOOD] >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mon/ut/unittest >> ActorPage::OptionsNoContent [GOOD] Test command err: 2026-01-04T02:43:57.334075Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331662093023214:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:43:57.334424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:43:57.590566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:43:57.596648Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:57.596788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:57.603181Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:57.715345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:57.861908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:57.949527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:57.949576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:57.949584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:57.949699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:58.339627Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:43:58.356741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:43:58.370245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:43:58.430422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:43:58.445405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:43:58.453243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:44:02.982753Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331685693271950:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:02.982802Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:02.996422Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:03.071448Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:03.074447Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331685693271921:2081] 1767494642981634 != 1767494642981637 2026-01-04T02:44:03.108971Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:03.109073Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:03.114443Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:03.158801Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:44:03.158825Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:44:03.158835Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:44:03.158954Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:03.278135Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:03.436190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:03.486703Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:44:03.491090Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/mon/ut/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClient [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantMultipleTenants [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain >> test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RmDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.950192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.950259Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.950284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.950308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.950337Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.950361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.950408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.950462Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.951053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.951251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:09.021844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:09.021888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:09.032915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:09.033091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:09.033262Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:09.042963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:09.043128Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:09.043693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:09.043918Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:09.046440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:09.046646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:09.047711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:09.047765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:09.047875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:09.047916Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:09.048030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:09.048210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.159292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.160978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.161081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:09.161159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:09.787358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:44:09.787400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 2026-01-04T02:44:09.787707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.787751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:44:09.787870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:09.787912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.787955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:09.787994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.788027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:44:09.788073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:09.788115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:44:09.788166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:44:09.788385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-04T02:44:09.788439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:44:09.788476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:44:09.788522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:44:09.789152Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.789224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.789264Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:09.789312Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:44:09.789378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:09.790004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.790090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:09.790127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:09.790158Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:44:09.790185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-04T02:44:09.790249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:44:09.792906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:44:09.793730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-04T02:44:09.793973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:44:09.794027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:44:09.794103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:44:09.794124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:44:09.794548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:44:09.794626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.794660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:999:2916] 2026-01-04T02:44:09.794817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:44:09.794905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:09.794929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:999:2916] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:09.795311Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:09.795500Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 194us result status StatusSuccess 2026-01-04T02:44:09.796054Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 TestModificationResults wait txId: 101 2026-01-04T02:44:09.799086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpRmDir Drop { Name: "USER_0" } } TxId: 101 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:09.799216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_rmdir.cpp:28: TRmDir Propose, path: /MyRoot/USER_0, pathId: 0, opId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:09.799374Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 101:1, propose status:StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:44:09.801585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 101, response: Status: StatusPathIsNotDirectory Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 101 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:09.801796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 101, database: /MyRoot, subject: , status: StatusPathIsNotDirectory, reason: Check failed: path: '/MyRoot/USER_0', error: path is not a directory (id: [OwnerId: 72057594046678944, LocalPathId: 38], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: DROP DIRECTORY, path: /MyRoot/USER_0 TestModificationResult got TxId: 101, wait until txId: 101 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous >> TConsoleTests::TestSetDefaultStorageUnitsQuota [GOOD] >> TConsoleTests::TestSetDefaultComputationalUnitsQuota >> TBlobStorageProxyTest::TestEmptyDiscover >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock |92.7%| [TA] $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {RESULT} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |92.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TJaegerTracingConfiguratorTests::ThrottlingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SamplingByDb >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block >> TConsoleConfigSubscriptionTests::TestReplaceConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.377864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.377949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.377984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.378016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.378075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.378106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.378168Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.378239Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.379033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.380124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.451285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.451351Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.466979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.467529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.467697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.476996Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.477249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.478728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.481846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.490103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.490698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.497359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.497528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.497635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.497818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.633553Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.635999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.636103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.636166Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... schemeshard_impl.cpp:5626: ExamineTreeVFS visit path id [OwnerId: 72057594046678944, LocalPathId: 38] name: USER_0 type: EPathTypeSubDomain state: EPathStateDrop stepDropped: 0 droppedTxId: 101 parent: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.244957Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5642: ExamineTreeVFS run path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:10.245095Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 130 2026-01-04T02:44:10.245275Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:10.245342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:10.246274Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:10.246400Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:44:10.248172Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.248214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.248375Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:10.248555Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.248596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:44:10.248636Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:44:10.248920Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.248972Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-04T02:44:10.249033Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:10.249070Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:10.249118Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:10.249156Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:10.249194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:44:10.249235Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:10.249291Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:44:10.249325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:44:10.249419Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:44:10.249472Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:44:10.249512Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:44:10.249550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:44:10.250470Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:10.250556Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:10.250590Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:10.250635Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:44:10.250698Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:10.252085Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:10.252194Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:10.252231Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:10.252263Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:44:10.252319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:10.252446Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:44:10.252889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:10.252953Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:10.253065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:10.253858Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:10.253932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:10.254030Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:10.257398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:10.258218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:10.258330Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258427Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:44:10.258729Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:44:10.258787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:44:10.259239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:44:10.259348Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:10.259395Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:749:2738] TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:10.259994Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:10.260203Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 258us result status StatusPathDoesNotExist 2026-01-04T02:44:10.260394Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> THealthCheckTest::TestStateStorageOk [GOOD] >> THealthCheckTest::TestStateStorageBlue >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone >> test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] >> TJaegerTracingConfiguratorTests::SamplingByDb [GOOD] >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] >> TBlobStorageProxyTest::TestProxyPutSingleTimeout >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system >> TBlobStorageProxyTest::TestEmptyDiscover [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi >> TBlobStorageProxyTest::TestVPutVGet >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser >> TConsoleConfigHelpersTests::TestConfigSubscriberAutoTenantDomain [GOOD] >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser >> DataShardVolatile::DistributedWriteThenBulkUpsert [GOOD] >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.377856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.377953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.377989Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.378025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.378077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.378106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.378174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.378246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.378985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.380107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.464911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.464965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.480888Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.481435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.481599Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.488720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.489007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.489698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.489974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.492514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.492642Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.497261Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497320Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.497420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.497611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.497847Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.649439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650628Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650834Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 02:44:11.920738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.921046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:11.921098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:11.921304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:44:11.921498Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:11.921542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:44:11.921591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:44:11.922206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.922266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 103:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:44:11.922370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.922417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 103:0, datashard: 72075186233409548, at schemeshard: 72057594046678944 2026-01-04T02:44:11.922468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 129 -> 240 2026-01-04T02:44:11.923393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:11.923499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:11.923546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:44:11.923600Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-04T02:44:11.923651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:11.924524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:11.924606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:44:11.924636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:44:11.924666Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:44:11.924709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:44:11.924782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:44:11.928437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.928506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:11.928881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:44:11.929083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:44:11.929139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:11.929183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:44:11.929217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:11.929252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:44:11.929325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:815:2782] message: TxId: 103 2026-01-04T02:44:11.929366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:44:11.929406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:44:11.929447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:44:11.929561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:44:11.930775Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:11.930819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:11.931283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:44:11.931599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:44:11.933003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:11.933062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-04T02:44:11.933155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:44:11.933202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1135:3069] 2026-01-04T02:44:11.934117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification: OK eventTxId 103 2026-01-04T02:44:11.935441Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:11.935655Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeDatabase" took 218us result status StatusSuccess 2026-01-04T02:44:11.936069Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "SomeDatabase" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverNone [GOOD] >> TBlobStorageProxyTest::TestPutGetMany |92.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestQuadrupleGroups >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous >> TJaegerTracingConfiguratorTests::SharedThrottlingLimits [GOOD] >> TJaegerTracingConfiguratorTests::SharedSamplingLimits >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous >> TConsoleConfigHelpersTests::TestConfigSubscriptionEraser [GOOD] >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D6C2D2ABD00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.377857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.377945Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.377979Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.378025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.378070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.378094Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.378136Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.378214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.378924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.380139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.451326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.451392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.467347Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.467576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.467800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.477824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.478083Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.478886Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.481864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.490249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.490725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.497249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.497500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.497556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.497708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.497973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.649617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.650972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.651767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 Leader for TabletID 72057594046678944 is [1:899:2848] sender: [1:970:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 2026-01-04T02:44:12.580179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 2026-01-04T02:44:12.591306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 0 2026-01-04T02:44:12.591474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:44:12.591573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:12.591841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:44:12.591923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:44:12.591964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:12.592051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D6C2D3D4100, stats written 2 2026-01-04T02:44:12.592738Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:12.595950Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 310us result status StatusSuccess 2026-01-04T02:44:12.596487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestProxyPutSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdates [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous >> KqpExplain::AggGroupLimit [GOOD] >> KqpExplain::ComplexJoin >> test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007DDBB28ABD00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:08.699477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:08.699549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.699585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:08.699617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:08.699658Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:08.699679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:08.699728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:08.699808Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:08.700476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:08.700681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:08.762198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.762253Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.772125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:08.772273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:08.772412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:08.778153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:08.778328Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:08.778853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:08.779072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:08.781109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.781288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:08.782203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:08.782260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:08.782403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:08.782440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:08.782534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:08.782673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:08.887637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.888691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.888808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.888861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.888903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.888977Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889351Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:08.889461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 Leader for TabletID 72057594046678944 is [1:902:2851] sender: [1:976:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 2026-01-04T02:44:12.791402Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 0 2026-01-04T02:44:12.791530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:44:12.791576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:12.791806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 2026-01-04T02:44:12.804765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 2026-01-04T02:44:12.815636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 1 2026-01-04T02:44:12.815752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:44:12.815854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:12.815990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DDBB295C100, stats written 2 2026-01-04T02:44:12.816462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:12.819051Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 378us result status StatusSuccess 2026-01-04T02:44:12.819498Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestVPutVGet [GOOD] >> TBlobStorageProxyTest::TestVPutVGetLimit >> TConsoleConfigSubscriptionTests::TestNotificationForTimeoutedNotificationResponse [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer >> TJaegerTracingConfiguratorTests::SharedSamplingLimits [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TLogSettingsConfiguratorTests::TestNoChanges >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestProxyPutInvalidSize >> TConsoleConfigSubscriptionTests::TestAddConfigSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser >> TLogSettingsConfiguratorTests::TestNoChanges [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries >> TConsoleTests::TestSetDefaultComputationalUnitsQuota [GOOD] >> TConsoleTests::TestTenantConfigConsistency >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3 [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 >> TConsoleTests::TestRestartConsoleAndPoolsExtSubdomain [GOOD] >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous |92.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetLimit [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyPutInvalidSize [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure >> TSchemeShardSubDomainTest::CreateAndWait >> TBlobStorageProxyTest::TestPutGetMany [GOOD] >> TLogSettingsConfiguratorTests::TestAddComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true >> TConsoleConfigSubscriptionTests::TestListConfigSubscriptions [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] >> TSchemeShardSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardSubDomainTest::CreateDropNbs |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetMany [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TLogSettingsConfiguratorTests::TestRemoveComponentEntries [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientManyUpdatesAddRemove [GOOD] >> TLogSettingsConfiguratorTests::TestChangeDefaults [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_DOMAIN [GOOD] >> TModificationsValidatorTests::TestComputeAffectedConfigs_All_DomainAffected_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS [GOOD] >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TModificationsValidatorTests::TestApplyValidators_TENANTS_AND_NODE_TYPES [GOOD] >> TModificationsValidatorTests::TestApplyValidatorsWithOldConfig [GOOD] >> TModificationsValidatorTests::TestChecksLimitError [GOOD] >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSubDomainTest::LS >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateDropNbs [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:16.306452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:16.306537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:16.306565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:16.306613Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:16.306673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:16.306705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:16.306772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:16.306882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:16.307734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:16.308057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:16.381240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:16.381292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:16.395826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:16.396123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:16.396364Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:16.409546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:16.409905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:16.410962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:16.411351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:16.415445Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:16.415692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:16.416984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:16.417053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:16.417182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:16.417239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:16.417342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:16.417546Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.425189Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:16.564348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:16.564625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.564856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:16.564907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:16.565179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:16.565260Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:16.567987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:16.568227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:16.568519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.568593Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:16.568635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:16.568672Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:16.571077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.571151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:16.571219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:16.573515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.573572Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.573630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:16.573694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:16.577791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:16.580172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:16.580385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:16.581651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:16.581830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:16.581887Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:16.582245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:16.582330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:16.582555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:16.582683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:16.585199Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:16.585275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... T_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:17.527521Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:17.527559Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:17.528541Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:17.529270Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-04T02:44:17.530567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:17.530906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:44:17.531429Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:44:17.531596Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:17.532450Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2026-01-04T02:44:17.532941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:44:17.533239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-04T02:44:17.533736Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:44:17.533938Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:44:17.534174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409547 2026-01-04T02:44:17.535487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:44:17.535668Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 Forgetting tablet 72075186233409549 2026-01-04T02:44:17.536991Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:17.537049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:44:17.537134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:44:17.538153Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:17.538202Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:44:17.538315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:44:17.538500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:17.541152Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:44:17.541213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:44:17.541643Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:44:17.541676Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:44:17.541759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:44:17.541783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:44:17.544651Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:44:17.544719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:44:17.545433Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:17.545561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:17.545614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:44:17.545707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:17.545973Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:17.547564Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:44:17.547905Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:44:17.547958Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:44:17.548436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:44:17.548542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:44:17.548583Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:550:2504] TestWaitNotification: OK eventTxId 102 2026-01-04T02:44:17.564526Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:17.564801Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 320us result status StatusPathDoesNotExist 2026-01-04T02:44:17.565015Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:44:17.565666Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:17.565909Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 247us result status StatusPathDoesNotExist 2026-01-04T02:44:17.566040Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:16.521863Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:16.522042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:16.522096Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:16.522138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:16.522185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:16.522212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:16.522281Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:16.522357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:16.523183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:16.523481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:16.622597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:16.622702Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:16.639115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:16.639314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:16.639527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:16.648272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:16.648541Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:16.649138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:16.649345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:16.651517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:16.651691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:16.652564Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:16.652626Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:16.652760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:16.652805Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:16.652931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:16.653110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:16.789197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.791998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792142Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:16.792362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... , LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:17.464212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:44:17.465876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:17.465917Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:17.466089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:17.466188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:17.466231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:44:17.466276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:44:17.466721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:17.466768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:44:17.466870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:17.466909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:17.466949Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:17.466999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:17.467059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:44:17.467102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:17.467144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:44:17.467180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:44:17.467246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:44:17.467289Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:44:17.467324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:44:17.467358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:44:17.467918Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:17.468034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:17.468081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:17.468135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:44:17.468174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:17.469021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:17.469145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:17.469201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:17.469233Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:44:17.469265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:44:17.469332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:44:17.476337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:17.476712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-04T02:44:17.480336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:17.480619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1078: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, feature flag EnableAlterDatabaseCreateHiveFirst 1, tx WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterExtSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 ExternalSchemeShard: true DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } 2026-01-04T02:44:17.480692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:1084: [72057594046678944] CreateCompatibleAlterExtSubDomain, opId 102:0, path /MyRoot/SomeDatabase 2026-01-04T02:44:17.480876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_just_reject.cpp:47: TReject Propose, opId: 102:0, explain: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-04T02:44:17.480923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , at schemeshard: 72057594046678944 2026-01-04T02:44:17.484079Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: " TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:17.484273Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Invalid AlterExtSubDomain request: Invalid ExtSubDomain request: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain SomeDatabase has the specified kinds. Existing storage kinds are: , operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-04T02:44:17.484582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:44:17.484629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:44:17.484704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:44:17.484731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:44:17.485034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:44:17.485133Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:17.485160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:715:2705] 2026-01-04T02:44:17.485287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:44:17.485400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:44:17.485428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:715:2705] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForNewSubscription [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TModificationsValidatorTests::TestChecksLimitWarning [GOOD] Test command err: 2026-01-04T02:44:07.039864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:07.039927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:07.234149Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:08.235694Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.235745Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.309789Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:09.253983Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:09.254047Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:09.337352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.298507Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:10.298575Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.385555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:11.354315Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:11.354397Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:11.432697Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:12.401910Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:12.401988Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:12.499905Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:13.523163Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:13.523226Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:13.618637Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) E0104 02:44:14.867423402 734585 trace.cc:67] Unknown trace var: 'sdk_authz' 2026-01-04T02:44:14.868959Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CONFIGS has been changed from WARN to NOTICE 2026-01-04T02:44:14.869062Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CONFIGS has been changed from WARN to DEBUG 2026-01-04T02:44:14.869114Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component CMS_CLUSTER has been changed from WARN to NOTICE 2026-01-04T02:44:14.869140Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component CMS_CLUSTER has been changed from WARN to DEBUG 2026-01-04T02:44:14.869170Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_PROXY has been changed from WARN to NOTICE 2026-01-04T02:44:14.869199Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_PROXY has been changed from WARN to DEBUG 2026-01-04T02:44:14.869226Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_WORKER has been changed from WARN to NOTICE 2026-01-04T02:44:14.869254Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_WORKER has been changed from WARN to DEBUG 2026-01-04T02:44:14.869280Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_GATEWAY has been changed from WARN to NOTICE 2026-01-04T02:44:14.869308Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_GATEWAY has been changed from WARN to DEBUG 2026-01-04T02:44:14.869338Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_YQL has been changed from WARN to NOTICE 2026-01-04T02:44:14.869366Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_YQL has been changed from WARN to DEBUG 2026-01-04T02:44:14.869393Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_EXECUTER has been changed from WARN to NOTICE 2026-01-04T02:44:14.869421Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_EXECUTER has been changed from WARN to DEBUG 2026-01-04T02:44:14.869446Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPUTE has been changed from WARN to NOTICE 2026-01-04T02:44:14.869472Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPUTE has been changed from WARN to DEBUG 2026-01-04T02:44:14.869497Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SLOW_LOG has been changed from WARN to NOTICE 2026-01-04T02:44:14.869523Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SLOW_LOG has been changed from WARN to DEBUG 2026-01-04T02:44:14.869548Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_SERVICE has been changed from WARN to NOTICE 2026-01-04T02:44:14.869577Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_SERVICE has been changed from WARN to DEBUG 2026-01-04T02:44:14.869605Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_ACTOR has been changed from WARN to NOTICE 2026-01-04T02:44:14.869636Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_ACTOR has been changed from WARN to DEBUG 2026-01-04T02:44:14.869663Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to NOTICE 2026-01-04T02:44:14.869691Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_RESOURCE_MANAGER has been changed from WARN to DEBUG 2026-01-04T02:44:14.869719Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to NOTICE 2026-01-04T02:44:14.869748Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TABLET_RESOURCE_BROKER has been changed from WARN to DEBUG 2026-01-04T02:44:14.869776Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_BLOBS_STORAGE has been changed from WARN to NOTICE 2026-01-04T02:44:14.869829Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_BLOBS_STORAGE has been changed from WARN to DEBUG 2026-01-04T02:44:14.869859Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_REQUEST has been changed from WARN to NOTICE 2026-01-04T02:44:14.869886Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_REQUEST has been changed from WARN to DEBUG 2026-01-04T02:44:14.869918Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_NODE has been changed from WARN to NOTICE 2026-01-04T02:44:14.869949Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_NODE has been changed from WARN to DEBUG 2026-01-04T02:44:14.869979Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_LOAD_TEST has been changed from WARN to NOTICE 2026-01-04T02:44:14.870008Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_LOAD_TEST has been changed from WARN to DEBUG 2026-01-04T02:44:14.870037Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_SESSION has been changed from WARN to NOTICE 2026-01-04T02:44:14.870065Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_SESSION has been changed from WARN to DEBUG 2026-01-04T02:44:14.870096Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to NOTICE 2026-01-04T02:44:14.870125Z node 8 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component KQP_COMPILE_COMPUTATION_PATTERN_SERVICE has been changed from WARN to DEBUG 2026-01-04T02:44:14.870151Z node 8 :CMS_CONFIGS NOTICE: log_settings_con ... 026-01-04T02:44:17.581260Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_VDISK_BALANCING has been changed from WARN to ALERT 2026-01-04T02:44:17.581277Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_VDISK_BALANCING has been changed from 0 to 10 2026-01-04T02:44:17.581294Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-04T02:44:17.581311Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_GETBLOCK has been changed from WARN to ALERT 2026-01-04T02:44:17.581328Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_GETBLOCK has been changed from 0 to 10 2026-01-04T02:44:17.581346Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-04T02:44:17.581362Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_SHRED has been changed from WARN to ALERT 2026-01-04T02:44:17.581383Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_SHRED has been changed from 0 to 10 2026-01-04T02:44:17.581410Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-04T02:44:17.581438Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_CHECKINTEGRITY has been changed from WARN to ALERT 2026-01-04T02:44:17.581459Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_CHECKINTEGRITY has been changed from 0 to 10 2026-01-04T02:44:17.581477Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-04T02:44:17.581500Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PROXY_BRIDGE has been changed from WARN to ALERT 2026-01-04T02:44:17.581524Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PROXY_BRIDGE has been changed from 0 to 10 2026-01-04T02:44:17.581551Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-04T02:44:17.581576Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_CLUSTER_BALANCING has been changed from WARN to ALERT 2026-01-04T02:44:17.581602Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_CLUSTER_BALANCING has been changed from 0 to 10 2026-01-04T02:44:17.581628Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-04T02:44:17.581657Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_BRIDGE_SYNC has been changed from WARN to ALERT 2026-01-04T02:44:17.581681Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_BRIDGE_SYNC has been changed from 0 to 10 2026-01-04T02:44:17.581710Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-04T02:44:17.581735Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BS_PHANTOM_FLAG_STORAGE has been changed from WARN to ALERT 2026-01-04T02:44:17.581759Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BS_PHANTOM_FLAG_STORAGE has been changed from 0 to 10 2026-01-04T02:44:17.581785Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-04T02:44:17.581828Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LDAP_AUTH_PROVIDER has been changed from WARN to ALERT 2026-01-04T02:44:17.581855Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LDAP_AUTH_PROVIDER has been changed from 0 to 10 2026-01-04T02:44:17.581881Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-04T02:44:17.581909Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SASL_AUTH has been changed from WARN to ALERT 2026-01-04T02:44:17.581933Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SASL_AUTH has been changed from 0 to 10 2026-01-04T02:44:17.581961Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-04T02:44:17.581990Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component GROUPED_MEMORY_LIMITER has been changed from WARN to ALERT 2026-01-04T02:44:17.582014Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component GROUPED_MEMORY_LIMITER has been changed from 0 to 10 2026-01-04T02:44:17.582046Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-04T02:44:17.582075Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component DATA_INTEGRITY has been changed from WARN to ALERT 2026-01-04T02:44:17.582114Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component DATA_INTEGRITY has been changed from 0 to 10 2026-01-04T02:44:17.582139Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-04T02:44:17.582156Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TX_PRIORITIES_QUEUE has been changed from WARN to ALERT 2026-01-04T02:44:17.582173Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TX_PRIORITIES_QUEUE has been changed from 0 to 10 2026-01-04T02:44:17.582194Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-04T02:44:17.582213Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BSCONFIG has been changed from WARN to ALERT 2026-01-04T02:44:17.582231Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BSCONFIG has been changed from 0 to 10 2026-01-04T02:44:17.582250Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-04T02:44:17.582269Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component NAMESERVICE has been changed from WARN to ALERT 2026-01-04T02:44:17.582285Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component NAMESERVICE has been changed from 0 to 10 2026-01-04T02:44:17.582304Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-04T02:44:17.582322Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component BRIDGE has been changed from WARN to ALERT 2026-01-04T02:44:17.582338Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component BRIDGE has been changed from 0 to 10 2026-01-04T02:44:17.582359Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-04T02:44:17.582377Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TRANSFER has been changed from WARN to ALERT 2026-01-04T02:44:17.582394Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TRANSFER has been changed from 0 to 10 2026-01-04T02:44:17.582412Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-04T02:44:17.582432Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component TOKEN_MANAGER has been changed from WARN to ALERT 2026-01-04T02:44:17.582451Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component TOKEN_MANAGER has been changed from 0 to 10 2026-01-04T02:44:17.582469Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-04T02:44:17.582486Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component LOCAL_DB_BACKUP has been changed from WARN to ALERT 2026-01-04T02:44:17.582503Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component LOCAL_DB_BACKUP has been changed from 0 to 10 2026-01-04T02:44:17.582522Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:192: TLogSettingsConfigurator: Priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-04T02:44:17.582539Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:203: TLogSettingsConfigurator: Sampling priority for the component SCHEMA_SECRET_CACHE has been changed from WARN to ALERT 2026-01-04T02:44:17.582556Z node 11 :CMS_CONFIGS NOTICE: log_settings_configurator.cpp:209: TLogSettingsConfigurator: Sampling rate for the component SCHEMA_SECRET_CACHE has been changed from 0 to 10 2026-01-04T02:44:17.582645Z node 11 :CMS_CONFIGS TRACE: log_settings_configurator.cpp:100: TLogSettingsConfigurator: Send TEvConfigNotificationResponse: SubscriptionId: 0 ConfigId { } ... waiting for config update (done) |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyDiscoverMaxi [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestTenantConfigConsistency [GOOD] >> TConsoleTests::TestSetConfig |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientDeadCausesSubscriptionDeregistration [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose >> TSchemeShardSubDomainTest::LS [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] >> TSchemeShardSubDomainTest::SetSchemeLimits >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::LS [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:18.348513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:18.348631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:18.348673Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:18.348710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:18.348757Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:18.348801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:18.348872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:18.348955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:18.350045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:18.350367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:18.439001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:18.439075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:18.453542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:18.453774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:18.454006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:18.463279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:18.463509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:18.464241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:18.464498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:18.467866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:18.468057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:18.469173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:18.469227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:18.469329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:18.469369Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:18.469490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:18.469697Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.476473Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:18.599588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:18.599827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.600016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:18.600062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:18.600352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:18.600423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:18.602896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:18.603098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:18.603367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.603426Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:18.603506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:18.603543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:18.605758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.605844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:18.605900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:18.607814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.607860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.607921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:18.607987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:18.611350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:18.613296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:18.613505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:18.614707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:18.614844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:18.614885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:18.615177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:18.615232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:18.615440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:18.615537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:18.617818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:18.617865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... meshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:44:18.723081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:18.723113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:44:18.723148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-04T02:44:18.723234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:44:18.723276Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:44:18.723386Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:18.723425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:18.723461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:18.723499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:18.723542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:44:18.723580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:18.723618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:44:18.723654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:44:18.723832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:44:18.723873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:44:18.723909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-04T02:44:18.723942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-04T02:44:18.724858Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:18.724939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:18.724973Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:18.725008Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-04T02:44:18.725046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:18.725791Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:18.725896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:18.725927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:18.725953Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-04T02:44:18.725982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:44:18.726038Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:44:18.729064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:44:18.729143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-04T02:44:18.729363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:44:18.729412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-04T02:44:18.729830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:44:18.729919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:44:18.729952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:453:2409] TestWaitNotification: OK eventTxId 100 2026-01-04T02:44:18.730335Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:18.730550Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusSuccess 2026-01-04T02:44:18.731052Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:18.733712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:18.733933Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 244us result status StatusSuccess 2026-01-04T02:44:18.734342Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus2Stripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKDatetime_Reboot [GOOD] Test command err: 2026-01-04T02:43:33.169738Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:33.207192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:33.207455Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:33.215595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:33.215897Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:33.216162Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:33.216307Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:33.216463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:33.216586Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:33.216707Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:33.216856Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:33.216993Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:33.217143Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.217251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:33.217375Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:33.217505Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:33.249925Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:33.250081Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:33.250128Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:33.250328Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:33.250508Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:33.250596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:33.250644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:33.250751Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:33.250817Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:33.250865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:33.250908Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:33.251105Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:33.251180Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:33.251240Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:33.251275Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:33.251375Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:33.251447Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:33.251514Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:33.251567Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:33.251618Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:33.251657Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:33.251696Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:33.251744Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:33.251799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:33.251834Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:33.252075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:33.252152Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:33.252194Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:33.252338Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:33.252410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.252448Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.252498Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:33.252539Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:33.252583Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:33.252643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:33.252684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:33.252721Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:33.252850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:33.252895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=9; 2026-01-04T02:44:15.802769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=759; 2026-01-04T02:44:15.802812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=42642; 2026-01-04T02:44:15.802856Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=42740; 2026-01-04T02:44:15.802909Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-04T02:44:15.803205Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=258; 2026-01-04T02:44:15.803233Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=43478; 2026-01-04T02:44:15.803357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=77; 2026-01-04T02:44:15.803450Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=55; 2026-01-04T02:44:15.803749Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=264; 2026-01-04T02:44:15.804051Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=250; 2026-01-04T02:44:15.814905Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=10787; 2026-01-04T02:44:15.824767Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=9720; 2026-01-04T02:44:15.824895Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=15; 2026-01-04T02:44:15.824960Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=12; 2026-01-04T02:44:15.825011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=8; 2026-01-04T02:44:15.825092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=44; 2026-01-04T02:44:15.825138Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:44:15.825231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=55; 2026-01-04T02:44:15.825276Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-04T02:44:15.825353Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=40; 2026-01-04T02:44:15.825447Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-04T02:44:15.825534Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=51; 2026-01-04T02:44:15.825577Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=73397; 2026-01-04T02:44:15.825725Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=54584568;raw_bytes=52825050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=22512;raw_bytes=19000;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=15800;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:15.825870Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:15.825939Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:15.826008Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:15.826055Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:15.826206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:15.826253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:15.826285Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:15.826320Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:44:15.826377Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494317240;tx_id=18446744073709551615;;current_snapshot_ts=1767494614696; 2026-01-04T02:44:15.826412Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:15.826445Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:15.826473Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:15.826548Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:15.826705Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.029000s; 2026-01-04T02:44:15.829148Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:15.829382Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:15.829423Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:15.829480Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:15.829519Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:44:15.829571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494317240;tx_id=18446744073709551615;;current_snapshot_ts=1767494614696; 2026-01-04T02:44:15.829618Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:15.829668Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:15.829712Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:15.829785Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:44:15.829852Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:15.830495Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.115000s; 2026-01-04T02:44:15.830543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5442:7073];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscriptionClientReconnectsOnConnectionLoose [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasureMirror3Plus2 [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetBlock [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe >> TBlobStorageProxyTest::TestPersistence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SetSchemeLimits [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:19.334519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:19.334596Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:19.334629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:19.334660Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:19.334696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:19.334721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:19.334792Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:19.334868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:19.335581Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:19.335827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:19.408899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:19.408950Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:19.420596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:19.420755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:19.420897Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:19.428004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:19.428237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:19.428795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:19.429005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:19.431584Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:19.431782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:19.432876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:19.432964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:19.433070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:19.433112Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:19.433219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:19.433408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.439369Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:19.545460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:19.545703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.545920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:19.545962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:19.546184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:19.546263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:19.548637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:19.548851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:19.549104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.549168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:19.549203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:19.549237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:19.551057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.551116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:19.551162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:19.552707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.552754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.552805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:19.552860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:19.556111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:19.557669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:19.557795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:19.558845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:19.558974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:19.559014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:19.559296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:19.559349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:19.559525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:19.559656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:19.561700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:19.561750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... blishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:44:19.768760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:19.768809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2316], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:44:19.768849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2316], at schemeshard: 72057594046678944, txId: 100, path id: 2 2026-01-04T02:44:19.769077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:44:19.769123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:44:19.769212Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:19.769244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:19.769292Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:44:19.769320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:19.769355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:44:19.769391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:44:19.769421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:44:19.769482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:44:19.769602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:44:19.769633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:44:19.769665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-04T02:44:19.769687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-04T02:44:19.770238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:19.770314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:19.770372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:19.770408Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-04T02:44:19.770444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:19.771089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:19.771175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:44:19.771207Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:44:19.771231Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-04T02:44:19.771270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:44:19.771327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:44:19.774041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:44:19.774819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestWaitNotification wait txId: 100 2026-01-04T02:44:19.774980Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:44:19.775012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 2026-01-04T02:44:19.775350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:44:19.775420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:44:19.775469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:483:2432] TestWaitNotification: OK eventTxId 100 2026-01-04T02:44:19.775800Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:19.775943Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 168us result status StatusSuccess 2026-01-04T02:44:19.776291Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 3 ShardsInside: 2 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 DatabaseQuotas { data_stream_shards_quota: 3 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:19.776700Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:19.776863Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 130us result status StatusSuccess 2026-01-04T02:44:19.777212Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 3 ShardsInside: 0 ShardsLimit: 3 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 300 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 3 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 3 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"$%&\'()*+,-.:;<=>?@[]^_`{|}~" MaxPQPartitions: 300 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] >> TSchemeShardSubDomainTest::Create >> TBlobStorageProxyTest::TestQuadrupleGroups [GOOD] >> TBlobStorageProxyTest::TestSingleFailure >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedServer [GOOD] >> TConsoleConfigTests::TestAddConfigItem >> TBlobStorageProxyTest::TestNormal >> TConsoleTests::TestRemoveTenantWithBorrowedStorageUnits [GOOD] >> TConsoleTests::TestModifyUsedZoneKind >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous >> TBlobStorageProxyTest::TestProxyLongTailDiscover >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlWithoutFlag [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart >> TBlobStorageProxyTest::TestInFlightPuts >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnDiscoverBlock [GOOD] Test command err: 2026-01-04T02:44:14.260740Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ade/r3tmp/tmphfVnUX//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-04T02:44:14.265926Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:17.078467Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ade/r3tmp/tmphfVnUX//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-04T02:44:17.096891Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:17.506559Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ade/r3tmp/tmphfVnUX//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-04T02:44:17.514876Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:18.821570Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ade/r3tmp/tmphfVnUX//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-04T02:44:18.827133Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:20.110796Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ade/r3tmp/tmphfVnUX//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-04T02:44:20.112899Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSubDomainTest::Create [GOOD] >> TSchemeShardSubDomainTest::CreateAlterNbsChannels >> TConsoleConfigSubscriptionTests::TestNotificationForNewConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestVPutVGetPersistence >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin >> TConsoleConfigTests::TestAddConfigItem [GOOD] >> TConsoleConfigTests::TestConfigureOrderConflicts >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] >> TBlobStorageProxyTest::TestPersistence [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe >> DataShardVolatile::DistributedWriteThenBulkUpsertWithCdc [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop >> TConsoleTests::TestSetConfig [GOOD] >> TConsoleTests::TestTenantGeneration >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestart [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionInGranule_PKUInt64_Reboot [GOOD] Test command err: 2026-01-04T02:43:33.939002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:33.971769Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:33.972036Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:33.979345Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:33.979587Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:33.979835Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:33.979960Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:33.980091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:33.980214Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:33.980313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:33.980421Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:33.980550Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:33.980702Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:33.980837Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:33.980944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:33.981064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:34.010187Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:34.010322Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:34.010370Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:34.010567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:34.010736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:34.010823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:34.010889Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:34.010982Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:34.011044Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:34.011098Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:34.011132Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:34.011317Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:34.011379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:34.011430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:34.011468Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:34.011562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:34.011614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:34.011664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:34.011701Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:34.011747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:34.011804Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:34.011837Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:34.011885Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:34.011921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:34.011954Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:34.012168Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:34.012227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:34.012268Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:34.012376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:34.012423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:34.012458Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:34.012506Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:34.012543Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:34.012582Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:34.012637Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:34.012672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:34.012702Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:34.012833Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:34.012876Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... tage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-04T02:44:19.768463Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=917; 2026-01-04T02:44:19.768517Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=53179; 2026-01-04T02:44:19.768564Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=53282; 2026-01-04T02:44:19.768630Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=13; 2026-01-04T02:44:19.768997Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=318; 2026-01-04T02:44:19.769046Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=54143; 2026-01-04T02:44:19.769209Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=105; 2026-01-04T02:44:19.769337Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=75; 2026-01-04T02:44:19.769731Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=341; 2026-01-04T02:44:19.770146Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=357; 2026-01-04T02:44:19.783953Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=13723; 2026-01-04T02:44:19.796972Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=12894; 2026-01-04T02:44:19.797079Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:44:19.797125Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:44:19.797155Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=4; 2026-01-04T02:44:19.797211Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=31; 2026-01-04T02:44:19.797245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:44:19.797324Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=48; 2026-01-04T02:44:19.797374Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-04T02:44:19.797441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=32; 2026-01-04T02:44:19.797535Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=53; 2026-01-04T02:44:19.797620Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=46; 2026-01-04T02:44:19.797659Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=89794; 2026-01-04T02:44:19.797829Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=57398400;raw_bytes=55525050;count=9;records=675000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=23304;raw_bytes=19800;count=1;records=200} inactive {blob_bytes=344000;raw_bytes=16600;count=200;records=200} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:19.797945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:19.797992Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:19.798045Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:19.798086Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:19.798222Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:19.798265Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:19.798294Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:19.798325Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:44:19.798373Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494318066;tx_id=18446744073709551615;;current_snapshot_ts=1767494615466; 2026-01-04T02:44:19.798404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:19.798435Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:19.798459Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:19.798526Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:19.798662Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.130000s; 2026-01-04T02:44:19.801189Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:19.801357Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:19.801404Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:19.801477Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:19.801516Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=1; 2026-01-04T02:44:19.801575Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:273;event=StartCleanupStop;snapshot=plan_step=1767494318066;tx_id=18446744073709551615;;current_snapshot_ts=1767494615466; 2026-01-04T02:44:19.801611Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=1;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:19.801646Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:19.801678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:19.801738Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=1.000000s; 2026-01-04T02:44:19.801793Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:19.802521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.029000s; 2026-01-04T02:44:19.802572Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:5542:7173];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TSchemeShardSubDomainTest::DeclareAndForbidTableInside [GOOD] >> TSchemeShardSubDomainTest::DeclareDefineAndDelete >> TBlobStorageProxyTest::TestDoubleFailure >> KqpExplain::ComplexJoin [GOOD] >> KqpExplain::CompoundKeyRange |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TConsoleConfigTests::TestConfigureOrderConflicts [GOOD] >> TConsoleConfigTests::TestGetItems >> TBlobStorageProxyTest::TestSingleFailure [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestInFlightPuts [GOOD] >> TBlobStorageProxyTest::TestHugeCollectGarbage |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetStripe [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateAlterNbsChannels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:20.908892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:20.909001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:20.909042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:20.909093Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:20.909146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:20.909193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:20.909255Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:20.909334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:20.910229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:20.910549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:21.008741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:21.008807Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:21.025380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:21.025631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:21.025852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:21.036118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:21.036416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:21.037198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:21.037478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:21.040817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:21.041040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:21.042352Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:21.042423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:21.042547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:21.042607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:21.042728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:21.042948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:21.217356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.218946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:21.219651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... EMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:23.638020Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:23.638046Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:23.639142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:44:23.640055Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:44:23.640244Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:23.640534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:44:23.642099Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:44:23.642660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:44:23.645219Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 Forgetting tablet 72075186233409546 2026-01-04T02:44:23.646289Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:44:23.646447Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:44:23.646639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 Forgetting tablet 72075186233409548 2026-01-04T02:44:23.647381Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:44:23.647488Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:44:23.647646Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 Forgetting tablet 72075186233409547 2026-01-04T02:44:23.650884Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:23.650944Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:44:23.651041Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409549 2026-01-04T02:44:23.651697Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:23.651785Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:23.651915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:23.653639Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:44:23.654192Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 105 2026-01-04T02:44:23.656966Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:44:23.657049Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:44:23.657169Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:44:23.657196Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:44:23.657572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:44:23.657604Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:44:23.657747Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:44:23.657831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:44:23.659176Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:23.659299Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:23.659345Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:23.659424Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:23.659648Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:23.662260Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 105, wait until txId: 105 TestWaitNotification wait txId: 105 2026-01-04T02:44:23.664713Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 105: send EvNotifyTxCompletion 2026-01-04T02:44:23.664763Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 105 2026-01-04T02:44:23.665222Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 105, at schemeshard: 72057594046678944 2026-01-04T02:44:23.665325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:44:23.665367Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1056:3008] TestWaitNotification: OK eventTxId 105 2026-01-04T02:44:23.665966Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/BSVolume" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:23.666168Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/BSVolume" took 238us result status StatusPathDoesNotExist 2026-01-04T02:44:23.666369Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/BSVolume\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/BSVolume" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:44:23.666967Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:23.667136Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 185us result status StatusPathDoesNotExist 2026-01-04T02:44:23.667261Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestNormal [GOOD] >> TBlobStorageProxyTest::TestNormalMirror >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous >> VectorIndexBuildTest::Metering_Documentation_Formula_Build-smallRows-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestConsoleRestartSimplified [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] >> TConsoleConfigTests::TestGetItems [GOOD] >> TConsoleConfigTests::TestGetNodeItems ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestSingleFailure [GOOD] Test command err: 2026-01-04T02:44:21.525434Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001adb/r3tmp/tmpC1O6Q6//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:21.534305Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscover [GOOD] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestProxyGetSingleTimeout >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeclareDefineAndDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:22.920759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:22.920830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:22.920855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:22.920877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:22.920908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:22.920933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:22.920981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:22.921036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:22.921743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:22.922033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:23.008568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:23.008620Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:23.022637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:23.022839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:23.023036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:23.031158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:23.031401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:23.032030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:23.032265Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:23.034607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:23.034793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:23.035752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:23.035828Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:23.035934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:23.035972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:23.036093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:23.036298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:23.161365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.162900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.163000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:23.163082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 939591Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:44:24.939618Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:44:24.939642Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 10 2026-01-04T02:44:24.939666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 38], 18446744073709551615 2026-01-04T02:44:24.940355Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:24.940444Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 10 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:24.940476Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:24.940528Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 10 2026-01-04T02:44:24.940566Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:44:24.941311Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:24.941407Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:24.941441Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:24.941467Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 18446744073709551615 2026-01-04T02:44:24.941495Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:24.941560Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:44:24.943079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 3, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:44:24.943185Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:24.943232Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:24.943263Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:44:24.944563Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409546 2026-01-04T02:44:24.944940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:24.945298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:44:24.946787Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 2026-01-04T02:44:24.947006Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:44:24.947213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:44:24.947565Z node 2 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 Forgetting tablet 72075186233409548 2026-01-04T02:44:24.948623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:44:24.948793Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 Forgetting tablet 72075186233409547 2026-01-04T02:44:24.949555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:24.949608Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:24.949719Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:24.950401Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:24.950465Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:44:24.950542Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:24.950932Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:24.951054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:24.953568Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:44:24.953622Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:44:24.953789Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:44:24.953834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:44:24.953893Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:44:24.953928Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:44:24.955234Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:24.955352Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:44:24.955623Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:44:24.955663Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:44:24.956095Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:44:24.956182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:44:24.956218Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:928:2883] TestWaitNotification: OK eventTxId 102 2026-01-04T02:44:24.956748Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:24.956926Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 217us result status StatusPathDoesNotExist 2026-01-04T02:44:24.957102Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKInt64 [GOOD] Test command err: 2026-01-04T02:43:17.018484Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:17.048451Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:17.048671Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:17.055718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:17.055954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:17.056139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:17.056249Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:17.056319Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:17.056380Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:17.056473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:17.056544Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:17.056641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:17.056726Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.056839Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:17.056913Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:17.057012Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:17.084520Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:17.084667Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:17.084718Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:17.084931Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:17.085101Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:17.085195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:17.085248Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:17.085341Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:17.085402Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:17.085455Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:17.085497Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:17.085701Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:17.085776Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:17.085844Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:17.085879Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:17.085981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:17.086038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:17.086111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:17.086149Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:17.086227Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:17.086271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:17.086312Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:17.086365Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:17.086403Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:17.086434Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:17.086640Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:17.086719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:17.086751Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:17.086859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:17.086898Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.086918Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:17.086965Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:17.087005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:17.087044Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:17.087103Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:17.087145Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:17.087181Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:17.087311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:17.087351Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... es;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=12824; 2026-01-04T02:44:21.961078Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=10; 2026-01-04T02:44:21.962311Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1167; 2026-01-04T02:44:21.962380Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=14391; 2026-01-04T02:44:21.962446Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=14554; 2026-01-04T02:44:21.962532Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=15; 2026-01-04T02:44:21.962663Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=60; 2026-01-04T02:44:21.962722Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=15526; 2026-01-04T02:44:21.962962Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=160; 2026-01-04T02:44:21.963245Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=188; 2026-01-04T02:44:21.963527Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=212; 2026-01-04T02:44:21.963827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=220; 2026-01-04T02:44:21.967011Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=3109; 2026-01-04T02:44:21.970007Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=2920; 2026-01-04T02:44:21.970092Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:44:21.970140Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=9; 2026-01-04T02:44:21.970186Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:44:21.970280Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=57; 2026-01-04T02:44:21.970330Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=6; 2026-01-04T02:44:21.970441Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=68; 2026-01-04T02:44:21.970490Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=6; 2026-01-04T02:44:21.970562Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=37; 2026-01-04T02:44:21.970678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=73; 2026-01-04T02:44:21.970793Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=64; 2026-01-04T02:44:21.970860Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=34929; 2026-01-04T02:44:21.971032Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110437896;raw_bytes=171489958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:21.971162Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:21.971257Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:21.971337Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:21.971397Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:21.971540Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:21.971606Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:21.971647Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:21.971696Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:21.971815Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:21.971869Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:21.971942Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:21.972056Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:21.972289Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.060000s; 2026-01-04T02:44:21.975321Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:21.975600Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:21.975670Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:21.975749Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:21.975826Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:21.975898Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:21.975981Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:21.976051Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:21.976106Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:21.976198Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:21.976261Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:21.977226Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.040000s; 2026-01-04T02:44:21.977287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2368:4348];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin >> TConsoleTests::TestModifyUsedZoneKind [GOOD] >> TConsoleTests::TestRemoveTenant |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVPutVGetPersistence [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure3Plus1Stripe [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TConsoleConfigTests::TestGetNodeItems [GOOD] >> TConsoleConfigTests::TestGetNodeConfig >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system >> TBlobStorageProxyTest::TestBlockPersistence >> TBlobStorageProxyTest::TestSingleFailureMirror >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin >> TConsoleInMemoryConfigSubscriptionTests::TestComplexYamlConfigChanges [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscover >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous >> TBlobStorageProxyTest::TestDoubleFailure [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestHugeCollectGarbage [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] >> TBlobStorageProxyTest::TestNormalMirror [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser >> TConsoleTests::TestTenantGeneration [GOOD] >> TConsoleTests::TestTenantGenerationExtSubdomain >> TConsoleConfigTests::TestGetNodeConfig [GOOD] >> TConsoleConfigTests::TestAutoOrder >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] >> TBlobStorageProxyTest::TestProxyGetSingleTimeout [GOOD] >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout >> TSchemeShardSubDomainTest::CopyRejects >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser >> TConsoleInMemoryConfigSubscriptionTests::TestNoYamlResend [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply >> TSchemeShardSubDomainTest::RedefineErrors |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror [GOOD] |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestNormalMirror [GOOD] |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block >> TBlobStorageProxyTest::TestProxySimpleDiscover [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranule_PKDatetime [GOOD] Test command err: 2026-01-04T02:43:21.976619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:22.004001Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:22.004327Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:22.012176Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:22.012470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:22.012734Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:22.012906Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:22.013015Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:22.013136Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:22.013266Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:22.013379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:22.013503Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:22.013667Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.013830Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:22.013984Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:22.014096Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:22.043858Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:22.044029Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:22.044081Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:22.044298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:22.044478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:22.044558Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:22.044600Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:22.044666Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:22.044742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:22.044781Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:22.044813Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:22.044961Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:22.045007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:22.045034Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:22.045063Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:22.045144Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:22.045191Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:22.045268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:22.045296Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:22.045354Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:22.045386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:22.045420Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:22.045454Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:22.045482Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:22.045519Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:22.045700Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:22.045754Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:22.045780Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:22.045937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:22.045995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.046027Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.046080Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:22.046124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:22.046170Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:22.046234Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:22.046277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:22.046327Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:22.046490Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:22.046536Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... _engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=7030; 2026-01-04T02:44:24.378200Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=8; 2026-01-04T02:44:24.379069Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=820; 2026-01-04T02:44:24.379135Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=8189; 2026-01-04T02:44:24.379181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=8367; 2026-01-04T02:44:24.379230Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=10; 2026-01-04T02:44:24.379298Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=36; 2026-01-04T02:44:24.379327Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=9015; 2026-01-04T02:44:24.379476Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=107; 2026-01-04T02:44:24.379617Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=99; 2026-01-04T02:44:24.379827Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=160; 2026-01-04T02:44:24.379977Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=105; 2026-01-04T02:44:24.382286Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=2253; 2026-01-04T02:44:24.384289Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1938; 2026-01-04T02:44:24.384360Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:44:24.384401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=8; 2026-01-04T02:44:24.384458Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:44:24.384545Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=54; 2026-01-04T02:44:24.384583Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=5; 2026-01-04T02:44:24.384692Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=65; 2026-01-04T02:44:24.384746Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=11; 2026-01-04T02:44:24.384798Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=29; 2026-01-04T02:44:24.384867Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=41; 2026-01-04T02:44:24.384966Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=67; 2026-01-04T02:44:24.385002Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=25414; 2026-01-04T02:44:24.385132Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=110400112;raw_bytes=155849958;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:24.385262Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:24.385331Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:24.385408Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:24.385482Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:24.385621Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:24.385687Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:24.385733Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:24.385772Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:24.385844Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:24.385886Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:24.385941Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:24.386038Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:24.386253Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.176000s; 2026-01-04T02:44:24.388778Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:24.388998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:24.389097Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:24.389187Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:24.389237Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:24.389293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:24.389378Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:24.389452Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:24.389503Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:24.389571Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:24.389617Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:24.390148Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.069000s; 2026-01-04T02:44:24.390203Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:2345:4325];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [FAIL] >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverSingleFailure [GOOD] Test command err: 2026-01-04T02:44:18.878326Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:18.887220Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:20.399737Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-04T02:44:20.405882Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:21.939519Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_2/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 3 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 3 2026-01-04T02:44:21.943864Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 3 VDISK[0:_:0:2:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 3 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:23.467671Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_3/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 4 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 4 2026-01-04T02:44:23.485495Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 4 VDISK[0:_:0:3:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 4 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:25.101199Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_4/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 5 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 5 2026-01-04T02:44:25.105749Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 5 VDISK[0:_:0:4:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 5 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:26.642793Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad7/r3tmp/tmp4iiHGr//vdisk_bad_5/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 6 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 6 2026-01-04T02:44:26.645241Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 6 VDISK[0:_:0:5:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 6 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin >> TConsoleConfigTests::TestAutoOrder [GOOD] >> TConsoleConfigTests::TestAutoKind >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenMkDir [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir >> TBlobStorageProxyTest::TestSingleFailureMirror [GOOD] >> TBlobStorageProxyTest::TestVBlockVPutVGet >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApply [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestDoubleEmptyGet >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] >> TBlobStorageProxyTest::TestBlockPersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSubDomainTest::CopyRejects [GOOD] >> TSchemeShardSubDomainTest::ConsistentCopyRejects >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetStripe [GOOD] >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin >> TBlobStorageProxyTest::TestGetMultipart >> TBlobStorageProxyTest::TestVPutVCollectVGetRace >> TConsoleConfigTests::TestAutoKind [GOOD] >> TConsoleConfigTests::TestAutoSplit >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithDb [GOOD] >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RedefineErrors [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:28.703058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:28.703131Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.703159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:28.703185Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:28.703215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:28.703238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:28.703275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.703360Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:28.704049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:28.704259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:28.800284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.800344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:28.817042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:28.817280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:28.817461Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:28.826532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:28.826769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:28.827608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:28.827906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:28.835171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.835391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:28.836749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:28.836814Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.836923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:28.836969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:28.837076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:28.837299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:28.966671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.967957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.968321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xt state 2026-01-04T02:44:29.770534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 2 -> 3 2026-01-04T02:44:29.772066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.772123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 108:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:29.772180Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 3 -> 128 2026-01-04T02:44:29.773580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.773631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.773664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 108:0, at tablet# 72057594046678944 2026-01-04T02:44:29.773707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 108 ready parts: 1/1 2026-01-04T02:44:29.773860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 108 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:29.775198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:108 msg type: 269090816 2026-01-04T02:44:29.775324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000043 FAKE_COORDINATOR: advance: minStep5000043 State->FrontStep: 5000042 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000043 2026-01-04T02:44:29.775649Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000043, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:29.775760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000043 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:29.775878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-04T02:44:29.776188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-04T02:44:29.776276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-04T02:44:29.776631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:29.776715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 108 2026-01-04T02:44:29.778481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:29.778517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:29.778674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:29.778712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 108, path id: 38 2026-01-04T02:44:29.779020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.779084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-04T02:44:29.779163Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:44:29.779189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:44:29.779271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:44:29.779296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:44:29.779326Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-04T02:44:29.779355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:44:29.779385Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-04T02:44:29.779415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-04T02:44:29.779471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:44:29.779502Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 1, subscribers: 0 2026-01-04T02:44:29.779527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 38], 8 2026-01-04T02:44:29.779919Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:44:29.779995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:44:29.780034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:44:29.780064Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 8 2026-01-04T02:44:29.780114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:29.780189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-04T02:44:29.782645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-04T02:44:29.782947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-04T02:44:29.782993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-04T02:44:29.783381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-04T02:44:29.783447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:44:29.783492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [1:999:2955] TestWaitNotification: OK eventTxId 108 2026-01-04T02:44:29.783989Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:29.784150Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 192us result status StatusSuccess 2026-01-04T02:44:29.784508Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 6 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 6 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } StoragePools { Name: "pool-hdd-1" Kind: "hdd-1" } StoragePools { Name: "pool-hdd-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForModifiedConfigItemScope [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem >> TSchemeShardSysNames::ESchemeOpCreateFileStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous >> test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestPartialGetBlock >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainsInSeparateDir [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:28.794784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:28.794904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.794959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:28.794998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:28.795044Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:28.795081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:28.795161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.795254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:28.796188Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:28.796552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:28.881375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.881434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:28.896125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:28.896697Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:28.896841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:28.905392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:28.905751Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:28.906614Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:28.906892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:28.910216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.910427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:28.911780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:28.911850Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.911910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:28.911958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:28.912076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:28.912307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:28.919616Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:29.037406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:29.037698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.037946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:29.038005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:29.038250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:29.038329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:29.040685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:29.040955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:29.041223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.041280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:29.041325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:29.041362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:29.043599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.043670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:29.043744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:29.046581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.046633Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:29.046692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:29.046778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:29.050276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:29.052100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:29.052543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:29.053690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:29.053819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:29.053885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:29.054139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:29.054184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:29.054350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:29.054441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:29.056481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:29.056524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-04T02:44:30.061827Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-01-04T02:44:30.061866Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 7 2026-01-04T02:44:30.061903Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2026-01-04T02:44:30.062528Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:30.062644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:30.062721Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:30.062766Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 7 2026-01-04T02:44:30.062819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:44:30.064293Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:30.064391Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:30.064426Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:30.064457Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-04T02:44:30.064508Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2026-01-04T02:44:30.064586Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-04T02:44:30.064645Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [2:892:2736] 2026-01-04T02:44:30.066716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:30.068216Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:30.068326Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:30.068362Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:893:2737] TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:30.068929Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:30.069152Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_0" took 275us result status StatusSuccess 2026-01-04T02:44:30.069559Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:30.070130Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:30.070311Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains/USER_1" took 196us result status StatusSuccess 2026-01-04T02:44:30.070700Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains/USER_1" PathDescription { Self { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 4 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 4 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 4 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:30.071173Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SubDomains" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:30.071325Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SubDomains" took 183us result status StatusSuccess 2026-01-04T02:44:30.071718Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SubDomains" PathDescription { Self { Name: "SubDomains" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000004 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } Children { Name: "USER_1" PathId: 4 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000003 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureMirror3Plus2 [GOOD] Test command err: 2026-01-04T02:44:24.675226Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad2/r3tmp/tmpRQlyZX//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:24.676378Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad2/r3tmp/tmpRQlyZX//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-04T02:44:24.681838Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:24.682223Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:27.784131Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad2/r3tmp/tmp6j39o7//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:27.784651Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001ad2/r3tmp/tmp6j39o7//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-04T02:44:27.790458Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:27.798172Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVBlockVPutVGet [GOOD] Test command err: 2026-01-04T02:44:27.060812Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001abb/r3tmp/tmpjPDwMA//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:27.063685Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |92.8%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] >> TBlobStorageProxyTest::TestVPutVCollectVGetRace [GOOD] >> TBlobStorageProxyTest::TestVGetNoData |92.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyDiscoverSingleTimeout [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin >> TConsoleConfigTests::TestAutoSplit [GOOD] >> TConsoleConfigTests::TestAllowedScopes >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenDrop [GOOD] >> DataShardVolatile::DistributedWriteLostPlanThenSplit >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Block [GOOD] >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ConsistentCopyRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:28.507140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:28.507246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.507298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:28.507336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:28.507376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:28.507406Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:28.507459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:28.507538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:28.508487Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:28.508778Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:28.596716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.596790Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:28.612870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:28.613108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:28.613332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:28.621275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:28.621482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:28.622082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:28.622305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:28.625280Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.625520Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:28.626938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:28.627014Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:28.627148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:28.627200Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:28.627305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:28.627526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:28.769975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.771947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.772017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.772082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.772199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:28.772305Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... { TxId: 106 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-04T02:44:31.223202Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 106:0 240 -> 240 2026-01-04T02:44:31.224892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 106:0, at schemeshard: 72057594046678944 2026-01-04T02:44:31.224937Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 106:0 ProgressState 2026-01-04T02:44:31.225045Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:44:31.225081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:44:31.225118Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#106:0 progress is 1/1 2026-01-04T02:44:31.225172Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:44:31.225212Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 106, ready parts: 1/1, is published: true 2026-01-04T02:44:31.225282Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:653:2575] message: TxId: 106 2026-01-04T02:44:31.225333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 106 ready parts: 1/1 2026-01-04T02:44:31.225516Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 106:0 2026-01-04T02:44:31.225550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 106:0 2026-01-04T02:44:31.225682Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 6] was 3 2026-01-04T02:44:31.225721Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-04T02:44:31.227411Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:44:31.227534Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:830:2727] TestWaitNotification: OK eventTxId 106 2026-01-04T02:44:31.228212Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:31.228448Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table" took 262us result status StatusSuccess 2026-01-04T02:44:31.228867Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table" PathDescription { Self { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:31.229573Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dst" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:31.229767Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dst" took 222us result status StatusSuccess 2026-01-04T02:44:31.230183Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dst" PathDescription { Self { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "dst" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 6 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:31.230787Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:31.230948Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 176us result status StatusSuccess 2026-01-04T02:44:31.231338Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dst" PathId: 6 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 106 CreateStep: 250 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "table" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 150 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 2 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TConsoleTests::TestTenantGenerationExtSubdomain [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbage [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetMultipart [GOOD] >> TBlobStorageProxyTest::TestGetFail >> TBlobStorageProxyTest::TestPartialGetBlock [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleInMemoryConfigSubscriptionTests::TestSubscribeAfterConfigApplyWithKnownConfig [GOOD] Test command err: 2026-01-04T02:44:04.169960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.170022Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:04.354527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:05.408427Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:05.408495Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:05.485052Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:06.445690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:06.445763Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:06.535796Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:07.469845Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:07.469910Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:07.544624Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:08.498667Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.498725Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.568435Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:09.564407Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:09.564464Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:09.642014Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.674261Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:10.674342Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.769937Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:14.403275Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:14.403341Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:14.481252Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:18.145975Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:18.146058Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:18.253921Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:19.228639Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:19.228721Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:19.311999Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:19.896378Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 11 2026-01-04T02:44:19.897008Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 11 2026-01-04T02:44:19.897073Z node 10 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 11 2026-01-04T02:44:19.897319Z node 11 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [11:464:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-04T02:44:20.777378Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:20.777463Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:20.880736Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:22.256410Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:22.256503Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:22.365109Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:23.797426Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:23.797499Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:23.907732Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:25.541034Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:25.541122Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:25.646191Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:27.251866Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:27.251935Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:27.347762Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:28.613251Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.613352Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:28.720929Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:30.854983Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:30.855061Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:30.956665Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TBlobStorageProxyTest::TestDoubleEmptyGet [GOOD] >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] >> KqpExplain::CompoundKeyRange [GOOD] >> TBlobStorageProxyTest::TestCollectGarbagePersistence >> TConsoleConfigTests::TestAllowedScopes [GOOD] >> TConsoleConfigTests::TestCheckConfigUpdates ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionGCFailingBs [GOOD] Test command err: 2026-01-04T02:43:20.420678Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:20.448790Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:20.448992Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:20.455467Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:20.455722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:20.455920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:20.456029Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:20.456099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:20.456188Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:20.456263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:20.456325Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:20.456410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:20.456494Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.456584Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:20.456683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:20.456791Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:20.480165Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:20.480271Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:20.480307Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:20.480469Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.480624Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:20.480686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:20.480723Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:20.480808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:20.480863Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:20.480894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:20.480913Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:20.481070Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:20.481130Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:20.481163Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:20.481196Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:20.481261Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:20.481297Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:20.481328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:20.481359Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:20.481395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:20.481419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:20.481479Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:20.481521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:20.481546Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:20.481620Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:20.481849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:20.481889Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:20.481914Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:20.482073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:20.482106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.482129Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:20.482175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:20.482206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:20.482234Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:20.482267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:20.482292Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:20.482318Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:20.482460Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:20.482511Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... odicWakeup;fline=columnshard.cpp:257;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:09.224678Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:09.224761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-04T02:44:09.224838Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=2; 2026-01-04T02:44:09.224989Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=20;drop=0;skip=0;portions_counter=20;chunks=2240;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:09.225142Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=manager.cpp:10;event=lock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.225201Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=ro_controller.cpp:41;event=CS::CLEANUP::PORTIONS;tablet_id=9437184; 2026-01-04T02:44:09.225272Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=abstract.cpp:13;event=new_stage;stage=Started;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.226066Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessorResources;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.226204Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskAccessors;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.226380Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:09.226484Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.626000s; 2026-01-04T02:44:09.226553Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:09.227291Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=AskDataResources;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.227642Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_execution;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;event=on_finished;consumer=CLEANUP_PORTIONS;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;script=ACCESSOR_PORTIONS_FETCHING::CLEANUP_PORTIONS;fline=abstract.cpp:13;event=new_stage;stage=ReadyForConstruct;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; Cleanup old portions: 2 4 9 8 1 6 3 7 10 5 19 14 18 13 21 11 15 22 20 17 2026-01-04T02:44:09.227993Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: WriteIndex at tablet 9437184 2026-01-04T02:44:09.228065Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;fline=abstract.cpp:13;event=new_stage;stage=Compiled;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.228257Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxWriteIndex[183] (CS::CLEANUP::PORTIONS) apply at tablet 9437184 2026-01-04T02:44:09.229133Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:343:2351];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteIndex;tablet_id=9437184;external_task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;fline=abstract.cpp:13;event=new_stage;stage=Written;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.229712Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=148108480;raw_bytes=150092142;count=25;records=1800002} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=12353440;raw_bytes=14738900;count=2;records=150000} inactive {blob_bytes=123422240;raw_bytes=125076680;count=20;records=1500000} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:09.242669Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;fline=abstract.cpp:13;event=new_stage;stage=Finished;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.242759Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;fline=abstract.cpp:54;event=WriteIndexComplete;type=CS::CLEANUP::PORTIONS;success=1; 2026-01-04T02:44:09.243183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;fline=manager.cpp:15;event=unlock;process_id=CS::CLEANUP::PORTIONS::PORTIONS_DROP::3e9972a4-e91711f0-906b0d69-d73fdd9d; 2026-01-04T02:44:09.243280Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:09.243360Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=columnshard_impl.cpp:488;event=skip_compaction;reason=disabled; 2026-01-04T02:44:09.243424Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:09.243508Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:09.243580Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:09.243636Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:09.243761Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.624500s; 2026-01-04T02:44:09.243869Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;task_id=3e9972a4-e91711f0-906b0d69-d73fdd9d;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:09.243976Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:20:4:0:6171112:0] 2026-01-04T02:44:09.244047Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:2:4:0:6171112:0] 2026-01-04T02:44:09.244089Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:5:4:0:6171112:0] 2026-01-04T02:44:09.244149Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:16:3:0:6171112:0] 2026-01-04T02:44:09.244197Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:19:3:0:6171112:0] 2026-01-04T02:44:09.244237Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:1:3:0:6171112:0] 2026-01-04T02:44:09.244292Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:4:3:0:6171112:0] 2026-01-04T02:44:09.244334Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:12:2:0:6171112:0] 2026-01-04T02:44:09.244378Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:7:3:0:6171112:0] 2026-01-04T02:44:09.244418Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:1:3:0:6171112:0] 2026-01-04T02:44:09.244459Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:3:2:0:6171112:0] 2026-01-04T02:44:09.244499Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:6:2:0:6171112:0] 2026-01-04T02:44:09.244539Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:8:4:0:6171112:0] 2026-01-04T02:44:09.244587Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:18:2:0:6171112:0] 2026-01-04T02:44:09.244634Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:9:2:0:6171112:0] 2026-01-04T02:44:09.244673Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:17:4:0:6171112:0] 2026-01-04T02:44:09.244723Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:13:3:0:6171112:0] 2026-01-04T02:44:09.244779Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:11:4:0:6171112:0] 2026-01-04T02:44:09.244830Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:2:2:4:0:6171112:0] 2026-01-04T02:44:09.244870Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: BlobManager at tablet 9437184 Delete Blob DS:0:[9437184:3:15:2:0:6171112:0] GC for channel 2 deletes blobs: Compactions happened: 2 Cleanups happened: 1 Old portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 Cleaned up portions: 1 2 3 4 5 6 7 8 9 10 11 13 14 15 17 18 19 20 21 22 |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestVGetNoData [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCompactedGetMultipart [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] >> TBlobStorageProxyTest::TestGetFail [GOOD] >> ReadSessionImplTest::ForcefulDestroyPartitionStream [GOOD] >> ReadSessionImplTest::DestroyPartitionStreamRequest [GOOD] >> ReadSessionImplTest::DecompressZstdEmptyMessage >> ReadSessionImplTest::UsesOnRetryStateDuringRetries [GOOD] >> RetryPolicy::TWriteSession_TestPolicy >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin >> ReadSessionImplTest::ProperlyOrdersDecompressedData [GOOD] >> ReadSessionImplTest::PacksBatches_ExactlyTwoMessagesInBatch [GOOD] >> ReadSessionImplTest::PacksBatches_OneMessageInEveryBatch [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin >> ReadSessionImplTest::DecompressRaw [GOOD] >> ReadSessionImplTest::DecompressGzip [GOOD] >> ReadSessionImplTest::DecompressZstd [GOOD] >> ReadSessionImplTest::DecompressRawEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressGzipEmptyMessage [GOOD] >> ReadSessionImplTest::DecompressWithSynchronousExecutor [GOOD] >> ReadSessionImplTest::DataReceivedCallbackReal >> TConsoleConfigTests::TestCheckConfigUpdates [GOOD] >> TConsoleConfigTests::TestManageValidators >> ReadSessionImplTest::DecompressZstdEmptyMessage [GOOD] >> ReadSessionImplTest::PacksBatches_BatchABitBiggerThanLimit [GOOD] >> ReadSessionImplTest::PacksBatches_BatchesEqualToServerBatches [GOOD] >> ReadSessionImplTest::HoleBetweenOffsets [GOOD] >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-false [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true >> TConsoleConfigSubscriptionTests::TestNotificationForRemovedConfigItem [GOOD] >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPartialGetMirror [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpExplain::CompoundKeyRange [GOOD] Test command err: 2026-01-04T02:41:55.049579Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331140271601060:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:55.049663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:55.282305Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:55.355462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:55.355567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:55.365078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:55.409665Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:55.450839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:55.597434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:55.597494Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:55.597505Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:55.597957Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:55.631867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:55.845790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:41:55.868043Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331140047088111:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:55.869384Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/Database/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:55.879512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:55.879612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:55.889270Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:41:55.891028Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:41:55.902287Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:55.916664Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:41:55.980460Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980623Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980713Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980820Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980878Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.980916Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.981617Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:55.981701Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:41:56.015560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:56.015781Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:56.019264Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:56.064820Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:56.067734Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:41:56.067824Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:41:56.119034Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:41:56.119103Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:41:56.119281Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:41:56.119310Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:41:56.119336Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:41:56.119360Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:41:56.119398Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:41:56.119423Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:41:56.120653Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:41:56.120827Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:56.123241Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7591331144342055934:2280] 2026-01-04T02:41:56.125558Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:41:56.130422Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. Describe result: PathErrorUnknown 2026-01-04T02:41:56.130451Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. Creating table 2026-01-04T02:41:56.130522Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:41:56.134852Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7591331144342056024:2344], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:41:56.138078Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:41:56.141716Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331144342056016:2342] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:41:56.155026Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:41:56.167885Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:41:56.167919Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:7591331144342056099:2312], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:41:56.201642Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:7591331144342056169:2445] 2026-01-04T02:41:56.201706Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:7591331144342056169:2445], schemeshard id = 72075186224037897 2026-01-04T02:41:56.238441Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:41:56.240951Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:7591331144342056270:2535], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:41:56.247481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:56.254032Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:41:56.254082Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:7591331144342056003:2299] Owner: [2:7591331144342056002:2298]. Subscribe on create table tx: 281474976720658 2026-01-04T02:41:56.273685Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:759133114434205600 ... 25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:24.414767Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:24.470541Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:44:24.470581Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:44:24.470592Z node 7 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:44:24.470702Z node 7 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:24.562730Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:25.014330Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:25.038133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:25.135224Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:25.284934Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:25.343603Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:25.429407Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.527178Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591331797104508705:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:28.527335Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:28.527723Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591331797104508714:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:28.527826Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:28.615018Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.664395Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.703443Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.745320Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.798879Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.846385Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.891396Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:28.991339Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:29.087614Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591331801399476882:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:29.087721Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:29.087748Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591331801399476887:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:29.087932Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591331801399476889:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:29.087995Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:29.093178Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:44:29.108338Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [7:7591331801399476890:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:44:29.163573Z node 7 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [7:7591331801399476942:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:44:29.217836Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[7:7591331779924637673:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:29.217968Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Logs"],"PlanNodeId":1,"Operators":[{"Inputs":[{"InternalOperatorId":1}],"Name":"Limit","Limit":"10"},{"Scan":"Parallel","ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/Logs","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"Limit-TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/Logs","reads":[{"lookup_by":["App (new_app_1)","Ts (49)"],"columns":["App","Host","Message","Ts"],"scan_by":["Host (null, xyz)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":5,"Operators":[{"ReadRange":["App (new_app_1)","Ts (49)","Host (null, xyz)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"Logs","ReadColumns":["App","Host","Message","Ts"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Operators":[{"Name":"Limit","Limit":"10"}],"Node Type":"Limit"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxySimpleDiscoverMaxi [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyLongTailDiscoverMaxi [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestGetFail [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::LOGBROKER_7702 [GOOD] Test command err: 2026-01-04T02:44:34.432579Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.432629Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.432661Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.433155Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.435471Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.449244Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.451844Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.455203Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.455223Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.455242Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.455508Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.456021Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.456251Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.456400Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.456661Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:44:34.457462Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.457486Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.457538Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.457797Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.458365Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.458511Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.458658Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.463745Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.466353Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.466556Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.466631Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:44:34.468105Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.468133Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.468176Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.468870Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.469342Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.469544Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.469857Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 11 Compressed message data size: 31 2026-01-04T02:44:34.470904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.471145Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:44:34.471468Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:44:34.471712Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:44:34.471835Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.471895Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:34.471927Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.472486Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-04T02:44:34.473247Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:34.473276Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:44:34.473308Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.473461Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-04T02:44:34.473525Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-04T02:44:34.473550Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-04T02:44:34.473578Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.473654Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-04T02:44:34.473701Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-04T02:44:34.473731Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-04T02:44:34.473760Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 22 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..11 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.473884Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-04T02:44:34.475406Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.475444Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.475510Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.475836Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.476485Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.476647Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.476892Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-04T02:44:34.477844Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.478073Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:44:34.480039Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:44:34.480250Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:44:34.480363Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.480413Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:34.480437Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:34.480538Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:44:34.480573Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.480759Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 5). Partition stream id: 1 GOT RANGE 0 5 Getting new event 2026-01-04T02:44:34.480855Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-04T02:44:34.480873Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-04T02:44:34.480904Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-04T02:44:34.480923Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-04T02:44:34.480946Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 4, size 40 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.481091Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 9). Partition stream id: 1 GOT RANGE 5 9 2026-01-04T02:44:34.482104Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.482120Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.482144Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.482528Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.482928Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.483078Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.483293Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.484337Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.485222Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.485475Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (10-11) 2026-01-04T02:44:34.485566Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:44:34.485673Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.485706Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:34.485734Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (10-10) 2026-01-04T02:44:34.485759Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (11-11) 2026-01-04T02:44:34.485824Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes 2026-01-04T02:44:34.485857Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 16 bytes got data event: DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 11 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-04T02:44:34.486107Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 end_offset: 3 } } RANGE 0 3 2026-01-04T02:44:34.486258Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 12). Partition stream id: 1 Got commit req { offset_ranges { assign_id: 1 start_offset: 3 end_offset: 12 } } RANGE 3 12 >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin >> TConsoleTests::TestRemoveTenant [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTest::MigrationDeletedPathNavigate [GOOD] Test command err: 2026-01-04T02:44:04.935711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.935768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... s/top_queries_by_read_bytes_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.518421Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720669 2026-01-04T02:44:06.518458Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720670 2026-01-04T02:44:06.518504Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pool_classifiers', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.518706Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/nodes', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.518841Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720671 2026-01-04T02:44:06.518928Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720672 2026-01-04T02:44:06.519046Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519191Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720673 2026-01-04T02:44:06.519233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720674 2026-01-04T02:44:06.519257Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_cpu_time_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519374Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720675 2026-01-04T02:44:06.519427Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_by_tli_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519481Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720676 2026-01-04T02:44:06.519551Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_request_units_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720677 2026-01-04T02:44:06.519866Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_hour', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519893Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/pg_tables', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.519960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720678 2026-01-04T02:44:06.520013Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_queries_by_duration_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520046Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720679 2026-01-04T02:44:06.520078Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_group_members', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520136Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720680 2026-01-04T02:44:06.520183Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_users', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720681 2026-01-04T02:44:06.520275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720682 2026-01-04T02:44:06.520301Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/resource_pools', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520331Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/streaming_queries', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720683 2026-01-04T02:44:06.520398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720684 2026-01-04T02:44:06.520432Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_permissions', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520488Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720685 2026-01-04T02:44:06.520533Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/top_partitions_one_minute', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520569Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_effective_permissions', reason: Schema is in ReadOnly mode 2026-01-04T02:44:06.520705Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6049: Schema modification rejected because of ReadOnly mode, at tablet: 72075186233409549 txid: 281474976720686 2026-01-04T02:44:06.520869Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard_sysviews_update.cpp:189: SysViewsRosterUpdate# [2:694:2599] at schemeshard: 72075186233409549, failed to create sys view '/Root/USER_0/.sys/auth_groups', reason: Schema is in ReadOnly mode TestWaitNotification: OK eventTxId 105 TestModificationResults wait txId: 106 2026-01-04T02:44:06.521508Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:44:06.521560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5805: Mark as Migrated path id [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:44:06.521947Z node 2 :FLAT_TX_SCHEMESHARD ERROR: schemeshard__operation_upgrade_subdomain.cpp:1466: TWait ProgressState, dependent transaction: 106, parent transaction: 105, at schemeshard: 72057594046678944 2026-01-04T02:44:06.522099Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpUpgradeSubDomainDecision, opId: 106:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_upgrade_subdomain.cpp:573) TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:44:06.539497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 2026-01-04T02:44:06.539824Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:6292: Got TEvUpdateAck for unknown txId 105, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 106 TestModificationResults wait txId: 107 TestModificationResult got TxId: 107, wait until txId: 107 TestWaitNotification wait txId: 107 TestWaitNotification: OK eventTxId 107 TestModificationResults wait txId: 108 2026-01-04T02:44:06.578337Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 108:0, at schemeshard: 72075186233409549, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72075186233409549 OwnerIdx: 4 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 2 BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:782:2067] recipient: [2:777:2670] IGNORE Leader for TabletID 72075186233409550 is [0:0:0] sender: [2:782:2067] recipient: [2:777:2670] Leader for TabletID 72075186233409550 is [2:784:2673] sender: [2:785:2067] recipient: [2:777:2670] TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 Leader for TabletID 72075186233409550 is [2:784:2673] sender: [2:813:2067] recipient: [2:24:2071] TestWaitNotification: OK eventTxId 108 2026-01-04T02:44:08.892555Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:44:08.892655Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.946228Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:44:08.946298Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSingle [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects >> TKesusTest::TestRegisterProxy >> TKesusTest::TestQuoterResourceDescribe >> TKesusTest::TestAcquireLocks >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin >> TConsoleConfigTests::TestManageValidators [GOOD] >> TConsoleConfigTests::TestDryRun >> TKesusTest::TestAcquireSemaphoreTimeout >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] >> ReadSessionImplTest::PacksBatches_BigBatchDecompressWithTwoBatchTasks [GOOD] >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestPutGetStatusErasure4Plus2Stripe [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> ReadSessionImplTest::PacksBatches_DecompressesOneMessagePerTime [GOOD] >> ReadSessionImplTest::PartitionStreamStatus [GOOD] >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] >> TKesusTest::TestRegisterProxy [GOOD] >> TKesusTest::TestRegisterProxyBadGeneration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy+SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-04T02:42:47.249945Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:42:47.277482Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:42:47.277713Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:42:47.285939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:42:47.286166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:42:47.286405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:42:47.286532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:42:47.286650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:42:47.286738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:42:47.286826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:42:47.286964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:42:47.287088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:42:47.287210Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.287320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:42:47.287475Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:42:47.287616Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:42:47.312004Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:42:47.312149Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:42:47.312190Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:42:47.312342Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314219Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:42:47.314342Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:42:47.314407Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:42:47.314568Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:42:47.314676Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:42:47.314752Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:42:47.314829Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:42:47.315076Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:42:47.315206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:42:47.315262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:42:47.315306Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:42:47.315445Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:42:47.315520Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:42:47.315574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:42:47.315609Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:42:47.315717Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:42:47.315772Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:42:47.315840Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:42:47.315902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:42:47.315959Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:42:47.315999Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:42:47.316355Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:42:47.316429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:42:47.316462Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:42:47.316650Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:42:47.316718Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316784Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:42:47.316864Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:42:47.316925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:42:47.316993Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:42:47.317067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:42:47.317125Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:42:47.317223Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:42:47.317410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:42:47.317469Z node 1 :TX_COLUMNSHARD WAR ... ults;result=1;count=1;finished=1; 2026-01-04T02:44:32.591734Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:44:32.591795Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:44:32.592343Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:44:32.592555Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.592602Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:44:32.592749Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-04T02:44:32.592818Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-04T02:44:32.593111Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:456:2468];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-04T02:44:32.593275Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.593424Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.593541Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.593761Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:44:32.593912Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.594076Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.594292Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:457:2469] finished for tablet 9437184 2026-01-04T02:44:32.594804Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:456:2468];stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.011}],"full":{"a":105919424,"name":"_full_task","f":105919424,"d_finished":0,"c":0,"l":105931420,"d":11996},"events":[{"name":"bootstrap","f":105919776,"d_finished":1355,"c":1,"l":105921131,"d":1355},{"a":105930804,"name":"ack","f":105929387,"d_finished":1236,"c":1,"l":105930623,"d":1852},{"a":105930794,"name":"processing","f":105921294,"d_finished":3405,"c":3,"l":105930625,"d":4031},{"name":"ProduceResults","f":105920692,"d_finished":2284,"c":6,"l":105931161,"d":2284},{"a":105931164,"name":"Finish","f":105931164,"d_finished":0,"c":0,"l":105931420,"d":256},{"name":"task_result","f":105921312,"d_finished":2111,"c":2,"l":105928883,"d":2111}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.594873Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:456:2468];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:44:32.595303Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:456:2468];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap"],"t":0},{"events":["l_bootstrap","f_processing","f_ProduceResults","f_task_result"],"t":0.001},{"events":["f_ack","l_task_result"],"t":0.009},{"events":["l_ProduceResults","f_Finish"],"t":0.011},{"events":["l_ack","l_processing","l_Finish"],"t":0.012}],"full":{"a":105919424,"name":"_full_task","f":105919424,"d_finished":0,"c":0,"l":105931967,"d":12543},"events":[{"name":"bootstrap","f":105919776,"d_finished":1355,"c":1,"l":105921131,"d":1355},{"a":105930804,"name":"ack","f":105929387,"d_finished":1236,"c":1,"l":105930623,"d":2399},{"a":105930794,"name":"processing","f":105921294,"d_finished":3405,"c":3,"l":105930625,"d":4578},{"name":"ProduceResults","f":105920692,"d_finished":2284,"c":6,"l":105931161,"d":2284},{"a":105931164,"name":"Finish","f":105931164,"d_finished":0,"c":0,"l":105931967,"d":803},{"name":"task_result","f":105921312,"d_finished":2111,"c":2,"l":105928883,"d":2111}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:32.595366Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:44:32.579717Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-04T02:44:32.595421Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:44:32.595563Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> TKesusTest::TestAcquireWaiterDowngrade >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestStopConsuming [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionState [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestUpdateConsumptionStateAfterAllResourceAllocated [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] >> TKesusTest::TestQuoterResourceDescribe [GOOD] >> TKesusTest::TestQuoterResourceCreation >> THDRRQuoterResourceTreeRuntimeTest::TestCreateInactiveSession [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceSessions [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDistributeResourcesBetweenConsumers [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestEffectiveProps [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TKesusTest::TestRegisterProxyBadGeneration [GOOD] >> TKesusTest::TestRegisterProxyFromDeadActor >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin >> TKesusTest::TestSessionTimeoutAfterDetach ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_cache/unittest >> TCacheTestWithDrops::LookupErrorUponEviction [GOOD] Test command err: 2026-01-04T02:44:04.657555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:04.657624Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished FAKE_COORDINATOR: Add transaction: 281474976710657 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710657 at step: 5000001 FAKE_COORDINATOR: Erasing txId 281474976710657 FAKE_COORDINATOR: Add transaction: 281474976710671 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710671 at step: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710666 at step: 5000003 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710670 at step: 5000004 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710665 at step: 5000005 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710669 at step: 5000006 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710664 at step: 5000007 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710668 at step: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: ... tep: 5000008 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710663 at step: 5000009 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710667 at step: 5000010 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710662 at step: 5000011 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710661 at step: 5000012 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710660 at step: 5000013 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710659 at step: 5000014 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710658 at step: 5000015 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710692 at step: 5000016 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710687 at step: 5000017 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710691 at step: 5000018 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710686 at step: 5000019 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710690 at step: 5000020 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710685 at step: 5000021 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710689 at step: 5000022 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710684 at step: 5000023 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710688 at step: 5000024 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710683 at step: 5000025 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710682 at step: 5000026 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710677 at step: 5000027 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710681 at step: 5000028 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710676 at step: 5000029 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710680 at step: 5000030 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710675 at step: 5000031 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710679 at step: 5000032 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710674 at step: 5000033 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710678 at step: 5000034 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710673 at step: 5000035 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Add transaction: 281474976710672 at step: 5000036 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000002 FAKE_COORDINATOR: Erasing txId 281474976710671 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000002 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710666 at step: 5000003 FAKE_COORDINATOR: Erasing txId 281474976710666 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000003 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710670 at step: 5000004 FAKE_COORDINATOR: Erasing txId 281474976710670 FAKE_COORDINATOR: advance: minStep5000005 State->FrontStep: 5000004 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710665 at step: 5000005 FAKE_COORDINATOR: Erasing txId 281474976710665 FAKE_COORDINATOR: advance: minStep5000006 State->FrontStep: 5000005 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710669 at step: 5000006 FAKE_COORDINATOR: Erasing txId 281474976710669 FAKE_COORDINATOR: advance: minStep5000007 State->FrontStep: 5000006 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710664 at step: 5000007 FAKE_COORDINATOR: Erasing txId 281474976710664 FAKE_COORDINATOR: advance: minStep5000008 State->FrontStep: 5000007 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710668 at step: 5000008 FAKE_COORDINATOR: Erasing txId 281474976710668 FAKE_COORDINATOR: advance: minStep5000009 State->FrontStep: 5000008 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710663 at step: 5000009 FAKE_COORDINATOR: Erasing txId 281474976710663 FAKE_COORDINATOR: advance: minStep5000010 State->FrontStep: 5000009 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710667 at step: 5000010 FAKE_COORDINATOR: Erasing txId 281474976710667 FAKE_COORDINATOR: advance: minStep5000011 State->FrontStep: 5000010 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710662 at step: 5000011 FAKE_COORDINATOR: Erasing txId 281474976710662 FAKE_COORDINATOR: advance: minStep5000012 State->FrontStep: 5000011 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710661 at step: 5000012 FAKE_COORDINATOR: Erasing txId 281474976710661 FAKE_COORDINATOR: advance: minStep5000013 State->FrontStep: 5000012 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710660 at step: 5000013 FAKE_COORDINATOR: Erasing txId 281474976710660 FAKE_COORDINATOR: advance: minStep5000014 State->FrontStep: 5000013 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710659 at step: 5000014 FAKE_COORDINATOR: Erasing txId 281474976710659 FAKE_COORDINATOR: advance: minStep5000015 State->FrontStep: 5000014 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710658 at step: 5000015 FAKE_COORDINATOR: Erasing txId 281474976710658 FAKE_COORDINATOR: advance: minStep5000016 State->FrontStep: 5000015 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710692 at step: 5000016 FAKE_COORDINATOR: Erasing txId 281474976710692 FAKE_COORDINATOR: advance: minStep5000017 State->FrontStep: 5000016 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710687 at step: 5000017 FAKE_COORDINATOR: Erasing txId 281474976710687 FAKE_COORDINATOR: advance: minStep5000018 State->FrontStep: 5000017 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710691 at step: 5000018 FAKE_COORDINATOR: Erasing txId 281474976710691 FAKE_COORDINATOR: advance: minStep5000019 State->FrontStep: 5000018 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710686 at step: 5000019 FAKE_COORDINATOR: Erasing txId 281474976710686 FAKE_COORDINATOR: advance: minStep5000020 State->FrontStep: 5000019 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710690 at step: 5000020 FAKE_COORDINATOR: Erasing txId 281474976710690 FAKE_COORDINATOR: advance: minStep5000021 State->FrontStep: 5000020 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710685 at step: 5000021 FAKE_COORDINATOR: Erasing txId 281474976710685 FAKE_COORDINATOR: advance: minStep5000022 State->FrontStep: 5000021 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710689 at step: 5000022 FAKE_COORDINATOR: Erasing txId 281474976710689 FAKE_COORDINATOR: advance: minStep5000023 State->FrontStep: 5000022 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710684 at step: 5000023 FAKE_COORDINATOR: Erasing txId 281474976710684 FAKE_COORDINATOR: advance: minStep5000024 State->FrontStep: 5000023 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710688 at step: 5000024 FAKE_COORDINATOR: Erasing txId 281474976710688 FAKE_COORDINATOR: advance: minStep5000025 State->FrontStep: 5000024 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710683 at step: 5000025 FAKE_COORDINATOR: Erasing txId 281474976710683 FAKE_COORDINATOR: advance: minStep5000026 State->FrontStep: 5000025 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710682 at step: 5000026 FAKE_COORDINATOR: Erasing txId 281474976710682 FAKE_COORDINATOR: advance: minStep5000027 State->FrontStep: 5000026 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710677 at step: 5000027 FAKE_COORDINATOR: Erasing txId 281474976710677 FAKE_COORDINATOR: advance: minStep5000028 State->FrontStep: 5000027 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710681 at step: 5000028 FAKE_COORDINATOR: Erasing txId 281474976710681 FAKE_COORDINATOR: advance: minStep5000029 State->FrontStep: 5000028 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710676 at step: 5000029 FAKE_COORDINATOR: Erasing txId 281474976710676 FAKE_COORDINATOR: advance: minStep5000030 State->FrontStep: 5000029 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710680 at step: 5000030 FAKE_COORDINATOR: Erasing txId 281474976710680 FAKE_COORDINATOR: advance: minStep5000031 State->FrontStep: 5000030 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710675 at step: 5000031 FAKE_COORDINATOR: Erasing txId 281474976710675 FAKE_COORDINATOR: advance: minStep5000032 State->FrontStep: 5000031 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710679 at step: 5000032 FAKE_COORDINATOR: Erasing txId 281474976710679 FAKE_COORDINATOR: advance: minStep5000033 State->FrontStep: 5000032 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710674 at step: 5000033 FAKE_COORDINATOR: Erasing txId 281474976710674 FAKE_COORDINATOR: advance: minStep5000034 State->FrontStep: 5000033 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710678 at step: 5000034 FAKE_COORDINATOR: Erasing txId 281474976710678 FAKE_COORDINATOR: advance: minStep5000035 State->FrontStep: 5000034 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710673 at step: 5000035 FAKE_COORDINATOR: Erasing txId 281474976710673 FAKE_COORDINATOR: advance: minStep5000036 State->FrontStep: 5000035 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 281474976710672 at step: 5000036 FAKE_COORDINATOR: Erasing txId 281474976710672 ... waiting for SysViewsRoster update finished (done) TestModificationResults wait txId: 1 2026-01-04T02:44:05.801847Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) FAKE_COORDINATOR: Add transaction: 1 at step: 5000037 FAKE_COORDINATOR: advance: minStep5000037 State->FrontStep: 5000036 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000037 FAKE_COORDINATOR: Erasing txId 1 TestModificationResult got TxId: 1, wait until txId: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::PartitionStreamCallbacks [GOOD] Test command err: 2026-01-04T02:44:34.470503Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.470563Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.470583Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.471300Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.471959Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.482285Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.482625Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.483836Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.484297Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.485996Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-04T02:44:34.486103Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.486220Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.486265Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-04T02:44:34.486314Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:44:34.486341Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:44:34.487615Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.487643Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.487667Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.488015Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.488465Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.488616Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.488795Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 10 Compressed message data size: 30 2026-01-04T02:44:34.489712Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.489928Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:44:34.490221Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:44:34.490455Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:44:34.490562Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.490596Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:34.490633Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.490724Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 GOT RANGE 0 3 Getting new event 2026-01-04T02:44:34.490806Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:34.490827Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:44:34.490840Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.490941Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 5). Partition stream id: 1 GOT RANGE 3 5 Getting new event 2026-01-04T02:44:34.490987Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (5-5) 2026-01-04T02:44:34.491010Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (6-6) 2026-01-04T02:44:34.491023Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.491103Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [5, 7). Partition stream id: 1 GOT RANGE 5 7 Getting new event 2026-01-04T02:44:34.491131Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (7-7) 2026-01-04T02:44:34.491152Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (8-8) 2026-01-04T02:44:34.491165Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 20 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..10 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:34.491221Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [7, 9). Partition stream id: 1 GOT RANGE 7 9 2026-01-04T02:44:34.492307Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.492323Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.492387Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.492843Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.493223Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.493348Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.493558Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 100 Compressed message data size: 91 2026-01-04T02:44:34.494437Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:44:34.494652Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function Getting new event 2026-01-04T02:44:34.494910Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (5-8) 2026-01-04T02:44:34.495100Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-4) 2026-01-04T02:44:34.495202Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.495242Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.495332Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 GOT RANGE 0 2 Getting new event 2026-01-04T02:44:34.495403Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:34.495421Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.495465Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [2, 3). Partition stream id: 1 GOT RANGE 2 3 Getting new event 2026-01-04T02:44:34.495506Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:34.495523Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 100 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..100 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } } 2026-01-04T02:44:34.495585Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [3, 4). Partition stream id: 1 GOT RANGE 3 4 Getting new event 2026-01-04T02:44:34.495655Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (4-4) 2026-01-04T02:44:34.495675Z :DEBUG: [db] [sessionid] [cluster] The application data ... er". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 190 SeqNo: 231 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 191 SeqNo: 232 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 192 SeqNo: 233 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 193 SeqNo: 234 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 194 SeqNo: 235 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 195 SeqNo: 236 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 196 SeqNo: 237 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 197 SeqNo: 238 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 198 SeqNo: 239 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 199 SeqNo: 240 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 200 SeqNo: 241 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:36.481566Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 201). Partition stream id: 1 GOT RANGE 0 201 2026-01-04T02:44:36.555154Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:44:36.555217Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:44:36.555287Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:36.555893Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:36.556699Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:36.556906Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 2026-01-04T02:44:36.557162Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) Message data size: 1000000 Compressed message data size: 3028 Post function Getting new event 2026-01-04T02:44:36.632853Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-10) 2026-01-04T02:44:36.633963Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:36.635901Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:36.646913Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:36.647744Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-04T02:44:36.652484Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-04T02:44:36.653346Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-04T02:44:36.654207Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (7-7) 2026-01-04T02:44:36.655201Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (8-8) 2026-01-04T02:44:36.665429Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (9-9) 2026-01-04T02:44:36.666305Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (10-10) 2026-01-04T02:44:36.666374Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 10, size 10000000 bytes 2026-01-04T02:44:36.666547Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 5, ReadSizeServerDelta = 0 DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 42 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 43 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 44 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 4 SeqNo: 45 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 5 SeqNo: 46 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 6 SeqNo: 47 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:02:03.000000Z Ip: "127.0.0.1" UncompressedSize: 0 Meta: { "k1": "v1", "k": "v" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 7 SeqNo: 48 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 8 SeqNo: 49 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 9 SeqNo: 50 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } Message { Data: ..1000000 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 10 SeqNo: 51 MessageGroupId: "src_id_2" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:05:21.000000Z Ip: "1.0.0.127" UncompressedSize: 0 Meta: { "v1": "k1", "v": "k" } } } } 2026-01-04T02:44:36.670278Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 11). Partition stream id: 1 GOT RANGE 0 11 2026-01-04T02:44:36.679622Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.679657Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.679696Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:36.679959Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:36.680294Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:36.680435Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.680627Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:36.681114Z :DEBUG: [db] [sessionid] [cluster] Requesting status for partition stream id: 1 2026-01-04T02:44:36.682250Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.682295Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.682324Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:36.682693Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:36.683200Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:36.683310Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.683895Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:36.684072Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:36.684211Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:36.684258Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:44:36.684399Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 >> TKesusTest::TestAcquireWaiterDowngrade [GOOD] >> TKesusTest::TestAcquireWaiterUpgrade >> TConsoleConfigTests::TestDryRun [GOOD] >> TConsoleConfigTests::TestAffectedConfigs |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_cache/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestVeryBigWeights [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestDeleteResourceWithActiveChildren [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyFromDeadActor [GOOD] >> TKesusTest::TestRegisterProxyLinkFailure >> TKesusTest::TestQuoterResourceCreation [GOOD] >> TKesusTest::TestQuoterResourceModification >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous >> TKesusTest::TestAcquireWaiterUpgrade [GOOD] >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeDatabaseQuotaRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:36.180529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:36.180615Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:36.180643Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:36.180666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:36.180696Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:36.180720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:36.180760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:36.180811Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:36.181439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:36.181626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:36.266672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:36.266726Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:36.280779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:36.280995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:36.281158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:36.288455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:36.288660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:36.289372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:36.289629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:36.292391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:36.292575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:36.293634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:36.293696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:36.293795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:36.293856Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:36.293968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:36.294159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.300815Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:36.417836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:36.418084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.418293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:36.418338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:36.418499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:36.418610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:36.421275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:36.421481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:36.421737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.421827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:36.421866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:36.421901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:36.424208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.424278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:36.424341Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:36.426362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.426409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.426461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:36.426513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:36.430279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:36.432209Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:36.432394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:36.433448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:36.433594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:36.433647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:36.433975Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:36.434035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:36.434241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:36.434339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:36.438214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:36.438263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... eTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 5 TxId_Deprecated: 0 TabletID: 72075186233409550 2026-01-04T02:44:36.978377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-04T02:44:36.978638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-04T02:44:36.979872Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 Forgetting tablet 72075186233409550 2026-01-04T02:44:36.980582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:36.980754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 2026-01-04T02:44:36.981439Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409546 2026-01-04T02:44:36.982388Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:44:36.983139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:44:36.983363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186233409548 2026-01-04T02:44:36.984613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:44:36.984788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:44:36.985422Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 Forgetting tablet 72075186233409547 Forgetting tablet 72075186233409549 2026-01-04T02:44:36.987107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:44:36.987287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-04T02:44:36.987757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:44:36.989405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:44:36.989612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:36.989669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:44:36.989737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:44:36.990391Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:36.990460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:44:36.990581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:44:36.991414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:44:36.991458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:44:36.991590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:44:36.991638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:44:36.994440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:44:36.994496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:44:36.994598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:44:36.994622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:44:36.994720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:44:36.994781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:44:36.994993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:36.995098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:44:36.995172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:44:36.995217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:44:36.995287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:36.997128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:44:36.997438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:44:36.997479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:44:36.998077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:44:36.998181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:44:36.998218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:790:2692] TestWaitNotification: OK eventTxId 106 2026-01-04T02:44:36.998898Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:36.999093Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 211us result status StatusSuccess 2026-01-04T02:44:36.999559Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TA] $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Delete |92.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_cache/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestRegisterProxyLinkFailure [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKUtf8 [GOOD] Test command err: 2026-01-04T02:43:06.215808Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:06.248158Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:06.248375Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:06.254643Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:06.254916Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:06.255132Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:06.255274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:06.255386Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:06.255518Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:06.255656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:06.255778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:06.255933Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:06.256099Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.256229Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:06.256373Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:06.256493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:06.287079Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:06.287231Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:06.287280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:06.287477Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:06.287701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:06.287802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:06.287844Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:06.287950Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:06.288011Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:06.288053Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:06.288099Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:06.288288Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:06.288344Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:06.288379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:06.288406Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:06.288496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:06.288556Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:06.288605Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:06.288632Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:06.288677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:06.288714Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:06.288746Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:06.288799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:06.288859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:06.288893Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:06.289111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:06.289157Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:06.289189Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:06.289313Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:06.289362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.289389Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:06.289431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:06.289468Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:06.289506Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:06.289568Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:06.289613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:06.289644Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:06.289798Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:06.289874Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... ngines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=5833; 2026-01-04T02:44:34.148910Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=14; 2026-01-04T02:44:34.150056Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1081; 2026-01-04T02:44:34.150142Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=7345; 2026-01-04T02:44:34.150199Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=7507; 2026-01-04T02:44:34.150268Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-04T02:44:34.150364Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=48; 2026-01-04T02:44:34.150413Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=8258; 2026-01-04T02:44:34.150615Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=133; 2026-01-04T02:44:34.150780Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=113; 2026-01-04T02:44:34.150998Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=151; 2026-01-04T02:44:34.151181Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=134; 2026-01-04T02:44:34.151885Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=650; 2026-01-04T02:44:34.153049Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1094; 2026-01-04T02:44:34.153136Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=13; 2026-01-04T02:44:34.153192Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:44:34.153237Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=7; 2026-01-04T02:44:34.153339Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=58; 2026-01-04T02:44:34.153391Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:44:34.153496Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=66; 2026-01-04T02:44:34.153543Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=7; 2026-01-04T02:44:34.153633Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=44; 2026-01-04T02:44:34.153739Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=56; 2026-01-04T02:44:34.153848Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=70; 2026-01-04T02:44:34.153891Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=23033; 2026-01-04T02:44:34.154078Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:34.154198Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:34.154269Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:34.154376Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:34.154465Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:34.154631Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:34.154731Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:34.154786Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:34.154862Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:34.154937Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:34.154998Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:34.155058Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:34.155183Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:34.155402Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.164000s; 2026-01-04T02:44:34.157174Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:34.158463Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:34.158549Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:34.158629Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:34.158683Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:34.158741Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:34.158816Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:34.158891Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:34.158954Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:34.159083Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:34.159163Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:34.159521Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.123000s; 2026-01-04T02:44:34.159573Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TKesusTest::TestAcquireWaiterChangeTimeoutToZero [GOOD] >> TKesusTest::TestAcquireWaiterRelease >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser >> TKesusTest::TestQuoterResourceModification [GOOD] >> TKesusTest::TestQuoterResourceDeletion >> TConsoleConfigSubscriptionTests::TestNotificationForRestartedClient [GOOD] >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency >> TConsoleConfigTests::TestAffectedConfigs [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser >> TKesusTest::TestAcquireWaiterRelease [GOOD] >> TKesusTest::TestAllocatesResources >> TSchemeShardSubDomainTest::Delete [GOOD] >> TSchemeShardSubDomainTest::DeleteAdd >> PersQueueSdkReadSessionTest::SettingsValidation |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestProxyRestoreOnGetMirror3Plus2 [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TKesusTest::TestKesusConfig >> TKesusTest::TestUnregisterProxy >> TKesusTest::TestQuoterResourceDeletion [GOOD] >> TKesusTest::TestQuoterSubscribeOnResource >> THDRRQuoterResourceTreeRuntimeTest::TestHierarchicalQuotas [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestHangDefence [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestMoreStrongChildLimit [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorSubProcessor [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorTemporary >> TKesusTest::TestAllocatesResources [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser >> TBlobStorageProxyTest::TestDoubleGroups ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleConfigTests::TestAffectedConfigs [GOOD] Test command err: 2026-01-04T02:44:08.136780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:08.136854Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:08.302548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:09.345455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:09.345511Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:09.417721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.363197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:10.363282Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.449233Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:11.407298Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:11.407364Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:11.490795Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:12.369344Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:44:12.369414Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:12.465290Z node 4 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=8651011 serviceid=[0:0:0] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-04T02:44:12.866905Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:12.866965Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:12.935531Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:13.852500Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:44:13.852594Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:13.958623Z node 5 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[5:8246204620103118691:7960687] nodeid=1 host=host1 tenant=tenant1 nodetype=type1 kinds=2 lastprovidedconfig= 2026-01-04T02:44:14.644766Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:14.644828Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:14.723278Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:18.336883Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:18.336976Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:18.434750Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:21.809304Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:21.809385Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:21.908766Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:23.094435Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:23.094498Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:23.183659Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:24.369128Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:24.369206Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:24.460274Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:25.726152Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:25.726243Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:25.831472Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:27.037160Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:27.037241Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:27.129747Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:28.384711Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.384812Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:28.482423Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:29.696317Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:29.696418Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:29.786929Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:30.844791Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:30.844879Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:30.951385Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:32.244692Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:32.244783Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:32.369840Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:33.606397Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:33.606481Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:33.714406Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:34.961100Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:34.961180Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:35.074082Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:36.525643Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:36.525708Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:36.643530Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:37.880630Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:37.880714Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:37.977153Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] >> TKesusTest::TestUnregisterProxy [GOOD] >> TKesusTest::TestUnregisterProxyBadGeneration |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestInactiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin >> TKesusTest::TestKesusConfig [GOOD] >> TKesusTest::TestLockNotFound >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAllocatesResources [GOOD] Test command err: 2026-01-04T02:44:37.180527Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.180631Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:37.193499Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:37.193586Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.218392Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.218981Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=351274069186984156, session=0, seqNo=0) 2026-01-04T02:44:37.219161Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:37.241652Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=351274069186984156, session=1) 2026-01-04T02:44:37.241958Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=14756007494997083808, session=0, seqNo=0) 2026-01-04T02:44:37.242053Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:37.253925Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=14756007494997083808, session=2) 2026-01-04T02:44:37.254213Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-04T02:44:37.254374Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:37.254463Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:37.267985Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:44:37.268357Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=222, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:37.268721Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:37.268841Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #2 session 2 2026-01-04T02:44:37.282440Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=222) 2026-01-04T02:44:37.282546Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=333) 2026-01-04T02:44:37.283076Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=4928725188136442097, name="Lock1") 2026-01-04T02:44:37.283203Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=4928725188136442097) 2026-01-04T02:44:37.617077Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.617188Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:37.637216Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:37.637355Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.671777Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.672220Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=7391604593715743186, session=0, seqNo=0) 2026-01-04T02:44:37.672393Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:37.686894Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=7391604593715743186, session=1) 2026-01-04T02:44:37.687286Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=16827574050987924970, session=0, seqNo=0) 2026-01-04T02:44:37.687433Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:37.699576Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=16827574050987924970, session=2) 2026-01-04T02:44:37.699842Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:37.699964Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:37.700037Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:37.712139Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-04T02:44:37.712480Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:37.712816Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=333, session=2, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:37.725311Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=222) 2026-01-04T02:44:37.725408Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=333) 2026-01-04T02:44:37.726085Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:153:2175], cookie=12057154701364315666, name="Lock1") 2026-01-04T02:44:37.726193Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:153:2175], cookie=12057154701364315666) 2026-01-04T02:44:37.726668Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=440874041099002742, name="Lock1") 2026-01-04T02:44:37.726758Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=440874041099002742) 2026-01-04T02:44:37.999171Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.999290Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:38.018591Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:38.019034Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:38.054004Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:38.054611Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=13381208923832160881, session=0, seqNo=0) 2026-01-04T02:44:38.054938Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:38.067087Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=13381208923832160881, session=1) 2026-01-04T02:44:38.067428Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=15359869463620341999, session=0, seqNo=0) 2026-01-04T02:44:38.067562Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:38.079526Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=15359869463620341999, session=2) 2026-01-04T02:44:38.080248Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:38.080406Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:38.080502Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:38.092397Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-04T02:44:38.092713Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:38.093075Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:38.093170Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-04T02:44:38.104910Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=222) 2026-01-04T02:44:38.104978Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=333) 2026-01-04T02:44:38.105531Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:156:2178], cookie=14664064115377418727, name="Lock1") 2026-01-04T02:44:38.105630Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:156:2178], cookie=14664064115377418727) 2026-01-04T02:44:38.106086Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:159:2181], cookie=12391521150529052915, name="Lock1") 2026-01-04T02:44:38.106147Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:159:2181], cookie=12391521150529052915) 2026-01-04T02:44:38.117506Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:38.117591Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:38.117973Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:38.118419Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:38.165298Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:38.165453Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:38.165871Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:198:2211], cookie=14144731933929289502, name="Lock1") 2026-01-04T02:44:38.165956Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:198:2211], cookie=14144731933929289502) 2026-01-04T02:44:38.166488Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:206:2218], cookie=15560294802363341185, name="Lock1") 2026-01-04T02:44:38.166557Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:206:2218], cookie=15560294802363341185) 2026-01-04T02:44:38.595999Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:38.596110Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:38.615400Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:38.616105Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:38.640841Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:38.641419Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12932159289064810521, session=0, seqNo=0) 2026-01-04T02:44:38.641576Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:38.653415Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12932159289064810521, session=1) 2026-01-04T02:44:38.653718Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=2006826293356249921, session=0, seqNo=0) 2026-01-04T02:44:38.653870Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:38.665768Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=2006826293356249921, session=2) 2026-01-04T02:44:38.666107Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:38.666232Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:38.666320Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:38.678587Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-04T02:44:38.678939Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:38.679336Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Lock1") 2026-01-04T02:44:38.679454Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-04T02:44:38.691635Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-04T02:44:38.691725Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-04T02:44:39.048493Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.048613Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.063270Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.063421Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.078511Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.083966Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:135:2159], cookie=6385023058800610701, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-04T02:44:39.084200Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:44:39.106683Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:135:2159], cookie=6385023058800610701) 2026-01-04T02:44:39.107262Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2166], cookie=870395684522660181, path="/Root/Res", config={ }) 2026-01-04T02:44:39.107489Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-04T02:44:39.119644Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2166], cookie=870395684522660181) 2026-01-04T02:44:39.123263Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:149:2171]. Cookie: 3508131375665319852. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:39.123356Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:149:2171], cookie=3508131375665319852) 2026-01-04T02:44:39.123921Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:149:2171]. Cookie: 16608671283859614933. Data: { } 2026-01-04T02:44:39.123970Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:149:2171], cookie=16608671283859614933) 2026-01-04T02:44:39.166012Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:39.219189Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:39.250352Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:39.292002Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:39.333413Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 10 StateNotification { Status: SUCCESS } } } >> TKesusTest::TestUnregisterProxyBadGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestRegisterProxyLinkFailureRace [GOOD] Test command err: 2026-01-04T02:44:36.484809Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.484939Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.507021Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.507131Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.548971Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.976002Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.976143Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.994946Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.995087Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.029618Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.363210Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.363321Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:37.377683Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:37.378269Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.412948Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.791292Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.791387Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:37.809359Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:37.809647Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.827528Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.829675Z node 4 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 5 2026-01-04T02:44:37.830378Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:195:2162]) 2026-01-04T02:44:38.425856Z node 6 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:38.425969Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:38.445482Z node 6 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:38.445721Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute ... waiting for register request 2026-01-04T02:44:38.492819Z node 6 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete ... blocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR cookie 5167430911708388275 ... waiting for register request (done) ... unblocking NKikimr::NKesus::TEvKesus::TEvRegisterProxy from TEST_ACTOR_RUNTIME to KESUS_TABLET_ACTOR 2026-01-04T02:44:38.493643Z node 6 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037927937] NodeDisconnected NodeId# 7 2026-01-04T02:44:38.494335Z node 6 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([6:191:2160]) >> TKesusTest::TestSessionTimeoutAfterUnregister |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestLockNotFound [GOOD] >> TKesusTest::TestDeleteSemaphore >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterSubscribeOnResource [GOOD] Test command err: 2026-01-04T02:44:36.484817Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.484940Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.504932Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.505040Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.562524Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.569901Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:134:2159], cookie=3280577203338735914, path="/Root", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-04T02:44:36.570189Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:44:36.582642Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:134:2159], cookie=3280577203338735914) 2026-01-04T02:44:36.583230Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2166], cookie=10394927072379453933, path="/Root/Folder", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-04T02:44:36.583419Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Folder" 2026-01-04T02:44:36.595502Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2166], cookie=10394927072379453933) 2026-01-04T02:44:36.596183Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:149:2171], cookie=46835488276926896, path="/Root/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:36.596385Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Q1" 2026-01-04T02:44:36.608729Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:149:2171], cookie=46835488276926896) 2026-01-04T02:44:36.609414Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:154:2176], cookie=18420686452643273816, path="/Root/Folder/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:36.609687Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-04T02:44:36.622142Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:154:2176], cookie=18420686452643273816) 2026-01-04T02:44:36.622826Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:159:2181], cookie=9086285917723416600, path="/Root/Folder/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:36.623061Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root/Folder/Q2" 2026-01-04T02:44:36.635979Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:159:2181], cookie=9086285917723416600) 2026-01-04T02:44:36.636678Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:164:2186], cookie=5666380166359467115, path="/Root/Folder/Q3", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:36.636922Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 6 "Root/Folder/Q3" 2026-01-04T02:44:36.649277Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:164:2186], cookie=5666380166359467115) 2026-01-04T02:44:36.650064Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:169:2191], cookie=6698595834365921845, path="/Root2", config={ MaxUnitsPerSecond: 100500 MaxBurstSizeCoefficient: 1.5 }) 2026-01-04T02:44:36.650289Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 7 "Root2" 2026-01-04T02:44:36.662663Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:169:2191], cookie=6698595834365921845) 2026-01-04T02:44:36.663381Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:174:2196], cookie=380902319852766181, path="/Root2/Q", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:36.663624Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 8 "Root2/Q" 2026-01-04T02:44:36.676037Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:174:2196], cookie=380902319852766181) 2026-01-04T02:44:36.676648Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:179:2201], cookie=8806111929467628642, ids=[100], paths=[], recursive=0) 2026-01-04T02:44:36.676723Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:179:2201], cookie=8806111929467628642) 2026-01-04T02:44:36.677117Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:182:2204], cookie=4479739685853973018, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-04T02:44:36.677182Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:182:2204], cookie=4479739685853973018) 2026-01-04T02:44:36.677652Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:185:2207], cookie=11727577570671122190, ids=[], paths=[/Root, ], recursive=0) 2026-01-04T02:44:36.677724Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:185:2207], cookie=11727577570671122190) 2026-01-04T02:44:36.678246Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:188:2210], cookie=1787842753531360362, ids=[1, 1], paths=[], recursive=0) 2026-01-04T02:44:36.678315Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:188:2210], cookie=1787842753531360362) 2026-01-04T02:44:36.678785Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:191:2213], cookie=7513851156141297498, ids=[], paths=[/Root2/Q, /Root2/Q], recursive=0) 2026-01-04T02:44:36.678870Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:191:2213], cookie=7513851156141297498) 2026-01-04T02:44:36.679396Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:194:2216], cookie=8732047595397854748, ids=[], paths=[], recursive=1) 2026-01-04T02:44:36.679503Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:194:2216], cookie=8732047595397854748) 2026-01-04T02:44:36.681257Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:197:2219], cookie=9255692153893788752, ids=[], paths=[], recursive=0) 2026-01-04T02:44:36.681380Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:197:2219], cookie=9255692153893788752) 2026-01-04T02:44:36.681961Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:200:2222], cookie=13353509140663797383, ids=[3, 2], paths=[], recursive=1) 2026-01-04T02:44:36.682068Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:200:2222], cookie=13353509140663797383) 2026-01-04T02:44:36.682619Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:203:2225], cookie=8295137839937984531, ids=[3, 2], paths=[], recursive=0) 2026-01-04T02:44:36.682682Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:203:2225], cookie=8295137839937984531) 2026-01-04T02:44:36.683261Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:206:2228], cookie=12160203074541944060, ids=[], paths=[Root2/], recursive=1) 2026-01-04T02:44:36.683335Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:206:2228], cookie=12160203074541944060) 2026-01-04T02:44:36.683814Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:209:2231], cookie=13428123905951183731, ids=[], paths=[Root2/], recursive=0) 2026-01-04T02:44:36.683863Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:209:2231], cookie=13428123905951183731) 2026-01-04T02:44:36.698517Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.698657Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.699186Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.699780Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.717679Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.718090Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:248:2261], cookie=7357425765285319824, ids=[100], paths=[], recursive=0) 2026-01-04T02:44:36.718159Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:248:2261], cookie=7357425765285319824) 2026-01-04T02:44:36.718729Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:254:2266], cookie=529163542956520555, ids=[], paths=[Nonexistent/Path], recursive=0) 2026-01-04T02:44:36.718800Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:254:2266], cookie=529163542956520555) 2026-01-04T02:44:36.719343Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[1:257:2269], cookie=9242907261007272413, ids=[], paths=[/Root, ], recursive=0) 2026-01-04T02:44:36.719418Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[1:257:2269], cookie=9242907261007272413) 2026-01-04T02:44:36.719955Z node 1 :KESUS_TAB ... ET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root/Folder/Q1" 2026-01-04T02:44:38.912302Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:156:2178], cookie=12458010120239712913) 2026-01-04T02:44:38.912876Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:161:2183], cookie=14042617342577813219, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.912980Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:161:2183], cookie=14042617342577813219) 2026-01-04T02:44:38.913994Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:167:2189], cookie=17326963560543536796, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.914077Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:167:2189], cookie=17326963560543536796) 2026-01-04T02:44:38.914924Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:173:2195], cookie=7308881654022558409, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.914998Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:173:2195], cookie=7308881654022558409) 2026-01-04T02:44:38.915461Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:176:2198], cookie=13796068406591175449, id=0, path="/Root/Folder/NonexistingRes") 2026-01-04T02:44:38.915552Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:176:2198], cookie=13796068406591175449) 2026-01-04T02:44:38.916035Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:179:2201], cookie=17969836564053401482, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.916104Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:179:2201], cookie=17969836564053401482) 2026-01-04T02:44:38.916611Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:182:2204], cookie=12131378716553898876, id=100, path="") 2026-01-04T02:44:38.916673Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:182:2204], cookie=12131378716553898876) 2026-01-04T02:44:38.917140Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:185:2207], cookie=6883020824820505868, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.917215Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:185:2207], cookie=6883020824820505868) 2026-01-04T02:44:38.917737Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:188:2210], cookie=12423475782268461567, id=3, path="") 2026-01-04T02:44:38.917826Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:188:2210], cookie=12423475782268461567) 2026-01-04T02:44:38.918338Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:191:2213], cookie=1095010972323994837, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.918430Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:191:2213], cookie=1095010972323994837) 2026-01-04T02:44:38.919035Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:194:2216], cookie=8294735812426114579, id=0, path="/Root/Folder/Q1") 2026-01-04T02:44:38.919205Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 4 "Root/Folder/Q1" 2026-01-04T02:44:38.931581Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:194:2216], cookie=8294735812426114579) 2026-01-04T02:44:38.932385Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:199:2221], cookie=9754836639269440131, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.932492Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:199:2221], cookie=9754836639269440131) 2026-01-04T02:44:38.946969Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:38.947076Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:38.947620Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:38.948297Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:38.985797Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:38.986237Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:238:2251], cookie=1331481274182446838, ids=[], paths=[], recursive=1) 2026-01-04T02:44:38.986327Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:238:2251], cookie=1331481274182446838) 2026-01-04T02:44:38.987017Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:32: [72057594037927937] TTxQuoterResourceDelete::Execute (sender=[4:244:2256], cookie=6534105833621697903, id=3, path="") 2026-01-04T02:44:38.987173Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:61: [72057594037927937] Deleted quoter resource 3 "Root/Folder" 2026-01-04T02:44:39.009643Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_delete.cpp:70: [72057594037927937] TTxQuoterResourceDelete::Complete (sender=[4:244:2256], cookie=6534105833621697903) 2026-01-04T02:44:39.010377Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:249:2261], cookie=5436762636679591886, ids=[], paths=[], recursive=1) 2026-01-04T02:44:39.010461Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:249:2261], cookie=5436762636679591886) 2026-01-04T02:44:39.021953Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.022061Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.022490Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.023032Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.059837Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.060185Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:78: [72057594037927937] TTxQuoterResourceDescribe::Execute (sender=[4:288:2291], cookie=16062330583715366648, ids=[], paths=[], recursive=1) 2026-01-04T02:44:39.060267Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_describe.cpp:115: [72057594037927937] TTxQuoterResourceDescribe::Complete (sender=[4:288:2291], cookie=16062330583715366648) 2026-01-04T02:44:39.432355Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.432470Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.450361Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.450503Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.464699Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.465146Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:135:2159], cookie=3986353804947647853, path="/Q1", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:39.465349Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Q1" 2026-01-04T02:44:39.487860Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:135:2159], cookie=3986353804947647853) 2026-01-04T02:44:39.488477Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2166], cookie=7478802930884519102, path="/Q2", config={ MaxUnitsPerSecond: 10 }) 2026-01-04T02:44:39.488673Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Q2" 2026-01-04T02:44:39.500977Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2166], cookie=7478802930884519102) 2026-01-04T02:44:39.502712Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:149:2171]. Cookie: 16400059416063929611. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:39.502782Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:149:2171], cookie=16400059416063929611) 2026-01-04T02:44:39.503564Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:149:2171]. Cookie: 14810171421661908798. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Q1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Q2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 10 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } Results { Error { Status: NOT_FOUND Issues { message: "Resource \"/Q3\" doesn\'t exist." } } } ProtocolVersion: 1 } 2026-01-04T02:44:39.503630Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:149:2171], cookie=14810171421661908798) |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DeleteAdd [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:38.250936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:38.251025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:38.251060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:38.251112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:38.251150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:38.251177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:38.251227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:38.251320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:38.252101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:38.252335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:38.330507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:38.330555Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:38.342798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:38.343014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:38.343203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:38.350707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:38.350924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:38.351563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:38.351827Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:38.354413Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:38.354594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:38.355692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:38.355750Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:38.355862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:38.355903Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:38.356005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:38.356201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.362417Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:44:38.461533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:38.461785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.461999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:38.462031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:38.462201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:38.462256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:38.464555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:38.464734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:38.464973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.465031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:38.465071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:38.465104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:38.466942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.467001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:38.467068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:38.468685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.468726Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:38.468774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:38.468816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:38.471732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:38.473599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:38.473752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:38.474788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:38.474919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:38.474978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:38.475232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:38.475284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:38.475429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:38.475505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:38.477430Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:38.477468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-04T02:44:39.591104Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:39.591142Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 1 2026-01-04T02:44:39.591210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 102, path id: 3 2026-01-04T02:44:39.591518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:44:39.591567Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:44:39.591677Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:44:39.591716Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:39.591780Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:44:39.591831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:39.591871Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: false 2026-01-04T02:44:39.591918Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:44:39.591963Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:44:39.591999Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:44:39.592214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 9 2026-01-04T02:44:39.592259Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 102, publications: 2, subscribers: 0 2026-01-04T02:44:39.592298Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 1], 9 2026-01-04T02:44:39.592330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 102, [OwnerId: 72057594046678944, LocalPathId: 3], 3 2026-01-04T02:44:39.593376Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:39.593464Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:39.593501Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:39.593545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:44:39.593593Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:44:39.594080Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:39.594182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 3 Version: 3 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T02:44:39.594222Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 102 2026-01-04T02:44:39.594254Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 3 2026-01-04T02:44:39.594284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 8 2026-01-04T02:44:39.594349Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 102, subscribers: 0 2026-01-04T02:44:39.603025Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:44:39.604518Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:44:39.604820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:44:39.604861Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:44:39.605223Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:44:39.605302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:44:39.605333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [2:972:2806] TestWaitNotification: OK eventTxId 102 2026-01-04T02:44:39.605787Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:39.605991Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 263us result status StatusSuccess 2026-01-04T02:44:39.606406Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409552 Coordinators: 72075186233409553 Coordinators: 72075186233409554 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409555 Mediators: 72075186233409556 Mediators: 72075186233409557 } DomainKey { SchemeShard: 72057594046678944 PathId: 3 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 3 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:39.606975Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:39.607093Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 166us result status StatusSuccess 2026-01-04T02:44:39.607413Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 102 CreateStep: 5000004 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |92.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine >> DataShardVolatile::DistributedWriteLostPlanThenSplit [GOOD] >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency >> TKesusTest::TestDeleteSemaphore [GOOD] >> TKesusTest::TestDescribeSemaphoreWatches ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D95BF0AA900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:35.735483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:35.735603Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:35.735646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:35.735689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:35.735738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:35.735786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:35.735846Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:35.735933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:35.736844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:35.737140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:35.834246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:35.834312Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:35.849732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:35.849978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:35.850193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:35.858200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:35.858450Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:35.859236Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:35.859528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:35.862341Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:35.862530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:35.863497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:35.863545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:35.863637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:35.863680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:35.863780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:35.863964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.011674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.012757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.012902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.012997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013408Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.013928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... n 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 Leader for TabletID 72057594046678944 is [1:896:2845] sender: [1:969:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 2026-01-04T02:44:39.950088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 2026-01-04T02:44:39.961337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 0 2026-01-04T02:44:39.961600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:44:39.961794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:39.962135Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable 2026-01-04T02:44:39.962313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:44:39.962353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:44:39.962406Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D95BF19E500, stats written 2 2026-01-04T02:44:39.963455Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:39.963984Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 706us result status StatusSuccess 2026-01-04T02:44:39.964719Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestCollectGarbagePersistence [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin >> ReadSessionImplTest::SuccessfulInit [GOOD] |92.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> ReadSessionImplTest::SuccessfulInitAndThenTimeoutCallback [GOOD] >> ReadSessionImplTest::StopsRetryAfterFailedAttempt [GOOD] >> ReadSessionImplTest::StopsRetryAfterTimeout [GOOD] >> ReadSessionImplTest::UnpackBigBatchWithTwoPartitions [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease >> TConsoleConfigSubscriptionTests::TestAddSubscriptionIdempotency [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpMkDir-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin >> ReadSessionImplTest::SimpleDataHandlersWithGracefulRelease [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] >> ReadSessionImplTest::ReconnectOnTmpError [GOOD] >> ReadSessionImplTest::ReconnectOnTmpErrorAndThenTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeout [GOOD] >> ReadSessionImplTest::ReconnectOnTimeoutAndThenCreate [GOOD] >> ReadSessionImplTest::ReconnectsAfterFailure [GOOD] >> ReadSessionImplTest::SimpleDataHandlers >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser >> TPQCachingProxyTest::MultipleSessions >> ReadSessionImplTest::SimpleDataHandlers [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin >> TPQCachingProxyTest::OutdatedSession ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithGracefulReleaseWithCommit [GOOD] Test command err: 2026-01-04T02:44:41.287229Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.287269Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.287312Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.287677Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.288112Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.288164Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.288983Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.289006Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.289023Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.289416Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.289786Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.289868Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.290580Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.290613Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.290640Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.290946Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:41.291016Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.291052Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.291290Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: INTERNAL_ERROR Issues: "
: Error: Failed to establish connection to server "" ( cluster cluster). Attempts done: 1 " } 2026-01-04T02:44:41.292070Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.292089Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.292104Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.292443Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:44:41.292471Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.292489Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.292535Z :INFO: [db] [sessionid] [cluster] Closing session to cluster: SessionClosed { Status: TIMEOUT Issues: "
: Error: Failed to establish connection to server. Attempts done: 1 " } 2026-01-04T02:44:41.293419Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:44:41.293463Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:44:41.293483Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.293714Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.294059Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:41.303142Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:44:41.303438Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:41.303798Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 2. Cluster: "TestCluster". Topic: "TestTopic". Partition: 2. Read offset: (NULL) 2026-01-04T02:44:41.306746Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-50) 2026-01-04T02:44:41.307001Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:41.307063Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:44:41.307097Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:44:41.307118Z :DEBUG: Take Data. Partition 1. Read: {0, 3} (4-4) 2026-01-04T02:44:41.307148Z :DEBUG: Take Data. Partition 1. Read: {0, 4} (5-5) 2026-01-04T02:44:41.307185Z :DEBUG: Take Data. Partition 1. Read: {0, 5} (6-6) 2026-01-04T02:44:41.307203Z :DEBUG: Take Data. Partition 1. Read: {0, 6} (7-7) 2026-01-04T02:44:41.307219Z :DEBUG: Take Data. Partition 1. Read: {0, 7} (8-8) 2026-01-04T02:44:41.307267Z :DEBUG: Take Data. Partition 1. Read: {0, 8} (9-9) 2026-01-04T02:44:41.307289Z :DEBUG: Take Data. Partition 1. Read: {0, 9} (10-10) 2026-01-04T02:44:41.307307Z :DEBUG: Take Data. Partition 1. Read: {0, 10} (11-11) 2026-01-04T02:44:41.307345Z :DEBUG: Take Data. Partition 1. Read: {0, 11} (12-12) 2026-01-04T02:44:41.307367Z :DEBUG: Take Data. Partition 1. Read: {0, 12} (13-13) 2026-01-04T02:44:41.307397Z :DEBUG: Take Data. Partition 1. Read: {0, 13} (14-14) 2026-01-04T02:44:41.307413Z :DEBUG: Take Data. Partition 1. Read: {0, 14} (15-15) 2026-01-04T02:44:41.307429Z :DEBUG: Take Data. Partition 1. Read: {0, 15} (16-16) 2026-01-04T02:44:41.307487Z :DEBUG: Take Data. Partition 1. Read: {0, 16} (17-17) 2026-01-04T02:44:41.307507Z :DEBUG: Take Data. Partition 1. Read: {0, 17} (18-18) 2026-01-04T02:44:41.307524Z :DEBUG: Take Data. Partition 1. Read: {0, 18} (19-19) 2026-01-04T02:44:41.307540Z :DEBUG: Take Data. Partition 1. Read: {0, 19} (20-20) 2026-01-04T02:44:41.307555Z :DEBUG: Take Data. Partition 1. Read: {0, 20} (21-21) 2026-01-04T02:44:41.307576Z :DEBUG: Take Data. Partition 1. Read: {0, 21} (22-22) 2026-01-04T02:44:41.307595Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (23-23) 2026-01-04T02:44:41.307611Z :DEBUG: Take Data. Partition 1. Read: {1, 1} (24-24) 2026-01-04T02:44:41.307639Z :DEBUG: Take Data. Partition 1. Read: {1, 2} (25-25) 2026-01-04T02:44:41.307666Z :DEBUG: Take Data. Partition 1. Read: {1, 3} (26-26) 2026-01-04T02:44:41.307681Z :DEBUG: Take Data. Partition 1. Read: {1, 4} (27-27) 2026-01-04T02:44:41.307719Z :DEBUG: Take Data. Partition 1. Read: {1, 5} (28-28) 2026-01-04T02:44:41.307743Z :DEBUG: Take Data. Partition 1. Read: {1, 6} (29-29) 2026-01-04T02:44:41.307758Z :DEBUG: Take Data. Partition 1. Read: {1, 7} (30-30) 2026-01-04T02:44:41.307786Z :DEBUG: Take Data. Partition 1. Read: {1, 8} (31-31) 2026-01-04T02:44:41.307803Z :DEBUG: Take Data. Partition 1. Read: {1, 9} (32-32) 2026-01-04T02:44:41.307884Z :DEBUG: Take Data. Partition 1. Read: {1, 10} (33-33) 2026-01-04T02:44:41.307912Z :DEBUG: Take Data. Partition 1. Read: {1, 11} (34-34) 2026-01-04T02:44:41.307932Z :DEBUG: Take Data. Partition 1. Read: {1, 12} (35-35) 2026-01-04T02:44:41.307949Z :DEBUG: Take Data. Partition 1. Read: {1, 13} (36-36) 2026-01-04T02:44:41.307971Z :DEBUG: Take Data. Partition 1. Read: {1, 14} (37-37) 2026-01-04T02:44:41.307991Z :DEBUG: Take Data. Partition 1. Read: {1, 15} (38-38) 2026-01-04T02:44:41.308006Z :DEBUG: Take Data. Partition 1. Read: {1, 16} (39-39) 2026-01-04T02:44:41.308026Z :DEBUG: Take Data. Partition 1. Read: {1, 17} (40-40) 2026-01-04T02:44:41.308041Z :DEBUG: Take Data. Partition 1. Read: {1, 18} (41-41) 2026-01-04T02:44:41.308059Z :DEBUG: Take Data. Partition 1. Read: {1, 19} (42-42) 2026-01-04T02:44:41.308074Z :DEBUG: Take Data. Partition 1. Read: {1, 20} (43-43) 2026-01-04T02:44:41.308098Z :DEBUG: Take Data. Partition 1. Read: {1, 21} (44-44) 2026-01-04T02:44:41.308118Z :DEBUG: Take Data. Partition 1. Read: {1, 22} (45-45) 2026-01-04T02:44:41.308135Z :DEBUG: Take Data. Partition 1. Read: {1, 23} (46-46) 2026-01-04T02:44:41.308149Z :DEBUG: Take Data. Partition 1. Read: {1, 24} (47-47) 2026-01-04T02:44:41.308166Z :DEBUG: Take Data. Partition 1. Read: {1, 25} (48-48) 2026-01-04T02:44:41.308182Z :DEBUG: Take Data. Partition 1. Read: {1, 26} (49-49) 2026-01-04T02:44:41.308210Z :DEBUG: Take Data. Partition 1. Read: {1, 27} (50-50) 2026-01-04T02:44:41.308277Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-04T02:44:41.310340Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 2 (51-100) 2026-01-04T02:44:41.310523Z :DEBUG: Take Data. Partition 2. Read: {0, 0} (51-51) 2026-01-04T02:44:41.310572Z :DEBUG: Take Data. Partition 2. Read: {0, 1} (52-52) 2026-01-04T02:44:41.310594Z :DEBUG: Take Data. Partition 2. Read: {0, 2} (53-53) 2026-01-04T02:44:41.310613Z :DEBUG: Take Data. Partition 2. Read: {0, 3} (54-54) 2026-01-04T02:44:41.310644Z :DEBUG: Take Data. Partition 2. Read: {0, 4} (55-55) 2026-01-04T02:44:41.310661Z :DEBUG: Take Data. Partition 2. Read: {0, 5} (56-56) 2026-01-04T02:44:41.310675Z :DEBUG: Take Data. Partition 2. Read: {0, 6} (57-57) 2026-01-04T02:44:41.310693Z :DEBUG: Take Data. Partition 2. Read: {0, 7} (58-58) 2026-01-04T02:44:41.310723Z :DEBUG: Take Data. Partition 2. Read: {0, 8} (59-59) 2026-01-04T02:44:41.310741Z :DEBUG: Take Data. Partition 2. Read: {0, 9} (60-60) 2026-01-04T02:44:41.310760Z :DEBUG: Take Data. Partition 2. Read: {0, 10} (61-61) 2026-01-04T02:44:41.310774Z :DEBUG: Take Data. Partition 2. Read: {0, 11} (62-62) 2026-01-04T02:44:41.310789Z :DEBUG: Take Data. Partition 2. Read: {0, 12} (63-63) 2026-01-04T02:44:41.310806Z :DEBUG: Take Data. Partition 2. Read: {0, 13} (64-64) 2026-01-04T02:44:41.310833Z :DEBUG: Take Data. Partition 2. Read: {0, 14} (65-65) 2026-01-04T02:44:41.310853Z :DEBUG: Take Data. Partition 2. Read: {0, 15} (66-66) 2026-01-04T02:44:41.310889Z :DEBUG: Take Data. Partition 2. Read: {0, 16} (67-67) 2026-01-04T02:44:41.310903Z :DEBUG: Take Data. Partition 2. Read: {0, 17} (68-68) 2026-01-04T02:44:41.310939Z :DEBUG: Take Data. Partition 2. Read: {0, 18} (69-69) 2026-01-04T02:44:41.310974Z :DEBUG: Take Data. Partition 2. Read: {0, 19} (70-70) 2026-01-04T02:44:41.310989Z :DEBUG: Take Data. Partition 2. Read: {0, 20} (71-71) 2026-01-04T02:44:41.311003Z :DEBUG: Take Data. Partition 2. Read: {0, 21} (72-72) 2026-01-04T02:44:41.311019Z :DEBUG: Take Data. Partition 2. Read: {1, 0} (73-73) 2026-01-04T02:44:41.311034Z :DEBUG: Take Data. Partition 2. Read: {1, 1} (74-74) 2026-01-04T02:44:41.311067Z :DEBUG: Take Data. Partition 2. Read: {1, 2} (75-75) 2026-01-04T02:44:41.311089Z :DEBUG: Take Data. Partition 2. Read: {1, 3} (76-76) 2026-01-04T02:44:41.311103Z :DEBUG: Take Data. Partition 2. Read: {1, 4} (77-77) 2026-01-04T02:44:41.311131Z :DEBUG: Take Data. Partition 2. Read: {1, 5} (78-78) 2026-01-04T02:44:41.311149Z :DEBUG: Take Data. Partition 2. Read: {1, 6} (79-79) 2026-01-04T02:44:41.311168Z :DEBUG: Take Data. Partition 2. Read: {1, 7} (80-80) 2026-01-04T02:44:41.311197Z :DEBUG: Take Data. Partition 2. Read: {1, 8} (81-81) 2026-01-04T02:44:41.311217Z :DEBUG: Take Data. Partition 2. Read: {1, 9} (82-82) 2026-01-04T02:44:41.311294Z :DEBUG: Take Data. Partition 2. Read: {1, 10} (83-83) 2026-01-04T02:44:41.311329Z :DEBUG: Take Data. Partition 2. Read: {1, 11} (84-84) 2026-01-04T02:44:41.311344Z :DEBUG: Take Data. Partition 2. Read: {1, 12} (85-85) 2026-01-04T02:44:41.311360Z :DEBUG: Take Data. Partition 2. Read: {1, 13} (86-86) 2026-01-04T02:44:41.311376Z :DEBUG: Take Data. Partition 2. Read: {1, 14} (87-87) 2026-01-04T02:44:41.311394Z :DEBUG: Take Data. Partition 2. Read: {1, 15} (88-88) 2026-01-04T02:44:41.311413Z :DEBUG: Take Data. Partition 2. Read: {1, 16} (89-89) 2026-01-04T02:44:41.311429Z :DEBUG: Take Data. Partition 2. Read: {1, 17} (90-90) 2026-01-04T02:44:41.311454Z :DEBUG: Take Data. Partition 2. Read: {1, 18} (91-91) 2026-01-04T02:44:41.311481Z :DEBUG: Take Data. Partition 2. Read: {1, 19} (92-92) 2026-01-04T02:44:41.311496Z :DEBUG: Take Data. Partition 2. Read: {1, 20} (93-93) 2026-01-04T02:44:41.311513Z :DEBUG: Take Data. Partition 2. Read: {1, 21} (94-94) 2026-01-04T02:44:41.311530Z :DEBUG: Take Data. Partition 2. Read: {1, 22} (95-95) 2026-01-04T02:44:41.311549Z :DEBUG: Take Data. Partition 2. Read: {1, 23} (96-96) 2026-01-04T02:44:41.311573Z :DEBUG: Take Data. Partition 2. Read: {1, 24} (97-97) 2026-01-04T02:44:41.311586Z :DEBUG: Take Data. Partition 2. Read: {1, 25} (98-98) 2026-01-04T02:44:41.311596Z :DEBUG: Take Data. Partition 2. Read: {1, 26} (99-99) 2026-01-04T02:44:41.311606Z :DEBUG: Take Data. Partition 2. Read: {1, 27} (100-100) 2026-01-04T02:44:41.311636Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 50, size 5000 bytes 2026-01-04T02:44:41.311742Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 2500, ReadSizeServerDelta = 0 2026-01-04T02:44:41.312673Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.312689Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.312705Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.313014Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.313429Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:41.313693Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.314141Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:41.415111Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.415359Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:44:41.415442Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:41.415486Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:44:41.415563Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:44:41.615925Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-04T02:44:41.716459Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:44:41.716634Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:44:41.716797Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:44:41.717603Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.717625Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.717642Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.717925Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.718531Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:41.718747Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.719260Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:41.820242Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.820563Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:44:41.820661Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:41.820713Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:44:41.820800Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-04T02:44:41.820922Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:44:41.821089Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:44:41.821155Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:44:41.821345Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster |93.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::SimpleDataHandlersWithCommit [GOOD] Test command err: 2026-01-04T02:44:41.844061Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.844175Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.844209Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.844732Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:41.844795Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.844830Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.846106Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006078s 2026-01-04T02:44:41.846754Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.847407Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.847557Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.848727Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.848748Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.848767Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.849121Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:41.849172Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.849198Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.849260Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007107s 2026-01-04T02:44:41.849690Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.850037Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.850127Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.851025Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.851044Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.851061Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.851416Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:44:41.851452Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.851472Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.851525Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.235738s 2026-01-04T02:44:41.851960Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.852480Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.852572Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.853406Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.853425Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.853441Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.853770Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TIMEOUT. Description:
: Error: Failed to establish connection to server. Attempts done: 1 2026-01-04T02:44:41.853828Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.853853Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.853908Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.235548s 2026-01-04T02:44:41.854285Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.854660Z :INFO: [db] [sessionid] [cluster] Server session id: session id 2026-01-04T02:44:41.854727Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.855692Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.855738Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.855759Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.856047Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.856460Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:41.865758Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.866234Z :ERROR: [db] [sessionid] [cluster] Got error. Status: TRANSPORT_UNAVAILABLE. Description:
: Error: GRpc error: (14): 2026-01-04T02:44:41.866289Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.866308Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.866364Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.195179s 2026-01-04T02:44:41.867656Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:44:41.868762Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.868778Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.868815Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:41.869064Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:41.869477Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:41.869686Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.870087Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:41.970950Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:41.971212Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:44:41.971284Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:41.971347Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:44:41.971426Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:44:42.071742Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:44:42.071904Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster 2026-01-04T02:44:42.072950Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:42.072983Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:42.073012Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:42.073314Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:42.073819Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:42.073985Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:42.074416Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:42.175383Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:42.175622Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-2) 2026-01-04T02:44:42.175686Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:42.175715Z :DEBUG: Take Data. Partition 1. Read: {1, 0} (2-2) 2026-01-04T02:44:42.175788Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 3). Partition stream id: 1 2026-01-04T02:44:42.175910Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 2, size 6 bytes 2026-01-04T02:44:42.175993Z :INFO: [db] [sessionid] [cluster] Confirm partition stream destroy. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1 2026-01-04T02:44:42.176069Z :DEBUG: [db] [sessionid] [cluster] Committed response: cookies { assign_id: 1 partition_cookie: 1 } 2026-01-04T02:44:42.176200Z :DEBUG: [db] [sessionid] [cluster] Abort session to cluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:40.830134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:40.830242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:40.830288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:40.830328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:40.830370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:40.830403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:40.830469Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:40.830590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:40.831485Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:40.831816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:40.910047Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:40.910114Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:40.924392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:40.924595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:40.924754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:40.932921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:40.933202Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:40.934045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:40.934331Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:40.937481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:40.937689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:40.938902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:40.938964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:40.939093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:40.939141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:40.939254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:40.939474Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.073376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074662Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.074989Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.075043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.075084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.075169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:41.075224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 9Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:120: NSubDomainState::TConfigureParts operationId# 101:0 Got OK TEvConfigureStatus from tablet# 72075186233409548 shardIdx# 72057594046678944:3 at schemeshard# 72057594046678944 2026-01-04T02:44:41.769327Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 3 -> 128 2026-01-04T02:44:41.769901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.772792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.772946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.773075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.773133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.773189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:44:41.773238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 101 ready parts: 1/1 2026-01-04T02:44:41.773380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 101 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:41.775441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 101:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-04T02:44:41.775572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000039 2026-01-04T02:44:41.775926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:41.776044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:41.776108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:44:41.776389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-04T02:44:41.776447Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:44:41.776626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:41.776709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:44:41.779037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:41.779093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:41.779287Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:41.779333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 2026-01-04T02:44:41.779446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:44:41.779507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:44:41.779608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:41.779648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:41.779722Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:44:41.779776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:41.779817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:44:41.779875Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:44:41.779920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:44:41.779955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:44:41.780162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:44:41.780207Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 1, subscribers: 1 2026-01-04T02:44:41.780258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 4 2026-01-04T02:44:41.781496Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:41.781627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 4 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:44:41.781684Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:44:41.781742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 4 2026-01-04T02:44:41.781792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:41.781901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-04T02:44:41.781966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:714:2704] 2026-01-04T02:44:41.784683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:44:41.784783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:44:41.784811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:721:2711] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:44:41.785268Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:41.785491Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusSuccess 2026-01-04T02:44:41.785942Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TKesusTest::TestDescribeSemaphoreWatches [GOOD] >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] >> TKesusTest::TestGetQuoterResourceCounters |93.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> TConsoleTxProcessorTests::TestTxProcessorTemporary [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] >> TPQCachingProxyTest::MultipleSessions [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser >> TPQCachingProxyTest::OutdatedSession [GOOD] >> TConsoleTests::TestRemoveTenantExtSubdomain [GOOD] >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants >> THDRRQuoterResourceTreeRuntimeTest::TestWeights [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestWeightsChange [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestVerySmallSpeed [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaRelease |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestCollectGarbageAfterLargeData [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::MultipleSessions [GOOD] Test command err: 2026-01-04T02:44:42.442237Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:44:42.578336Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:44:42.579240Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:44:42.579406Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:42.579470Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:44:42.597546Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:42.598173Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-04T02:44:42.598280Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-04T02:44:42.598324Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 2 for session: session1 2026-01-04T02:44:42.598353Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-04T02:44:42.598406Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 2 for session session1, Generation: 1 2026-01-04T02:44:42.598469Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 2 2026-01-04T02:44:42.598547Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 3 for session: session2 2026-01-04T02:44:42.598577Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 3 for session session2, Generation: 2 >> TKesusTest::TestGetQuoterResourceCounters [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::OutdatedSession [GOOD] Test command err: 2026-01-04T02:44:42.630562Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:44:42.708398Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:44:42.708474Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:44:42.708603Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:44:42.708669Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:44:42.725605Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:42.725715Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-04T02:44:42.725825Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-04T02:44:42.725865Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-04T02:44:42.725951Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:293: Direct read cache: registered server session: session1:1 with generation 2, killed existing session with older generation |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> ApplyClusterEndpointTest::NoPorts [GOOD] >> ApplyClusterEndpointTest::PortFromCds [GOOD] >> ApplyClusterEndpointTest::PortFromDriver [GOOD] >> BasicUsage::MaxByteSizeEqualZero ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithStandaloneColumnTable-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D32341AE500 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:37.417372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:37.417458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:37.417498Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:37.417538Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:37.417577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:37.417606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:37.417687Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:37.417769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:37.418680Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:37.418957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:37.494276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:37.494331Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:37.506804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:37.507014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:37.507178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:37.515112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:37.515346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:37.515976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:37.516246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:37.519235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:37.519430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:37.520506Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:37.520560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:37.520660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:37.520699Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:37.520809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:37.521037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:37.661451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.662979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:37.663525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... schemeshard: 72057594046678944 2026-01-04T02:44:42.558297Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 0 2026-01-04T02:44:42.558407Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.558483Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.558863Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 66, at schemeshard: 72057594046678944 2026-01-04T02:44:42.559397Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.559525Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.559938Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560018Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560285Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560440Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560499Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560615Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560806Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.560891Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561328Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561574Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561654Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561707Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561870Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561925Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.561982Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:44:42.562247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:44:42.568158Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:42.573238Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:42.573336Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:42.574900Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:42.574978Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:42.575042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:42.575367Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 Leader for TabletID 72057594046678944 is [2:823:2782] sender: [2:878:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 2026-01-04T02:44:42.624035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 0 2026-01-04T02:44:42.624240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:44:42.624314Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=ColumnTable, is column=1, is olap=0, RowCount 100000, DataSize 1263320 2026-01-04T02:44:42.624389Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:477: PersistSingleStats: ColumnTable rec.GetColumnTables() size=1 2026-01-04T02:44:42.624462Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:487: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D3234207100, stats written 1 2026-01-04T02:44:42.624997Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ColumnTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:42.625287Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ColumnTable" took 331us result status StatusSuccess 2026-01-04T02:44:42.625839Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ColumnTable" PathDescription { Self { Name: "ColumnTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 ColumnTableVersion: 1 ColumnTableSchemaVersion: 1 } ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 149 LastUpdateTime: 149 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnTableDescription { Name: "ColumnTable" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } ColumnFamilyId: 0 } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } ColumnFamilyId: 0 } KeyColumnNames: "timestamp" Engine: COLUMN_ENGINE_REPLACING_TIMESERIES NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } ColumnFamilies { Id: 0 Name: "default" } NextColumnFamilyId: 1 } ColumnShardCount: 1 Sharding { ColumnShards: 72075186233409546 HashSharding { Function: HASH_FUNCTION_CONSISTENCY_64 Columns: "timestamp" } } StorageConfig { DataChannelCount: 64 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TBlobStorageProxyTest::TestDoubleGroups [GOOD] >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 >> TKesusTest::TestAcquireBeforeTimeoutViaRelease [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestGetQuoterResourceCounters [GOOD] Test command err: 2026-01-04T02:44:39.369247Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.369438Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.387631Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.387761Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.413346Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.413776Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:134:2159], cookie=7215056577716337294, path="/foo/bar/baz") 2026-01-04T02:44:39.438106Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:134:2159], cookie=7215056577716337294, status=SUCCESS) 2026-01-04T02:44:39.438731Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:144:2166], cookie=12593324921679738361) 2026-01-04T02:44:39.450872Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:144:2166], cookie=12593324921679738361) 2026-01-04T02:44:39.451497Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:149:2171], cookie=4756148119087442389, path="/foo/bar/baz") 2026-01-04T02:44:39.463662Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:149:2171], cookie=4756148119087442389, status=SUCCESS) 2026-01-04T02:44:39.464276Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:154:2176], cookie=7095941401964105314) 2026-01-04T02:44:39.476900Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:154:2176], cookie=7095941401964105314) 2026-01-04T02:44:39.490174Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.490268Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.490697Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.491316Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.531348Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.531691Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:23: [72057594037927937] TTxConfigGet::Execute (sender=[1:195:2208], cookie=3389970650474296876) 2026-01-04T02:44:39.554497Z node 1 :KESUS_TABLET DEBUG: tx_config_get.cpp:44: [72057594037927937] TTxConfigGet::Complete (sender=[1:195:2208], cookie=3389970650474296876) 2026-01-04T02:44:39.555228Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:203:2215], cookie=9499448644779838452, path="/foo/bar/baz") 2026-01-04T02:44:39.567611Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:203:2215], cookie=9499448644779838452, status=SUCCESS) 2026-01-04T02:44:39.568376Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[1:208:2220], cookie=8097367323828251936, path="/foo/bar/baz") 2026-01-04T02:44:39.568468Z node 1 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[1:208:2220], cookie=8097367323828251936, status=PRECONDITION_FAILED) 2026-01-04T02:44:39.981960Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.982051Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:40.002546Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:40.002720Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:40.037245Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:40.037650Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:136:2161], cookie=8263922004961160417, name="Lock1") 2026-01-04T02:44:40.037730Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:136:2161], cookie=8263922004961160417) 2026-01-04T02:44:40.402525Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:40.402639Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:40.422523Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:40.423033Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:40.458358Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:40.458919Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=10878755194211881297, session=0, seqNo=0) 2026-01-04T02:44:40.459075Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:40.471078Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=10878755194211881297, session=1) 2026-01-04T02:44:40.471404Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[3:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:40.471550Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:40.471649Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:40.483537Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[3:136:2161], cookie=111) 2026-01-04T02:44:40.484125Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:148:2170], cookie=9418307149403785168, name="Lock1", force=0) 2026-01-04T02:44:40.496368Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:148:2170], cookie=9418307149403785168) 2026-01-04T02:44:40.496956Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:153:2175], cookie=876046059279461726, name="Sem1", force=0) 2026-01-04T02:44:40.509107Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:153:2175], cookie=876046059279461726) 2026-01-04T02:44:40.509709Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:158:2180], cookie=1159849534273861478, name="Sem1", limit=42) 2026-01-04T02:44:40.509866Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-04T02:44:40.521958Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:158:2180], cookie=1159849534273861478) 2026-01-04T02:44:40.522549Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:163:2185], cookie=17681249997554195742, name="Sem1", force=0) 2026-01-04T02:44:40.522657Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 2 "Sem1" 2026-01-04T02:44:40.534942Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:163:2185], cookie=17681249997554195742) 2026-01-04T02:44:40.535591Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[3:168:2190], cookie=14059404677315737622, name="Sem1", force=0) 2026-01-04T02:44:40.548134Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[3:168:2190], cookie=14059404677315737622) 2026-01-04T02:44:41.017619Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:41.017733Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:41.037388Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:41.039488Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:41.063870Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:41.064447Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12268510931484839296, session=0, seqNo=0) 2026-01-04T02:44:41.064603Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:41.076902Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12268510931484839296, session=1) 2026-01-04T02:44:41.077253Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=1224436129179604474, session=0, seqNo=0) 2026-01-04T02:44:41.077397Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:41.089614Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=1224436129179604474, session=2) 2026-01-04T02:44:41.089933Z node 4 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=2 from sender=[4:137:2161], cookie=4849236824306685114 2026-01-04T02:44:41.090465Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[4:149:2171], cookie=1946054256348589910, name="Sem1", limit=3) 2026-01-04T02:44:41.090629Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:44:41.103927Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[4:149:2171], cookie=1946054256348589910) 2026-01-04T02:44:41.104228Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=112, name="Sem1") 2026-01-04T02:44:41.104327Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=112) 2026-01-04T02:44:41.104563Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=113, name="Sem1") 2026-01-04T02:44:41.104618Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=113) 2026-01-04T02:44:41.104767Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=14097052560816412051, session=2, seqNo=0) 2026-01-04T02:44:41.117026Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSession ... :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.506864Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.517510Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=129, session=1, semaphore="Sem2" count=2) 2026-01-04T02:44:42.530085Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=129) 2026-01-04T02:44:42.530510Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=130, name="Sem2") 2026-01-04T02:44:42.530597Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=130) 2026-01-04T02:44:42.530855Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=131, session=1, semaphore="Sem2" count=1) 2026-01-04T02:44:42.543112Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=131) 2026-01-04T02:44:42.543556Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=132, name="Sem2") 2026-01-04T02:44:42.543647Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=132) 2026-01-04T02:44:42.543946Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:137:2161], cookie=133, name="Sem2") 2026-01-04T02:44:42.544016Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:137:2161], cookie=133) 2026-01-04T02:44:42.842379Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:42.842482Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:42.866321Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:42.866440Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:42.881789Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:42.891732Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:135:2159], cookie=11277365045623671505, path="/Root1", config={ MaxUnitsPerSecond: 1000 }) 2026-01-04T02:44:42.892044Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root1" 2026-01-04T02:44:42.914883Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:135:2159], cookie=11277365045623671505) 2026-01-04T02:44:42.915508Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2166], cookie=4151153458899365855, path="/Root1/Res", config={ }) 2026-01-04T02:44:42.915779Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root1/Res" 2026-01-04T02:44:42.927834Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2166], cookie=4151153458899365855) 2026-01-04T02:44:42.928434Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:149:2171], cookie=11814277941496092552, path="/Root2", config={ MaxUnitsPerSecond: 1000 }) 2026-01-04T02:44:42.928623Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root2" 2026-01-04T02:44:42.940828Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:149:2171], cookie=11814277941496092552) 2026-01-04T02:44:42.941446Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:154:2176], cookie=14248384788769476743, path="/Root2/Res", config={ }) 2026-01-04T02:44:42.941651Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 4 "Root2/Res" 2026-01-04T02:44:42.956329Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:154:2176], cookie=14248384788769476743) 2026-01-04T02:44:42.957045Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:159:2181], cookie=13296003751559650353, path="/Root2/Res/Subres", config={ }) 2026-01-04T02:44:42.957307Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 5 "Root2/Res/Subres" 2026-01-04T02:44:42.969461Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:159:2181], cookie=13296003751559650353) 2026-01-04T02:44:42.970791Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:164:2186]. Cookie: 8430615866426110811. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:42.970865Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:164:2186], cookie=8430615866426110811) 2026-01-04T02:44:43.013086Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:164:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.065323Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:164:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.096562Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:164:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.097314Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:172:2190]. Cookie: 17539181686373573498. Data: { ResourceCounters { ResourcePath: "Root2/Res" } ResourceCounters { ResourcePath: "Root2/Res/Subres" } ResourceCounters { ResourcePath: "Root2" } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-04T02:44:43.098250Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:175:2193]. Cookie: 11655210150091855744. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:43.098324Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:175:2193], cookie=11655210150091855744) 2026-01-04T02:44:43.140220Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:175:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.181968Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:175:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 100 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.182718Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:181:2197]. Cookie: 12230409713245178637. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 200 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 200 } ResourceCounters { ResourcePath: "Root2" Allocated: 200 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 300 } ResourceCounters { ResourcePath: "Root1" Allocated: 300 } } 2026-01-04T02:44:43.183370Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:164:2186]. Cookie: 18315568510175393046. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root1/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:43.183421Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:164:2186], cookie=18315568510175393046) 2026-01-04T02:44:43.183970Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:175:2193]. Cookie: 948024614726558377. Data: { Results { ResourceId: 5 Error { Status: SUCCESS } EffectiveProps { ResourceId: 5 ResourcePath: "Root2/Res/Subres" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 1000 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:44:43.184015Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:175:2193], cookie=948024614726558377) 2026-01-04T02:44:43.215360Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:164:2186]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 Amount: 20 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.215510Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:175:2193]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 5 Amount: 50 StateNotification { Status: SUCCESS } } } 2026-01-04T02:44:43.216279Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:313: [72057594037927937] Send TEvGetQuoterResourceCountersResult to [5:188:2204]. Cookie: 6328208322316003431. Data: { ResourceCounters { ResourcePath: "Root2/Res" Allocated: 250 } ResourceCounters { ResourcePath: "Root2/Res/Subres" Allocated: 250 } ResourceCounters { ResourcePath: "Root2" Allocated: 250 } ResourceCounters { ResourcePath: "Root1/Res" Allocated: 320 } ResourceCounters { ResourcePath: "Root1" Allocated: 320 } } |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi >> TKeyValueTest::TestIncorrectRequestThenResponseError >> TKeyValueTest::TestWriteDeleteThenReadRemaining >> TKeyValueTest::TestRewriteThenLastValueNewApi >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin >> TPQTest::Read_From_Different_Zones_What_Was_Written_With_Gaps [GOOD] >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireBeforeTimeoutViaModeChange [GOOD] Test command err: 2026-01-04T02:44:43.538849Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:43.539027Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:43.555202Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:43.555337Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:43.581088Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:43.582089Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=7963185855905181710, session=0, seqNo=0) 2026-01-04T02:44:43.582290Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:43.605263Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=7963185855905181710, session=1) 2026-01-04T02:44:43.605592Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2160], cookie=6956997240284445268, session=0, seqNo=0) 2026-01-04T02:44:43.605722Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:43.618284Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2160], cookie=6956997240284445268, session=2) 2026-01-04T02:44:43.619475Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:43.619647Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:43.619752Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:43.631850Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:44:43.632139Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-04T02:44:43.632246Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-04T02:44:43.632305Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-04T02:44:43.644473Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=112) 2026-01-04T02:44:43.644930Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:134:2159], cookie=333, name="Lock1") 2026-01-04T02:44:43.645032Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-04T02:44:43.645241Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2160], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:43.645355Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 3 "Lock1" 2026-01-04T02:44:43.645451Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-04T02:44:43.645582Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:135:2160], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-04T02:44:43.657922Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:134:2159], cookie=333) 2026-01-04T02:44:43.658025Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2160], cookie=222) 2026-01-04T02:44:43.658078Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:135:2160], cookie=223) 2026-01-04T02:44:43.658409Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:134:2159], cookie=334, name="Lock2") 2026-01-04T02:44:43.658534Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-04T02:44:43.658604Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:44:43.671095Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:134:2159], cookie=334) 2026-01-04T02:44:43.671725Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:164:2186], cookie=13123687904210225250, name="Lock1") 2026-01-04T02:44:43.671878Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:164:2186], cookie=13123687904210225250) 2026-01-04T02:44:43.672350Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:167:2189], cookie=1904349002006396557, name="Lock2") 2026-01-04T02:44:43.672420Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:167:2189], cookie=1904349002006396557) 2026-01-04T02:44:43.687915Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:43.688050Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:43.688598Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:43.688985Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:43.727036Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:43.727218Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:44:43.727277Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 3 "Lock1" queue: next order #3 session 2 2026-01-04T02:44:43.727616Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:206:2219], cookie=16902041373879057027, name="Lock1") 2026-01-04T02:44:43.727732Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:206:2219], cookie=16902041373879057027) 2026-01-04T02:44:43.728421Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:214:2226], cookie=10442798204862671473, name="Lock2") 2026-01-04T02:44:43.728495Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:214:2226], cookie=10442798204862671473) 2026-01-04T02:44:44.147572Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:44.147651Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:44.159650Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:44.159745Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:44.193480Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:44.194118Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=2324431055709762309, session=0, seqNo=0) 2026-01-04T02:44:44.194223Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:44.206254Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=2324431055709762309, session=1) 2026-01-04T02:44:44.206565Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=467133528867136911, session=0, seqNo=0) 2026-01-04T02:44:44.206713Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:44.218620Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=467133528867136911, session=2) 2026-01-04T02:44:44.220156Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:44.220293Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:44.220379Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:44.232118Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-04T02:44:44.232363Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-04T02:44:44.232467Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-04T02:44:44.232523Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-04T02:44:44.244339Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=112) 2026-01-04T02:44:44.244739Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=333, session=1, semaphore="Lock1" count=1) 2026-01-04T02:44:44.244990Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:44:44.245089Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-04T02:44:44.245191Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-04T02:44:44.257292Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=333) 2026-01-04T02:44:44.257376Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2026-01-04T02:44:44.257410Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=223) 2026-01-04T02:44:44.257927Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=14794366499794199206, name="Lock1") 2026-01-04T02:44:44.258010Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=14794366499794199206) 2026-01-04T02:44:44.258393Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=15638960569743996728, name="Lock2") 2026-01-04T02:44:44.258456Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=15638960569743996728) 2026-01-04T02:44:44.258824Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:170:2192], cookie=12393739616919854785, name="Lock1") 2026-01-04T02:44:44.258886Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:170:2192], cookie=12393739616919854785) 2026-01-04T02:44:44.259251Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:173:2195], cookie=203109335842994393, name="Lock2") 2026-01-04T02:44:44.259310Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:173:2195], cookie=203109335842994393) 2026-01-04T02:44:44.259542Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=444, session=2, semaphore="Lock2" count=1) 2026-01-04T02:44:44.259676Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:44:44.271456Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=444) 2026-01-04T02:44:44.271900Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:178:2200], cookie=7098655196602684614, name="Lock2") 2026-01-04T02:44:44.271974Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:178:2200], cookie=7098655196602684614) 2026-01-04T02:44:44.272343Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:181:2203], cookie=7559340798496431955, name="Lock2") 2026-01-04T02:44:44.272412Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:181:2203], cookie=7559340798496431955) 2026-01-04T02:44:44.282207Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:44.282301Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:44.282650Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:44.283023Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:44.330032Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:44.330193Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:44.330240Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-04T02:44:44.330273Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-04T02:44:44.330299Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:44:44.330644Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:220:2233], cookie=6790787452068174543, name="Lock1") 2026-01-04T02:44:44.330735Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:220:2233], cookie=6790787452068174543) 2026-01-04T02:44:44.331212Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:228:2240], cookie=1010751542046117959, name="Lock2") 2026-01-04T02:44:44.331275Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:228:2240], cookie=1010751542046117959) |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KeyValueReadStorage::ReadRangeOk1Key [GOOD] >> KeyValueReadStorage::ReadRangeOk >> KeyValueReadStorage::ReadRangeOk [GOOD] >> KeyValueReadStorage::ReadRangeNoData [GOOD] >> TKeyValueTest::TestCopyRangeWorks >> TKeyValueTest::TestBasicWriteRead >> TKeyValueTest::TestIncorrectRequestThenResponseError [GOOD] >> TKeyValueTest::TestIncrementalKeySet >> TKeyValueTest::TestWrite200KDeleteThenResponseError >> VectorIndexBuildTest::Metering_ServerLessDB-smallScanBuffer-true [GOOD] >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin >> TKeyValueTest::TestVacuumOnEmptyTablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadRangeNoData [GOOD] Test command err: 2026-01-04T02:44:45.372724Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:45.375849Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-04T02:44:45.381618Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:45.381673Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-04T02:44:45.386337Z 1 00h00m00.000000s :KEYVALUE INFO: {KV320@keyvalue_storage_read_request.cpp:124} Inline read request KeyValue# 1 Status# OK 2026-01-04T02:44:45.386397Z 1 00h00m00.000000s :KEYVALUE DEBUG: {KV322@keyvalue_storage_read_request.cpp:136} Expected OK or UNKNOWN and given OK readCount# 0 2026-01-04T02:44:45.386434Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TPQTest::PQ_Tablet_Removes_Blobs_Asynchronously [GOOD] >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous >> TKeyValueTest::TestWriteReadPatchRead >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] >> TKeyValueTest::TestWriteReadPatchRead [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi >> TSchemeShardSysNames::ESchemeOpMkDir-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNamesCore::NameListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::PrefixListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::ExceptionsListIsUnchanged [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesAllowedForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemPrefixesForbiddenForSystemUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAnonymousUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForOrdinaryUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminUser [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForAdminGroup [GOOD] >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestDoubleFailureStripe4Plus2 [GOOD] Test command err: 2026-01-04T02:44:44.558392Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001a80/r3tmp/tmpJM1r96//vdisk_bad_0/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 1 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1 2026-01-04T02:44:44.560925Z :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:407} PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001a80/r3tmp/tmpJM1r96//vdisk_bad_1/pdisk.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 123 PDiskId# 2 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 1 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 2 2026-01-04T02:44:44.590340Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1 VDISK[0:_:0:0:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:44.590512Z :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 2 VDISK[0:_:0:1:0]: (0) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 2 Can not be initialized! Format is incomplete. Magic sector is not present on disk. Maybe wrong PDiskKey" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::CompactionSplitGranuleStrKey_PKString [GOOD] Test command err: 2026-01-04T02:43:22.026381Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:22.064037Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:22.064313Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:22.072205Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:22.072493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:22.072768Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:22.072925Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:22.073042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:22.073181Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:22.073315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:22.073437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:22.073582Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:22.073736Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.073895Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:22.074046Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:22.074166Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:22.107253Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:22.107401Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:22.107459Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:22.107685Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:22.107884Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:22.107994Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:22.108053Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:22.108194Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:22.108281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:22.108334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:22.108376Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:22.108588Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:22.108685Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:22.108741Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:22.108776Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:22.108918Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:22.108986Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:22.109051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:22.109144Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:22.109203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:22.109245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:22.109280Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:22.109327Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:22.109383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:22.109439Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:22.109701Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:22.109766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:22.109829Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:22.109968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:22.110024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.110061Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:22.110121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:22.110164Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:22.110217Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:22.110281Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:22.110357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:22.110401Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:22.110613Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:22.110668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLAS ... gines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:portionsLoadingTime=4724; 2026-01-04T02:44:44.028776Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;PRECHARGE:granule_finished_commonLoadingTime=12; 2026-01-04T02:44:44.030033Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;load_stage_name=EXECUTE:granules/granule;fline=common_data.cpp:29;EXECUTE:granule_finished_commonLoadingTime=1179; 2026-01-04T02:44:44.030122Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;load_stage_name=EXECUTE:column_engines/granules;fline=common_data.cpp:29;EXECUTE:granuleLoadingTime=6302; 2026-01-04T02:44:44.030183Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:granulesLoadingTime=6466; 2026-01-04T02:44:44.030256Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;PRECHARGE:finishLoadingTime=14; 2026-01-04T02:44:44.030357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;load_stage_name=EXECUTE:column_engines;fline=common_data.cpp:29;EXECUTE:finishLoadingTime=51; 2026-01-04T02:44:44.030428Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:column_enginesLoadingTime=7109; 2026-01-04T02:44:44.030624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tx_controllerLoadingTime=126; 2026-01-04T02:44:44.030812Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tx_controllerLoadingTime=129; 2026-01-04T02:44:44.031064Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:operations_managerLoadingTime=185; 2026-01-04T02:44:44.031307Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:operations_managerLoadingTime=182; 2026-01-04T02:44:44.032154Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:storages_managerLoadingTime=787; 2026-01-04T02:44:44.033287Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:storages_managerLoadingTime=1071; 2026-01-04T02:44:44.033373Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:db_locksLoadingTime=11; 2026-01-04T02:44:44.033427Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:db_locksLoadingTime=11; 2026-01-04T02:44:44.033472Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:bg_sessionsLoadingTime=6; 2026-01-04T02:44:44.033569Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:bg_sessionsLoadingTime=52; 2026-01-04T02:44:44.033624Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:sharing_sessionsLoadingTime=7; 2026-01-04T02:44:44.033720Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:sharing_sessionsLoadingTime=60; 2026-01-04T02:44:44.033791Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:in_flight_readsLoadingTime=10; 2026-01-04T02:44:44.033913Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:in_flight_readsLoadingTime=53; 2026-01-04T02:44:44.034038Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;PRECHARGE:tiers_managerLoadingTime=73; 2026-01-04T02:44:44.034156Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;load_stage_name=EXECUTE:composite_init;fline=common_data.cpp:29;EXECUTE:tiers_managerLoadingTime=71; 2026-01-04T02:44:44.034212Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;event=initialize_shard;fline=common_data.cpp:29;EXECUTE:composite_initLoadingTime=19590; 2026-01-04T02:44:44.034413Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: Index: tables 1 inserted {blob_bytes=126218384;raw_bytes=174224032;count=23;records=1955000} compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} s-compacted {blob_bytes=0;raw_bytes=0;count=0;records=0} inactive {blob_bytes=0;raw_bytes=0;count=0;records=0} evicted {blob_bytes=0;raw_bytes=0;count=0;records=0} at tablet 9437184 2026-01-04T02:44:44.034523Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard.cpp:78;event=initialize_shard;step=SwitchToWork; 2026-01-04T02:44:44.034597Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard.cpp:81;event=initialize_shard;step=SignalTabletActive; 2026-01-04T02:44:44.034682Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=columnshard_impl.cpp:1532;event=OnTieringModified;path_id=NO_VALUE_OPTIONAL; 2026-01-04T02:44:44.034727Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];process=SwitchToWork;fline=column_engine_logs.cpp:543;event=OnTieringModified;new_count_tierings=0; 2026-01-04T02:44:44.034840Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:44.034932Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:44.034976Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:44.035056Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:44.035132Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:44.035186Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:44.035256Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:44.035360Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:44.035531Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: fline=columnshard.cpp:505;event=TEvReportBaseStatistics;ReportBaseStatisticsPeriodMs=60000;scheduleDuration=60.150000s; 2026-01-04T02:44:44.037633Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=9437184; 2026-01-04T02:44:44.037819Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;fline=columnshard.cpp:260;event=TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184; 2026-01-04T02:44:44.037887Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:448;event=EnqueueBackgroundActivities;periodic=0; 2026-01-04T02:44:44.037949Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:693;message=tiling compaction: actualize called; 2026-01-04T02:44:44.037986Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=tiling.cpp:710;message=tiling compaction: returning DoGetUsefulMetric zero; 2026-01-04T02:44:44.038032Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:260;event=StartCleanup;portions_count=0; 2026-01-04T02:44:44.038098Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=column_engine_logs.cpp:341;event=StartCleanup;portions_count=0;portions_prepared=0;drop=0;skip=0;portions_counter=0;chunks=0;limit=0;max_portions=1000;max_chunks=500000; 2026-01-04T02:44:44.038152Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:803;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:44.038206Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:835;background=cleanup;skip_reason=no_changes; 2026-01-04T02:44:44.038293Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;queue=ttl;external_count=0;fline=granule.cpp:172;event=skip_actualization;waiting=0.999000s; 2026-01-04T02:44:44.038343Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvPeriodicWakeup;tablet_id=9437184;fline=columnshard_impl.cpp:757;background=ttl;skip_reason=no_changes; 2026-01-04T02:44:44.038715Z node 1 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:515;event=TEvReportExecutorStatistics;ReportExecutorStatisticsPeriodMs=60000;scheduleDuration=60.191000s; 2026-01-04T02:44:44.038771Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:1421:3400];ev=NKikimr::NColumnShard::TEvPrivate::TEvReportExecutorStatistics;fline=columnshard.cpp:447;No CurrentSchemeShardId=9437184; >> TKeyValueTest::TestIncrementalKeySet [GOOD] >> TKeyValueTest::TestGetStatusWorksNewApi >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNamesCore::SystemNamesExceptionsAllowedForSystemUser [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:10.969358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:10.969433Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:10.969471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:10.969514Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:10.969546Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:10.969573Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:10.969631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:10.969698Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:10.970557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.970801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:11.098354Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:11.098429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:11.099261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:11.108567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:11.112706Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:11.112890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:11.116937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:11.117114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:11.117781Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:11.118057Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:11.122559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:11.122746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:11.123902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:11.123946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:11.124010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:11.124057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:11.124098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:11.124244Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.131158Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:239:2058] recipient: [1:15:2062] 2026-01-04T02:44:11.242283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:11.242549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.242745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:11.242792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:11.243011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:11.243061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:11.245091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:11.245307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:11.245537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.245614Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:11.245651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:11.245676Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:11.247058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.247104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:11.247140Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:11.248349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.248385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:11.248413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:11.248458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:11.251061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:11.252444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:11.252606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:11.253307Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:11.253413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:11.253465Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:11.253632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:11.253665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:11.253786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:11.253859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:11.255306Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 2 2026-01-04T02:44:47.247073Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:44:47.247134Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:44:47.247168Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:44:47.247207Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-01-04T02:44:47.247237Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-04T02:44:47.249332Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.249438Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.249480Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:44:47.249522Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:44:47.249562Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:44:47.250646Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.250731Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.250759Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:44:47.250782Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:44:47.250806Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:44:47.251681Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.251744Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.251784Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:44:47.251819Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-04T02:44:47.251860Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:44:47.253093Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.253198Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:44:47.253262Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:44:47.253319Z node 13 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-04T02:44:47.253352Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-04T02:44:47.253426Z node 13 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:44:47.255968Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:44:47.256078Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:44:47.257613Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:44:47.257692Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:44:47.258651Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:44:47.258695Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:44:47.259921Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:44:47.260004Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.260031Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [13:2746:4735] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:44:47.260898Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:44:47.260931Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:44:47.260987Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:44:47.261003Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:44:47.261062Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:44:47.261086Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:44:47.261133Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:44:47.261150Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:44:47.261208Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:44:47.261227Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:44:47.262583Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:44:47.262688Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:44:47.262752Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.262783Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [13:2749:4738] 2026-01-04T02:44:47.262905Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.262925Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [13:2749:4738] 2026-01-04T02:44:47.263053Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:44:47.263101Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:44:47.263162Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.263179Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [13:2749:4738] 2026-01-04T02:44:47.263246Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:44:47.263273Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.263288Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [13:2749:4738] 2026-01-04T02:44:47.263361Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:44:47.263379Z node 13 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [13:2749:4738] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |93.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TConsoleTests::TestRemoveSharedTenantWoServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TableDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:44:35.742194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:35.742302Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:35.742347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:35.742381Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:35.742419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:35.742447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:35.742497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:35.742560Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:35.743397Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:35.743670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:35.821369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:35.821443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:35.840621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:35.840889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:35.841106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:35.852140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:35.852419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:35.853291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:35.853590Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:35.863147Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:35.863383Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:35.864617Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:35.864686Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:35.864804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:35.864855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:35.864963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:35.865157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:36.002212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003500Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003708Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:44:36.003785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... nId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:44:48.059224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:44:48.060780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:48.060851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:48.061050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 107, path id: [OwnerId: 72057594046678944, LocalPathId: 40] 2026-01-04T02:44:48.061166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:48.061201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 107, path id: 38 2026-01-04T02:44:48.061251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 107, path id: 40 2026-01-04T02:44:48.061683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:44:48.061736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 107:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:44:48.061826Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:44:48.061866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 107:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:44:48.061900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 107:0 129 -> 240 2026-01-04T02:44:48.062682Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:44:48.062775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 14 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:44:48.062819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:44:48.062871Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 14 2026-01-04T02:44:48.062912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:44:48.063395Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:44:48.063461Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 107 2026-01-04T02:44:48.063494Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 107 2026-01-04T02:44:48.063524Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 107, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-04T02:44:48.063550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-04T02:44:48.063606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 0/1, is published: true 2026-01-04T02:44:48.066117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 107:0, at schemeshard: 72057594046678944 2026-01-04T02:44:48.066168Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 107:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:48.066425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:44:48.066553Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:44:48.066598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:44:48.066631Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#107:0 progress is 1/1 2026-01-04T02:44:48.066657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:44:48.066690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 107, ready parts: 1/1, is published: true 2026-01-04T02:44:48.066717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 107 ready parts: 1/1 2026-01-04T02:44:48.066767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 107:0 2026-01-04T02:44:48.066803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 107:0 2026-01-04T02:44:48.066895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:44:48.067348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:48.067382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 0, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:44:48.067988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:44:48.068265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 107 2026-01-04T02:44:48.069393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:48.069435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 0, path id: 38 2026-01-04T02:44:48.069967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 0 TestWaitNotification wait txId: 107 2026-01-04T02:44:48.070370Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 107: send EvNotifyTxCompletion 2026-01-04T02:44:48.070416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 107 2026-01-04T02:44:48.070849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 107, at schemeshard: 72057594046678944 2026-01-04T02:44:48.070960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 107: got EvNotifyTxCompletionResult 2026-01-04T02:44:48.071004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 107: satisfy waiter [1:1397:3323] TestWaitNotification: OK eventTxId 107 2026-01-04T02:44:48.071536Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:44:48.071749Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 245us result status StatusSuccess 2026-01-04T02:44:48.072251Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 9 SubDomainVersion: 1 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents >> PersQueueSdkReadSessionTest::SettingsValidation [GOOD] >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty >> KqpLimits::TooBigQuery+useSink [GOOD] >> KqpLimits::TooBigQuery-useSink >> TKeyValueCollectorTest::TestKeyValueCollectorEmpty [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMany >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEvents [GOOD] >> TKeyValueTest::TestWriteLongKey >> TKeyValueCollectorTest::TestKeyValueCollectorMany [GOOD] >> KeyValueReadStorage::ReadWithTwoPartsOk >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] >> TKeyValueTest::TestRewriteThenLastValue >> KeyValueReadStorage::ReadError >> test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] >> KeyValueReadStorage::ReadError [GOOD] >> KeyValueReadStorage::ReadErrorWithWrongGroupId [GOOD] >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadWithTwoPartsOk [GOOD] Test command err: 2026-01-04T02:44:50.213097Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 2 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:50.215532Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin >> DataShardVolatile::DistributedOutOfOrderFollowerConsistency [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadErrorWithUncorrectCookie [GOOD] Test command err: 2026-01-04T02:44:50.561843Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# ERROR ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:50.561932Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV316@keyvalue_storage_read_request.cpp:272} Unexpected EvGetResult. KeyValue# 1 Status# ERROR Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1767494690560 ErrorReason# 2026-01-04T02:44:50.570538Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 2 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:50.570649Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV318@keyvalue_storage_read_request.cpp:242} Received EvGetResult from an unexpected storage group. KeyValue# 1 GroupId# 2 ExpecetedGroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 1767494690570 ErrorReason# 2026-01-04T02:44:50.576678Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:50.576741Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV319@keyvalue_storage_read_request.cpp:224} Received EvGetResult with an unexpected cookie. KeyValue# 1 Cookie# 1000 SentGets# 1 GroupId# 3 Status# OK Deadline# 18446744073709551 Now# 0 GotAt# 1767494690576 ErrorReason# |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [FAIL] >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] >> TKeyValueTest::TestWriteDeleteThenReadRemaining [GOOD] >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookup [GOOD] >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system >> TKeyValueCollectorTest::TestKeyValueCollectorSingle >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system >> TKeyValueCollectorTest::TestKeyValueCollectorSingle [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithoutCreationUnixTimeNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:451:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:454:2057] recipient: [2:453:2379] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:456:2057] recipient: [2:453:2379] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:455:2380] Leader for TabletID 72057594037927937 is [2:455:2380] sender: [2:571:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:451:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:454:2057] recipient: [3:453:2379] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:456:2057] recipient: [3:453:2379] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:455:2380] Leader for TabletID 72057594037927937 is [3:455:2380] sender: [3:571:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:452:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:455:2057] recipient: [4:454:2379] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:457:2057] recipient: [4:454:2379] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:456:2380] Leader for TabletID 72057594037927937 is [4:456:2380] sender: [4:572:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorSingleWithOneError [GOOD] >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |93.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueCollectorTest::TestKeyValueCollectorMultiple [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateRtmrVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExtSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] >> THealthCheckTest::TestStateStorageBlue [GOOD] >> THealthCheckTest::TestStateStorageRed >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk >> BasicUsage::WriteAndReadSomeMessagesWithSyncCompression [GOOD] >> CompressExecutor::TestReorderedExecutor ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvGetStorageChannelStatus ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:78:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:81:2057] recipient: [5:80:2112] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:83:2057] recipient: [5:80:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:82:2113] Leader for TabletID 72057594037927937 is [5:82:2113] sender: [5:198:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:79:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:81:2112] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:84:2057] recipient: [6:81:2112] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:83:2113] Leader for TabletID 72057594037927937 is [6:83:2113] sender: [6:199:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system >> BasicUsage::MaxByteSizeEqualZero [GOOD] >> BasicUsage::BrokenCredentialsProvider >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system >> KeyValueReadStorage::ReadOk [GOOD] >> KeyValueReadStorage::ReadNotWholeBlobOk [GOOD] >> KeyValueReadStorage::ReadOneItemError [GOOD] >> TKeyValueTest::TestBasicWriteRead [GOOD] >> TKeyValueTest::TestBasicWriteReadOverrun >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser >> TConsoleTests::TestRemoveSharedTenantWithServerlessTenants [GOOD] >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> KeyValueReadStorage::ReadOneItemError [GOOD] Test command err: 2026-01-04T02:44:55.411603Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:55.413945Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-04T02:44:55.418722Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:55.418778Z 1 00h00m00.000000s :KEYVALUE INFO: {KV34@keyvalue_storage_read_request.cpp:494} Send respose KeyValue# 1 Status# RSTATUS_OK ReadRequestCookie# 0 2026-01-04T02:44:55.423743Z 1 00h00m00.000000s :KEYVALUE INFO: {KV20@keyvalue_storage_read_request.cpp:211} Received GetResult KeyValue# 1 GroupId# 3 Status# OK ResponseSz# 1 ErrorReason# ReadRequestCookie# 0 2026-01-04T02:44:55.423847Z 1 00h00m00.000000s :KEYVALUE ERROR: {KV317@keyvalue_storage_read_request.cpp:312} Unexpected EvGetResult. KeyValue# 1 Status# OK Id# [1:2:3:2:0:1:0] ResponseStatus# ERROR Deadline# 586524-01-19T08:01:49.551615Z Now# 1970-01-01T00:00:00.000000Z SentAt# 1970-01-01T00:00:00.000000Z GotAt# 2026-01-04T02:44:55.423578Z ErrorReason# |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateSolomonVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi >> TPQTest::PQ_Tablet_Does_Not_Remove_The_Blob_Until_The_Reading_Is_Complete [GOOD] >> TPQTest::IncompleteProxyResponse >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin >> TKeyValueTest::TestRenameWorks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/ut_rw/unittest >> TColumnShardTestReadWrite::ReadGroupBy-SimpleReader [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2100;fline=native.cpp:110;event=serialize;size=8328;columns=19; -- group by key: 0 2026-01-04T02:43:09.073159Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:110;event=initialize_shard;step=OnActivateExecutor; 2026-01-04T02:43:09.107614Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];fline=columnshard.cpp:129;event=initialize_shard;step=initialize_tiring_finished; 2026-01-04T02:43:09.107899Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Execute at tablet 9437184 2026-01-04T02:43:09.116196Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:43:09.116476Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:43:09.116738Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:43:09.116862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:43:09.116985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:43:09.117083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:43:09.117204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:43:09.117330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:43:09.117484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:43:09.117623Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.117737Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:43:09.117910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:43:09.118052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;self_id=[1:128:2159];tablet_id=9437184;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:43:09.149122Z node 1 :TX_COLUMNSHARD DEBUG: ctor_logger.h:56: TxInitSchema.Complete at tablet 9437184 2026-01-04T02:43:09.149296Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=columnshard__init.cpp:131;step=TTxUpdateSchema.Execute_Start;details=normalizers_count=12;current_normalizer=CLASS_NAME=Granules; 2026-01-04T02:43:09.149360Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=1;type=Granules; 2026-01-04T02:43:09.149567Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:127;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.149729Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:43:09.149823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:43:09.149868Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=2;type=Chunks; 2026-01-04T02:43:09.149981Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=chunks.cpp:138;normalizer=TChunksNormalizer;message=0 chunks found; 2026-01-04T02:43:09.150043Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:43:09.150085Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:43:09.150122Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=4;type=TablesCleaner; 2026-01-04T02:43:09.150357Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=normalizer.cpp:123;normalizer=TGranulesNormalizer;message=0 chunks found; 2026-01-04T02:43:09.150439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:43:09.150491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:43:09.150520Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=6;type=CleanGranuleId; 2026-01-04T02:43:09.150619Z node 1 :TX_COLUMNSHARD INFO: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=clean_granule.cpp:133;normalizer=TCleanGranuleIdNormalizer;message=0 chunks found; 2026-01-04T02:43:09.150668Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:43:09.150705Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:43:09.150731Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=9;type=GCCountersNormalizer; 2026-01-04T02:43:09.150779Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:43:09.150823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:43:09.150858Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=11;type=SyncPortionFromChunks; 2026-01-04T02:43:09.150904Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:43:09.150947Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:43:09.150981Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=15;type=RestoreV1Chunks_V2; 2026-01-04T02:43:09.151212Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:43:09.151268Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:43:09.151298Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=16;type=RestoreV2Chunks; 2026-01-04T02:43:09.151464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:43:09.151517Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.151546Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=17;type=CleanDeprecatedSnapshot; 2026-01-04T02:43:09.151595Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:43:09.151665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:43:09.151711Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=18;type=RestoreV0ChunksMeta; 2026-01-04T02:43:09.151761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:43:09.151818Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:43:09.151860Z node 1 :TX_COLUMNSHARD NOTICE: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:156;event=normalizer_init;last=0;seq_id=19;type=CopyBlobIdsToV2; 2026-01-04T02:43:09.152013Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=9437184;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:43:09.152061Z node 1 :TX_COLUMNSHARD WAR ... -01-04T02:44:54.117677Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:224;stage=limit exhausted;limit=limits:(bytes=0;chunks=0);; 2026-01-04T02:44:54.117708Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;tablet_id=9437184;fline=scanner.cpp:53;event=build_next_interval; 2026-01-04T02:44:54.118113Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:44:54.118291Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:1;records_count:1;schema=100: binary 101: binary 102: binary 103: uint64;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.118323Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=plain_read_data.cpp:31;event=DoExtractReadyResults;result=0;count=0;finished=1; 2026-01-04T02:44:54.118426Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:255;stage=ready result;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;);columns=4;rows=1; 2026-01-04T02:44:54.118475Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:276;stage=data_format;batch_size=0;num_rows=1;batch_columns=100,101,102,103; 2026-01-04T02:44:54.118667Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:456:2468];bytes=26;rows=1;faults=0;finished=0;fault=0;schema=100: binary 101: binary 102: binary 103: uint64; 2026-01-04T02:44:54.118782Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:298;stage=finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.118903Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.119002Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.119182Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:122;event=TEvScanDataAck;info=limits:(bytes=8388608;chunks=1);; 2026-01-04T02:44:54.119285Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:213;stage=start;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.119396Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:218;stage=scan iterator is finished;iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.119582Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: actor.cpp:457: Scan [54:457:2469] finished for tablet 9437184 2026-01-04T02:44:54.119976Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:463;event=scan_finish;compute_actor_id=[54:456:2468];stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ack","l_processing","l_ProduceResults","f_Finish","l_Finish"],"t":0.008}],"full":{"a":105567853,"name":"_full_task","f":105567853,"d_finished":0,"c":0,"l":105576435,"d":8582},"events":[{"name":"bootstrap","f":105568046,"d_finished":997,"c":1,"l":105569043,"d":997},{"a":105575980,"name":"ack","f":105574902,"d_finished":929,"c":1,"l":105575831,"d":1384},{"a":105575971,"name":"processing","f":105569165,"d_finished":2397,"c":3,"l":105575833,"d":2861},{"name":"ProduceResults","f":105568736,"d_finished":1651,"c":6,"l":105576223,"d":1651},{"a":105576226,"name":"Finish","f":105576226,"d_finished":0,"c":0,"l":105576435,"d":209},{"name":"task_result","f":105569176,"d_finished":1427,"c":2,"l":105574535,"d":1427}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.120026Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:394;event=send_data;compute_actor_id=[54:456:2468];bytes=0;rows=0;faults=0;finished=1;fault=0;schema=; 2026-01-04T02:44:54.120398Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=actor.cpp:399;event=scan_finished;compute_actor_id=[54:456:2468];packs_sum=0;bytes=0;bytes_sum=0;rows=0;rows_sum=0;faults=0;finished=1;fault=0;stats={"p":[{"events":["f_bootstrap","f_ProduceResults"],"t":0},{"events":["l_bootstrap","f_processing","f_task_result"],"t":0.001},{"events":["l_task_result"],"t":0.006},{"events":["f_ack"],"t":0.007},{"events":["l_ProduceResults","f_Finish"],"t":0.008},{"events":["l_ack","l_processing","l_Finish"],"t":0.009}],"full":{"a":105567853,"name":"_full_task","f":105567853,"d_finished":0,"c":0,"l":105576875,"d":9022},"events":[{"name":"bootstrap","f":105568046,"d_finished":997,"c":1,"l":105569043,"d":997},{"a":105575980,"name":"ack","f":105574902,"d_finished":929,"c":1,"l":105575831,"d":1824},{"a":105575971,"name":"processing","f":105569165,"d_finished":2397,"c":3,"l":105575833,"d":3301},{"name":"ProduceResults","f":105568736,"d_finished":1651,"c":6,"l":105576223,"d":1651},{"a":105576226,"name":"Finish","f":105576226,"d_finished":0,"c":0,"l":105576875,"d":649},{"name":"task_result","f":105569176,"d_finished":1427,"c":2,"l":105574535,"d":1427}],"id":"9437184::2052"};iterator=ready_results:(count:0;records_count:0;);indexed_data:(CTX:{ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;};SCANNER:{S:{{finished:{1};internal:{};constructor:{{class_name=GENERAL_ORDERING::ascending;internal={{CC:0}};}};in_fly=0;type=FULL_SORTED;}};SP:[{{RESULT;IDX=0;FIN=1;}};]};SF:1;PR:0;); 2026-01-04T02:44:54.120468Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=stats.cpp:8;event=statistic;begin=2026-01-04T02:44:54.109010Z;index_granules=0;index_portions=1;index_batches=0;schema_columns=4;filter_columns=0;additional_columns=0;compacted_portions_bytes=0;inserted_portions_bytes=14056;committed_portions_bytes=0;data_filter_bytes=0;data_additional_bytes=0;delta_bytes=14056;selected_rows=0; 2026-01-04T02:44:54.120497Z node 54 :TX_COLUMNSHARD_SCAN DEBUG: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=read_context.h:196;event=scan_aborted;reason=unexpected on destructor; 2026-01-04T02:44:54.120602Z node 54 :TX_COLUMNSHARD_SCAN INFO: log.cpp:841: SelfId=[54:457:2469];TabletId=9437184;ScanId=0;TxId=100;ScanGen=0;task_identifier=;fline=context.h:99;fetching=ef=(column_ids=4;column_names=i32;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1,2,3,4;column_names=i16,i32,i8,ts;);;ff=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;program_input=(column_ids=4,17,18,19;column_names=i32,json,jsondoc,yson;);;; >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin >> TPQTest::IncompleteProxyResponse [GOOD] >> TPQTest::SmallMsgCompactificationWithRebootsTest |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/ut_rw/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin >> TKesusTest::TestAcquireSemaphoreTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig >> TKesusTest::TestAcquireSemaphoreTimeoutTooBig [GOOD] >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk >> TKeyValueTest::TestWrite200KDeleteThenResponseError [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser >> TKesusTest::TestAcquireSemaphoreTimeoutInfinite [GOOD] >> TKesusTest::TestAcquireSemaphoreRebootTimeout >> PersQueueSdkReadSessionTest::SpecifyClustersExplicitly [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin >> TKeyValueTest::TestWrite200KDeleteThenResponseErrorNewApi [GOOD] >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] >> DataShardVolatile::DistributedWriteRSNotAckedBeforeCommit [GOOD] >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] >> TKesusTest::TestSessionTimeoutAfterDetach [GOOD] >> TKesusTest::TestSessionTimeoutAfterReboot >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSubDomain-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteAndRenameWithCreationUnixTime [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] 2026-01-04T02:44:51.855345Z node 1 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-04T02:44:59.413449Z node 2 :KEYVALUE ERROR: keyvalue_state.cpp:3029: KeyValue# 72057594037927937 PrepareExecuteTransactionRequest return flase, Marker# KV73 Submsg# KeyValue# 72057594037927937 Can't delete Range, in DeleteRange, total limit of deletions per request (100000) reached, Marker# KV90 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/ut/unittest >> TPQTest::SmallMsgCompactificationWithRebootsTest [GOOD] Test command err: 2026-01-04T02:40:08.302774Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:107:2057] recipient: [1:105:2138] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:112:2057] recipient: [1:105:2138] 2026-01-04T02:40:08.369914Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72057594037927937] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T02:40:08.373632Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72057594037927937] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T02:40:08.373943Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:40:08.374011Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:40:08.374051Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72057594037927937] no config, start with empty partitions and default config 2026-01-04T02:40:08.374092Z node 1 :PQ_TX DEBUG: pq_impl.cpp:4978: [PQ: 72057594037927937] Txs.size=0, PlannedTxs.size=0 2026-01-04T02:40:08.374171Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:08.374240Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] IGNORE Leader for TabletID 72057594037927938 is [0:0:0] sender: [1:153:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:158:2057] recipient: [1:151:2172] Leader for TabletID 72057594037927937 is [1:111:2142] sender: [1:183:2057] recipient: [1:14:2061] 2026-01-04T02:40:08.390153Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:182:2195], now have 1 active actors on pipe 2026-01-04T02:40:08.390275Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:1289: [PQ: 72057594037927937] Handle TEvPersQueue::TEvUpdateConfig 2026-01-04T02:40:08.408319Z node 1 :PQ_TX DEBUG: pq_impl.cpp:1471: [PQ: 72057594037927937] Config update version 1(current 0) received from actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:08.411055Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72057594037927937] Apply new config CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:08.411230Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:40:08.412225Z node 1 :PERSQUEUE INFO: pq_impl.cpp:1316: [PQ: 72057594037927937] Config applied version 1 actor [1:181:2194] txId 12345 config: CacheSize: 10485760 PartitionConfig { MaxCountInPartition: 20000000 MaxSizeInPartition: 104857600 LifetimeSeconds: 0 LowWatermark: 6291456 SourceIdLifetimeSeconds: 3600 WriteSpeedInBytesPerSecond: 102400 BurstSize: 102400 MaxWriteInflightSize: 90000000 } PartitionIds: 0 TopicName: "rt3.dc1--asdfgs--topic" Version: 1 LocalDC: true Topic: "topic" TopicPath: "/Root/PQ/rt3.dc1--asdfgs--topic" Partitions { PartitionId: 0 } MeteringMode: METERING_MODE_RESERVED_CAPACITY AllPartitions { PartitionId: 0 } Consumers { Name: "user" ReadFromTimestampsMs: 0 Generation: 1 } Consumers { Name: "user1" Generation: 1 Important: true } 2026-01-04T02:40:08.412389Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitConfigStep 2026-01-04T02:40:08.412721Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitInternalFieldsStep 2026-01-04T02:40:08.413050Z node 1 :PERSQUEUE INFO: partition_init.cpp:1155: [72057594037927937][Partition][0][StateInit] bootstrapping 0 [1:189:2142] 2026-01-04T02:40:08.415166Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:86: [rt3.dc1--asdfgs--topic:0:Initializer] Start initializing step TInitFieldsStep 2026-01-04T02:40:08.415229Z node 1 :PERSQUEUE DEBUG: partition_init.cpp:66: [rt3.dc1--asdfgs--topic:0:Initializer] Initializing completed. 2026-01-04T02:40:08.415282Z node 1 :PERSQUEUE INFO: partition.cpp:712: [72057594037927937][Partition][0][StateInit] init complete for topic 'rt3.dc1--asdfgs--topic' partition 0 generation 2 [1:189:2142] 2026-01-04T02:40:08.415341Z node 1 :PERSQUEUE DEBUG: partition.cpp:732: [72057594037927937][Partition][0][StateInit] SYNC INIT topic rt3.dc1--asdfgs--topic partitition 0 so 0 endOffset 0 Head Offset 0 PartNo 0 PackedSize 0 count 0 nextOffset 0 batches 0 2026-01-04T02:40:08.415422Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72057594037927937][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:40:08.416929Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:08.418129Z node 1 :PERSQUEUE DEBUG: partition.cpp:4472: [72057594037927937][Partition][0][StateIdle] Process pending events. Count 0 2026-01-04T02:40:08.418192Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.418236Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:08.418294Z node 1 :PERSQUEUE DEBUG: partition.cpp:2409: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:08.418327Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 2, PendingWrites: 0 2026-01-04T02:40:08.418362Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.418407Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:08.418481Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit request with generation 1 2026-01-04T02:40:08.418524Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user reinit with generation 1 done 2026-01-04T02:40:08.418586Z node 1 :PERSQUEUE DEBUG: partition.cpp:2473: [72057594037927937][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TEvPQ::TEvSetClientInfo) 2026-01-04T02:40:08.418635Z node 1 :PERSQUEUE DEBUG: partition.cpp:3765: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit request with generation 1 2026-01-04T02:40:08.418681Z node 1 :PERSQUEUE DEBUG: partition.cpp:3836: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 reinit with generation 1 done 2026-01-04T02:40:08.418716Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 2 2026-01-04T02:40:08.418784Z node 1 :PERSQUEUE DEBUG: partition.cpp:2342: [72057594037927937][Partition][0][StateIdle] Batch completed (2) 2026-01-04T02:40:08.418831Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.419096Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:08.419144Z node 1 :PERSQUEUE DEBUG: partition_read.cpp:934: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0 user user1 readTimeStamp for offset 0 initiated queuesize 0 startOffset 0 ReadingTimestamp 0 rrg 0 2026-01-04T02:40:08.419240Z node 1 :PERSQUEUE DEBUG: partition_mlp.cpp:85: [72057594037927937][Partition][0][StateIdle] Process MLP pending events. Count 0 2026-01-04T02:40:08.419449Z node 1 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:40:08.419624Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:08.422107Z node 1 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:40:08.422212Z node 1 :PERSQUEUE DEBUG: partition_write.cpp:572: [72057594037927937][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:40:08.422285Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72057594037927937][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:40:08.422335Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:08.422361Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72057594037927937][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:40:08.422393Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72057594037927937][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:40:08.422418Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72057594037927937][Partition][0][StateIdle] Try persist 2026-01-04T02:40:08.422456Z node 1 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72057594037927937][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:40:08.422723Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:196:2203], now have 1 active actors on pipe Set pipe for create session: [1:200:2206] Leader for TabletID 72057594037927938 is [1:157:2176] sender: [1:201:2057] recipient: [1:14:2061] 2026-01-04T02:40:08.423579Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [1:200:2206], now have 1 active actors on pipe 2026-01-04T02:40:08.423687Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T0 ... UE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.133251Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-04T02:45:00.133275Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-04T02:45:00.133304Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 27:0 isTruncatedBlob 0 2026-01-04T02:45:00.133404Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 27:0 isTruncatedBlob 0 hasNonZeroParts 0 isMiddlePartOfMessage 0 2026-01-04T02:45:00.134065Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:350: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Send EvRead (Compact state) from offset: 50:0 2026-01-04T02:45:00.134220Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:891: [72057594037927937][Partition][0][StateIdle] read cookie 14 Topic 'rt3.dc1--asdfgs--topic' partition 0 user __ydb_compaction_consumer offset 50 partno 0 count 4294967295 size 4294967295 endOffset 67 max time lag 0ms effective offset 50 2026-01-04T02:45:00.134514Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72057594037927937][Partition][0][StateIdle] read cookie 14 added 2 blobs, size 5223238 count 17 last offset 61, current partition end offset: 67 2026-01-04T02:45:00.134541Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72057594037927937][Partition][0][StateIdle] Reading cookie 14. Send blob request. 2026-01-04T02:45:00.134606Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 50 partno 0 count 11 parts_count 0 source 1 size 3379747 accessed 4 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-04T02:45:00.134637Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 61 partno 0 count 6 parts_count 0 source 1 size 1843491 accessed 2 times before, last time 1970-01-01T00:00:00.000000Z 2026-01-04T02:45:00.134671Z node 159 :PERSQUEUE DEBUG: read.h:126: [72057594037927937][PQCacheProxy]Reading cookie 14. All 2 blobs are from cache. 2026-01-04T02:45:00.134714Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' 2026-01-04T02:45:00.134745Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72057594037927937' partition 0 offset 61 partno 0 count 6 parts 0 suffix '0' 2026-01-04T02:45:00.134788Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 2 blobs 2026-01-04T02:45:00.134890Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 50 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.134938Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 52 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.134976Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 54 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.135014Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 56 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.135045Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 58 totakecount 11 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.135074Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 60 totakecount 11 count 1 size 307240 from pos 0 cbcount 1 2026-01-04T02:45:00.135118Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 61 totakecount 6 count 2 size 614475 from pos 0 cbcount 2 2026-01-04T02:45:00.135186Z node 159 :PERSQUEUE DEBUG: partition_read.cpp:997: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got internal ProxyResponse 2026-01-04T02:45:00.135208Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:93: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 proxy response cookie: 1 2026-01-04T02:45:00.135232Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:443: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 process read result in CompState starting from: 50:0 isTruncatedBlob 0 2026-01-04T02:45:00.135447Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 50 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.135643Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 51 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.135842Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 52 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136042Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 53 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136220Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 54 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136408Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 55 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136584Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 56 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136763Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 57 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.136941Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 58 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.137123Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 59 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.137298Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:546: Compaction for topic LastPart 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 res.GetOffset() 60 isTruncatedBlob 0 hasNonZeroParts 1 keepMessage 0 LastBatch 0 2026-01-04T02:45:00.137357Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:581: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 processed read result in CompState starting from: 50:0 isTruncatedBlob 0 hasNonZeroParts 1 isMiddlePartOfMessage 0 2026-01-04T02:45:00.139338Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:621: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 add CmdDeleteRange for key d0000000000_00000000000000000027_00000_0000000023_00000 2026-01-04T02:45:00.139433Z node 159 :PERSQUEUE DEBUG: partition.cpp:4515: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquire RW Lock 2026-01-04T02:45:00.139600Z node 159 :PERSQUEUE DEBUG: partition.cpp:4523: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Acquired RW Lock, send compacter KV request 2026-01-04T02:45:00.139659Z node 159 :PERSQUEUE DEBUG: read.h:275: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:45:00.139781Z node 159 :PERSQUEUE DEBUG: read.h:313: [72057594037927937][PQCacheProxy]CacheProxy. Passthrough blob. Partition 0 offset 50 partNo 0 count 11 size 167 2026-01-04T02:45:00.139819Z node 159 :PERSQUEUE DEBUG: read.h:350: [72057594037927937][PQCacheProxy]CacheProxy. Delete blobs from d0000000000_00000000000000000027_00000_0000000023_00000(+) to d0000000000_00000000000000000027_00000_0000000023_00000(+) 2026-01-04T02:45:00.143402Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:361: Deleting head blob in L1. Partition 0 offset 27 count 23 actorID [159:138:2142] 2026-01-04T02:45:00.143460Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:547: Duplicate blob in L1. Partition 0 offset 50 count 11 size 3379747 actorID [159:138:2142] is actual 1 2026-01-04T02:45:00.143540Z node 159 :PERSQUEUE DEBUG: cache_eviction.h:311: Caching head blob in L1. Partition 0 offset 50 count 11 size 167 actorID [159:138:2142] 2026-01-04T02:45:00.143641Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 27 partno 0 count 23 parts 0 suffix '0' size 263 2026-01-04T02:45:00.143706Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:155: PQ Cache (L2). Removed. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 3379747 2026-01-04T02:45:00.145022Z node 159 :PERSQUEUE DEBUG: pq_l2_cache.cpp:129: PQ Cache (L2). Adding blob. Tablet '72057594037927937' partition 0 offset 50 partno 0 count 11 parts 0 suffix '0' size 167 2026-01-04T02:45:00.145284Z node 159 :PERSQUEUE DEBUG: partition.cpp:2153: [72057594037927937][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:45:00.145323Z node 159 :PERSQUEUE DEBUG: partition.cpp:2161: [72057594037927937][Partition][0][StateIdle] Topic 'rt3.dc1--asdfgs--topic' partition 0: Got compacter KV response, release RW lock 2026-01-04T02:45:00.145358Z node 159 :PERSQUEUE DEBUG: partition_compactification.cpp:125: Compaction for topic 'rt3.dc1--asdfgs--topic, partition: 0 Process KV response 2026-01-04T02:45:00.148794Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:301:2288], now have 1 active actors on pipe 2026-01-04T02:45:00.148849Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--asdfgs--topic' requestId: 2026-01-04T02:45:00.148879Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72057594037927937] got client message batch for topic 'rt3.dc1--asdfgs--topic' partition 0 2026-01-04T02:45:00.148958Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--asdfgs--topic' partition: 0 messageNo: 0 requestId: cookie: 0 Got offset = 61 for user __ydb_compaction_consumer 2026-01-04T02:45:00.149164Z node 159 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72057594037927937] server connected, pipe [159:303:2290], now have 1 active actors on pipe Got start offset = 50 |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:203:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] |93.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] >> TKeyValueTest::TestWriteTrimWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin >> TKesusTest::TestSessionTimeoutAfterUnregister [GOOD] >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous >> TConsoleTests::TestRemoveSharedTenantAfterRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRemoveServerlessTenant >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks >> TKeyValueTest::TestConcatWorks >> TKesusTest::TestAcquireLocks [GOOD] >> TKesusTest::TestAcquireRepeat >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_index_build/unittest >> VectorIndexBuildTest::Metering_ServerLessDB_Restarts-doRestarts-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:42:26.408892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:42:26.409001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.409042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:42:26.409097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:42:26.409134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:42:26.409163Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:42:26.409229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:42:26.409306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:42:26.410195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:42:26.410482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:42:26.501361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:42:26.501434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:26.516297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:42:26.516508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:42:26.516679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:42:26.524599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:42:26.524835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:42:26.525532Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:42:26.525822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:42:26.528479Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.528678Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:42:26.529926Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:42:26.529985Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:42:26.530106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:42:26.530150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:42:26.530193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:42:26.530356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:42:26.687533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.690694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.690872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.690962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:42:26.691793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0 TxId: 281474976725763 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72075186233409550 2026-01-04T02:45:01.822747Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-04T02:45:01.822895Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1777:3625], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-04T02:45:01.823081Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976725763:4294967295 from tablet: 72075186233409549 to tablet: 72075186233409550 cookie: 0:281474976725763 msg type: 269090816 2026-01-04T02:45:01.823200Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976725763, partId: 4294967295, tablet: 72075186233409550 2026-01-04T02:45:01.823360Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-04T02:45:01.823397Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 0/1, is published: true 2026-01-04T02:45:01.823437Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976725763, at schemeshard: 72075186233409549 2026-01-04T02:45:01.837364Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 20750, transactions count in step: 1, at schemeshard: 72075186233409549 2026-01-04T02:45:01.837516Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976725763 AckTo { RawX1: 0 RawX2: 0 } } Step: 20750 MediatorID: 72075186233409551 TabletID: 72075186233409549, at schemeshard: 72075186233409549 2026-01-04T02:45:01.837580Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_lock.cpp:44: [72075186233409549] TDropLock TPropose opId# 281474976725763:0 HandleReply TEvOperationPlan: step# 20750 2026-01-04T02:45:01.837654Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976725763:0 128 -> 240 2026-01-04T02:45:01.840335Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976725763:0, at schemeshard: 72075186233409549 2026-01-04T02:45:01.840405Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976725763:0 ProgressState 2026-01-04T02:45:01.840496Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-04T02:45:01.840532Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-04T02:45:01.840578Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725763:0 progress is 1/1 2026-01-04T02:45:01.840610Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-04T02:45:01.840651Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725763, ready parts: 1/1, is published: true 2026-01-04T02:45:01.840725Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [5:1014:2953] message: TxId: 281474976725763 2026-01-04T02:45:01.840778Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725763 ready parts: 1/1 2026-01-04T02:45:01.840819Z node 5 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725763:0 2026-01-04T02:45:01.840877Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725763:0 2026-01-04T02:45:01.840967Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 6 2026-01-04T02:45:01.844650Z node 5 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725763 2026-01-04T02:45:01.844745Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725763 2026-01-04T02:45:01.844812Z node 5 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 109, txId# 281474976725763 2026-01-04T02:45:01.844949Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1777:3625], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000}, txId# 281474976725763 2026-01-04T02:45:01.847446Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking 2026-01-04T02:45:01.847596Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Unlocking TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1777:3625], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-04T02:45:01.847675Z node 5 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:45:01.849527Z node 5 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done 2026-01-04T02:45:01.849655Z node 5 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 109 Done TBuildInfo{ IndexBuildId: 109, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeGlobalVectorKmeansTree, IndexName: index1, IndexColumn: embedding, State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [5:1777:3625], AlterMainTableTxId: 0, AlterMainTableTxStatus: StatusSuccess, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725758, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725763, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000, Billed: UploadRows: 420 UploadBytes: 6220 ReadRows: 2000 ReadBytes: 26000 CpuTimeUs: 363000} 2026-01-04T02:45:01.849702Z node 5 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 109, subscribers count# 1 2026-01-04T02:45:01.849891Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-04T02:45:01.849954Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [5:3244:4985] TestWaitNotification: OK eventTxId 109 2026-01-04T02:45:01.851344Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-04T02:45:01.851445Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo quantity: 311, 311 unit: "request_unit", "request_unit" type: "delta", "delta" ... unblocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering 2026-01-04T02:45:01.853458Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:338: tests -- TFakeMetering got TEvMetering::TEvWriteMeteringJson 2026-01-04T02:45:01.853984Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:1703: Handle TEvRemoteHttpInfo: BuildIndexId=109&Page=BuildIndexInfo 2026-01-04T02:45:01.854111Z node 5 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__monitoring.cpp:441: TTxMonitoring.Execute: BuildIndexId=109&Page=BuildIndexInfo |93.0%| [TA] $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_index_build/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] >> TKesusTest::TestAcquireRepeat [GOOD] >> TKesusTest::TestAcquireDowngrade ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestStopResourceAllocationWhenPipeDestroyed [GOOD] Test command err: 2026-01-04T02:44:39.436721Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.436855Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.456845Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.456972Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.482978Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:39.850179Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:39.850275Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:39.863794Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:39.863913Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:39.888579Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:40.265997Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:40.266100Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:40.285292Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:40.285861Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:40.320280Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:40.320837Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=8197021490829142821, session=0, seqNo=0) 2026-01-04T02:44:40.321020Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:40.333128Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=8197021490829142821, session=1) 2026-01-04T02:44:40.333648Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:146:2168], cookie=16832061161331170995) 2026-01-04T02:44:40.333727Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:146:2168], cookie=16832061161331170995) 2026-01-04T02:44:40.758406Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.770867Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.126848Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.139153Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.489486Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.501961Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.852614Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.864991Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.236642Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.248862Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.600366Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.612705Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.954198Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.966588Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.316981Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.329321Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.680884Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.692787Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.075694Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.088095Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.459954Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.472694Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.834101Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.846518Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.212792Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.225151Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.584895Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.597042Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.989005Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.001121Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.372496Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.384736Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.745550Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.757343Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.117463Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.129235Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.489431Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.501168Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.882776Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.894870Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.254917Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.267061Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.626942Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.638687Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.998019Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.009669Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:49.369496Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.381286Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:49.751729Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.763498Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.134269Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.146043Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.505858Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.517798Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.878361Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.890147Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:51.249833Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:51.261787Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:51.664729Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:51.676440Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:52.036185Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:52.048402Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:52.419253Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:52.431418Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:52.791907Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:52.803689Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.154241Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.166354Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.537816Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.549647Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.910867Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.923070Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:54.283801Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:54.295636Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:54.656116Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:54.667947Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:55.031394Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:55.043686Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:55.437062Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:55.448925Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:55.810389Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:55.822827Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:56.195789Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:56.207752Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:56.567819Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:56.579473Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:56.938920Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:56.950574Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:57.352702Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:57.364611Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:57.725213Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:57.737308Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:58.108942Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:58.121011Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:58.481888Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:58.493993Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:58.854801Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:58.866894Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:59.259194Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:59.271097Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:59.631394Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:59.643490Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:00.004768Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:00.016667Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:00.367303Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:00.379350Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:00.720457Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:00.732559Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:01.093624Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:01.105928Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:01.458162Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:01.470729Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:01.813894Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:01.826242Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:02.189543Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:02.201993Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:02.553886Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:02.566273Z node 3 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:02.981993Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:45:02.982092Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:45:02.993936Z node 3 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:45:03.005099Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:535:2482], cookie=11597807124627828594) 2026-01-04T02:45:03.005202Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:535:2482], cookie=11597807124627828594) 2026-01-04T02:45:03.355570Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:03.355682Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:03.370247Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:03.370761Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:03.394433Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:03.400078Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=13417420985112576260, path="Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-04T02:45:03.400361Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:45:03.412624Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=13417420985112576260) 2026-01-04T02:45:03.414194Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:146:2168]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:45:03.414266Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:146:2168], cookie=0) 2026-01-04T02:45:03.414481Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:148:2170]. Cookie: 0. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:45:03.414509Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:148:2170], cookie=0) 2026-01-04T02:45:03.456337Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-04T02:45:03.456427Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 5 StateNotification { Status: SUCCESS } } } 2026-01-04T02:45:03.456663Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:318: Got TEvServerDisconnected([4:151:2173]) 2026-01-04T02:45:03.456788Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:37: [72057594037927937] Send TEvResourcesAllocated to [4:148:2170]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 StateNotification { Status: SESSION_EXPIRED Issues { message: "Disconected." } } } } 2026-01-04T02:45:03.508942Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [4:146:2168]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 10 StateNotification { Status: SUCCESS } } } |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestAcquireDowngrade [GOOD] >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system |93.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TA] $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> BasicUsage::BrokenCredentialsProvider [GOOD] >> BasicUsage::CreateTopicWithCustomName >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [FAIL] >> TKeyValueTest::TestBasicWriteReadOverrun [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser >> TKeyValueTest::TestBlockedEvGetRequest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::DataReceivedCallbackReal [GOOD] Test command err: 2026-01-04T02:44:34.457654Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.457690Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.457713Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.458181Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.469440Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.469704Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.470099Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.470576Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.470702Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.470830Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.470872Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:44:34.471529Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.471557Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.471578Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.471970Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.472740Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.472878Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.473053Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.473412Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.473532Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.473639Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.473681Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:44:34.474793Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.474814Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.474848Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.475140Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.475790Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.475941Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.476155Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.476904Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.477100Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.477201Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.477237Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:44:34.478148Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.478188Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.478232Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.478511Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.479028Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.479153Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.479336Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.481023Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.481601Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.481695Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.481737Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:44:34.482619Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.482664Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.482683Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.483038Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.483809Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.483942Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.484138Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.484512Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.484615Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.484691Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.484724Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:44:34.485334Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.485355Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.485380Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.485663Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.486464Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.486594Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.486776Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.487193Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.487320Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.487413Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.487452Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:44:34.488411Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.488438Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.488476Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.488740Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.489347Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.489513Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.489650Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.490296Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.490527Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.490612Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.490650Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 0 bytes 2026-01-04T02:44:34.491450Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.491467Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.491484Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.491826Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:44:34.492390Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:44:34.492490Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.492615Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:44:34.494136Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.494538Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:44:34.494618Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:44:34.494656Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 3 bytes 2026-01-04T02:44:34.501409Z :ReadSession INFO: Random seed for debugging is 1767494674501366 2026-01-04T02:44:34.768795Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331821605394938:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:34.769315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:34.834004Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:34.834314Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331823357474095:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:34.844124Z node 2 :M ... ndingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.076913Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.177245Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.177290Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.177302Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.177320Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.177333Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.277577Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.277612Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.277624Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.277671Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.277684Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.377924Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.377956Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.377967Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.377982Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.377991Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.478258Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.478284Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.478297Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.478311Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.478321Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.578627Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.578661Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.578674Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.578690Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.578701Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.678978Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.679012Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.679025Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.679042Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.679053Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.779327Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.779358Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.779368Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.779380Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.779388Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.879647Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.879674Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.879684Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.879696Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.879703Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:02.980038Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:02.980071Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.980084Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:02.980100Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:02.980111Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:03.016430Z node 1 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_1_1_4316057975025281348_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 6 from offset 3 2026-01-04T02:45:03.071018Z :INFO: [/Root] [/Root] [9af50e3c-95b9ed76-b4c7f28e-4aec4d55] Closing read session. Close timeout: 0.000000s 2026-01-04T02:45:03.071105Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:3 2026-01-04T02:45:03.071142Z :INFO: [/Root] [/Root] [9af50e3c-95b9ed76-b4c7f28e-4aec4d55] Counters: { Errors: 0 CurrentSessionLifetimeMs: 16399 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:45:03.071256Z :NOTICE: [/Root] [/Root] [9af50e3c-95b9ed76-b4c7f28e-4aec4d55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:45:03.071298Z :DEBUG: [/Root] [/Root] [9af50e3c-95b9ed76-b4c7f28e-4aec4d55] [dc1] Abort session to cluster 2026-01-04T02:45:03.072149Z :NOTICE: [/Root] [/Root] [9af50e3c-95b9ed76-b4c7f28e-4aec4d55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:45:03.072324Z node 1 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_1_1_4316057975025281348_v1 grpc read done: success# 0, data# { } 2026-01-04T02:45:03.072369Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_1_1_4316057975025281348_v1 grpc read failed 2026-01-04T02:45:03.072406Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_1_1_4316057975025281348_v1 grpc closed 2026-01-04T02:45:03.072454Z node 1 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_1_1_4316057975025281348_v1 is DEAD 2026-01-04T02:45:03.072893Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_1_1_4316057975025281348_v1 2026-01-04T02:45:03.072953Z node 2 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [1:7591331873145004957:2477] destroyed 2026-01-04T02:45:03.073061Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_1_1_4316057975025281348_v1 2026-01-04T02:45:03.073071Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591331873145004954:2474] disconnected. 2026-01-04T02:45:03.073103Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591331873145004954:2474] disconnected; active server actors: 1 2026-01-04T02:45:03.073130Z node 1 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [1:7591331873145004954:2474] client user disconnected session shared/user_1_1_4316057975025281348_v1 2026-01-04T02:45:03.080336Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:03.080356Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:03.080367Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:03.080378Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:03.080387Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:03.181008Z node 2 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:03.181043Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:03.181055Z node 2 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:03.181072Z node 2 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:03.181081Z node 2 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin |93.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin >> CompressExecutor::TestReorderedExecutor [GOOD] >> DstCreator::WithIntermediateDir >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] >> DstCreator::ReplicationModeMismatch >> DataShardVolatile::DistributedUpsertRestartBeforePrepare+UseSink [GOOD] >> DstCreator::GlobalConsistency >> CompressExecutor::TestExecutorMemUsage >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink >> DstCreator::NonExistentSrc >> DstCreator::Basic |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[pq-ReadTopic-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestBlockedEvGetRequest [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] 2026-01-04T02:45:05.844812Z node 17 :KEYVALUE ERROR: keyvalue_storage_read_request.cpp:256: {KV323@keyvalue_storage_read_request.cpp:256} Received BLOCKED EvGetResult. KeyValue# 72057594037927937 Status# BLOCKED Deadline# 18446744073709551 Now# 0 SentAt# 1970-01-01T00:00:00.000000Z GotAt# 0 ErrorReason# block race detected 2026-01-04T02:45:05.848868Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1006: Tablet: 72057594037927937 HandleBlockBlobStorageResult, msg->Status: ALREADY, not discovered Marker# TSYS21 2026-01-04T02:45:05.848925Z node 17 :TABLET_MAIN ERROR: tablet_sys.cpp:1925: Tablet: 72057594037927937 Type: KeyValue, EReason: ReasonBootBSError, SuggestedGeneration: 0, KnownGeneration: 3 Marker# TSYS31 |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadWhileWriteWorks >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOkWithNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:78:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:81:2057] recipient: [4:80:2112] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:83:2057] recipient: [4:80:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:82:2113] Leader for TabletID 72057594037927937 is [4:82:2113] sender: [4:198:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:79:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:81:2112] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:84:2057] recipient: [5:81:2112] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:83:2113] Leader for TabletID 72057594037927937 is [5:83:2113] sender: [5:199:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:83:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:85:2115] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:88:2057] recipient: [8:85:2115] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:87:2116] Leader for TabletID 72057594037927937 is [8:87:2116] sender: [8:105:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:85:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:87:2117] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:90:2057] recipient: [10:87:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:89:2118] Leader for TabletID 72057594037927937 is [10:89:2118] sender: [10:205:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:86:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:88:2117] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:91:2057] recipient: [11:88:2117] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:90:2118] Leader for TabletID 72057594037927937 is [11:90:2118] sender: [11:206:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:87:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:89:2118] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:92:2057] recipient: [12:89:2118] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:91:2119] Leader for TabletID 72057594037927937 is [12:91:2119] sender: [12:111:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:88:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:90:2119] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:93:2057] recipient: [13:90:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:92:2120] Leader for TabletID 72057594037927937 is [13:92:2120] sender: [13:112:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:91:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:94:2057] recipient: [15:93:2122] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:96:2057] recipient: [15:93:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:95:2123] Leader for TabletID 72057594037927937 is [15:95:2123] sender: [15:211:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-NoDbAdmin-system [GOOD] >> TKeyValueTest::TestCopyRangeWorks [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system >> TKeyValueTest::TestCopyRangeWorksNewApi |93.1%| [TA] $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> DstCreator::WithIntermediateDir [GOOD] >> DstCreator::GlobalConsistency [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser >> DstCreator::WithAsyncIndex >> DstCreator::KeyColumnNameMismatch >> DstCreator::NonExistentSrc [GOOD] >> DstCreator::KeyColumnsSizeMismatch >> TConsoleTests::TestRemoveServerlessTenant [GOOD] >> TConsoleTests::TestRegisterComputationalUnitsForPending |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/streaming_optimize/py3test >> test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [FAIL] |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/streaming_optimize/py3test >> DstCreator::ReplicationModeMismatch [GOOD] >> DstCreator::Basic [GOOD] >> DstCreator::ReplicationConsistencyLevelMismatch >> DstCreator::CannotFindColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadDeleteWithRestartsAndCatchCollectGarbageEventsWithSlowInitialGC [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:91:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:94:2057] recipient: [13:93:2122] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:96:2057] recipient: [13:93:2122] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:95:2123] Leader for TabletID 72057594037927937 is [13:95:2123] sender: [13:211:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:91:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:94:2057] recipient: [14:93:2122] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:96:2057] recipient: [14:93:2122] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:95:2123] Leader for TabletID 72057594037927937 is [14:95:2123] sender: [14:211:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:92:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:95:2057] recipient: [15:94:2122] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:97:2057] recipient: [15:94:2122] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:96:2123] Leader for TabletID 72057594037927937 is [15:96:2123] sender: [15:212:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-anonymous [GOOD] >> TGRpcCmsTest::AuthTokenTest >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous |93.1%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] >> PersQueueSdkReadSessionTest::StopResumeReadingData [GOOD] >> ReadSessionImplTest::CreatePartitionStream [GOOD] >> ReadSessionImplTest::BrokenCompressedData [GOOD] >> ReadSessionImplTest::CommitOffsetTwiceIsError [GOOD] >> ReadSessionImplTest::DataReceivedCallback >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_index_build/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi >> DstCreator::KeyColumnsSizeMismatch [GOOD] >> TGRpcCmsTest::DescribeOptionsTest >> DstCreator::WithAsyncIndex [GOOD] |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/ut_rw/test-results/unittest/{meta.json ... results_accumulator.log} >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] >> DstCreator::CannotFindColumn [GOOD] >> TConsoleTxProcessorTests::TestTxProcessorRandom [GOOD] >> TImmediateControlsConfiguratorTests::TestControlsInitialization >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnNameMismatch [GOOD] Test command err: 2026-01-04T02:45:07.433644Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331963200690328:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:07.433744Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:07.654747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:07.694817Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:07.694958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:07.743812Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:07.759404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:07.944085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:08.066029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:08.066045Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:08.066115Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:08.066174Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:08.410549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:08.417472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:08.449602Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:08.516009Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:08.516048Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:08.516599Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:09.847343Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494708511, tx_id: 281474976715658 } } } 2026-01-04T02:45:09.848258Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:09.849674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:09.850435Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:45:09.850487Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:45:09.873506Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:45:09.873550Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:45:10.344394Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331978240647507:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:10.344461Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:10.354992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:10.412002Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:10.446659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:10.446750Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:10.448570Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:10.594886Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:10.636963Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:10.636994Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:10.637002Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:10.637082Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:10.861389Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:10.867837Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:10.901353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:10.931969Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:10.931990Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:10.932494Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:11.348997Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:12.605342Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494710940, tx_id: 281474976710658 } } } 2026-01-04T02:45:12.605577Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:12.641373Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:45:12.643713Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767494710968 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:45:12.643949Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key column name mismatch: position: 0, expected: key, got: value |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.1%| [TA] $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::KeyColumnsSizeMismatch [GOOD] Test command err: 2026-01-04T02:45:07.807183Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331966292411156:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:07.807423Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:07.992560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:07.992699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:07.994594Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:08.074064Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:08.084071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:08.085323Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331966292411115:2081] 1767494707806151 != 1767494707806154 2026-01-04T02:45:08.279625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:08.279646Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:08.279664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:08.279803Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:08.340971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:08.482830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:08.494495Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:08.494611Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:08.495173Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:08.815407Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:10.122752Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-04T02:45:10.122798Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot describe table: status: SCHEME_ERROR, issue: 2026-01-04T02:45:10.543463Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331978008661760:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:10.543522Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:10.555056Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:10.633950Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:10.635678Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331978008661728:2081] 1767494710542433 != 1767494710542436 2026-01-04T02:45:10.675114Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:10.675211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:10.681778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:10.726479Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:10.840317Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:10.840341Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:10.840348Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:10.840454Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:11.037175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:11.044601Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.122692Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.147859Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:11.147882Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:11.148327Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:11.547915Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:12.841171Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494711129, tx_id: 281474976715658 } } } 2026-01-04T02:45:12.841566Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:12.868492Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976715660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:45:12.872045Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1767494711185 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnNames: "value" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:45:12.872283Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Key columns size mismatch: expected: 1, got: 2 >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithAsyncIndex [GOOD] Test command err: 2026-01-04T02:45:07.433643Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331964062124929:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:07.433797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:07.677158Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:07.691769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:07.691888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:07.720881Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:07.759737Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:07.909396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:08.066415Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:08.066474Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:08.066500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:08.066580Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:08.411872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:08.419674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:08.454043Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:08.520381Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:08.520431Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:08.520997Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:09.915968Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494708511, tx_id: 281474976715658 } } } 2026-01-04T02:45:09.916283Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:09.918068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:09.918826Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:45:09.918843Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:45:09.941017Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:45:09.941040Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-04T02:45:10.435380Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331976276129175:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:10.435417Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:10.442673Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:10.491619Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:10.493023Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331976276129136:2081] 1767494710434493 != 1767494710434496 2026-01-04T02:45:10.498685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:10.498767Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:10.516412Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:10.522761Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:10.660366Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:10.660386Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:10.660393Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:10.660474Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:10.835733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:10.843736Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.098961Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:11.098991Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:11.099405Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:11.439931Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:12.792121Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494711108, tx_id: 281474976715658 } } } 2026-01-04T02:45:12.792498Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:12.793774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:12.794377Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:45:12.794396Z node 2 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:45:12.818249Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:45:12.818268Z node 2 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 41] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin >> TGRpcCmsTest::AlterRemoveTest >> ReadSessionImplTest::DataReceivedCallback [GOOD] >> ReadSessionImplTest::CommonHandler [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::ReplicationConsistencyLevelMismatch [GOOD] Test command err: 2026-01-04T02:45:07.433648Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331966442427362:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:07.433776Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:07.678080Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:07.698979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:07.699073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:07.748736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:07.754095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:07.944646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:08.066043Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:08.066073Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:08.066094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:08.066184Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:08.395117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:08.405832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:08.449607Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:08.517671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:08.550106Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:08.550155Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:08.550900Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:10.158601Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494708511, tx_id: 281474976710658 } } } 2026-01-04T02:45:10.158952Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:10.187964Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:45:10.190425Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767494708588 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_NONE ConsistencyLevel: CONSISTENCY_LEVEL_UNKNOWN } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:45:10.190632Z node 1 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication mode mismatch: expected: REPLICATION_MODE_READ_ONLY, got: 0 2026-01-04T02:45:10.764714Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331978666433285:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:10.764801Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:10.776279Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:10.825327Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:10.826795Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331978666433243:2081] 1767494710763797 != 1767494710763800 2026-01-04T02:45:10.893131Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:10.893219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:10.903460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:11.041549Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:11.041575Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:11.041583Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:11.041652Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:11.055296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:11.277400Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:11.284038Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.313658Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.342337Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:11.342354Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:11.342714Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:11.771080Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:13.198824Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494711353, tx_id: 281474976710658 } } } 2026-01-04T02:45:13.199054Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:13.200294Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:45:13.201122Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767494711381 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:45:13.201297Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Replication consistency level mismatch: expected: CONSISTENCY_LEVEL_ROW, got: 1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::CannotFindColumn [GOOD] Test command err: 2026-01-04T02:45:07.930558Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331963713762095:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:07.930854Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:08.134478Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:08.158942Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:08.159048Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:08.178146Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:08.234727Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:08.240605Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331963713762055:2081] 1767494707928941 != 1767494707928944 2026-01-04T02:45:08.342618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:08.420147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:08.420174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:08.420221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:08.420323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:08.611609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:08.619191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:08.722239Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:08.722281Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:08.722803Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:08.938552Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:10.407015Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494708700, tx_id: 281474976715658 } } } 2026-01-04T02:45:10.407381Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:10.408822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:10.409465Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:45:10.409493Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:45:10.433895Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:45:10.433925Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 1] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:45:10.907662Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331976069730122:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:10.907743Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:10.920190Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:10.983662Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:10.984896Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591331976069730088:2081] 1767494710906936 != 1767494710906939 2026-01-04T02:45:11.021946Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:11.022024Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:11.027977Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:11.132742Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:11.210864Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:11.210887Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:11.210892Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:11.210943Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:11.389553Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:11.396122Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.427998Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:11.452962Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:11.452980Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:11.453311Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:11.913798Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:13.223812Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Src, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494711465, tx_id: 281474976710658 } } } 2026-01-04T02:45:13.224125Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:13.298328Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAlreadyExists txid# 281474976710660 Reason# Check failed: path: '/Root/Dst', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 39], type: EPathTypeTable, state: EPathStateNoChanges)} 2026-01-04T02:45:13.301325Z node 2 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Dst" PathDescription { Self { Name: "Dst" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976710659 CreateStep: 1767494711493 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Dst" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 39 PathOwnerId: 72057594046644480 2026-01-04T02:45:13.301612Z node 2 :REPLICATION_CONTROLLER ERROR: dst_creator.cpp:599: [DstCreator][rid 1][tid 1] Error: status# StatusSchemeError, reason# Cannot find column: name: value |93.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest |93.1%| [TA] {RESULT} $(B)/ydb/tests/fq/streaming_optimize/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardVolatile::DistributedUpsertRestartBeforePrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink >> TKesusTest::TestAcquireSemaphoreRebootTimeout [GOOD] >> TKesusTest::TestAcquireSemaphoreViaDecrease >> TImmediateControlsConfiguratorTests::TestControlsInitialization [GOOD] >> TImmediateControlsConfiguratorTests::TestModifiedControls >> TKeyValueTest::TestRenameWorks [GOOD] >> TKeyValueTest::TestRenameToLongKey >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous >> DstCreator::WithSyncIndex >> TConsoleTests::TestRegisterComputationalUnitsForPending [GOOD] >> TConsoleTests::TestNotifyOperationCompletion >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] >> TGRpcCmsTest::SimpleTenantsTestSyncOperation >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin >> TImmediateControlsConfiguratorTests::TestModifiedControls [GOOD] >> TImmediateControlsConfiguratorTests::TestResetToDefault ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> ReadSessionImplTest::CommonHandler [GOOD] Test command err: 2026-01-04T02:44:39.059949Z :SettingsValidation INFO: Random seed for debugging is 1767494679059918 2026-01-04T02:44:39.253234Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331843969825038:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:39.253413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:39.289936Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331843926615005:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:39.291437Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:39.293955Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:39.301508Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:39.401073Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:39.453992Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:39.501615Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:39.501711Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:39.504774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:39.504884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:39.531987Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:39.539007Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:44:39.539533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:39.590883Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:39.591962Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331843969824992:2082] 1767494679251581 != 1767494679251584 2026-01-04T02:44:39.646422Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/00308c/r3tmp/yandexaKOnTk.tmp 2026-01-04T02:44:39.646451Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/00308c/r3tmp/yandexaKOnTk.tmp 2026-01-04T02:44:39.646658Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/00308c/r3tmp/yandexaKOnTk.tmp 2026-01-04T02:44:39.646789Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:39.684210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:39.685255Z INFO: TTestServer started on Port 61709 GrpcPort 19124 2026-01-04T02:44:39.725174Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:19124 2026-01-04T02:44:39.870372Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:40.261132Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:40.298585Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:42.175357Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331856854728256:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:42.175467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331856854728246:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:42.175671Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:42.175949Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331856854728264:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:42.176037Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:42.178163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:44:42.196085Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331856854728262:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:44:42.464295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:42.465045Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331856854728356:2982] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:44:42.553559Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331856854728406:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:42.554057Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YWE0YjkzZmYtMzcxNmI4NDctODQzYTA4NDYtNmJmMjcyODg=, ActorId: [1:7591331856854728229:2327], ActorState: ExecuteState, LegacyTraceId: 01ke3e91hxbf9mby6tye87fm92, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:42.558144Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591331856811517236:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:42.560315Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=Y2RhMzY0MWYtNTM1YzgxZjItYjdiZGU0ZjktNWE5NGU0MDg=, ActorId: [2:7591331856811517209:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3e91srb2vnv18m4mwb3f80, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:42.571333Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:44:42.571334Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please ch ... FO: [/Root] [/Root] [f65561b0-5bfe2db9-e1d5d45-46b38308] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1316 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:45:11.804463Z :INFO: [/Root] [/Root] [f65561b0-5bfe2db9-e1d5d45-46b38308] Closing read session. Close timeout: 0.000000s 2026-01-04T02:45:11.804496Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): dc1:test-topic:0:1:2:2 2026-01-04T02:45:11.804529Z :INFO: [/Root] [/Root] [f65561b0-5bfe2db9-e1d5d45-46b38308] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1317 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 84 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:45:11.804510Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_4748235962724298608_v1 grpc read done: success# 0, data# { } 2026-01-04T02:45:11.804528Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_4748235962724298608_v1 grpc read failed 2026-01-04T02:45:11.804545Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_4748235962724298608_v1 grpc closed 2026-01-04T02:45:11.804613Z :NOTICE: [/Root] [/Root] [f65561b0-5bfe2db9-e1d5d45-46b38308] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:45:11.804586Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_4748235962724298608_v1 is DEAD 2026-01-04T02:45:11.804891Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_5_1_4748235962724298608_v1 2026-01-04T02:45:11.804939Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [5:7591331976626879204:2477] destroyed 2026-01-04T02:45:11.804977Z node 6 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_4748235962724298608_v1 2026-01-04T02:45:11.805249Z node 5 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [5:7591331976626879201:2474] disconnected. 2026-01-04T02:45:11.805275Z node 5 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [5:7591331976626879201:2474] disconnected; active server actors: 1 2026-01-04T02:45:11.805290Z node 5 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [5:7591331976626879201:2474] client user disconnected session shared/user_5_1_4748235962724298608_v1 2026-01-04T02:45:11.862286Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:11.862323Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:11.862336Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:11.862355Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:11.862367Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:13.041861Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.041898Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.041932Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:45:13.042174Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:45:13.042697Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:45:13.042920Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.043382Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: 13. Commit offset: 31 2026-01-04T02:45:13.044903Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.044937Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.044973Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:45:13.045265Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:45:13.045624Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:45:13.045746Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.046050Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:45:13.047216Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:45:13.047680Z :INFO: Error decompressing data: (TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check) 2026-01-04T02:45:13.047786Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-3) 2026-01-04T02:45:13.047927Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:45:13.047975Z :DEBUG: Take Data. Partition 1. Read: {0, 1} (2-2) 2026-01-04T02:45:13.048003Z :DEBUG: Take Data. Partition 1. Read: {0, 2} (3-3) 2026-01-04T02:45:13.048049Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 3, size 57 bytes DataReceived { PartitionStreamId: 1 PartitionId: 1 Message { DataDecompressionError: "(TZLibDecompressorError) util/stream/zlib.cpp:143: inflate error(incorrect header check)" Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 1 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 2 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } Message { Data: ..8 bytes.. Partition stream id: 1 Cluster: "TestCluster". Topic: "TestTopic" Partition: 1 PartitionKey: "" Information: { Offset: 3 SeqNo: 1 MessageGroupId: "src_id" CreateTime: 1970-01-01T00:00:00.042000Z WriteTime: 1970-01-01T00:00:00.042000Z Ip: "::1" UncompressedSize: 0 Meta: { } } } } 2026-01-04T02:45:13.050016Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.050062Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.050092Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:45:13.050399Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:45:13.050901Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:45:13.051103Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.051301Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:45:13.051900Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.052072Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:45:13.052206Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:45:13.052249Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:45:13.052314Z :DEBUG: [db] [sessionid] [cluster] Commit offsets [0, 2). Partition stream id: 1 2026-01-04T02:45:13.053458Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.053491Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.053511Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:45:13.053759Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:45:13.054133Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:45:13.054272Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:13.054476Z :INFO: [db] [sessionid] [cluster] Confirm partition stream create. Partition stream id: 1. Cluster: "TestCluster". Topic: "TestTopic". Partition: 1. Read offset: (NULL) 2026-01-04T02:45:13.055170Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:45:13.055642Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 Post function 2026-01-04T02:45:13.055802Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (2-2) 2026-01-04T02:45:13.055864Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:45:13.055909Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:45:13.055938Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (2-2) 2026-01-04T02:45:13.056077Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:45:13.056146Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes 2026-01-04T02:45:15.058370Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:15.058405Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:15.058442Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:45:15.076085Z :DEBUG: [db] [sessionid] [cluster] Successfully connected. Initializing session 2026-01-04T02:45:15.076786Z :INFO: [db] [sessionid] [cluster] Server session id: 123-session-id-321 2026-01-04T02:45:15.078609Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:15.080025Z :DEBUG: [db] [sessionid] [cluster] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:45:15.080698Z :DEBUG: Decompression task done. Partition/PartitionSessionId: 1 (1-1) 2026-01-04T02:45:15.080825Z :DEBUG: Take Data. Partition 1. Read: {0, 0} (1-1) 2026-01-04T02:45:15.080954Z :DEBUG: [db] [sessionid] [cluster] The application data is transferred to the client. Number of messages 1, size 8 bytes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaDecrease [GOOD] Test command err: 2026-01-04T02:44:36.484787Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.484984Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.503413Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.503549Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.549206Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.549758Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=18183268237571723984, session=0, seqNo=0) 2026-01-04T02:44:36.550918Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:36.574821Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=18183268237571723984, session=1) 2026-01-04T02:44:36.575149Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=9079408877532672480, session=0, seqNo=0) 2026-01-04T02:44:36.575287Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:36.587430Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=9079408877532672480, session=2) 2026-01-04T02:44:36.588077Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[1:146:2168], cookie=12963128361507036607, name="Sem1", limit=1) 2026-01-04T02:44:36.588234Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:44:36.600665Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[1:146:2168], cookie=12963128361507036607) 2026-01-04T02:44:36.601013Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-04T02:44:36.601216Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:44:36.601447Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-04T02:44:36.613625Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:44:36.613709Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=222) 2026-01-04T02:44:36.614276Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=17908699426064023466, name="Sem1") 2026-01-04T02:44:36.614380Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=17908699426064023466) 2026-01-04T02:44:36.615756Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:157:2179], cookie=4251043945978543486, name="Sem1") 2026-01-04T02:44:36.615848Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:157:2179], cookie=4251043945978543486) 2026-01-04T02:44:37.040241Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.052734Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:37.392722Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.405280Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:37.756089Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.768471Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.097737Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.109933Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.471671Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.483894Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.825403Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.837656Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.158354Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.170580Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.501478Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.514559Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.860082Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.876664Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.261367Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.273486Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.634980Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.647197Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.004902Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.018659Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.370443Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.382624Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.733725Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.746059Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.148855Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.160790Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.521831Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.533779Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.884544Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.898568Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.249317Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.261494Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.612328Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.624681Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.997425Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.009788Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.370744Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.382900Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.727988Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.739902Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.085593Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.098122Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.449204Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.461229Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.822748Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.835223Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.197493Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.209443Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.560381Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.572746Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.924039Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.936343Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.287449Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.299679Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.704229Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.716035Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.077710Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.089684Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.444358Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.457605Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.807519Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.819685Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:49.169606Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.181919Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck: ... DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:10.290773Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:10.641194Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:10.653406Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:11.003550Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:11.015681Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:11.450026Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:11.462135Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:11.822768Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:11.835020Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:12.196250Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:12.208382Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:12.568861Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:12.580755Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:12.942390Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:12.954356Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:13.336883Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:13.349154Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:13.710212Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:13.722374Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.084059Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.096190Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.464026Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.475989Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.837820Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.850014Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.233704Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:45:15.233788Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:45:15.233891Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-04T02:45:15.246169Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:45:15.257091Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:455:2414], cookie=9744234435249188310, name="Sem1") 2026-01-04T02:45:15.257199Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:455:2414], cookie=9744234435249188310) 2026-01-04T02:45:15.556639Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:15.556752Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:15.570781Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:15.570897Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:15.584953Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:15.585514Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=15218074040216510626, session=0, seqNo=0) 2026-01-04T02:45:15.585674Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:45:15.608291Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=15218074040216510626, session=1) 2026-01-04T02:45:15.608598Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=4227903964078489804, session=0, seqNo=0) 2026-01-04T02:45:15.608700Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:45:15.620701Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=4227903964078489804, session=2) 2026-01-04T02:45:15.620988Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=9304547150752652449, session=0, seqNo=0) 2026-01-04T02:45:15.621135Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-04T02:45:15.633042Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=9304547150752652449, session=3) 2026-01-04T02:45:15.633538Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:148:2170], cookie=3743829077778703515, name="Sem1", limit=3) 2026-01-04T02:45:15.633688Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:45:15.646055Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:148:2170], cookie=3743829077778703515) 2026-01-04T02:45:15.646416Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-04T02:45:15.646582Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:45:15.646822Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-04T02:45:15.646909Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-04T02:45:15.647014Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-04T02:45:15.659322Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=111) 2026-01-04T02:45:15.659418Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=222) 2026-01-04T02:45:15.659451Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=333) 2026-01-04T02:45:15.660082Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:156:2178], cookie=14461068189305166817, name="Sem1") 2026-01-04T02:45:15.660194Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:156:2178], cookie=14461068189305166817) 2026-01-04T02:45:15.660690Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:159:2181], cookie=2438387082254751799, name="Sem1") 2026-01-04T02:45:15.660776Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:159:2181], cookie=2438387082254751799) 2026-01-04T02:45:15.661065Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=444, session=1, semaphore="Sem1" count=1) 2026-01-04T02:45:15.661202Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-04T02:45:15.673372Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=444) 2026-01-04T02:45:15.674065Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:164:2186], cookie=628304276137505090, name="Sem1") 2026-01-04T02:45:15.674177Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:164:2186], cookie=628304276137505090) 2026-01-04T02:45:15.674640Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:167:2189], cookie=8424934073177213513, name="Sem1") 2026-01-04T02:45:15.674720Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:167:2189], cookie=8424934073177213513) 2026-01-04T02:45:15.688028Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:15.688146Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:15.688548Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:15.689042Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:15.735647Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:15.735847Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:45:15.735905Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-04T02:45:15.735940Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-04T02:45:15.736331Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:206:2219], cookie=16909317355970633518, name="Sem1") 2026-01-04T02:45:15.736430Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:206:2219], cookie=16909317355970633518) 2026-01-04T02:45:15.737004Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:215:2227], cookie=15682540782814048383, name="Sem1") 2026-01-04T02:45:15.737086Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:215:2227], cookie=15682540782814048383) >> TKesusTest::TestAcquireBeforeTimeoutViaSessionTimeout [GOOD] >> TKesusTest::TestAcquireSemaphore |93.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser >> TKesusTest::TestAcquireSemaphore [GOOD] >> TGRpcCmsTest::AuthTokenTest [GOOD] >> TGRpcCmsTest::DescribeOptionsTest [GOOD] >> TGRpcCmsTest::SimpleTenantsTest [GOOD] >> BasicUsage::CreateTopicWithCustomName [GOOD] >> BasicUsage::CreateTopicWithAvailabilityPeriod ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphore [GOOD] Test command err: 2026-01-04T02:44:36.485190Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.485334Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.507036Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.507136Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.559790Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.560466Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=5421578016323898088, session=0, seqNo=0) 2026-01-04T02:44:36.560648Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:36.572947Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=5421578016323898088, session=1) 2026-01-04T02:44:36.573319Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=1936486478320622421, session=0, seqNo=0) 2026-01-04T02:44:36.573470Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:44:36.590629Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=1936486478320622421, session=2) 2026-01-04T02:44:36.591554Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:44:36.592790Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:44:36.592948Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:36.593665Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=222, session=2, semaphore="Lock2" count=1) 2026-01-04T02:44:36.593760Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-04T02:44:36.593847Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-04T02:44:36.593968Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=333, session=1, semaphore="Lock2" count=1) 2026-01-04T02:44:36.594054Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-04T02:44:36.606447Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:44:36.606561Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=222) 2026-01-04T02:44:36.606594Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=333) 2026-01-04T02:44:36.607242Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=10218602156517705603, name="Lock1") 2026-01-04T02:44:36.608460Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=10218602156517705603) 2026-01-04T02:44:36.609132Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:154:2176], cookie=9284721858486256637, name="Lock2") 2026-01-04T02:44:36.609247Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:154:2176], cookie=9284721858486256637) 2026-01-04T02:44:36.625358Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:36.625470Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:36.626027Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:36.626360Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:36.644004Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:36.644159Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:44:36.644198Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 2 2026-01-04T02:44:36.644216Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #3 session 1 2026-01-04T02:44:36.644641Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:193:2206], cookie=13489262917964945108, name="Lock1") 2026-01-04T02:44:36.644750Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:193:2206], cookie=13489262917964945108) 2026-01-04T02:44:36.645376Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:201:2213], cookie=3748034608515360226, name="Lock2") 2026-01-04T02:44:36.645459Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:201:2213], cookie=3748034608515360226) 2026-01-04T02:44:37.123408Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.135803Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:37.501221Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.513438Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:37.857109Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.869471Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.221518Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.233642Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.608849Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.620809Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.972471Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.984448Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.314730Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.326969Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.681357Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.694657Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.047884Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.060013Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.470309Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.482278Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.833890Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.846222Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.216428Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.228762Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.580898Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.593046Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.953888Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.966318Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.380823Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.393010Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.753814Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.766197Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.127287Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.139640Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.503374Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.515561Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.877964Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.890126Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.294082Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.306276Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.672842Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.684783Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.053333Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.066654Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.429553Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.441708Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.792409Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.804489Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.195271Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [720575940379 ... pp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:12.511213Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:12.871359Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:12.883163Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:13.233511Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:13.245333Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:13.605616Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:13.617823Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:13.978271Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:13.990584Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.373641Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.385753Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.747000Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.759161Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.120659Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:15.132853Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.493912Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:15.505837Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.867238Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:15.879024Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:16.252202Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:45:16.252294Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:45:16.252338Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-04T02:45:16.252414Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-04T02:45:16.252475Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 2 "Lock2" owner link 2026-01-04T02:45:16.252512Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:45:16.264804Z node 4 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:45:16.265377Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:367:2347], cookie=5716329169377554422, name="Lock1") 2026-01-04T02:45:16.265452Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:367:2347], cookie=5716329169377554422) 2026-01-04T02:45:16.265860Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:370:2350], cookie=5115212126459820131, name="Lock2") 2026-01-04T02:45:16.265913Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:370:2350], cookie=5115212126459820131) 2026-01-04T02:45:16.266297Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:373:2353], cookie=15359197700507372295) 2026-01-04T02:45:16.266354Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:373:2353], cookie=15359197700507372295) 2026-01-04T02:45:16.278835Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:16.278918Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:16.279264Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:16.279686Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:16.326041Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:16.326181Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #3 session 2 2026-01-04T02:45:16.326232Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #4 session 2 2026-01-04T02:45:16.326593Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:412:2383], cookie=15585874698966550245) 2026-01-04T02:45:16.326669Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:412:2383], cookie=15585874698966550245) 2026-01-04T02:45:16.327251Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:419:2389], cookie=9932112771109205466, name="Lock1") 2026-01-04T02:45:16.327329Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:419:2389], cookie=9932112771109205466) 2026-01-04T02:45:16.327897Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:422:2392], cookie=10354751902301409969, name="Lock2") 2026-01-04T02:45:16.327969Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:422:2392], cookie=10354751902301409969) 2026-01-04T02:45:16.716964Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:16.717059Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:16.730107Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:16.730234Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:16.743684Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:16.744193Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=16050611104865973563, session=0, seqNo=0) 2026-01-04T02:45:16.744337Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:45:16.766880Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=16050611104865973563, session=1) 2026-01-04T02:45:16.767214Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=15018929631790613114, session=0, seqNo=0) 2026-01-04T02:45:16.767347Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:45:16.779317Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=15018929631790613114, session=2) 2026-01-04T02:45:16.779653Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-04T02:45:16.791743Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=111) 2026-01-04T02:45:16.792235Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:148:2170], cookie=10744491004428157693, name="Sem1", limit=1) 2026-01-04T02:45:16.792366Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:45:16.804253Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:148:2170], cookie=10744491004428157693) 2026-01-04T02:45:16.804688Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=333, session=1, semaphore="Sem1" count=100500) 2026-01-04T02:45:16.816864Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=333) 2026-01-04T02:45:16.817194Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=222, session=1, semaphore="Sem1" count=1) 2026-01-04T02:45:16.817317Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:45:16.817528Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=333, session=2, semaphore="Sem1" count=1) 2026-01-04T02:45:16.829299Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=222) 2026-01-04T02:45:16.829376Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=333) 2026-01-04T02:45:16.829794Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:158:2180], cookie=770467367951928961, name="Sem1") 2026-01-04T02:45:16.829891Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:158:2180], cookie=770467367951928961) 2026-01-04T02:45:16.830252Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:161:2183], cookie=15381819168603853190, name="Sem1") 2026-01-04T02:45:16.830307Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:161:2183], cookie=15381819168603853190) 2026-01-04T02:45:16.830593Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:164:2186], cookie=12886278331932141473, name="Sem1", force=0) 2026-01-04T02:45:16.842390Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:164:2186], cookie=12886278331932141473) 2026-01-04T02:45:16.842822Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:169:2191], cookie=7440456318967200026, name="Sem1", force=1) 2026-01-04T02:45:16.842906Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-04T02:45:16.854822Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:169:2191], cookie=7440456318967200026) |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TImmediateControlsConfiguratorTests::TestResetToDefault [GOOD] >> TImmediateControlsConfiguratorTests::TestMaxLimit ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AuthTokenTest [GOOD] Test command err: 2026-01-04T02:45:12.957994Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331985008353643:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:12.958114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:13.106122Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:13.229659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:13.229730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:13.251110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:13.270473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:13.388441Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:13.441850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:13.441866Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:13.441877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:13.441958Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:13.780597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:13.831815Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7591331989303321995:2301], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:13.831853Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-04T02:45:13.831867Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.831878Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.831997Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" 2026-01-04T02:45:13.832146Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767494713831914) 2026-01-04T02:45:13.865447Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767494713831914 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-04T02:45:13.865696Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-04T02:45:13.868834Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-04T02:45:13.870468Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713831914&action=1" } } } 2026-01-04T02:45:13.871521Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.871617Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:13.871820Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:13.872278Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-04T02:45:13.872422Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:45:13.875396Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591331989303322004:2302], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713831914&action=1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-04T02:45:13.875422Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:45:13.875656Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713831914&action=1" } } 2026-01-04T02:45:13.876673Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-04T02:45:13.876713Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876771Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7591331989303322000:2202], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876787Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876811Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.876825Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.876862Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-04T02:45:13.877680Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-04T02:45:13.877770Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-04T02:45:13.880338Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:13.880370Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.880377Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.880386Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.880440Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-04T02:45:13.880459Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767494713831914 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:13.882293Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:13.899159Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.899243Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-04T02:45:13.899263Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-04T02:45:13.904519Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" DatabaseName: "Root" PeerName: "ipv6:[::1]:47598" 2026-01-04T02:45:13.907390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:45:13.911156Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-04T02:45:13.911268Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-04T02:45:13.914665Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-04T02:45:13.920575Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2026-01-04T02:45:13.923271Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user- ... 960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.199047Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.199487Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.204491Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331993598290022:2395], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:14.204594Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.204636Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.206069Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.206098Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.206985Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.209171Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331993598290040:2396], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:14.209204Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.209248Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.209338Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.209357Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.209906Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.211678Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331993598290044:2397], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:14.211705Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.211736Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.211830Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.211838Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.212420Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.214423Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715659 2026-01-04T02:45:14.214465Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:45:14.214521Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:14.214681Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7591331989303322104:2202], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:14.214719Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:14.214741Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:14.214754Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:14.214812Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:22: TTxUpdateConfirmedSubdomain for tenant /Root/users/user-1 to 2 2026-01-04T02:45:14.214849Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=RUNNING txid=1767494713831914 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:14.214917Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2904: Update database for /Root/users/user-1 confirmedsubdomain=2 2026-01-04T02:45:14.215226Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331993598290053:2398], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:14.215264Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.215317Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.215795Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.215818Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.216501Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: PENDING_RESOURCES required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.218212Z node 1 :CMS_TENANTS DEBUG: console__update_confirmed_subdomain.cpp:42: TTxUpdateConfirmedSubdomain complete for /Root/users/user-1 2026-01-04T02:45:14.218248Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:14.218962Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331993598290068:2399], Recipient [1:7591331989303321470:2202]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" PeerName: "ipv6:[::1]:47598" } 2026-01-04T02:45:14.218999Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.219049Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2121: Send TEvTenantSlotBroker::TEvGetTenantState: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.219448Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331989303321285:2201], Recipient [1:7591331989303321470:2202]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.219477Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.220348Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3744: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.GetDatabaseStatusResult] { path: "/Root/users/user-1" state: RUNNING required_resources { storage_units { unit_kind: "hdd" count: 1 } } allocated_resources { storage_units { unit_kind: "hdd" count: 1 } } generation: 1 } } } } 2026-01-04T02:45:14.398350Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:45:14.398833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:45:14.932855Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:14.943604Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:15.933608Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/users/user-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::DescribeOptionsTest [GOOD] Test command err: 2026-01-04T02:45:13.564115Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331988246343770:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:13.564177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:13.788766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:13.810691Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:13.810803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:13.839566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:13.915344Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:13.952565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:13.967666Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:13.967686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:13.967703Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:13.967781Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:14.183730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:14.413220Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:131: TTxProcessor(tenants) is now locking 2026-01-04T02:45:14.413248Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:143: TTxProcessor(tenants) is now locked by parent 2026-01-04T02:45:14.416954Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:102: TTxProcessor(tenants) is now active 2026-01-04T02:45:14.452602Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285140, Sender [1:7591331992541312139:2302], Recipient [1:7591331988246344245:2204]: NKikimr::NConsole::TEvConsole::TEvDescribeTenantOptionsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:41874" } 2026-01-04T02:45:14.452661Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:964: StateWork, processing event TEvConsole::TEvDescribeTenantOptionsRequest 2026-01-04T02:45:14.457615Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3326: Send TEvConsole::TEvDescribeTenantOptionsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.DescribeDatabaseOptionsResult] { storage_units { kind: "hdd2" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "hdd1" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "ssd" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } storage_units { kind: "test" labels { key: "disk_type" value: "ROT" } labels { key: "erasure" value: "none" } } availability_zones { name: "dc-1" labels { key: "collocation" value: "disabled" } labels { key: "fixed_data_center" value: "DC-1" } } availability_zones { name: "any" labels { key: "any_data_center" value: "true" } labels { key: "collocation" value: "disabled" } } computational_units { kind: "slot" labels { key: "slot_type" value: "default" } labels { key: "type" value: "dynamic_slot" } allowed_availability_zones: "any" allowed_availability_zones: "dc-1" } } } } } |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTest [GOOD] Test command err: 2026-01-04T02:45:12.958007Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331987898786695:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:12.958077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:13.106762Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:13.226949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:13.227075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:13.232499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:13.288728Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:13.321315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:13.441704Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:13.441722Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:13.441768Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:13.441863Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:13.777006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:13.825098Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7591331992193755062:2301], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:43904" } 2026-01-04T02:45:13.825141Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-04T02:45:13.825181Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.825200Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.825311Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:43904" 2026-01-04T02:45:13.826321Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767494713826171) 2026-01-04T02:45:13.863433Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767494713826171 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-04T02:45:13.863686Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-04T02:45:13.868663Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-04T02:45:13.870329Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713826171&action=1" } } } 2026-01-04T02:45:13.871521Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.871609Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:13.871789Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:13.872254Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-04T02:45:13.872407Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:45:13.875397Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591331992193755070:2302], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713826171&action=1" } UserToken: "" } 2026-01-04T02:45:13.875426Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:45:13.875642Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494713826171&action=1" } } 2026-01-04T02:45:13.876592Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-04T02:45:13.876641Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876714Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7591331992193755067:2200], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876743Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:13.876762Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.876777Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.876808Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-04T02:45:13.877688Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-04T02:45:13.877755Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-04T02:45:13.880228Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:13.880262Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:13.880270Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.880276Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:13.880317Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-04T02:45:13.880340Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767494713826171 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:13.882032Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:13.893787Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:13.893867Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-04T02:45:13.893873Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-04T02:45:13.898644Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:43904" 2026-01-04T02:45:13.899750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:45:13.901601Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-04T02:45:13.901667Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-04T02:45:13.904108Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-04T02:45:13.911244Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2026-01-04T02:45:13.913596Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976715658 CreateStep: 1767494713950 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion ... 80,5) wasn't found - using supplied 72075186224037896 2026-01-04T02:45:14.261112Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2026-01-04T02:45:14.261127Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:45:14.261168Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:14.261259Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7591331996488723145:2200], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:14.261284Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:14.261302Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:14.261311Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:14.261342Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-04T02:45:14.261359Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767494714218907 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:14.261425Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494714218907 issue= 2026-01-04T02:45:14.262201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-04T02:45:14.262255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-04T02:45:14.262305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-04T02:45:14.262350Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-04T02:45:14.262407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-04T02:45:14.264258Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-04T02:45:14.268155Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-04T02:45:14.268232Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.268252Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:14.268554Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331992193754339:2203], Recipient [1:7591331992193754512:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:14.268585Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:14.268609Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:14.268620Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:14.268648Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-04T02:45:14.268700Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767494714218907 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:14.270760Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:14.270829Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:14.270863Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:14.270996Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:14.271700Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-04T02:45:14.271810Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-04T02:45:14.275332Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-04T02:45:14.275456Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7591331996488723257:2200], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:14.276156Z node 3 :BS_PROXY_PUT ERROR: dsproxy_put_impl.cpp:72: [2553a99a14921cb1] Result# TEvPutResult {Id# [72075186224037888:1:19:0:0:149:0] Status# ERROR StatusFlags# { } ErrorReason# "Request got Poison" ApproximateFreeSpaceShare# 0} GroupId# 2181038081 Marker# BPP12 2026-01-04T02:45:14.276277Z node 3 :BS_PROXY_PUT ERROR: {BPP72@dsproxy_put.cpp:483} Query history GroupId# 2181038081 HandleClass# TabletLog Tactic# MinLatency History# THistory { Entries# [ TEvVPut{ TimestampMs# 0.29 sample PartId# [72075186224037888:1:19:0:0:149:1] QueryCount# 1 VDiskId# [82000001:2:0:0:0] NodeId# 1 } ] } 2026-01-04T02:45:14.276519Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:14.276543Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:14.276554Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:14.276620Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-04T02:45:14.276643Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-04T02:45:14.284573Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:14.284619Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:14.284629Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:14.284635Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:14.284694Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767494714218907 2026-01-04T02:45:14.284726Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494714218907 issue= 2026-01-04T02:45:14.284745Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767494714218907 issue= 2026-01-04T02:45:14.284755Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-04T02:45:14.284824Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767494714218907 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:14.287250Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-04T02:45:14.287367Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:14.291612Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591331996488723275:2408], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494714218907&action=2" } UserToken: "" } 2026-01-04T02:45:14.291637Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:45:14.291815Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494714218907&action=2" ready: true status: SUCCESS } } 2026-01-04T02:45:14.293440Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591331996488723278:2410], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:43904" } 2026-01-04T02:45:14.293468Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:14.293618Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-04T02:45:14.295212Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7591331996488723281:2411], Recipient [1:7591331992193754512:2200]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:43904" } 2026-01-04T02:45:14.295229Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-04T02:45:14.295357Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-04T02:45:14.298043Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:45:14.298216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:45:14.937353Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous >> TGRpcCmsTest::AlterRemoveTest [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTablet [GOOD] >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration |93.1%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin >> TMonitoringTests::ValidActorId |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] |93.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TKeyValueTest::TestWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk >> TImmediateControlsConfiguratorTests::TestMaxLimit [GOOD] >> TImmediateControlsConfiguratorTests::TestDynamicMap >> DstCreator::WithSyncIndex [GOOD] >> TMonitoringTests::ValidActorId [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumOnEmptyTabletResetGeneration [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:82:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:85:2057] recipient: [7:84:2115] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:87:2057] recipient: [7:84:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:86:2116] Leader for TabletID 72057594037927937 is [7:86:2116] sender: [7:202:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:90:2057] recipient: [8:89:2119] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:92:2057] recipient: [8:89:2119] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:91:2120] Leader for TabletID 72057594037927937 is [8:91:2120] sender: [8:207:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:91:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:94:2057] recipient: [9:93:2123] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:96:2057] recipient: [9:93:2123] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:95:2124] Leader for TabletID 72057594037927937 is [9:95:2124] sender: [9:211:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:94:2057] recipient: [10:93:2123] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:96:2057] recipient: [10:93:2123] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:95:2124] Leader for TabletID 72057594037927937 is [10:95:2124] sender: [10:211:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:96:2057] recipient: [11:95:2125] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:98:2057] recipient: [11:95:2125] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:97:2126] Leader for TabletID 72057594037927937 is [11:97:2126] sender: [11:213:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:96:2057] recipient: [12:95:2125] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:98:2057] recipient: [12:95:2125] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:97:2126] Leader for TabletID 72057594037927937 is [12:97:2126] sender: [12:213:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:93:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:96:2057] recipient: [13:95:2125] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:98:2057] recipient: [13:95:2125] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:97:2126] Leader for TabletID 72057594037927937 is [13:97:2126] sender: [13:213:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:98:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:101:2057] recipient: [14:100:2129] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:103:2057] recipient: [14:100:2129] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:102:2130] Leader for TabletID 72057594037927937 is [14:102:2130] sender: [14:218:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:102:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:105:2057] recipient: [15:104:2133] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:107:2057] recipient: [15:104:2133] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:106:2134] Leader for TabletID 72057594037927937 is [15:106:2134] sender: [15:222:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:102:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:105:2057] recipient: [16:104:2133] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:107:2057] recipient: [16:104:2133] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:106:2134] Leader for TabletID 72057594037927937 is [16:106:2134] sender: [16:222:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:104:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:107:2057] recipient: [17:106:2135] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:109:2057] recipient: [17:106:2135] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:108:2136] Leader for TabletID 72057594037927937 is [17:108:2136] sender: [17:224:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:104:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:107:2057] recipient: [18:106:2135] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:109:2057] recipient: [18:106:2135] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:108:2136] Leader for TabletID 72057594037927937 is [18:108:2136] sender: [18:224:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::AlterRemoveTest [GOOD] Test command err: 2026-01-04T02:45:15.087931Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331998404570966:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:15.088005Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:15.250918Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:15.280559Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:15.280672Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:15.286073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:15.367143Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:15.421251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:15.421275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:15.421282Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:15.421382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:15.432834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:15.583493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:15.621237Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7591331998404572023:2301], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49348" } 2026-01-04T02:45:15.621289Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-04T02:45:15.621321Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.621330Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.621449Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:49348" 2026-01-04T02:45:15.621629Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767494715621145) 2026-01-04T02:45:15.652744Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767494715621145 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-04T02:45:15.652980Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-04T02:45:15.656082Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-04T02:45:15.656952Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494715621145&action=1" } } } 2026-01-04T02:45:15.657083Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.657170Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:15.657308Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:15.657639Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-04T02:45:15.657755Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:45:15.660003Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591331998404572031:2302], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494715621145&action=1" } UserToken: "" } 2026-01-04T02:45:15.660030Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:45:15.660198Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494715621145&action=1" } } 2026-01-04T02:45:15.661517Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-04T02:45:15.661564Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:15.661634Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7591331998404572028:2203], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:15.661659Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:15.661668Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.661675Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.661714Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-04T02:45:15.661733Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-04T02:45:15.661789Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-04T02:45:15.663869Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:15.663910Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.663916Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.663921Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.663966Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-04T02:45:15.663989Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767494715621145 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:15.665492Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:15.673939Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.673981Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-04T02:45:15.673990Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-04T02:45:15.679203Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:49348" 2026-01-04T02:45:15.680627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:45:15.682917Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-04T02:45:15.682982Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710658 2026-01-04T02:45:15.685493Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710658 2026-01-04T02:45:15.692198Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710658 2026-01-04T02:45:15.694544Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 281474976710658 CreateStep: 1767494715735 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion ... ation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpForceDropExtSubDomain, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_extsubdomain.cpp:342) 2026-01-04T02:45:15.725674Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:15.725695Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.727528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5826: Mark as Dropping path id [OwnerId: 72057594046644480, LocalPathId: 39] by tx: 281474976710660 2026-01-04T02:45:15.727811Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710659 2026-01-04T02:45:15.727826Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:45:15.727860Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:15.727929Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976710660 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 2026-01-04T02:45:15.727954Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976710660 2026-01-04T02:45:15.728013Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435076, Sender [1:7591331998404572128:2203], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:15.728035Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:979: StateWork, processing event TEvPrivate::TEvSubdomainReady 2026-01-04T02:45:15.728044Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3652: Ignoring ready subdomain for tenant /Root/users/user-1 in REMOVING_SUBDOMAIN state 2026-01-04T02:45:15.730396Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976710660 2026-01-04T02:45:15.736923Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,2) wasn't found 2026-01-04T02:45:15.736995Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,7) wasn't found 2026-01-04T02:45:15.737022Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,4) wasn't found 2026-01-04T02:45:15.737047Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,10) wasn't found 2026-01-04T02:45:15.737184Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,9) wasn't found 2026-01-04T02:45:15.737220Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,6) wasn't found 2026-01-04T02:45:15.737245Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,3) wasn't found 2026-01-04T02:45:15.737274Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,8) wasn't found 2026-01-04T02:45:15.737310Z node 1 :HIVE WARN: tx__delete_tablet.cpp:91: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found 2026-01-04T02:45:15.739209Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976710660 2026-01-04T02:45:15.739227Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:45:15.739275Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:15.739366Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7591331998404572205:2203], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:15.739404Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:15.739420Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.739427Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.739449Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-04T02:45:15.739469Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767494715718902 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:15.739531Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494715718902 issue= 2026-01-04T02:45:15.744165Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-04T02:45:15.744236Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-04T02:45:15.744249Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.744439Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591331998404571313:2202], Recipient [1:7591331998404571491:2203]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:15.744464Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:15.744481Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.744491Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.744518Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-04T02:45:15.744545Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767494715718902 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:15.746469Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:15.746508Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.746529Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:15.746641Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:15.747028Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-04T02:45:15.747113Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-04T02:45:15.750037Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-04T02:45:15.750170Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7591331998404572341:2203], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:15.750205Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:15.750218Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.750226Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.750262Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-04T02:45:15.750297Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-04T02:45:15.752687Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:15.752717Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:15.752724Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.752729Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:15.752807Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767494715718902 2026-01-04T02:45:15.752822Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494715718902 issue= 2026-01-04T02:45:15.752831Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767494715718902 issue= 2026-01-04T02:45:15.752839Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-04T02:45:15.752900Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767494715718902 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:15.754856Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-04T02:45:15.754906Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:15.776264Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285131, Sender [1:7591331998404572360:2313], Recipient [1:7591331998404571491:2203]: NKikimr::NConsole::TEvConsole::TEvGetOperationRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494715718902&action=2" } UserToken: "" } 2026-01-04T02:45:15.776294Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:965: StateWork, processing event TEvConsole::TEvGetOperationRequest 2026-01-04T02:45:15.776450Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3344: Send TEvConsole::TEvGetOperationResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494715718902&action=2" ready: true status: SUCCESS } } |93.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TMonitoringTests::ValidActorId [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> DstCreator::WithSyncIndex [GOOD] Test command err: 2026-01-04T02:45:15.855949Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331998980849922:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:15.857126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:16.084175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:16.086405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:16.086574Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:16.107906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331998980849889:2081] 1767494715853590 != 1767494715853593 2026-01-04T02:45:16.113045Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:16.115641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:16.297058Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:16.341944Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:16.341980Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:16.341986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:16.342072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:16.576996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:16.585447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:16.861633Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:16.872866Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:57: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult: entry# { Path: Root TableId: [72057594046644480:1:0] RequestType: ByTableId Operation: OpPath RedirectRequired: false ShowPrivatePath: false SyncVersion: false Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } } 2026-01-04T02:45:16.872897Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:92: [DstCreator][rid 1][tid 1] Get table profiles 2026-01-04T02:45:16.873265Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:111: [DstCreator][rid 1][tid 1] Handle NKikimr::NConsole::TEvConfigsDispatcher::TEvGetConfigResponse 2026-01-04T02:45:18.381895Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:163: [DstCreator][rid 1][tid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494716841, tx_id: 281474976715658 } } } 2026-01-04T02:45:18.382445Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:254: [DstCreator][rid 1][tid 1] Handle NKikimr::TEvTxUserProxy::TEvAllocateTxIdResult 2026-01-04T02:45:18.384457Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:18.386215Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:284: [DstCreator][rid 1][tid 1] Handle {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715659} 2026-01-04T02:45:18.386238Z node 1 :REPLICATION_CONTROLLER DEBUG: dst_creator.cpp:311: [DstCreator][rid 1][tid 1] Subscribe tx: txId# 281474976715659 2026-01-04T02:45:18.415513Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:316: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvNotifyTxCompletionResult TxId: 281474976715659 2026-01-04T02:45:18.449041Z node 1 :REPLICATION_CONTROLLER TRACE: dst_creator.cpp:340: [DstCreator][rid 1][tid 1] Handle NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/Replicated" PathDescription { Self { Name: "Replicated" PathId: 41 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1767494718458 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "Replicated" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 0 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableIndexes { Name: "index_by_value" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046644480 DataSize: 0 IndexImplTableDescriptions { PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 Pl ... ndexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037906 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } PathId: 43 PathOwnerId: 72057594046644480 } 2026-01-04T02:45:18.458085Z node 1 :REPLICATION_CONTROLLER INFO: dst_creator.cpp:590: [DstCreator][rid 1][tid 2] Success: dstPathId# [OwnerId: 72057594046644480, LocalPathId: 43] Status: 1 StatusCode: SUCCESS SchemeStatus: 0 PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715659 CreateStep: 1767494718458 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 SplitByLoadSettings { Enabled: false } } ColumnFamilies { Id: 0 ColumnCodec: ColumnCodecPlain StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_ROW } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186224037906 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 19 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "__async_replica" Value: "true" } } Path: "/Root/Replicated/index_by_value/indexImplTable" |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_dst_creator/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin >> DataShardWrite::ReplaceImmediate >> DataShardWrite::AsyncIndexKeySizeConstraint >> TImmediateControlsConfiguratorTests::TestDynamicMap [GOOD] >> TJaegerTracingConfiguratorTests::DefaultConfig |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system |93.2%| [TA] $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_dst_creator/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser >> TJaegerTracingConfiguratorTests::DefaultConfig [GOOD] >> TJaegerTracingConfiguratorTests::GlobalRules ------- [TM] {asan, default-linux-x86_64, release} ydb/services/cms/ut/unittest >> TGRpcCmsTest::SimpleTenantsTestSyncOperation [GOOD] Test command err: 2026-01-04T02:45:16.412752Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332005293652159:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:45:16.412819Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:45:16.547418Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:45:16.615681Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:16.615819Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:16.669436Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:16.673340Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:16.746134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:16.746148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:16.746152Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:16.746215Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:16.814503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:16.894927Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:16.930947Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285120, Sender [1:7591332005293653199:2301], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TEvConsole::TEvCreateTenantRequest { Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:37636" } 2026-01-04T02:45:16.930993Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:963: StateWork, processing event TEvConsole::TEvCreateTenantRequest 2026-01-04T02:45:16.931008Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:16.931022Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:16.931148Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:71: TTxCreateTenant: Request { operation_params { operation_mode: SYNC } path: "/Root/users/user-1" resources { storage_units { unit_kind: "hdd" count: 1 } } } UserToken: "" PeerName: "ipv6:[::1]:37636" 2026-01-04T02:45:16.931255Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:365: Add tenant /Root/users/user-1 (txid = 1767494716931184) 2026-01-04T02:45:16.990283Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2568: Add tenant /Root/users/user-1 to database state=CREATING_POOLS coordinators=3 mediators=3 planresolution=10 timecastbucketspermediator=2 issue= txid=1767494716931184 subdomainversion=1 confirmedsubdomain=0 attrs= generation=1 errorcode=STATUS_CODE_UNSPECIFIED isExternalSubDomain=1 isExternalHive=1 isExternalSysViewProcessor=1 isExternalStatisticsAggregator=1 areResourcesShared=0 sharedDomainId= 2026-01-04T02:45:16.990475Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2628: Add tenant pool /Root/users/user-1:hdd to database kind=hdd config=BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } allocatednumgroups=0 state=NOT_ALLOCATED 2026-01-04T02:45:16.993554Z node 1 :CMS_TENANTS DEBUG: console__create_tenant.cpp:375: TTxCreateTenant Complete 2026-01-04T02:45:16.994297Z node 1 :CMS_TENANTS TRACE: console__create_tenant.cpp:383: Send: NKikimr::NConsole::TEvConsole::TEvCreateTenantResponse { Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494716931184&action=1" } } } 2026-01-04T02:45:16.994417Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:16.994478Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:16.994606Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:16.994771Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285139, Sender [1:7591332005293653199:2301], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TEvConsole::TEvNotifyOperationCompletionRequest { Request { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494716931184&action=1" } UserToken: "" PeerName: "ipv6:[::1]:37636" } 2026-01-04T02:45:16.994801Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:968: StateWork, processing event TEvConsole::TEvNotifyOperationCompletionRequest 2026-01-04T02:45:16.994954Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:3434: Add subscription to /Root/users/user-1 for [1:7591332005293653199:2301] 2026-01-04T02:45:16.995016Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3442: Send TEvConsole::TEvNotifyOperationCompletionResponse: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494716931184&action=1" } } 2026-01-04T02:45:16.995080Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true } Success: true ConfigTxSeqNo: 5 2026-01-04T02:45:16.995186Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:131: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DefineStoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" VDiskKind: "Default" Kind: "hdd" NumGroups: 1 PDiskFilter { Property { Type: ROT } } } } } 2026-01-04T02:45:16.998780Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:244: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 6 2026-01-04T02:45:16.998817Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:168: TPoolManip(/Root/users/user-1:hdd) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:16.998901Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435079, Sender [1:7591332005293653204:2200], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:16.998926Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:972: StateWork, processing event TEvPrivate::TEvPoolAllocated 2026-01-04T02:45:16.998936Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:16.998943Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:16.998987Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=ALLOCATED 2026-01-04T02:45:16.999007Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=ALLOCATED allocatednumgroups=1 2026-01-04T02:45:16.999057Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3197: Update subdomain version in database for /Root/users/user-1 subdomainversion=2 2026-01-04T02:45:17.000838Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:17.000865Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:17.000877Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.000884Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:17.000926Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to CREATING_SUBDOMAIN 2026-01-04T02:45:17.000947Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=CREATING_SUBDOMAIN txid=1767494716931184 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:17.002748Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:17.012538Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.012595Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:775: TSubdomainManip(/Root/users/user-1)::Bootstrap 2026-01-04T02:45:17.012604Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:597: TSubDomainManip(/Root/users/user-1) create subdomain 2026-01-04T02:45:17.019119Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:615: TSubdomainManip(/Root/users/user-1) send subdomain creation cmd: NKikimrTxUserProxy.TEvProposeTransaction Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "users/user-1" ExternalSchemeShard: true ExternalHive: true ExternalSysViewProcessor: true ExternalStatisticsAggregator: true GraphShard: true } } } ExecTimeoutPeriod: 18446744073709551615 DatabaseName: "Root" PeerName: "ipv6:[::1]:37636" 2026-01-04T02:45:17.020416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:45:17.022534Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:823: TSubdomainManip(/Root/users/user-1) got propose result: Status: 53 TxId: 281474976715658 SchemeShardStatus: 1 SchemeShardTabletId: 72057594046644480 PathId: 39 2026-01-04T02:45:17.022584Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:759: TSubdomainManip(/Root/users/user-1) send notification request: NKikimrScheme.TEvNotifyTxCompletion TxId: 281474976715658 2026-01-04T02:45:17.025225Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:795: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionRegistered: TxId: 281474976715658 2026-01-04T02:45:17.030590Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715658 2026-01-04T02:45:17.032401Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:872: TSubdomainManip(/Root/users/user-1) got describe result: Status: StatusSuccess Path: "/Root/users/user-1" PathDescription { Self { Name: "user-1" PathId: 39 SchemeshardId: 72057594046644480 PathType: EPathT ... 43Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:7 2026-01-04T02:45:17.358951Z node 1 :HIVE WARN: tx__delete_tablet.cpp:88: HIVE#72057594037968897 THive::TTxDeleteTablet tablet (72057594046644480,5) wasn't found - using supplied 72075186224037896 2026-01-04T02:45:17.358984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:4 2026-01-04T02:45:17.359010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:10 2026-01-04T02:45:17.360524Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:800: TSubdomainManip(/Root/users/user-1) got TEvNotifyTxCompletionResult: TxId: 281474976715660 2026-01-04T02:45:17.360543Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:685: TSubdomainManip(/Root/users/user-1) done 2026-01-04T02:45:17.360577Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:701: TSubdomainManip(/Root/users/user-1) reply with NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:17.360692Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435077, Sender [1:7591332009588621342:2200], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:17.360719Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:980: StateWork, processing event TEvPrivate::TEvSubdomainRemoved 2026-01-04T02:45:17.360743Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:17.360751Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:17.360796Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:20: TTxRemoveComputationalUnits Execute /Root/users/user-1 2026-01-04T02:45:17.360817Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_UNITS txid=1767494717323405 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:17.360875Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494717323405 issue= 2026-01-04T02:45:17.361770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:9 2026-01-04T02:45:17.361879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:6 2026-01-04T02:45:17.361938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:3 2026-01-04T02:45:17.361997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:8 2026-01-04T02:45:17.362054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__delete_tablet_reply.cpp:41: Got DeleteTabletReply with Forward response from Hive 72057594037968897 to Hive 72075186224037888 shardIdx 72057594046644480:5 2026-01-04T02:45:17.363556Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-04T02:45:17.368548Z node 1 :CMS_TENANTS DEBUG: console__remove_computational_units.cpp:34: TTxRemoveComputationalUnits Complete /Root/users/user-1 2026-01-04T02:45:17.368653Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2105: Send TEvTenantSlotBroker::TEvAlterTenant: TenantName: "/Root/users/user-1" 2026-01-04T02:45:17.368669Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.369227Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273154052, Sender [1:7591332005293652497:2204], Recipient [1:7591332005293652612:2200]: NKikimrTenantSlotBroker.TTenantState TenantName: "/Root/users/user-1" 2026-01-04T02:45:17.369260Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:984: StateWork, processing event TEvTenantSlotBroker::TEvTenantState 2026-01-04T02:45:17.369292Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:17.369303Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:17.369330Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:23: TTxUpdateTenantState for tenant /Root/users/user-1 to REMOVING_POOLS 2026-01-04T02:45:17.369377Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3137: Update tenant state in database for /Root/users/user-1 state=REMOVING_POOLS txid=1767494717323405 errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:17.371726Z node 1 :CMS_TENANTS DEBUG: console__update_tenant_state.cpp:45: TTxUpdateTenantState complete for /Root/users/user-1 2026-01-04T02:45:17.371783Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.371818Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:158: TPoolManip(/Root/users/user-1:hdd) Bootstrap 2026-01-04T02:45:17.372033Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:117: TPoolManip(/Root/users/user-1:hdd) read pool state: Request { Command { ReadStoragePool { BoxId: 999 Name: "/Root/users/user-1:hdd" } } } 2026-01-04T02:45:17.372540Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:198: TPoolManip(/Root/users/user-1:hdd) got read response: Status { Success: true StoragePool { BoxId: 999 StoragePoolId: 4 Name: "/Root/users/user-1:hdd" ErasureSpecies: "none" Geometry { } VDiskKind: "Default" Kind: "hdd" NumGroups: 2 PDiskFilter { Property { Type: ROT } } ScopeId { X1: 72057594046644480 X2: 39 } ItemConfigGeneration: 3 } } Success: true ConfigTxSeqNo: 13 2026-01-04T02:45:17.372631Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:151: TPoolManip(/Root/users/user-1:hdd) send pool request: Request { Command { DeleteStoragePool { BoxId: 999 StoragePoolId: 4 ItemConfigGeneration: 3 } } } 2026-01-04T02:45:17.376551Z node 1 :CMS_TENANTS DEBUG: console_tenants_manager.cpp:306: TPoolManip(/Root/users/user-1:hdd) got config response: Status { Success: true } Success: true ConfigTxSeqNo: 14 2026-01-04T02:45:17.376683Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 2146435081, Sender [1:7591332009588621459:2200], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TTenantsManager::TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:17.376764Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:973: StateWork, processing event TEvPrivate::TEvPoolDeleted 2026-01-04T02:45:17.376787Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:17.376801Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:17.376869Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:28: TTxUpdatePoolState for pool /Root/users/user-1:hdd of /Root/users/user-1 state=DELETED 2026-01-04T02:45:17.376917Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3038: Update pool state in database for /Root/users/user-1:hdd state=DELETED allocatednumgroups=0 2026-01-04T02:45:17.381110Z node 1 :CMS_TENANTS DEBUG: console__update_pool_state.cpp:73: TTxUpdatePoolState complete for /Root/users/user-1:hdd 2026-01-04T02:45:17.381140Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:51: TTxProcessor(tenants) enqueue tx 2026-01-04T02:45:17.381147Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.381151Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:245: TTxProcessor(tenants) starts new tx 2026-01-04T02:45:17.381205Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:22: TTxRemoveTenantDone for tenant /Root/users/user-1 txid=1767494717323405 2026-01-04T02:45:17.381217Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2918: Remove computational units of /Root/users/user-1 from database txid=1767494717323405 issue= 2026-01-04T02:45:17.381235Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2949: Remove tenant /Root/users/user-1 from database txid=1767494717323405 issue= 2026-01-04T02:45:17.381243Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2954: Remove pool /Root/users/user-1:hdd from database 2026-01-04T02:45:17.381357Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3074: Add tenant removal info for /Root/users/user-1 txid=1767494717323405 code=SUCCESS errorcode=STATUS_CODE_UNSPECIFIED issue= 2026-01-04T02:45:17.383376Z node 1 :CMS_TENANTS DEBUG: console__remove_tenant_done.cpp:34: TTxRemoveTenantDone Complete 2026-01-04T02:45:17.383603Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:2422: Send /Root/users/user-1 notification to [1:7591332009588621297:2432]: Response { operation { id: "ydb://cmsrequest/5?tenant=/Root/users/user-1&cmstid=72057594037936131&txid=1767494717323405&action=2" ready: true status: SUCCESS } } 2026-01-04T02:45:17.383679Z node 1 :CMS_TENANTS TRACE: tx_processor.cpp:60: TTxProcessor(tenants) completed tx 2026-01-04T02:45:17.386334Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285122, Sender [1:7591332009588621477:2434], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TEvConsole::TEvGetTenantStatusRequest { Request { path: "/Root/users/user-1" } UserToken: "" PeerName: "ipv6:[::1]:37636" } 2026-01-04T02:45:17.386363Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:966: StateWork, processing event TEvConsole::TEvGetTenantStatusRequest 2026-01-04T02:45:17.386538Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3368: Send TEvConsole::TEvGetTenantStatusResponse: Response { operation { ready: true status: NOT_FOUND issues { message: "Unknown tenant /Root/users/user-1" severity: 1 } } } 2026-01-04T02:45:17.388427Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:960: StateWork, received event# 273285123, Sender [1:7591332009588621480:2435], Recipient [1:7591332005293652612:2200]: NKikimr::NConsole::TEvConsole::TEvListTenantsRequest { Request { } UserToken: "" PeerName: "ipv6:[::1]:37636" } 2026-01-04T02:45:17.388444Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.h:967: StateWork, processing event TEvConsole::TEvListTenantsRequest 2026-01-04T02:45:17.388565Z node 1 :CMS_TENANTS TRACE: console_tenants_manager.cpp:3412: Send TEvConsole::TEvListTenantsResponse: Response { operation { ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Cms.ListDatabasesResult] { } } } } 2026-01-04T02:45:17.391946Z node 1 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:45:17.392150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:45:17.420839Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:18.043434Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:19.471721Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/scheme_board/ut_monitoring/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/cms/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/scheme_board/ut_monitoring/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorks >> TConsoleTests::TestNotifyOperationCompletion [GOOD] >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain |93.2%| [TA] $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {RESULT} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/scheme_board/ut_monitoring/test-results/unittest/{meta.json ... results_accumulator.log} |93.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/cms/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin >> TJaegerTracingConfiguratorTests::GlobalRules [GOOD] >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] >> TDataShardLocksTest::Points_OneTx >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin >> TDataShardLocksTest::MvccTestOooTxDoesntBreakPrecedingReadersLocks [GOOD] >> TDataShardLocksTest::MvccTestOutdatedLocksRemove [GOOD] >> TDataShardLocksTest::MvccTestBreakEdge [GOOD] >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] >> TDataShardLocksTest::Points_OneTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_RemoveAll >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser >> TDataShardLocksTest::MvccTestWriteBreaksLocks [GOOD] >> TDataShardLocksTest::Points_ManyTx >> DataShardVolatile::DistributedUpsertRestartAfterPrepare+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin >> TDataShardLocksTest::Points_ManyTx_RemoveAll [GOOD] >> TDataShardLocksTest::UseLocksCache |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TDataShardLocksTest::Points_ManyTx [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakAll |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::MvccTestAlreadyBrokenLocks [GOOD] |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardWrite::ReplaceImmediate [GOOD] >> DataShardWrite::ReplaceImmediate_DefaultValue >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] >> DataShardWrite::AsyncIndexKeySizeConstraint [GOOD] >> DataShardWrite::DeleteImmediate >> TDataShardLocksTest::Points_ManyTx_BreakAll [GOOD] >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin >> CompressExecutor::TestExecutorMemUsage [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithAbort >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:78:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:81:2057] recipient: [10:80:2112] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:83:2057] recipient: [10:80:2112] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:82:2113] Leader for TabletID 72057594037927937 is [10:82:2113] sender: [10:198:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:78:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:81:2057] recipient: [11:80:2112] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:83:2057] recipient: [11:80:2112] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:82:2113] Leader for TabletID 72057594037927937 is [11:82:2113] sender: [11:198:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:79:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:82:2057] recipient: [12:81:2112] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:84:2057] recipient: [12:81:2112] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:83:2113] Leader for TabletID 72057594037927937 is [12:83:2113] sender: [12:199:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:82:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:85:2057] recipient: [13:84:2115] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:87:2057] recipient: [13:84:2115] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:86:2116] Leader for TabletID 72057594037927937 is [13:86:2116] sender: [13:202:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:82:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:85:2057] recipient: [14:84:2115] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:87:2057] recipient: [14:84:2115] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:86:2116] Leader for TabletID 72057594037927937 is [14:86:2116] sender: [14:202:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:83:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:86:2057] recipient: [15:85:2115] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:88:2057] recipient: [15:85:2115] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:87:2116] Leader for TabletID 72057594037927937 is [15:87:2116] sender: [15:203:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:86:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:89:2057] recipient: [16:88:2118] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:91:2057] recipient: [16:88:2118] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:90:2119] Leader for TabletID 72057594037927937 is [16:90:2119] sender: [16:206:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:86:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:89:2057] recipient: [17:88:2118] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:91:2057] recipient: [17:88:2118] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:90:2119] Leader for TabletID 72057594037927937 is [17:90:2119] sender: [17:206:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:87:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:90:2057] recipient: [18:89:2118] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:92:2057] recipient: [18:89:2118] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:91:2119] Leader for TabletID 72057594037927937 is [18:91:2119] sender: [18:207:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:90:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:93:2057] recipient: [19:92:2121] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:95:2057] recipient: [19:92:2121] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:94:2122] Leader for TabletID 72057594037927937 is [19:94:2122] sender: [19:210:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:90:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:93:2057] recipient: [20:92:2121] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:95:2057] recipient: [20:92:2121] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:94:2122] Leader for TabletID 72057594037927937 is [20:94:2122] sender: [20:210:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:91:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:94:2057] recipient: [21:93:2121] Leader for TabletID 72057594037927937 is [21:95:2122] sender: [21:96:2057] recipient: [21:93:2121] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TJaegerTracingConfiguratorTests::ExternalTracePlusSampling [GOOD] Test command err: 2026-01-04T02:44:05.915548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:05.915605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:06.089151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:06.975250Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:06.975630Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:06.976031Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6050081234636184127 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:07.084411Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:07.084592Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:07.084745Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:44:07.144524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:44:07.317044Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.317603Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.317899Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmptx8z3O/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8468191646918952050 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunks ... ed to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmpCi5EAB/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f42/r3tmp/tmpCi5EAB/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4067177978556061642 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:30.310924Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1577:2813] txid# 281474976715659, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:44:30.311466Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-04T02:44:30.394527Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1602:2835] txid# 281474976715660, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:44:30.394940Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-04T02:44:30.427618Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1661:2879] txid# 281474976715661, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:44:30.427955Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-04T02:44:30.493789Z node 46 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [46:1686:2901] txid# 281474976715662, issues: { message: "Invalid AlterExtSubDomain request: Check failed: path: \'/dc-1/users/tenant-1\', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter)" severity: 1 } 2026-01-04T02:44:30.494177Z node 46 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3596: Cannot configure subdomain for tenant /dc-1/users/tenant-1: GENERIC_ERROR: Invalid AlterExtSubDomain request: Check failed: path: '/dc-1/users/tenant-1', error: path is under operation (id: [OwnerId: 72057594046578944, LocalPathId: 39], type: EPathTypeExtSubDomain, state: EPathStateAlter) 2026-01-04T02:44:32.944090Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:32.944168Z node 55 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:33.059278Z node 55 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:36.441019Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:36.441097Z node 64 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:36.570136Z node 64 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:39.912823Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:39.912885Z node 73 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:40.018853Z node 73 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:43.358442Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:43.358533Z node 82 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:43.468014Z node 82 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:14.387432Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:14.387526Z node 91 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:14.489189Z node 91 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:15.674480Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:15.674572Z node 92 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:15.763747Z node 92 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:16.773110Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:16.773195Z node 93 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:16.847453Z node 93 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:18.188549Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:18.188640Z node 94 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:18.263411Z node 94 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:19.263658Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:19.263747Z node 95 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:19.352092Z node 95 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:20.375104Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:20.375197Z node 96 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:20.459743Z node 96 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:21.416781Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:21.416853Z node 97 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:21.510482Z node 97 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:22.492417Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:22.492497Z node 98 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:22.563942Z node 98 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::Points_ManyTx_BreakHalf_RemoveHalf [GOOD] |93.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system >> KqpLimits::CancelAfterRwTx-useSink [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TConsoleTests::TestNotifyOperationCompletionExtSubdomain [GOOD] >> TConsoleTests::TestRemoveAttributes >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TKesusTest::TestSessionTimeoutAfterReboot [GOOD] >> TKesusTest::TestSessionStealingSameKey |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.2%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRwTx-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14531, MsgBus: 6789 2026-01-04T02:41:20.098800Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330987608888660:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.100645Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.425975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.444153Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.444313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.561458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.577422Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.609882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591330987608888631:2081] 1767494480097125 != 1767494480097128 2026-01-04T02:41:20.650659Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.768686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.768718Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.768724Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.768790Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.117111Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.388746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.462884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.541936Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331000493791752:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.542077Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.542254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331000493791764:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.542287Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331000493791766:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.542383Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.545312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:23.558288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331000493791768:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:41:23.649016Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331000493791819:2765] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:24.273728Z node 1 :KQP_COMPUTE WARN: log.cpp:841: fline=kqp_compute_actor_factory.cpp:34;problem=cannot_allocate_memory;tx_id=281474976710661;task_id=2;memory=1048576; 2026-01-04T02:41:24.273773Z node 1 :KQP_COMPUTE WARN: dq_compute_memory_quota.h:152: TxId: 281474976710661, task: 2. [Mem] memory 1048576 NOT granted 2026-01-04T02:41:24.300151Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591331004788759161:2364], TxId: 281474976710661, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3e2zjjdw60wqqyrv8tpkwh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDU0YWRiYTQtZTRmZDc4NTUtYTdjYWU1N2MtMTk2ZjA4OTI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: OVERLOADED KIKIMR_PRECONDITION_FAILED: {
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-04T02:41:24.258363Z }, code: 2029 }. 2026-01-04T02:41:24.305189Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591331004788759159:2363], TxId: 281474976710661, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3e2zjjdw60wqqyrv8tpkwh. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZDU0YWRiYTQtZTRmZDc4NTUtYTdjYWU1N2MtMTk2ZjA4OTI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591331004788759148:2347], status: OVERLOADED, reason: {
: Error: Terminate execution } 2026-01-04T02:41:24.309644Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZDU0YWRiYTQtZTRmZDc4NTUtYTdjYWU1N2MtMTk2ZjA4OTI=, ActorId: [1:7591331000493791749:2347], ActorState: ExecuteState, LegacyTraceId: 01ke3e2zjjdw60wqqyrv8tpkwh, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-04T02:41:24.258363Z }\n" issue_code: 2029 severity: 1 } trace_id#
: Error: Mkql memory limit exceeded, allocated by task 2: 10, host: ghrun-46vtxmsa7i, canAllocateExtraMemory: 1, memory manager details for current node: TxResourcesInfo { TxId: 281474976710661, Database: /Root, PoolId: default, MemoryPoolPercent: 100.00, tx initially granted memory: 20B, tx total memory allocations: 1MiB, tx largest successful memory allocation: 1MiB, tx last failed memory allocation: 1MiB, tx total execution units: 2, started at: 2026-01-04T02:41:24.258363Z } , code: 2029 Trying to start YDB, gRPC: 19309, MsgBus: 26060 2026-01-04T02:41:25.005098Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331010266890384:2121];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.006017Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:25.080212Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:25.085659Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:25.124959Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:25.125035Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:25.127332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:25.196715Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:25.196737Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:25.196744Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:25.196820Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:25.322918Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:25.570356Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:25.587079Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__op ... 6711065, task: 6. Ctx: { CheckpointId : . TraceId : 01ke3e9gyd3qv6h0a5x1x1ddjn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591331925383431590:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:44:58.367825Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591331925383431603:5706], TxId: 281474976711065, task: 7. Ctx: { CheckpointId : . TraceId : 01ke3e9gyd3qv6h0a5x1x1ddjn. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591331925383431590:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:44:58.367994Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591331925383431598:5701], TxId: 281474976711065, task: 2. Ctx: { TraceId : 01ke3e9gyd3qv6h0a5x1x1ddjn. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7591331925383431590:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:44:58.367999Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037894 Cancelled read: {[5:7591331925383431622:5704], 0} 2026-01-04T02:44:58.368053Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591331925383431605:5708], TxId: 281474976711065, task: 9. Ctx: { TraceId : 01ke3e9gyd3qv6h0a5x1x1ddjn. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591331925383431590:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:44:58.368757Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=, ActorId: [5:7591331581786036734:2526], ActorState: ExecuteState, LegacyTraceId: 01ke3e9gyd3qv6h0a5x1x1ddjn, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 432ms" severity: 1 }{ message: "Cancelling after 432ms during execution" severity: 1 } trace_id# 2026-01-04T02:45:04.304279Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7591331951153236205:2526] TxId: 281474976711095. Ctx: { TraceId: 01ke3e9pqgetsyztqrzv3dxyp2, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 447ms } {
: Error: Cancelling after 447ms during execution } ] trace_id# 2026-01-04T02:45:04.304442Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591331951153236220:5930], TxId: 281474976711095, task: 9. Ctx: { CheckpointId : . TraceId : 01ke3e9pqgetsyztqrzv3dxyp2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7591331951153236205:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:04.317319Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591331951153236217:5927], TxId: 281474976711095, task: 6. Ctx: { CheckpointId : . TraceId : 01ke3e9pqgetsyztqrzv3dxyp2. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591331951153236205:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:04.317954Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=, ActorId: [5:7591331581786036734:2526], ActorState: ExecuteState, LegacyTraceId: 01ke3e9pqgetsyztqrzv3dxyp2, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 447ms" severity: 1 }{ message: "Cancelling after 447ms during execution" severity: 1 } trace_id# 2026-01-04T02:45:16.167714Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7591332002692845522:2526] TxId: 281474976711159. Ctx: { TraceId: 01ke3ea296cgpky96yvkby4a6d, Database: /Root, SessionId: ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 479ms } {
: Error: Cancelling after 480ms during execution } ] trace_id# 2026-01-04T02:45:16.168144Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845535:6402], TxId: 281474976711159, task: 7. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.168652Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845536:6403], TxId: 281474976711159, task: 8. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.169060Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845537:6404], TxId: 281474976711159, task: 9. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.169179Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845530:6397], TxId: 281474976711159, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.169251Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037897 Cancelled read: {[5:7591332002692845553:6403], 0} 2026-01-04T02:45:16.169646Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845531:6398], TxId: 281474976711159, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.169786Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845529:6396], TxId: 281474976711159, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.169913Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845532:6399], TxId: 281474976711159, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.170143Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845533:6400], TxId: 281474976711159, task: 5. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.170403Z node 5 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [5:7591332002692845534:6401], TxId: 281474976711159, task: 6. Ctx: { CheckpointId : . TraceId : 01ke3ea296cgpky96yvkby4a6d. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [5:7591332002692845522:2526], status: CANCELLED, reason: {
: Error: Terminate execution } 2026-01-04T02:45:16.170729Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037896 Cancelled read: {[5:7591332002692845551:6402], 0} 2026-01-04T02:45:16.170838Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037891 Cancelled read: {[5:7591332002692845541:6397], 0} 2026-01-04T02:45:16.170954Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037892 Cancelled read: {[5:7591332002692845543:6398], 0} 2026-01-04T02:45:16.171025Z node 5 :TX_DATASHARD WARN: datashard__read_iterator.cpp:3698: 72075186224037893 Cancelled read: {[5:7591332002692845545:6399], 0} 2026-01-04T02:45:16.171112Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=ZTliMzEwOTAtYTUyYWYyMGEtYTg0YmUxNGYtZjhkNzk1ZDU=, ActorId: [5:7591331581786036734:2526], ActorState: ExecuteState, LegacyTraceId: 01ke3ea296cgpky96yvkby4a6d, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 479ms" severity: 1 }{ message: "Cancelling after 480ms during execution" severity: 1 } trace_id# |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> TKesusTest::TestSessionStealingSameKey [GOOD] >> TKesusTest::TestSessionStealingDifferentKey >> DataShardWrite::DeleteImmediate [GOOD] >> DataShardWrite::CancelImmediate >> DataShardWrite::ReplaceImmediate_DefaultValue [GOOD] >> DataShardWrite::UpdateImmediate >> TKesusTest::TestSessionStealingDifferentKey [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingDifferentKey [GOOD] Test command err: 2026-01-04T02:44:37.478367Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:44:37.478496Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:44:37.494815Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:44:37.494940Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:44:37.519708Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:44:37.520223Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=2096979486185625694, session=0, seqNo=0) 2026-01-04T02:44:37.520367Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:44:37.542929Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=2096979486185625694, session=1) 2026-01-04T02:44:37.543608Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:134:2159], cookie=1935449000624838184 2026-01-04T02:44:37.544010Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[1:148:2170], cookie=8246513737250594873) 2026-01-04T02:44:37.544093Z node 1 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[1:148:2170], cookie=8246513737250594873) 2026-01-04T02:44:37.974690Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:37.986986Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.349746Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.361853Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:38.701951Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:38.714383Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.065144Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.077259Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.448778Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.460895Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:39.815073Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:39.828788Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.173260Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.185657Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.537142Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.548895Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:40.899951Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:40.912089Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.304734Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.316826Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:41.676905Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:41.688840Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.049459Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.061211Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.420887Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.432860Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:42.792461Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:42.804822Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.217151Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.230803Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.597600Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.610157Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:43.972713Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:43.985172Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.345908Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.358248Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:44.726159Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:44.738691Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.124634Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.136875Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.507775Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.519978Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:45.880593Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:45.892701Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.256789Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.268927Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:46.630039Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:46.642169Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.002486Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.014567Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.385554Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.397915Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:47.758349Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:47.770331Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.130541Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.142422Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.502442Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.514191Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:48.916468Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:48.928197Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:49.300216Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.312175Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:49.672884Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:49.684848Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.044707Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.057036Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.416619Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.428593Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:50.800280Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:50.812190Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:51.182291Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:51.194041Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:51.553950Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:51.566056Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:51.928853Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:51.941496Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:52.306840Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:52.319267Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:52.704587Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:52.716581Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.078801Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.091160Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.454317Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.466305Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:44:53.829297Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:44:53.841304Z node 1 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Co ... : tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.182795Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.546840Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.559425Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:14.920745Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:14.932852Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.295130Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:15.307237Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:15.669642Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:15.681908Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:16.085925Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:16.097880Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:16.459933Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:16.471990Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:16.834945Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:16.846758Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:17.209571Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:17.221611Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:17.584810Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:17.597074Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:18.001400Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:18.013631Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:18.387132Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:18.399485Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:18.762482Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:18.774524Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:19.137326Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:19.149695Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:19.513166Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:19.525537Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:19.920468Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:19.932378Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:20.306671Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:20.319117Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:20.672426Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:20.685030Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:21.037989Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:21.049882Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:21.402110Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:21.414038Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:21.776346Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:21.788919Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:22.152402Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:22.164845Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:22.496667Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:22.508758Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:22.860720Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:22.873173Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:23.226073Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:23.238168Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:23.727166Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:23.739344Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:24.101782Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:24.114003Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:24.477333Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:24.489744Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:24.852436Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:24.864790Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:25.228710Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:25.241051Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:25.614750Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:25.627191Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:25.990865Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:26.003225Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:26.366961Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:26.379485Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:26.753662Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:26.766140Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:27.129401Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:45:27.141360Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:45:27.473931Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:45:27.474016Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:45:27.486255Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:45:27.497274Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[2:643:2569], cookie=12180651957956426435) 2026-01-04T02:45:27.497390Z node 2 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[2:643:2569], cookie=12180651957956426435) 2026-01-04T02:45:27.856179Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:27.856284Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:27.875773Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:27.876303Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:27.910638Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:27.911647Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=12345, session=0, seqNo=0) 2026-01-04T02:45:27.911841Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:45:27.924184Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=12345, session=1) 2026-01-04T02:45:27.924989Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-04T02:45:27.937279Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:144:2166], cookie=23456, session=1) 2026-01-04T02:45:28.268848Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:45:28.268935Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:45:28.287182Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:45:28.287630Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:45:28.311684Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:45:28.312432Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-01-04T02:45:28.312566Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:45:28.324534Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-01-04T02:45:28.325156Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-04T02:45:28.337203Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=23456, session=1) >> BasicUsage::CreateTopicWithAvailabilityPeriod [GOOD] >> BasicUsage::CreateTopicWithCustomMetricsLevel |93.3%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.3%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:105:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:85:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:88:2057] recipient: [22:87:2117] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:90:2057] recipient: [22:87:2117] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:89:2118] Leader for TabletID 72057594037927937 is [22:89:2118] sender: [22:205:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:88:2057] recipient: [23:87:2117] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:90:2057] recipient: [23:87:2117] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:89:2118] Leader for TabletID 72057594037927937 is [23:89:2118] sender: [23:205:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:89:2057] recipient: [24:88:2117] Leader for TabletID 72057594037927937 is [24:90:2118] sender: [24:91:2057] recipient: [24:88:2117] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeWorksNewApi [GOOD] >> TKeyValueTest::TestCopyRangeToLongKey |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPrepare-UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink >> TKeyValueTest::TestConcatWorks [GOOD] >> TKeyValueTest::TestConcatWorksNewApi >> DataShardWrite::UpdateImmediate [GOOD] >> DataShardWrite::RejectOnChangeQueueOverflow >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadWhileWriteWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:89:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:92:2057] recipient: [13:91:2119] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:94:2057] recipient: [13:91:2119] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:93:2120] Leader for TabletID 72057594037927937 is [13:93:2120] sender: [13:209:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:78:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:81:2057] recipient: [17:80:2112] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:83:2057] recipient: [17:80:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:82:2113] Leader for TabletID 72057594037927937 is [17:82:2113] sender: [17:198:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:79:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:81:2112] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:84:2057] recipient: [18:81:2112] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:83:2113] Leader for TabletID 72057594037927937 is [18:83:2113] sender: [18:199:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:82:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:84:2115] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:87:2057] recipient: [20:84:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:86:2116] Leader for TabletID 72057594037927937 is [20:86:2116] sender: [20:202:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:83:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:85:2115] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:88:2057] recipient: [21:85:2115] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:87:2116] Leader for TabletID 72057594037927937 is [21:87:2116] sender: [21:203:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:86:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:89:2057] recipient: [23:88:2118] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:91:2057] recipient: [23:88:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:90:2119] Leader for TabletID 72057594037927937 is [23:90:2119] sender: [23:206:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:87:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:90:2057] recipient: [24:89:2118] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:92:2057] recipient: [24:89:2118] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:91:2119] Leader for TabletID 72057594037927937 is [24:91:2119] sender: [24:207:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system |93.4%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest >> DataShardWrite::CancelImmediate [GOOD] >> DataShardWrite::DeletePrepared+Volatile >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system >> TKeyValueTest::TestRenameToLongKey [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:84:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:87:2057] recipient: [8:86:2116] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:89:2057] recipient: [8:86:2116] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:88:2117] Leader for TabletID 72057594037927937 is [8:88:2117] sender: [8:108:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:109:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:88:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:78:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:81:2057] recipient: [15:80:2112] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:83:2057] recipient: [15:80:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:82:2113] Leader for TabletID 72057594037927937 is [15:82:2113] sender: [15:198:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:78:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:81:2057] recipient: [16:80:2112] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:83:2057] recipient: [16:80:2112] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:82:2113] Leader for TabletID 72057594037927937 is [16:82:2113] sender: [16:198:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:79:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:81:2112] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:84:2057] recipient: [17:81:2112] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:83:2113] Leader for TabletID 72057594037927937 is [17:83:2113] sender: [17:199:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:82:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:85:2057] recipient: [18:84:2115] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:87:2057] recipient: [18:84:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:86:2116] Leader for TabletID 72057594037927937 is [18:86:2116] sender: [18:202:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:82:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:84:2115] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:87:2057] recipient: [19:84:2115] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:86:2116] Leader for TabletID 72057594037927937 is [19:86:2116] sender: [19:202:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:83:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:86:2057] recipient: [20:85:2115] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:88:2057] recipient: [20:85:2115] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:87:2116] Leader for TabletID 72057594037927937 is [20:87:2116] sender: [20:203:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2118] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:91:2057] recipient: [21:88:2118] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2119] Leader for TabletID 72057594037927937 is [21:90:2119] sender: [21:206:2057] recipient: [21:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] !Reboot 72057594037927937 (actor [22:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:86:2057] recipient: [22:39:2086] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:89:2057] recipient: [22:88:2118] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:91:2057] recipient: [22:88:2118] !Reboot 72057594037927937 (actor [22:58:2099]) rebooted! !Reboot 72057594037927937 (actor [22:58:2099]) tablet resolver refreshed! new actor is[22:90:2119] Leader for TabletID 72057594037927937 is [22:90:2119] sender: [22:206:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:87:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:90:2057] recipient: [23:89:2118] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:92:2057] recipient: [23:89:2118] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:91:2119] Leader for TabletID 72057594037927937 is [23:91:2119] sender: [23:207:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system >> DataShardTxOrder::DelayData >> DataShardTxOrder::ZigZag_oo >> DataShardTxOrder::ZigZag >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin >> DataShardTxOrder::RandomDotRanges_DelayRS >> DataShardTxOrder::RandomPoints_DelayData >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 >> DataShardTxOrder::ImmediateBetweenOnline_oo8 >> TConsoleTests::TestRemoveAttributes [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain >> DataShardOutOfOrder::UncommittedReadSetAck >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite >> DataShardTxOrder::ForceOnlineBetweenOnline >> DataShardTxOrder::ImmediateBetweenOnline_Init >> TDataShardLocksTest::UseLocksCache [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system >> THealthCheckTest::TestStateStorageRed [GOOD] >> DataShardTxOrder::RandomPoints_DelayRS_Reboot ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_locks/unittest >> TDataShardLocksTest::UseLocksCache [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:26.540361Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:26.694925Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:26.695404Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:26.695451Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:27.019178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:27.019306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:27.075836Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494724161742 != 1767494724161745 2026-01-04T02:45:27.088543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:27.132774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:27.234986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:27.535005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:27.548104Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:27.656981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:27.703000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:27.704099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:27.705560Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-04T02:45:27.705835Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:27.718592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:27.742814Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:27.743132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:27.743329Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:894:2770] 2026-01-04T02:45:27.743468Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:27.749008Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:879:2760], Recipient [1:894:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:27.749573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:27.749696Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:27.751741Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:27.751818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:27.751856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:27.752762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:27.752909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:27.752981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-04T02:45:27.753279Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:27.753341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:27.754321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:27.754363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:27.754409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:27.754581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:27.754659Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:27.754710Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-04T02:45:27.765551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:27.795857Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:27.797112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:27.797251Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-04T02:45:27.797293Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:27.797324Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:27.797370Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:27.797644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:27.797684Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:27.798416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:27.798455Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:27.798513Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:27.798562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-04T02:45:27.798579Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:27.798594Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:27.798608Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:27.798902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:894:2770], Recipient [1:894:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:27.798936Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:27.799078Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:27.799171Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:27.799308Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:27.799358Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:27.799463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:27.799501Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:27.799537Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:27.799565Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:27.799607Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:27.799656Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:27.799731Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:27.799849Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:897:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:27.799930Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:27.799981Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:885:2764], serverId# [1:897:2771], sessionId# [0:0:0] 2026-01-04T02:45:27.800052Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:27.800081Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:27.800102 ... 24037888 is Executed 2026-01-04T02:45:34.151700Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:34.151726Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037888 has finished 2026-01-04T02:45:34.162419Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:45:34.162487Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:45:34.162522Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:34.162565Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 1 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:45:34.162654Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:34.162797Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:1154:2949], Recipient [2:894:2770]: {TEvReadSet step# 3500 txid# 281474976715661 TabletSource# 72075186224037889 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-04T02:45:34.162837Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:34.162871Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-04T02:45:34.273211Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1197:2975], Recipient [2:1154:2949]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:45:34.273350Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:34.273403Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:34.273480Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:34.273514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:34.273544Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:34.273571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:34.273604Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-04T02:45:34.273630Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:34.273644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:34.273661Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:34.273674Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:34.273743Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:45:34.273935Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-04T02:45:34.273974Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1197:2975], 0} after executionsCount# 1 2026-01-04T02:45:34.274006Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1197:2975], 0} sends rowCount# 2, bytes# 64, quota rows left# 999, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:34.274058Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1197:2975], 0} finished in read 2026-01-04T02:45:34.274099Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:34.274115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:34.274130Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:34.274145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:34.274174Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:34.274187Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:34.274216Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T02:45:34.274246Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:34.274311Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:45:34.274677Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1197:2975], Recipient [2:1154:2949]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:45:34.274724Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:45:34.275145Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1197:2975], Recipient [2:894:2770]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 RangesSize: 1 2026-01-04T02:45:34.275220Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:45:34.275249Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CheckRead 2026-01-04T02:45:34.275284Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-04T02:45:34.275299Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:45:34.275322Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:34.275343Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:45:34.275373Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037889 2026-01-04T02:45:34.275398Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-04T02:45:34.275412Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:34.275433Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:45:34.275457Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:45:34.275523Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3500 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 999 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 999 } 2026-01-04T02:45:34.275640Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3500/18446744073709551615 2026-01-04T02:45:34.275664Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[2:1197:2975], 1} after executionsCount# 1 2026-01-04T02:45:34.275684Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[2:1197:2975], 1} sends rowCount# 2, bytes# 64, quota rows left# 997, quota bytes left# 5242816, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:34.275717Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[2:1197:2975], 1} finished in read 2026-01-04T02:45:34.275766Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-04T02:45:34.275786Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:45:34.275800Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:45:34.275815Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:45:34.275836Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037889 is Executed 2026-01-04T02:45:34.275851Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:45:34.275864Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037889 has finished 2026-01-04T02:45:34.275879Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:45:34.275918Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:45:34.276268Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1197:2975], Recipient [2:894:2770]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:45:34.276297Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_locks/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes >> DataShardWrite::RejectOnChangeQueueOverflow [GOOD] >> DataShardWrite::PreparedDistributedWritePageFault >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system |93.4%| [TA] $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_locks/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::DeletePrepared+Volatile [GOOD] >> DataShardWrite::DeletePrepared-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_ReproducerDelayRS1 [GOOD] Test command err: 2026-01-04T02:45:34.772033Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.773687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.773730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.783258Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.783638Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.783969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.831302Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.839206Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.839312Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.841141Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.841221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.841292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.841721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.843225Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.843314Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.905772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.926245Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.926417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.926526Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.926571Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.926629Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.926673Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.926879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.926924Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.927185Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.927305Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.927373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.927411Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.927449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.927496Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.927539Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.927584Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.927631Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.927727Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.927784Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.927842Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.934266Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.934330Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.934419Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.934578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.934626Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.934679Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.934758Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.934815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.934860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.934889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.935098Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.935127Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.935158Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.935193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.935233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.935271Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.935306Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.935344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.935367Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.947268Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.947323Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.947353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.947396Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.947456Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.947954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.948006Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.948046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.948172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.948204Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.948333Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.948375Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.948423Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.948479Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.951589Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.951661Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.951937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.951987Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.952047Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.952087Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.952151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.952198Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.952235Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 45:35.880729Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.880812Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:45:35.880842Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:45:35.880854Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:45:35.880866Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.880881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:7] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.880902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 7] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.880926Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-04T02:45:35.880940Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.881049Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.881062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:8] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.881079Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 8] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.881131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-04T02:45:35.881151Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.881259Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.881274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:9] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.881294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 9] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.881328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-04T02:45:35.881347Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.881460Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.881481Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:14] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.881513Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 14 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: COMPLETE 2026-01-04T02:45:35.881567Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.881659Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.881704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:10] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.881729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 10] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.881781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-04T02:45:35.881797Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.881930Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.881946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:12] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.881966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 12] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.881988Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-04T02:45:35.882001Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.882133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.882147Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:13] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.882192Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 13] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:35.882215Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.882277Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437184 2026-01-04T02:45:35.882292Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.882305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000004:5] at 9437184 on unit CompleteOperation 2026-01-04T02:45:35.882323Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000004 : 5] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:35.882353Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-04T02:45:35.882369Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.882522Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 4 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 1} 2026-01-04T02:45:35.882555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.882585Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 4 2026-01-04T02:45:35.882653Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 6 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 3} 2026-01-04T02:45:35.882702Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.882724Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 6 2026-01-04T02:45:35.882764Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 7 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 4} 2026-01-04T02:45:35.882781Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.882794Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 7 2026-01-04T02:45:35.882826Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 8 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 5} 2026-01-04T02:45:35.882839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.882853Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 8 2026-01-04T02:45:35.882883Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 9 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 6} 2026-01-04T02:45:35.882897Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.882909Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 9 2026-01-04T02:45:35.882958Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 10 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 7} 2026-01-04T02:45:35.882987Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.883005Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 10 2026-01-04T02:45:35.883051Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 12 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 8} 2026-01-04T02:45:35.883068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.883082Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 12 2026-01-04T02:45:35.883124Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000004 txid# 5 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 2} 2026-01-04T02:45:35.883152Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:35.883166Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 5 expect 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - actual 7 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - interm - 2 5 4 - 3 - - - - - - - - - - - - - - - - - - - - - - - - - - |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithAbort [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose ------- [TM] {asan, default-linux-x86_64, release} ydb/core/health_check/ut/unittest >> THealthCheckTest::TestStateStorageRed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:45.868802Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:45.995067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:42:46.004033Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:46.004442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:46.004609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:46.426034Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:46.521405Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:46.521542Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:46.569263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:46.652314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:47.421911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:42:47.421968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:42:47.422016Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:42:47.422288Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration ... blocking NKikimr::NSchemeShard::TEvSchemeShard::TEvDescribeScheme from MONITORING_REQUEST to FLAT_SCHEMESHARD_ACTOR cookie 1 2026-01-04T02:42:47.482874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:42:56.460386Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:42:56.460483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-04T02:43:07.316856Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:07.317002Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:07.323932Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:07.325162Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:43:07.326420Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:848:2408], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:07.326698Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:07.326785Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:07.327623Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:843:2351], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:43:07.327865Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:43:07.327957Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:43:07.653058Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:43:07.765517Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:07.765641Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:07.766124Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:43:07.766222Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:43:07.824338Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T02:43:07.824812Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:07.825231Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:43:07.895365Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:43:07.908401Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:43:11.414925Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:43:11.419451Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 3: Status: 2 2026-01-04T02:43:11.419549Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(3)::Execute 2026-01-04T02:43:11.419594Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 3 2026-01-04T02:43:11.419674Z node 3 :HIVE DEBUG: tx__status.cpp:66: HIVE#72057594037968897 THive::TTxStatus(3)::Complete 2026-01-04T02:43:11.422951Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(3)::Execute 2026-01-04T02:43:11.423087Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:43:11.423141Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(3) DeregisterInDomains (72057594046644480:1) : 2 -> 1 2026-01-04T02:43:11.423193Z node 3 :HIVE DEBUG: hive_impl.cpp:2891: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(1, 3) 2026-01-04T02:43:11.423234Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1228:2635] 2026-01-04T02:43:11.423274Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(3): waiting 3600.000000s 2026-01-04T02:43:11.423750Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:43:11.423808Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:43:11.423838Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:43:11.424365Z node 3 :HIVE TRACE: hive_impl.cpp:150: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerDisconnected([3:877:2417]) [3:1228:2635] 2026-01-04T02:43:11.424776Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:43:11.426758Z node 3 :HIVE TRACE: hive_impl.cpp:142: HIVE#72057594037968897 Handle TEvTabletPipe::TEvServerConnected([3:1709:3000]) [3:1947:3004] 2026-01-04T02:43:11.427223Z node 3 :HIVE DEBUG: hive_impl.cpp:764: HIVE#72057594037968897 Handle TEvLocal::TEvStatus for Node 4: Status: 2 2026-01-04T02:43:11.427296Z node 3 :HIVE DEBUG: tx__status.cpp:22: HIVE#72057594037968897 THive::TTxStatus(4)::Execute 2026-01-04T02:43:11.427332Z node 3 :HIVE WARN: tx__status.cpp:58: HIVE#72057594037968897 THive::TTxStatus(status=2 node=Connected) - killing node 4 2026-01-04T02:43:11.427499Z node 3 :HIVE DEBUG: tx__kill_node.cpp:22: HIVE#72057594037968897 THive::TTxKillNode(4)::Execute 2026-01-04T02:43:11.427587Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:43:11.427622Z node 3 :HIVE TRACE: hive_domains.cpp:16: Node(4) DeregisterInDomains (72057594046644480:1) : 1 -> 0 2026-01-04T02:43:11.427663Z node 3 :HIVE DEBUG: hive_impl.cpp:2891: HIVE#72057594037968897 RemoveRegisteredDataCentersNode(2, 4) 2026-01-04T02:43:11.427702Z node 3 :HIVE TRACE: tx__kill_node.cpp:50: HIVE#72057594037968897 THive::TTxKillNode - killing pipe server [3:1255:2662] 2026-01-04T02:43:11.427740Z node 3 :HIVE DEBUG: hive_impl.cpp:133: HIVE#72057594037968897 TryToDeleteNode(4): waiting 3600.000000s 2026-01-04T02:43:11.428002Z node 3 :HIVE DEBUG: hive_impl.cpp:55: HIVE#72057594037968897 Handle TEvHive::TEvCreateTablet(PersQueue(72057594046578946,0)) 2026-01-04T02:43:11.452275Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:200: HIVE#72057594037968897 THive::TTxCreateTablet::Execute Owner: 72057594046578946 OwnerIdx: 0 TabletType: PersQueue BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } BindedChannels { StoragePoolName: "/Root:test" } 2026-01-04T02:43:11.452366Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:354: HIVE#72057594037968897 Hive 72057594037968897 allocated TabletId 72075186224037888 from TabletIdIndex 65536 2026-01-04T02:43:11.452591Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:446: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for type PersQueue: {} 2026-01-04T02:43:11.452657Z node 3 :HIVE DEBUG: tx__create_tablet.cpp:453: HIVE#72057594037968897 THive::TTxCreateTablet::Execute; Default resources after merge for profile 'default': {Memory: 1048576} 2026-01-04T02:43:11.452799Z node 3 :HIVE DEBUG: hive_impl.cpp:2904: HIVE#72057594037968897 CreateTabletFollowers Tablet PersQueue.72075186224037888 ... for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:28.494688Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:28.495098Z node 31 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:28.829275Z node 29 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:29.065305Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.065419Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.066179Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.066292Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.066535Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.066622Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.066844Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.066900Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.067140Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.067214Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.067540Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.067610Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.067850Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.067915Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.068133Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.068195Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.068408Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:29.068473Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:29.159985Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 30 Cookie 30 2026-01-04T02:45:29.160403Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 31 Cookie 31 2026-01-04T02:45:29.160495Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 32 Cookie 32 2026-01-04T02:45:29.160632Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 33 Cookie 33 2026-01-04T02:45:29.160775Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 34 Cookie 34 2026-01-04T02:45:29.160915Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 35 Cookie 35 2026-01-04T02:45:29.161006Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 36 Cookie 36 2026-01-04T02:45:29.161097Z node 29 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 37 Cookie 37 2026-01-04T02:45:29.161408Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(29, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.162183Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(37, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.162478Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(30, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.162649Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(31, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.162900Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(32, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.163127Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(34, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.163285Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(35, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.163433Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(36, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.163596Z node 29 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(33, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:29.205324Z node 34 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.232366Z node 29 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.303350Z node 32 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.319354Z node 30 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.337418Z node 36 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.381292Z node 31 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.427239Z node 35 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.457405Z node 37 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:29.474493Z node 33 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:30.334921Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:30.334981Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:30.335010Z node 29 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:30.335526Z node 29 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:30.404530Z node 33 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.404653Z node 34 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.404736Z node 35 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.404818Z node 36 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.404896Z node 37 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.408985Z node 30 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.409107Z node 29 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.409184Z node 31 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:30.409409Z node 32 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; self_check_result: EMERGENCY issue_log { id: "RED-7831" status: RED message: "There is not enough functional rings" type: "STATE_STORAGE" level: 1 } issue_log { id: "RED-ccd4-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-b30b-1-29" type: "STATE_STORAGE_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-b30b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "STATE_STORAGE_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-568c" status: RED message: "There is not enough functional rings" type: "SCHEME_BOARD" level: 1 } issue_log { id: "RED-9bdc-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-106b-1-29" type: "SCHEME_BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-106b-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "SCHEME_BOARD_NODE" level: 3 listed: 6 count: 6 } issue_log { id: "RED-18c3" status: RED message: "There is not enough functional rings" type: "BOARD" level: 1 } issue_log { id: "RED-aaf7-1" status: RED message: "Rings have unavailable nodes" location { compute { state_storage { ring: 1 } } } reason: "RED-0632-1-29" type: "BOARD_RING" level: 2 listed: 6 count: 6 } issue_log { id: "RED-0632-1-29" status: RED message: "Node is not available" location { compute { state_storage { ring: 1 node { id: 29 } } } } type: "BOARD_NODE" level: 3 listed: 6 count: 6 } location { id: 29 host: "::1" port: 12001 } >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/health_check/ut/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin |93.4%| [TA] $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/health_check/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardOutOfOrder::TestSnapshotReadPriority >> TBlobStorageProxyTest::TestGetAndRangeGetManyBlobs [GOOD] >> TBlobStorageProxyTest::TestEmptyRange ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestAlterTenantTooManyStorageResourcesForRunningExtSubdomain [GOOD] Test command err: 2026-01-04T02:39:51.013539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:39:51.013608Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:39:51.280960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:39:52.215940Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:39:52.216603Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:39:52.217050Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13006662951232282781 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:39:52.351743Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000000:_:0:0:0]: (2147483648) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.356854Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000001:_:0:0:0]: (2147483649) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.357042Z node 3 :BS_LOCALRECOVERY CRIT: localrecovery_public.cpp:107: PDiskId# 1000 VDISK[80000002:_:0:0:0]: (2147483650) LocalRecovery FINISHED: {RecoveryDuration# INPROGRESS RecoveredLogStartLsn# 0 SuccessfulRecovery# false EmptyLogoBlobsDb# true EmptyBlocksDb# true EmptyBarriersDb# true EmptySyncLog# true EmptySyncer# true EmptyHuge# true LogRecLogoBlob# 0 LogRecBlock# 0 LogRecGC# 0 LogRecSyncLogIdx# 0 LogRecLogoBlobsDB# 0 LogRecBlocksDB# 0 LogRecBarriersDB# 0 LogRecCutLog# 0 LogRecLocalSyncData# 0 LogRecSyncerState# 0 LogRecHandoffDel# 0 LogRecHugeBlobAllocChunk# 0 LogRecHugeBlobFreeChunk# 0 LogRecHugeBlobEntryPoint# 0 LogRecHugeLogoBlob# 0 LogRecLogoBlobOpt# 0 LogRecPhantomBlob# 0 LogRecAnubisOsirisPut# 0 LogRecAddBulkSst# 0 LogoBlobFreshApply# 0 LogoBlobFreshSkip# 0 LogoBlobsBatchFreshApply# 0 LogoBlobsBatchFreshSkip#0 LogoBlobSyncLogApply# 0 LogoBlobSyncLogSkip# 0 HugeLogoBlobFreshApply# 0 HugeLogoBlobFreshSkip# 0 HugeLogoBlobSyncLogApply# 0 HugeLogoBlobSyncLogSkip# 0 BlockFreshApply# 0 BlockFreshSkip# 0 BlocksBatchFreshApply# 0 BlocksBatchFreshSkip# 0 BlockSyncLogApply# 0 BlockSyncLogSkip# 0 BarrierFreshApply# 0 BarrierFreshSkip# 0 BarriersBatchFreshApply# 0 BarriersBatchFreshSkip# 0 BarrierSyncLogApply# 0 BarrierSyncLogSkip# 0 GCBarrierFreshApply# 0 GCBarrierFreshSkip# 0 GCLogoBlobFreshApply# 0 GCLogoBlobFreshSkip# 0 GCSyncLogApply# 0 GCSyncLogSkip# 0 TryPutLogoBlobSyncData# 0 TryPutBlockSyncData# 0 TryPutBarrierSyncData# 0 HandoffDelFreshApply# 0 HandoffDelFreshSkip# 0 HugeBlobAllocChunkApply# 0 HugeBlobAllocChunkSkip# 0 HugeBlobFreeChunkApply# 0 HugeBlobFreeChunkSkip# 0 HugeLogoBlobToHeapApply# 0 HugeLogoBlobToHeapSkip# 0 HugeSlotsDelGenericApply# 0 HugeSlotsDelGenericSkip# 0 TryPutLogoBlobPhantom# 0 RecoveryLogDiapason# [18446744073709551615 0] StartingPoints# {} ReadLogReplies# {}} reason# Yard::Init failed, errorReason# "PDisk is in StateError, reason# PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized" status# CORRUPTED;VDISK LOCAL RECOVERY FAILURE DUE TO LOGICAL ERROR 2026-01-04T02:39:52.438225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:39:52.618531Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:39:52.619065Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:39:52.619339Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f54/r3tmp/tmpJHDOrX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4748129556255973163 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunk ... 2026-01-04T02:44:25.956448Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:25.956847Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:32.254997Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:32.255455Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:38.466904Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-1) cannot create pool '/dc-1/users/tenant-1:hdd-1' (0): Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:38.467231Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-1 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 2 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:38.575493Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:38.576016Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:44.663256Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:44.663738Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:50.949257Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:50.949599Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:57.213857Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:44:57.214202Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:03.464472Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:03.464828Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:09.772601Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:09.772932Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:16.233793Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:16.234038Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:22.405025Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:22.405279Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:28.742134Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:28.742402Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:35.154678Z node 154 :CMS_TENANTS ERROR: console_tenants_manager.cpp:252: TPoolManip(/dc-1/users/tenant-1:hdd-2) cannot create pool '/dc-1/users/tenant-1:hdd-2' (0): Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} 2026-01-04T02:45:35.154941Z node 154 :CMS_TENANTS CRIT: console_tenants_manager.cpp:3505: Couldn't update storage pool /dc-1/users/tenant-1:hdd-2 for tenant /dc-1/users/tenant-1: Group fit error BoxId# 1 StoragePoolId# 3 Error# failed to allocate group: no group options PDisks# {[(154:1-s[16/16])(155:1000-s[16/16]o)(156:1000-s[16/16]o)(157:1000-s[16/16]o)(158:1000-s[16/16]o)(159:1000-s[16/16]o)(160:1000-s[16/16]o)(161:1000-s[16/16]o)(162:1000-s[16/16]o)]} |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 >> TSchemeShardSysNames::ESchemeOpCreateTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] >> TBlobStorageProxyTest::TestEmptyRange [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace >> TSchemeShardSysNames::ESchemeOpCreateSecret-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system >> DataShardVolatile::DistributedUpsertRestartAfterPlan+UseSink [GOOD] >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8 [GOOD] Test command err: 2026-01-04T02:45:34.759452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.761153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.761200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.770468Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.770764Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.771014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.818934Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.824578Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.824639Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.825710Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.825757Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.825820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.826086Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.826922Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.826981Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.885076Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.914485Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.914635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.914749Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.914799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.914860Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.914906Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.915097Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.915139Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.915377Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.915488Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.915578Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.915632Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.915663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.915706Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.915743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.915801Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.915853Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.915941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.915974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.916008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.922194Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.922265Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.922371Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.922527Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.922572Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.922618Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.922672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.922729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.922765Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.922803Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.923028Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.923059Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.923091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.923122Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.923171Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.923219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.923256Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.923305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.923342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.934987Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.935037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.935060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.935102Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.935163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.935550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.935607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.935665Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.935774Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.935801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.935898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.935931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.935965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.936000Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.938387Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.938435Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.938600Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.938627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.938666Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.938698Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.938733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.938762Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.938784Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:39.231491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.231514Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:39.231587Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.231614Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:134] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.231652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 134] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.231698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-04T02:45:39.231765Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.231907Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.231942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.231965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:137] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.231998Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 137] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.232040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-04T02:45:39.232063Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.232146Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.232166Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.232193Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.232230Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:140] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.232269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 140] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.232325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-04T02:45:39.232363Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.232461Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.232481Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.232503Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.232524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:143] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.232553Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 143] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.232596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-04T02:45:39.232622Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.232711Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.232732Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.232753Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:146] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.232794Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 146] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.232848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:39.232879Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.232974Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.232996Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.233027Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.233060Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:39.233099Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.233189Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.233211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.233239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 4 ms 2026-01-04T02:45:39.233283Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:39.233314Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.233533Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-04T02:45:39.233564Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.233606Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-04T02:45:39.233672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:39.233697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.233719Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-04T02:45:39.233796Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-04T02:45:39.233839Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.233859Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-04T02:45:39.233965Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-04T02:45:39.233996Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.234033Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-04T02:45:39.234112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-04T02:45:39.234147Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.234176Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-04T02:45:39.234272Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:39.234309Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.234331Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-04T02:45:39.234405Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:39.234435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.234457Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-04T02:45:39.234519Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:39.234540Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.234579Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init [GOOD] Test command err: 2026-01-04T02:45:34.858469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.860197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.860245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.869434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.869745Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.870030Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.913983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.921763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.921862Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.923427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.923491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.923562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.923956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.925121Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.925186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.994415Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:35.025549Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:35.025717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:35.025849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:35.025889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:35.025950Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:35.025990Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.026177Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.026219Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.026467Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:35.026560Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:35.026624Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.026657Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:35.026686Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:35.026735Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:35.026766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:35.026807Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:35.026860Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.026943Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.026982Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.027032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:35.032878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:35.032939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:35.033033Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:35.033184Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:35.033232Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:35.033277Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:35.033340Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:35.033389Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:35.033422Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:35.033458Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.033696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:35.033727Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:35.033759Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:35.033787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.033848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:35.033889Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:35.033927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:35.033978Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.034003Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:35.045916Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:35.045979Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.046011Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.046061Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:35.046145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:35.046618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.046666Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.046706Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:35.046824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:35.046856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:35.046966Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.047001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:35.047048Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:35.047087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:35.050499Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:35.050557Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.050771Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.050810Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.050859Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.050902Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:35.050939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:35.050978Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:35.051014Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 02:45:39.291616Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit StoreAndSendOutRS 2026-01-04T02:45:39.291643Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 50 at 9437184 from 9437184 to 9437186 txId 152 2026-01-04T02:45:39.291690Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:39.291728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-04T02:45:39.291776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:39.291816Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:39.291839Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:39.291942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:39.291962Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-04T02:45:39.291990Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:39.292026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:39.292066Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:39.292226Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:39.292290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.292330Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:45:39.292471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:39.292496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.292519Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:45:39.292568Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:238:2230], Recipient [1:460:2402]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:45:39.292597Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:39.292624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-04T02:45:39.292675Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:45:39.292723Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-04T02:45:39.292786Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:39.292927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:39.292954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.292998Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:39.293080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:460:2402], Recipient [1:460:2402]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.293110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.293168Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-04T02:45:39.293195Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:39.293224Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-04T02:45:39.293249Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-04T02:45:39.293275Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:39.293300Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-04T02:45:39.293335Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-04T02:45:39.293364Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-04T02:45:39.293385Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:39.293405Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-04T02:45:39.293440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-04T02:45:39.293461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-04T02:45:39.293900Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-04T02:45:39.293947Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:45:39.293993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-04T02:45:39.294022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-04T02:45:39.294056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-04T02:45:39.294086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.294247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-04T02:45:39.294273Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-04T02:45:39.294297Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-04T02:45:39.294322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-04T02:45:39.294353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:39.294373Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-04T02:45:39.294415Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-04T02:45:39.294447Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:39.294470Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:45:39.294504Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-04T02:45:39.294531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-04T02:45:39.294720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:39.294759Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.294798Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-04T02:45:39.308220Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:39.308265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:39.308326Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:39.308397Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:39.308430Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:39.308705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:39.308749Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:39.308780Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] >> DataShardWrite::DeletePrepared-Volatile [GOOD] >> DataShardWrite::DelayedVolatileTxAndEvWrite |93.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> TBlobStorageProxyTest::TestEmptyRange [GOOD] |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/dsproxy/ut_fat/unittest >> DataShardTxOrder::ZigZag [GOOD] >> TConsoleTests::TestRemoveAttributesExtSubdomain [GOOD] >> TConsoleTests::TestSchemeShardErrorForwarding >> DataShardWrite::PreparedDistributedWritePageFault [GOOD] >> DataShardWrite::UncommittedUpdateLockMissingRow >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline_oo8 [GOOD] Test command err: 2026-01-04T02:45:34.829607Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.831328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.831377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.841030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.841373Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.841626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.891069Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.898208Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.898302Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.899922Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.899997Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.900066Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.900464Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.901735Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.901826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.968099Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:35.000757Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:35.000941Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:35.001056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:35.001104Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:35.001162Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:35.001205Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.001425Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.001473Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.001767Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:35.001878Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:35.001946Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.001985Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:35.002023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:35.002077Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:35.002114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:35.002166Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:35.002209Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.002290Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.002327Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.002378Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:35.009109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:35.009197Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:35.009319Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:35.009494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:35.009541Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:35.009616Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:35.009680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:35.009726Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:35.009766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:35.009821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.010095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:35.010128Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:35.010161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:35.010193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.010242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:35.010283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:35.010321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:35.010372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.010403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:35.022113Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:35.022168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.022214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.022272Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:35.022351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:35.022833Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.022884Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.022929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:35.023058Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:35.023094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:35.023207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.023251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:35.023296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:35.023339Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:35.027322Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:35.027390Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.027634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.027676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.027735Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.027794Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:35.027835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:35.027880Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:35.027920Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 45:40.349954Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.350064Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is DelayComplete 2026-01-04T02:45:40.350081Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompleteOperation 2026-01-04T02:45:40.350095Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:154] at 9437184 to execution unit CompletedOperations 2026-01-04T02:45:40.350115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:154] at 9437184 on unit CompletedOperations 2026-01-04T02:45:40.350140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:154] at 9437184 is Executed 2026-01-04T02:45:40.350168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:154] at 9437184 executing on unit CompletedOperations 2026-01-04T02:45:40.350185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:154] at 9437184 has finished 2026-01-04T02:45:40.350200Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:40.350214Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:40.350240Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:40.350263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:40.367562Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.367630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.367694Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 4 ms 2026-01-04T02:45:40.367771Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:40.367809Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.367961Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.367992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:150] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.368046Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 150] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:40.368084Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.368176Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.368218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.368255Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 4 ms 2026-01-04T02:45:40.368307Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:40.368345Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.368447Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.368469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.368504Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 3 ms, propose latency: 4 ms 2026-01-04T02:45:40.368540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:40.368565Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.368652Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.368674Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:153] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.368713Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 153] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:40.368747Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.368838Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.368881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.368920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:40.368973Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:40.368996Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.369165Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:40.369220Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.369265Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:45:40.369421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:40.369458Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.369501Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:45:40.369590Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:40.369617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-04T02:45:40.369668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:40.369729Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:40.369760Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:40.369911Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:40.369954Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:40.369982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:40.370015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:40.370056Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:40.370080Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:40.370316Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:40.370351Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.370381Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-04T02:45:40.370462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:40.370492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.370517Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:40.370616Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:460:2402], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:40.370650Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.370674Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-04T02:45:40.370728Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:40.370750Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.370774Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ForceOnlineBetweenOnline [GOOD] Test command err: 2026-01-04T02:45:34.798024Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.799922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.799979Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.808360Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.808699Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.808937Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.843483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.850573Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.850681Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.852457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.852549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.852629Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.853073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.854515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.854598Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.911515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.934590Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.934750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.934853Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.934891Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.934940Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.934972Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.935156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.935203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.935464Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.935544Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.935626Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.935671Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.935707Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.935767Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.935815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.935866Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.935903Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.935984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.936019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.936076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.945243Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.945306Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.945411Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.945594Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.945636Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.945678Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.945730Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.945796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.945852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.945893Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.946124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.946151Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.946180Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.946206Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.946247Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.946284Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.946314Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.946349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.946371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.958074Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.958140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.958169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.958212Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.958274Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.958683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.958746Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.958795Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.958936Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.958973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.959090Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.959124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.959168Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.959207Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.961772Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.961838Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.962026Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.962057Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.962100Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.962134Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.962177Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.962211Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.962240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:40.223211Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:40.223232Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.223346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:40.223378Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.223411Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:45:40.223518Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:40.223544Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.223580Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:45:40.223658Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:40.223687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-04T02:45:40.223717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:40.223760Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:40.223782Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:40.223867Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:40.223988Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:40.224007Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.224024Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-04T02:45:40.224090Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:40.224111Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.224151Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:40.224203Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [1:238:2230], Recipient [1:459:2401]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:45:40.224222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:40.224243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 9437186 source 9437184 dest 9437186 producer 9437184 txId 152 2026-01-04T02:45:40.224280Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 9437186 got read set: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletProducer# 9437184 ReadSet.Size()# 7 Seqno# 50 Flags# 0} 2026-01-04T02:45:40.224308Z node 1 :TX_DATASHARD TRACE: operation.cpp:67: Filled readset for [1000005:152] from=9437184 to=9437186origin=9437184 2026-01-04T02:45:40.224355Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 9437186 2026-01-04T02:45:40.224449Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:40.224470Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.224487Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 2026-01-04T02:45:40.224549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:459:2401], Recipient [1:459:2401]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:40.224571Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:40.224622Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437186 2026-01-04T02:45:40.224654Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:40.224687Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000005:152] at 9437186 for LoadAndWaitInRS 2026-01-04T02:45:40.224713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit LoadAndWaitInRS 2026-01-04T02:45:40.224746Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:40.224768Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit LoadAndWaitInRS 2026-01-04T02:45:40.224787Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit BlockFailPoint 2026-01-04T02:45:40.224806Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit BlockFailPoint 2026-01-04T02:45:40.224821Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:40.224835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit BlockFailPoint 2026-01-04T02:45:40.224848Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit ExecuteDataTx 2026-01-04T02:45:40.224861Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit ExecuteDataTx 2026-01-04T02:45:40.225189Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:152] at tablet 9437186 with status COMPLETE 2026-01-04T02:45:40.225236Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:152] at 9437186: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 5, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:45:40.225270Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is ExecutedNoMoreRestarts 2026-01-04T02:45:40.225288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit ExecuteDataTx 2026-01-04T02:45:40.225308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompleteOperation 2026-01-04T02:45:40.225326Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:40.225445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is DelayComplete 2026-01-04T02:45:40.225476Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompleteOperation 2026-01-04T02:45:40.225498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:152] at 9437186 to execution unit CompletedOperations 2026-01-04T02:45:40.225516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:152] at 9437186 on unit CompletedOperations 2026-01-04T02:45:40.225545Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:152] at 9437186 is Executed 2026-01-04T02:45:40.225564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:152] at 9437186 executing on unit CompletedOperations 2026-01-04T02:45:40.225587Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:152] at 9437186 has finished 2026-01-04T02:45:40.225628Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:40.225648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:45:40.225669Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-04T02:45:40.225687Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-04T02:45:40.238797Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:40.238845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:40.238886Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 1 ms, propose latency: 3 ms 2026-01-04T02:45:40.238935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:40.238961Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:40.239171Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:40.239201Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:40.239231Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag [GOOD] Test command err: 2026-01-04T02:45:34.725366Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.726977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.727028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.736805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.737098Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.737360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.783496Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.790645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.790733Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.792234Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.792300Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.792364Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.792721Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.793885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.793969Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.858956Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.881587Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.881728Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.881815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.881856Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.881904Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.881940Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.882117Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.882157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.882363Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.882451Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.882506Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.882540Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.882570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.882595Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.882626Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.882666Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.882704Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.882794Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.882827Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.882871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.887883Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\000\030\000(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.887949Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.888030Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.888157Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.888192Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.888233Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.888280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.888308Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.888344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.888387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.888612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.888635Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.888670Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.888691Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.888743Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.888776Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.888805Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.888829Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.888852Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.900393Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.900441Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.900473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.900503Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.900548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.900983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.901028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.901071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.901175Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.901200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.901287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.901317Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.901342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.901371Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.903877Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.903928Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.904110Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.904141Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.904185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.904216Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.904240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.904277Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.904321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-04T02:45:40.930746Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-04T02:45:40.931019Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:237:2229], Recipient [2:237:2229]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:40.931076Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:40.931139Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:40.931174Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:40.931201Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:40.931234Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437184 2026-01-04T02:45:40.931261Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PlanQueue 2026-01-04T02:45:40.931312Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.931341Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PlanQueue 2026-01-04T02:45:40.931369Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadTxDetails 2026-01-04T02:45:40.931395Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadTxDetails 2026-01-04T02:45:40.932094Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000016:45 keys extracted: 2 2026-01-04T02:45:40.932147Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932175Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadTxDetails 2026-01-04T02:45:40.932227Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-04T02:45:40.932258Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit FinalizeDataTxPlan 2026-01-04T02:45:40.932306Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932333Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-04T02:45:40.932372Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:40.932398Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:45:40.932445Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437184 2026-01-04T02:45:40.932484Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437184 2026-01-04T02:45:40.932521Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437184 2026-01-04T02:45:40.932558Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932584Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:40.932607Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BuildDataTxOutRS 2026-01-04T02:45:40.932628Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BuildDataTxOutRS 2026-01-04T02:45:40.932674Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932700Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BuildDataTxOutRS 2026-01-04T02:45:40.932722Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit StoreAndSendOutRS 2026-01-04T02:45:40.932745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit StoreAndSendOutRS 2026-01-04T02:45:40.932769Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932797Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit StoreAndSendOutRS 2026-01-04T02:45:40.932827Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit PrepareDataTxInRS 2026-01-04T02:45:40.932852Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit PrepareDataTxInRS 2026-01-04T02:45:40.932879Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.932901Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit PrepareDataTxInRS 2026-01-04T02:45:40.932923Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit LoadAndWaitInRS 2026-01-04T02:45:40.932959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit LoadAndWaitInRS 2026-01-04T02:45:40.932987Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.933024Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit LoadAndWaitInRS 2026-01-04T02:45:40.933050Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:45:40.933075Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit BlockFailPoint 2026-01-04T02:45:40.933117Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.933145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:45:40.933172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:45:40.933198Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit ExecuteDataTx 2026-01-04T02:45:40.933551Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437184 with status COMPLETE 2026-01-04T02:45:40.933601Z node 2 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437184: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:45:40.933652Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.933676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:45:40.933699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompleteOperation 2026-01-04T02:45:40.933745Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.933959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is DelayComplete 2026-01-04T02:45:40.934004Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompleteOperation 2026-01-04T02:45:40.934037Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437184 to execution unit CompletedOperations 2026-01-04T02:45:40.934064Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437184 on unit CompletedOperations 2026-01-04T02:45:40.934096Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437184 is Executed 2026-01-04T02:45:40.934118Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437184 executing on unit CompletedOperations 2026-01-04T02:45:40.934147Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437184 has finished 2026-01-04T02:45:40.934176Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:40.934202Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:40.934235Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:40.934264Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:40.947435Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-04T02:45:40.947533Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-04T02:45:40.947605Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:40.947665Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-04T02:45:40.947740Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:40.947810Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:40.948094Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-04T02:45:40.948133Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-04T02:45:40.948171Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:45:40.948198Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-04T02:45:40.948237Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [2:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:40.948271Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 |93.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.4%| [TA] $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |93.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |93.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/dsproxy/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous >> DataShardOutOfOrder::TestPlannedTimeoutSplit >> DataShardOutOfOrder::UncommittedReads >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit+UseSink [GOOD] >> DataShardOutOfOrder::LocksBrokenStats >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWoIndexes [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::CreateTopicWithCustomMetricsLevel [GOOD] Test command err: 2026-01-04T02:44:43.632785Z :MaxByteSizeEqualZero INFO: Random seed for debugging is 1767494683632761 2026-01-04T02:44:43.819423Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331861651921869:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:43.820893Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:43.855378Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331859377177185:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:43.855620Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:43.856299Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:43.863121Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:44.009152Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:44.053777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:44.084369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:44.084484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:44.085066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:44.085148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:44.092656Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:44:44.092801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:44.093435Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:44.161294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:44.212916Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002ec0/r3tmp/yandexIs5Asn.tmp 2026-01-04T02:44:44.212947Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002ec0/r3tmp/yandexIs5Asn.tmp 2026-01-04T02:44:44.213129Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002ec0/r3tmp/yandexIs5Asn.tmp 2026-01-04T02:44:44.213226Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:44.213708Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:44.246793Z INFO: TTestServer started on Port 13850 GrpcPort 3539 2026-01-04T02:44:44.261692Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:3539 2026-01-04T02:44:44.449263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:44.826459Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:44.860311Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:46.858720Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331872262079302:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:46.858721Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331872262079291:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:46.858819Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:46.859093Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331872262079306:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:46.859151Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:46.863740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:44:46.878109Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591331872262079305:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:44:47.021611Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331872262079337:2138] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:44:47.212256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:47.211836Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591331876557046648:2311], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:47.212333Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZmIzOGQ0OTktNWNjMDk5MmEtOWE4Nzk3ZGEtY2MxOTM3YmY=, ActorId: [2:7591331872262079289:2300], ActorState: ExecuteState, LegacyTraceId: 01ke3e96483bmh6bw49vmcfdcr, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:47.212713Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331874536825148:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:47.213098Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjI5MjNmNWUtZDZkN2U1MGYtNDliNmFiNzAtMjY1NjUzNTQ=, ActorId: [1:7591331874536825121:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3e966p4q41vnrc6dns2ne1, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:47.214926Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:44:47.214948Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:44:47.370960Z ... Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1767494740" Version: 2 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1767494740" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1767494740" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 3 MonitoringProjectId: "" 2026-01-04T02:45:40.446115Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:45:40.446229Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:45:40.448258Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:45:40.455709Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-04T02:45:40.460871Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:207: new Alter topic request 2026-01-04T02:45:40.462313Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterPersQueueGroup, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_pq.cpp:311) 2026-01-04T02:45:40.468760Z node 9 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][rt3.dc1--test-topic-1767494740] updating configuration. Deleted partitions []. Added partitions [] 2026-01-04T02:45:40.469057Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:45:40.471009Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:45:40.476408Z node 10 :PERSQUEUE DEBUG: partition.cpp:1289: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvProposePartitionConfig Step 1767494740522, TxId 281474976710678 2026-01-04T02:45:40.476493Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.476522Z node 10 :PERSQUEUE DEBUG: partition.cpp:2416: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TTransaction[ProposeConfig]) 2026-01-04T02:45:40.476554Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:45:40.476566Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.476581Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:45:40.476591Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.476823Z node 10 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037894][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767494740522, TxId 281474976710678 2026-01-04T02:45:40.476856Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.476872Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:45:40.476888Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.476916Z node 10 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037894][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T02:45:40.477074Z node 10 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037894][Partition][0][StateIdle] Schedule reply tx done 281474976710678 2026-01-04T02:45:40.477102Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:45:40.477116Z node 10 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037894][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:45:40.477136Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.477321Z node 10 :PERSQUEUE DEBUG: read.h:275: [72075186224037894][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T02:45:40.478787Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.478814Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.478829Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.478849Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.478864Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.479480Z node 10 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037894][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T02:45:40.479665Z node 10 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037894][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T02:45:40.479735Z node 10 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037894][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T02:45:40.479772Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.479785Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.479796Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.479807Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.479817Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.479833Z node 10 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:45:40.480071Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "rt3.dc1--test-topic-1767494740" Version: 3 LocalDC: true RequireAuthWrite: true RequireAuthRead: true Producer: "unknown" Ident: "unknown" Topic: "test-topic-1767494740" DC: "dc1" FormatVersion: 0 Codecs { } TopicPath: "/Root/PQ/rt3.dc1--test-topic-1767494740" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 72075186224037894 } MetricsLevel: 4 MonitoringProjectId: "" 2026-01-04T02:45:40.480101Z node 10 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:45:40.480198Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T02:45:40.481957Z node 10 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T02:45:40.488761Z node 9 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:216: new Describe topic request 2026-01-04T02:45:40.506467Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.506505Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.506521Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.506543Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.506558Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.579161Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.579191Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.579204Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.579222Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.579234Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.606805Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.606840Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.606858Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.606876Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.606890Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:45:40.679545Z node 10 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:45:40.679587Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.679605Z node 10 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:45:40.679626Z node 10 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:45:40.679642Z node 10 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |93.5%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> DataShardScan::ScanFollowedByUpdate >> TxOrderInternals::OperationOrder [GOOD] >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestVacuumWithMockDisk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] 2026-01-04T02:44:59.425410Z node 3 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:58:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:61:2057] recipient: [3:56:2099] Leader for TabletID 72057594037927937 is [3:60:2101] sender: [3:78:2057] recipient: [3:17:2064] 2026-01-04T02:44:59.742763Z node 4 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:58:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:61:2057] recipient: [4:55:2099] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:78:2057] recipient: [4:17:2064] !Reboot 72057594037927937 (actor [4:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:80:2057] recipient: [4:42:2089] Leader for TabletID 72057594037927937 is [4:60:2101] sender: [4:83:2057] recipient: [4:82:2114] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:85:2057] recipient: [4:82:2114] !Reboot 72057594037927937 (actor [4:60:2101]) rebooted! !Reboot 72057594037927937 (actor [4:60:2101]) tablet resolver refreshed! new actor is[4:84:2115] Leader for TabletID 72057594037927937 is [4:84:2115] sender: [4:200:2057] recipient: [4:17:2064] 2026-01-04T02:45:01.804172Z node 5 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:58:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:61:2057] recipient: [5:55:2099] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:78:2057] recipient: [5:17:2064] !Reboot 72057594037927937 (actor [5:60:2101]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:80:2057] recipient: [5:42:2089] Leader for TabletID 72057594037927937 is [5:60:2101] sender: [5:83:2057] recipient: [5:82:2114] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:85:2057] recipient: [5:82:2114] !Reboot 72057594037927937 (actor [5:60:2101]) rebooted! !Reboot 72057594037927937 (actor [5:60:2101]) tablet resolver refreshed! new actor is[5:84:2115] Leader for TabletID 72057594037927937 is [5:84:2115] sender: [5:200:2057] recipient: [5:17:2064] 2026-01-04T02:45:03.688722Z node 6 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:54:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:58:2057] recipient: [6:54:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:61:2057] recipient: [6:54:2099] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:78:2057] recipient: [6:17:2064] !Reboot 72057594037927937 (actor [6:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:81:2057] recipient: [6:42:2089] Leader for TabletID 72057594037927937 is [6:60:2101] sender: [6:84:2057] recipient: [6:83:2114] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:86:2057] recipient: [6:83:2114] !Reboot 72057594037927937 (actor [6:60:2101]) rebooted! !Reboot 72057594037927937 (actor [6:60:2101]) tablet resolver refreshed! new actor is[6:85:2115] Leader for TabletID 72057594037927937 is [6:85:2115] sender: [6:201:2057] recipient: [6:17:2064] 2026-01-04T02:45:05.551571Z node 7 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:58:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:61:2057] recipient: [7:55:2099] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:78:2057] recipient: [7:17:2064] !Reboot 72057594037927937 (actor [7:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:84:2057] recipient: [7:42:2089] Leader for TabletID 72057594037927937 is [7:60:2101] sender: [7:87:2057] recipient: [7:86:2117] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:89:2057] recipient: [7:86:2117] !Reboot 72057594037927937 (actor [7:60:2101]) rebooted! !Reboot 72057594037927937 (actor [7:60:2101]) tablet resolver refreshed! new actor is[7:88:2118] Leader for TabletID 72057594037927937 is [7:88:2118] sender: [7:204:2057] recipient: [7:17:2064] 2026-01-04T02:45:07.410018Z node 8 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:58:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:61:2057] recipient: [8:55:2099] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:78:2057] recipient: [8:17:2064] !Reboot 72057594037927937 (actor [8:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:84:2057] recipient: [8:42:2089] Leader for TabletID 72057594037927937 is [8:60:2101] sender: [8:87:2057] recipient: [8:86:2117] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:89:2057] recipient: [8:86:2117] !Reboot 72057594037927937 (actor [8:60:2101]) rebooted! !Reboot 72057594037927937 (actor [8:60:2101]) tablet resolver refreshed! new actor is[8:88:2118] Leader for TabletID 72057594037927937 is [8:88:2118] sender: [8:204:2057] recipient: [8:17:2064] 2026-01-04T02:45:09.299888Z node 9 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:58:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:61:2057] recipient: [9:55:2099] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:78:2057] recipient: [9:17:2064] !Reboot 72057594037927937 (actor [9:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:85:2057] recipient: [9:42:2089] Leader for TabletID 72057594037927937 is [9:60:2101] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:60:2101]) rebooted! !Reboot 72057594037927937 (actor [9:60:2101]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:17:2064] 2026-01-04T02:45:11.202787Z node 10 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:58:2057] recipient: [10:56:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:61:2057] recipient: [10:56:2099] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:78:2057] recipient: [10:17:2064] !Reboot 72057594037927937 (actor [10:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:88:2057] recipient: [10:42:2089] Leader for TabletID 72057594037927937 is [10:60:2101] sender: [10:91:2057] recipient: [10:90:2120] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:93:2057] recipient: [10:90:2120] !Reboot 72057594037927937 (actor [10:60:2101]) rebooted! !Reboot 72057594037927937 (actor [10:60:2101]) tablet resolver refreshed! new actor is[10:92:2121] Leader for TabletID 72057594037927937 is [10:92:2121] sender: [10:208:2057] recipient: [10:17:2064] 2026-01-04T02:45:13.061477Z node 11 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:58:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:61:2057] recipient: [11:55:2099] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:78:2057] recipient: [11:17:2064] !Reboot 72057594037927937 (actor [11:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:88:2057] recipient: [11:42:2089] Leader for TabletID 72057594037927937 is [11:60:2101] sender: [11:91:2057] recipient: [11:90:2120] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:93:2057] recipient: [11:90:2120] !Reboot 72057594037927937 (actor [11:60:2101]) rebooted! !Reboot 72057594037927937 (actor [11:60:2101]) tablet resolver refreshed! new actor is[11:92:2121] Leader for TabletID 72057594037927937 is [11:92:2121] sender: [11:208:2057] recipient: [11:17:2064] 2026-01-04T02:45:14.931705Z node 12 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:58:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:61:2057] recipient: [12:55:2099] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:78:2057] recipient: [12:17:2064] !Reboot 72057594037927937 (actor [12:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:89:2057] recipient: [12:42:2089] Leader for TabletID 72057594037927937 is [12:60:2101] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:60:2101]) rebooted! !Reboot 72057594037927937 (actor [12:60:2101]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:212 ... pient: [18:42:2089] Leader for TabletID 72057594037927937 is [18:60:2101] sender: [18:100:2057] recipient: [18:99:2126] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:102:2057] recipient: [18:99:2126] !Reboot 72057594037927937 (actor [18:60:2101]) rebooted! !Reboot 72057594037927937 (actor [18:60:2101]) tablet resolver refreshed! new actor is[18:101:2127] Leader for TabletID 72057594037927937 is [18:101:2127] sender: [18:217:2057] recipient: [18:17:2064] 2026-01-04T02:45:28.800757Z node 19 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:58:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:61:2057] recipient: [19:55:2099] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:78:2057] recipient: [19:17:2064] !Reboot 72057594037927937 (actor [19:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:100:2057] recipient: [19:42:2089] Leader for TabletID 72057594037927937 is [19:60:2101] sender: [19:103:2057] recipient: [19:102:2129] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:105:2057] recipient: [19:102:2129] !Reboot 72057594037927937 (actor [19:60:2101]) rebooted! !Reboot 72057594037927937 (actor [19:60:2101]) tablet resolver refreshed! new actor is[19:104:2130] Leader for TabletID 72057594037927937 is [19:104:2130] sender: [19:220:2057] recipient: [19:17:2064] 2026-01-04T02:45:30.661224Z node 20 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:58:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:61:2057] recipient: [20:55:2099] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:78:2057] recipient: [20:17:2064] !Reboot 72057594037927937 (actor [20:60:2101]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:100:2057] recipient: [20:42:2089] Leader for TabletID 72057594037927937 is [20:60:2101] sender: [20:103:2057] recipient: [20:102:2129] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:105:2057] recipient: [20:102:2129] !Reboot 72057594037927937 (actor [20:60:2101]) rebooted! !Reboot 72057594037927937 (actor [20:60:2101]) tablet resolver refreshed! new actor is[20:104:2130] Leader for TabletID 72057594037927937 is [20:104:2130] sender: [20:220:2057] recipient: [20:17:2064] 2026-01-04T02:45:32.535051Z node 21 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:58:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:61:2057] recipient: [21:55:2099] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:78:2057] recipient: [21:17:2064] !Reboot 72057594037927937 (actor [21:60:2101]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:101:2057] recipient: [21:42:2089] Leader for TabletID 72057594037927937 is [21:60:2101] sender: [21:104:2057] recipient: [21:103:2129] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:106:2057] recipient: [21:103:2129] !Reboot 72057594037927937 (actor [21:60:2101]) rebooted! !Reboot 72057594037927937 (actor [21:60:2101]) tablet resolver refreshed! new actor is[21:105:2130] Leader for TabletID 72057594037927937 is [21:105:2130] sender: [21:221:2057] recipient: [21:17:2064] 2026-01-04T02:45:34.449063Z node 22 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:58:2057] recipient: [22:56:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:61:2057] recipient: [22:56:2099] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:78:2057] recipient: [22:17:2064] !Reboot 72057594037927937 (actor [22:60:2101]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:102:2057] recipient: [22:42:2089] Leader for TabletID 72057594037927937 is [22:60:2101] sender: [22:105:2057] recipient: [22:104:2130] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:107:2057] recipient: [22:104:2130] !Reboot 72057594037927937 (actor [22:60:2101]) rebooted! !Reboot 72057594037927937 (actor [22:60:2101]) tablet resolver refreshed! new actor is[22:106:2131] Leader for TabletID 72057594037927937 is [22:106:2131] sender: [22:126:2057] recipient: [22:17:2064] 2026-01-04T02:45:34.716876Z node 23 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:58:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:61:2057] recipient: [23:55:2099] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:78:2057] recipient: [23:17:2064] !Reboot 72057594037927937 (actor [23:60:2101]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:103:2057] recipient: [23:42:2089] Leader for TabletID 72057594037927937 is [23:60:2101] sender: [23:106:2057] recipient: [23:105:2131] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:108:2057] recipient: [23:105:2131] !Reboot 72057594037927937 (actor [23:60:2101]) rebooted! !Reboot 72057594037927937 (actor [23:60:2101]) tablet resolver refreshed! new actor is[23:107:2132] Leader for TabletID 72057594037927937 is [23:107:2132] sender: [23:127:2057] recipient: [23:17:2064] 2026-01-04T02:45:35.016726Z node 24 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:58:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:61:2057] recipient: [24:55:2099] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:78:2057] recipient: [24:17:2064] !Reboot 72057594037927937 (actor [24:60:2101]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:106:2057] recipient: [24:42:2089] Leader for TabletID 72057594037927937 is [24:60:2101] sender: [24:109:2057] recipient: [24:108:2134] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:111:2057] recipient: [24:108:2134] !Reboot 72057594037927937 (actor [24:60:2101]) rebooted! !Reboot 72057594037927937 (actor [24:60:2101]) tablet resolver refreshed! new actor is[24:110:2135] Leader for TabletID 72057594037927937 is [24:110:2135] sender: [24:226:2057] recipient: [24:17:2064] 2026-01-04T02:45:36.893008Z node 25 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:58:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:61:2057] recipient: [25:55:2099] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:78:2057] recipient: [25:17:2064] !Reboot 72057594037927937 (actor [25:60:2101]) on event NKikimr::TEvKeyValue::TEvVacuumRequest ! Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:106:2057] recipient: [25:42:2089] Leader for TabletID 72057594037927937 is [25:60:2101] sender: [25:109:2057] recipient: [25:108:2134] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:111:2057] recipient: [25:108:2134] !Reboot 72057594037927937 (actor [25:60:2101]) rebooted! !Reboot 72057594037927937 (actor [25:60:2101]) tablet resolver refreshed! new actor is[25:110:2135] Leader for TabletID 72057594037927937 is [25:110:2135] sender: [25:226:2057] recipient: [25:17:2064] 2026-01-04T02:45:38.779091Z node 26 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:58:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:61:2057] recipient: [26:55:2099] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:78:2057] recipient: [26:17:2064] !Reboot 72057594037927937 (actor [26:60:2101]) on event NKikimr::TEvKeyValue::TEvForceTabletVacuum ! Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:106:2057] recipient: [26:42:2089] Leader for TabletID 72057594037927937 is [26:60:2101] sender: [26:109:2057] recipient: [26:108:2134] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:111:2057] recipient: [26:108:2134] !Reboot 72057594037927937 (actor [26:60:2101]) rebooted! !Reboot 72057594037927937 (actor [26:60:2101]) tablet resolver refreshed! new actor is[26:110:2135] Leader for TabletID 72057594037927937 is [26:110:2135] sender: [26:226:2057] recipient: [26:17:2064] 2026-01-04T02:45:40.683834Z node 27 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:56:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:58:2057] recipient: [27:56:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:61:2057] recipient: [27:56:2099] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:78:2057] recipient: [27:17:2064] !Reboot 72057594037927937 (actor [27:60:2101]) on event NKikimr::TEvTablet::TEvFollowerGcApplied ! Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:111:2057] recipient: [27:42:2089] Leader for TabletID 72057594037927937 is [27:60:2101] sender: [27:114:2057] recipient: [27:113:2138] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:116:2057] recipient: [27:113:2138] !Reboot 72057594037927937 (actor [27:60:2101]) rebooted! !Reboot 72057594037927937 (actor [27:60:2101]) tablet resolver refreshed! new actor is[27:115:2139] Leader for TabletID 72057594037927937 is [27:115:2139] sender: [27:231:2057] recipient: [27:17:2064] 2026-01-04T02:45:42.588934Z node 28 :BS_NODE CRIT: {NWDC41@distconf_quorum.cpp:167} configuration incorrect Error# VDisk [0:1:0:0:0] does not match any static group Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:58:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:61:2057] recipient: [28:55:2099] Leader for TabletID 72057594037927937 is [28:60:2101] sender: [28:78:2057] recipient: [28:17:2064] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed-EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit+UseSink [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock+EvWrite [GOOD] >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite >> DataShardOutOfOrder::TestOutOfOrderReadOnlyAllowed+EvWrite [GOOD] >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> TxOrderInternals::OperationOrder [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteReadRangeDataLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:76:2057] recipient: [13:14:2061] !Reboot 72057594037927937 (actor [13:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:78:2057] recipient: [13:39:2086] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:81:2057] recipient: [13:80:2112] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:83:2057] recipient: [13:80:2112] !Reboot 72057594037927937 (actor [13:58:2099]) rebooted! !Reboot 72057594037927937 (actor [13:58:2099]) tablet resolver refreshed! new actor is[13:82:2113] Leader for TabletID 72057594037927937 is [13:82:2113] sender: [13:198:2057] recipient: [13:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [14:56:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:59:2057] recipient: [14:53:2097] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:76:2057] recipient: [14:14:2061] !Reboot 72057594037927937 (actor [14:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:78:2057] recipient: [14:39:2086] Leader for TabletID 72057594037927937 is [14:58:2099] sender: [14:81:2057] recipient: [14:80:2112] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:83:2057] recipient: [14:80:2112] !Reboot 72057594037927937 (actor [14:58:2099]) rebooted! !Reboot 72057594037927937 (actor [14:58:2099]) tablet resolver refreshed! new actor is[14:82:2113] Leader for TabletID 72057594037927937 is [14:82:2113] sender: [14:198:2057] recipient: [14:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [15:56:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:59:2057] recipient: [15:53:2097] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:76:2057] recipient: [15:14:2061] !Reboot 72057594037927937 (actor [15:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:79:2057] recipient: [15:39:2086] Leader for TabletID 72057594037927937 is [15:58:2099] sender: [15:82:2057] recipient: [15:81:2112] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:84:2057] recipient: [15:81:2112] !Reboot 72057594037927937 (actor [15:58:2099]) rebooted! !Reboot 72057594037927937 (actor [15:58:2099]) tablet resolver refreshed! new actor is[15:83:2113] Leader for TabletID 72057594037927937 is [15:83:2113] sender: [15:199:2057] recipient: [15:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [16:56:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:59:2057] recipient: [16:53:2097] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:76:2057] recipient: [16:14:2061] !Reboot 72057594037927937 (actor [16:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:82:2057] recipient: [16:39:2086] Leader for TabletID 72057594037927937 is [16:58:2099] sender: [16:85:2057] recipient: [16:84:2115] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:87:2057] recipient: [16:84:2115] !Reboot 72057594037927937 (actor [16:58:2099]) rebooted! !Reboot 72057594037927937 (actor [16:58:2099]) tablet resolver refreshed! new actor is[16:86:2116] Leader for TabletID 72057594037927937 is [16:86:2116] sender: [16:202:2057] recipient: [16:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [17:56:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:59:2057] recipient: [17:53:2097] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:76:2057] recipient: [17:14:2061] !Reboot 72057594037927937 (actor [17:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:82:2057] recipient: [17:39:2086] Leader for TabletID 72057594037927937 is [17:58:2099] sender: [17:85:2057] recipient: [17:84:2115] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:87:2057] recipient: [17:84:2115] !Reboot 72057594037927937 (actor [17:58:2099]) rebooted! !Reboot 72057594037927937 (actor [17:58:2099]) tablet resolver refreshed! new actor is[17:86:2116] Leader for TabletID 72057594037927937 is [17:86:2116] sender: [17:202:2057] recipient: [17:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [18:56:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:59:2057] recipient: [18:52:2097] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:76:2057] recipient: [18:14:2061] !Reboot 72057594037927937 (actor [18:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:83:2057] recipient: [18:39:2086] Leader for TabletID 72057594037927937 is [18:58:2099] sender: [18:86:2057] recipient: [18:85:2115] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:88:2057] recipient: [18:85:2115] !Reboot 72057594037927937 (actor [18:58:2099]) rebooted! !Reboot 72057594037927937 (actor [18:58:2099]) tablet resolver refreshed! new actor is[18:87:2116] Leader for TabletID 72057594037927937 is [18:87:2116] sender: [18:105:2057] recipient: [18:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [19:56:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:59:2057] recipient: [19:53:2097] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:76:2057] recipient: [19:14:2061] !Reboot 72057594037927937 (actor [19:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:85:2057] recipient: [19:39:2086] Leader for TabletID 72057594037927937 is [19:58:2099] sender: [19:88:2057] recipient: [19:87:2117] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:90:2057] recipient: [19:87:2117] !Reboot 72057594037927937 (actor [19:58:2099]) rebooted! !Reboot 72057594037927937 (actor [19:58:2099]) tablet resolver refreshed! new actor is[19:89:2118] Leader for TabletID 72057594037927937 is [19:89:2118] sender: [19:205:2057] recipient: [19:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [20:56:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:59:2057] recipient: [20:53:2097] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:76:2057] recipient: [20:14:2061] !Reboot 72057594037927937 (actor [20:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:85:2057] recipient: [20:39:2086] Leader for TabletID 72057594037927937 is [20:58:2099] sender: [20:88:2057] recipient: [20:87:2117] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:90:2057] recipient: [20:87:2117] !Reboot 72057594037927937 (actor [20:58:2099]) rebooted! !Reboot 72057594037927937 (actor [20:58:2099]) tablet resolver refreshed! new actor is[20:89:2118] Leader for TabletID 72057594037927937 is [20:89:2118] sender: [20:205:2057] recipient: [20:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [21:56:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:59:2057] recipient: [21:53:2097] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:76:2057] recipient: [21:14:2061] !Reboot 72057594037927937 (actor [21:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:86:2057] recipient: [21:39:2086] Leader for TabletID 72057594037927937 is [21:58:2099] sender: [21:89:2057] recipient: [21:88:2117] Leader for TabletID 72057594037927937 is [21:90:2118] sender: [21:91:2057] recipient: [21:88:2117] !Reboot 72057594037927937 (actor [21:58:2099]) rebooted! !Reboot 72057594037927937 (actor [21:58:2099]) tablet resolver refreshed! new actor is[21:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [22:56:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:59:2057] recipient: [22:54:2097] Leader for TabletID 72057594037927937 is [22:58:2099] sender: [22:76:2057] recipient: [22:14:2061] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateStreamingQuery-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962977Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.963019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963110Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.964118Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.074518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.074588Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.075292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.088211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.088656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.088866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.099981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.100160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.100740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.101032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.103752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.103963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.108121Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.108405Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.108482Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.108716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.115790Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.243950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.244193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.244417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.244473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.244719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.244800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.247032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.247250Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.247480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.247548Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.247608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.247644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.249675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.251502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.251688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.255401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.257193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.257369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.258519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.258980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.259030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.259235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.259319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.261277Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-04T02:45:42.488401Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:45:42.488432Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:45:42.488464Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:45:42.488490Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-04T02:45:42.488516Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-04T02:45:42.490152Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.490247Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.490287Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:42.490325Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:45:42.490365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:45:42.491645Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.491734Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.491785Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:42.491818Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:45:42.491854Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:45:42.493631Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.493717Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.493751Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:42.493786Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-04T02:45:42.493847Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:45:42.494321Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.494403Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:42.494435Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:42.494467Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-04T02:45:42.494500Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-04T02:45:42.494564Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:45:42.497190Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:42.497317Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:42.499242Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:42.499386Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:45:42.500909Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:45:42.500952Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:45:42.502620Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:45:42.502729Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.502766Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2682:4671] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:45:42.504021Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:45:42.504062Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:45:42.504137Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:45:42.504163Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:45:42.504242Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:45:42.504276Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:45:42.504338Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:45:42.504366Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:45:42.504423Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:45:42.504450Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:45:42.506431Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:45:42.506579Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:45:42.506715Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:45:42.506770Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.506805Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2685:4674] 2026-01-04T02:45:42.506967Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.506999Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2685:4674] 2026-01-04T02:45:42.507129Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:45:42.507213Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.507242Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2685:4674] 2026-01-04T02:45:42.507386Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:45:42.507464Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.507493Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2685:4674] 2026-01-04T02:45:42.507628Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:45:42.507656Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2685:4674] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> DataShardOutOfOrder::TestSecondaryClearanceAfterShardRestartRace [GOOD] >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead >> TSchemeShardSysNames::ESchemeOpCreateColumnStore-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous >> TConsoleTests::TestSchemeShardErrorForwarding [GOOD] >> TConsoleTests::TestScaleRecommenderPolicies >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_Init_oo8 [GOOD] Test command err: 2026-01-04T02:45:39.409554Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:39.411051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:39.411093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:39.417793Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:39.418082Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:39.418264Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:39.448820Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:39.454762Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:39.454825Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:39.456032Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:39.456092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:39.456152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:39.456422Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:39.457366Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:39.457424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:39.515292Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:39.534854Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:39.535006Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:39.535115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:39.535170Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:39.535232Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:39.535268Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:39.535458Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.535492Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.535724Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:39.535808Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:39.535868Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:39.535899Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:39.535927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:39.535962Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:39.535993Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:39.536028Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:39.536070Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:39.536149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:39.536182Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:39.536236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:39.542009Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:39.542099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:39.542209Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:39.542354Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:39.542390Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:39.542430Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:39.542477Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:39.542524Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:39.542554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:39.542580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:39.542808Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:39.542834Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:39.542859Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:39.542884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:39.542921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:39.542952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:39.542983Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:39.543018Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:39.543045Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:39.555073Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:39.555142Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:39.555181Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:39.555233Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:39.555306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:39.555842Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:39.555903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:39.555952Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:39.556089Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 2 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:39.556123Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:39.556260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [2:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:39.556315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [2:1] at 9437184 is Executed 2026-01-04T02:45:39.556380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [2:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:39.556427Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [2:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:39.559554Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 2 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 2 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:39.559614Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:39.559819Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.559851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:39.559903Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:39.559936Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:39.559974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:39.560011Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2:1] in PlanQueue unit at 9437184 2026-01-04T02:45:39.560037Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [2:1] at 9437184 on unit PlanQueue 2026-01-04T02:45:39. ... latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.850409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:43.850429Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:43.850490Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:43.850511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437186 on unit CompleteOperation 2026-01-04T02:45:43.850541Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.850569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:43.850586Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:43.850661Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:43.850677Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:43.850712Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.850753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:43.850777Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:43.850983Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-04T02:45:43.851016Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851043Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-04T02:45:43.851124Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-04T02:45:43.851145Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851168Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-04T02:45:43.851238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-04T02:45:43.851255Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851269Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-04T02:45:43.851339Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-04T02:45:43.851358Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851388Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-04T02:45:43.851445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:43.851465Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851527Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-04T02:45:43.851587Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:43.851604Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851634Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-04T02:45:43.851706Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:43.851729Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.851745Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-04T02:45:43.851839Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:43.851864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:149] at 9437184 on unit CompleteOperation 2026-01-04T02:45:43.851899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.851966Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:43.852024Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:43.852185Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:43.852213Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:151] at 9437184 on unit CompleteOperation 2026-01-04T02:45:43.852248Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.852297Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:43.852315Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:43.852385Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:43.852443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:152] at 9437184 on unit CompleteOperation 2026-01-04T02:45:43.852466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.852493Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:43.852512Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:43.852588Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:43.852603Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [6:154] at 9437184 on unit CompleteOperation 2026-01-04T02:45:43.852624Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [6 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:43.852658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:43.852688Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:43.852835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 6 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:43.852874Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.852910Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:45:43.853000Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 6 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:43.853019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.853034Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:45:43.853070Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 6 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:43.853086Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.853106Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:43.853180Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 6 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:43.853206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:43.853229Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962766Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962953Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.963014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.105167Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.105273Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.106150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.115521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.119330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.119553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.124450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.124657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.125372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.125634Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.130387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.130604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.131622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.131675Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.131743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.131803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.131862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.131998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.137951Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:239:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.251342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.251631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.251907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.252140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.252197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.254390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.254600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.254820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.254897Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.254937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.254970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.256876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.256937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.256995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.258743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.258920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.262453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.264172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.264346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.265387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.265514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.265556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.265890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.265948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.266131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.266221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.268160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-04T02:45:43.058959Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:45:43.058987Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:45:43.059018Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:45:43.059046Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-04T02:45:43.059072Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-04T02:45:43.060387Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.060486Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.060526Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:43.060567Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:45:43.060607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:45:43.062013Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.062106Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.062140Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:43.062175Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:45:43.062214Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:45:43.064131Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.064224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.064257Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:43.064288Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-04T02:45:43.064324Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:45:43.064801Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.064889Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:43.064924Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:43.064958Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-04T02:45:43.064991Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-04T02:45:43.065058Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:45:43.067813Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:43.067955Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:43.069761Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:43.069907Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:45:43.071479Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:45:43.071523Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:45:43.073244Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:45:43.073365Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.073403Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2682:4671] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:45:43.074786Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:45:43.074828Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:45:43.074907Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:45:43.074936Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:45:43.075000Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:45:43.075029Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:45:43.075091Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:45:43.075120Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:45:43.075181Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:45:43.075207Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:45:43.077408Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:45:43.077565Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:45:43.077704Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:45:43.077760Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.077795Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2685:4674] 2026-01-04T02:45:43.078030Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.078068Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2685:4674] 2026-01-04T02:45:43.078203Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:45:43.078298Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.078328Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2685:4674] 2026-01-04T02:45:43.078483Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:45:43.078575Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.078607Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2685:4674] 2026-01-04T02:45:43.078758Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:45:43.078791Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2685:4674] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> DataShardTxOrder::DelayData [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreatePersQueueGroup-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous >> DataShardWrite::DelayedVolatileTxAndEvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::DelayData [GOOD] Test command err: 2026-01-04T02:45:34.734352Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.736144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.736199Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.745966Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.746302Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.746589Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.788672Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.795364Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.795444Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.796810Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.796867Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.796924Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.797228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.798237Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.798295Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.870124Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.902137Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.902282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.902376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.902423Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.902469Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.902500Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.902697Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.902732Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.902937Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.903015Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.903072Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.903109Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.903134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.903158Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.903196Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.903240Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.903283Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.903363Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.903398Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.903451Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.909753Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\002\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.909838Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.909941Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.910086Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.910131Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.910189Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.910259Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.910309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.910353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.910387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.910657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.910701Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.910735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.910781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.910826Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.910863Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.910890Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.910913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.910941Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.922886Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.922951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.922986Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.923026Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.923099Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.923576Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.923625Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.923656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.923739Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.923777Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.923876Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.923910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.923936Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.923965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.926622Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.926670Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.926835Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.926866Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.926904Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.926931Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.926965Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.927025Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.927054Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... plan for [1000005:506] at 9437184 has finished 2026-01-04T02:45:44.775064Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:44.775101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:44.775142Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000005:507] in PlanQueue unit at 9437184 2026-01-04T02:45:44.775469Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.775501Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.775534Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:44.775556Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:44.775579Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:282: Return cached ready operation [1000005:507] at 9437184 2026-01-04T02:45:44.775609Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PlanQueue 2026-01-04T02:45:44.775629Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.775648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PlanQueue 2026-01-04T02:45:44.775665Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadTxDetails 2026-01-04T02:45:44.775682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadTxDetails 2026-01-04T02:45:44.776144Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437184 loaded tx from db 1000005:507 keys extracted: 1 2026-01-04T02:45:44.776190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776212Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadTxDetails 2026-01-04T02:45:44.776232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit FinalizeDataTxPlan 2026-01-04T02:45:44.776252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit FinalizeDataTxPlan 2026-01-04T02:45:44.776277Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit FinalizeDataTxPlan 2026-01-04T02:45:44.776305Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:44.776321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildAndWaitDependencies 2026-01-04T02:45:44.776379Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000005:507] is the new logically complete end at 9437184 2026-01-04T02:45:44.776402Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000005:507] is the new logically incomplete end at 9437184 2026-01-04T02:45:44.776420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000005:507] at 9437184 2026-01-04T02:45:44.776453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:44.776494Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BuildDataTxOutRS 2026-01-04T02:45:44.776512Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BuildDataTxOutRS 2026-01-04T02:45:44.776540Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776555Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BuildDataTxOutRS 2026-01-04T02:45:44.776568Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit StoreAndSendOutRS 2026-01-04T02:45:44.776584Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit StoreAndSendOutRS 2026-01-04T02:45:44.776612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776630Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit StoreAndSendOutRS 2026-01-04T02:45:44.776644Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit PrepareDataTxInRS 2026-01-04T02:45:44.776657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit PrepareDataTxInRS 2026-01-04T02:45:44.776682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776709Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit PrepareDataTxInRS 2026-01-04T02:45:44.776724Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit LoadAndWaitInRS 2026-01-04T02:45:44.776737Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit LoadAndWaitInRS 2026-01-04T02:45:44.776749Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit LoadAndWaitInRS 2026-01-04T02:45:44.776785Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit BlockFailPoint 2026-01-04T02:45:44.776815Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit BlockFailPoint 2026-01-04T02:45:44.776830Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.776844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit BlockFailPoint 2026-01-04T02:45:44.776860Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit ExecuteDataTx 2026-01-04T02:45:44.776873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit ExecuteDataTx 2026-01-04T02:45:44.777099Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000005:507] at tablet 9437184 with status COMPLETE 2026-01-04T02:45:44.777167Z node 1 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000005:507] at 9437184: {NSelectRow: 0, NSelectRange: 0, NUpdateRow: 1, NEraseRow: 0, SelectRowRows: 0, SelectRowBytes: 0, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 11, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:45:44.777218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:44.777246Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit ExecuteDataTx 2026-01-04T02:45:44.777279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompleteOperation 2026-01-04T02:45:44.777313Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompleteOperation 2026-01-04T02:45:44.777443Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is DelayComplete 2026-01-04T02:45:44.777463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompleteOperation 2026-01-04T02:45:44.777482Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000005:507] at 9437184 to execution unit CompletedOperations 2026-01-04T02:45:44.777499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000005:507] at 9437184 on unit CompletedOperations 2026-01-04T02:45:44.777520Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000005:507] at 9437184 is Executed 2026-01-04T02:45:44.777534Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000005:507] at 9437184 executing on unit CompletedOperations 2026-01-04T02:45:44.777550Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000005:507] at 9437184 has finished 2026-01-04T02:45:44.777569Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:44.777586Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:44.777629Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:44.777657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:44.792277Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000005 txid# 506 txid# 507} 2026-01-04T02:45:44.792362Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000005} 2026-01-04T02:45:44.792429Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:44.792475Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:506] at 9437184 on unit CompleteOperation 2026-01-04T02:45:44.792563Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 506] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:44.792638Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:44.792811Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:44.792835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:507] at 9437184 on unit CompleteOperation 2026-01-04T02:45:44.792870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 507] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:45:44.792942Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin >> DataShardWrite::UncommittedUpdateLockMissingRow [GOOD] >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot >> TKeyValueTest::TestEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestGetStatusWorks >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithSyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardScan::ScanFollowedByUpdate [GOOD] Test command err: 2026-01-04T02:45:43.951915Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:43.953423Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:43.953464Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:43.961099Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:43.961456Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:43.961678Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:43.992539Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:43.998435Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:43.998504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:43.999818Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:43.999877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:43.999943Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:44.000272Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:44.001393Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:44.001469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:44.064719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:44.082793Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:44.082994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:44.083120Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:44.083182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:44.083257Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:44.083309Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:44.083548Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.083599Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.083916Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:44.084010Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:44.084071Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:44.084103Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:44.084133Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:44.084163Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:44.084200Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:44.084238Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:44.084271Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:44.084336Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:44.084362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:44.084417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:44.089444Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:44.089502Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:44.089581Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:44.089731Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:44.089768Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:44.089845Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:44.089894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:44.089927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:44.089969Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:44.090001Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:44.090261Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:44.090290Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:44.090319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:44.090343Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:44.090380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:44.090416Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:44.090446Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:44.090471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:44.090498Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:44.102502Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:44.102575Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:44.102612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:44.102655Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:44.102720Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:44.103281Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:44.103349Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:44.103394Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:44.103530Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:44.103564Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:44.103692Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:44.103734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:44.103789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:44.103840Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:44.111493Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:44.111580Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:44.111853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.111902Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:44.111963Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:44.112009Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:44.112049Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:44.112102Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:44.112145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... on::Execute at 9437186 2026-01-04T02:45:45.828003Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:45.828027Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437186 for ReadTableScan 2026-01-04T02:45:45.828057Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit ReadTableScan 2026-01-04T02:45:45.828084Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437186 error: , IsFatalError: 0 2026-01-04T02:45:45.828117Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-04T02:45:45.828140Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit ReadTableScan 2026-01-04T02:45:45.828165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompleteOperation 2026-01-04T02:45:45.828190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompleteOperation 2026-01-04T02:45:45.828351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is DelayComplete 2026-01-04T02:45:45.828377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompleteOperation 2026-01-04T02:45:45.828410Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437186 to execution unit CompletedOperations 2026-01-04T02:45:45.828435Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437186 on unit CompletedOperations 2026-01-04T02:45:45.828456Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437186 is Executed 2026-01-04T02:45:45.828468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437186 executing on unit CompletedOperations 2026-01-04T02:45:45.828487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437186 has finished 2026-01-04T02:45:45.828519Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437186 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:45.828543Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437186 2026-01-04T02:45:45.828566Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437186 has no attached operations 2026-01-04T02:45:45.828589Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437186 2026-01-04T02:45:45.828733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:238:2230], Recipient [1:238:2230]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.828818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.828854Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:45.828873Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:45.828893Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437184 for ReadTableScan 2026-01-04T02:45:45.828912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit ReadTableScan 2026-01-04T02:45:45.828942Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437184 error: , IsFatalError: 0 2026-01-04T02:45:45.828963Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-04T02:45:45.828989Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit ReadTableScan 2026-01-04T02:45:45.829006Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompleteOperation 2026-01-04T02:45:45.829021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompleteOperation 2026-01-04T02:45:45.829129Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is DelayComplete 2026-01-04T02:45:45.829155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompleteOperation 2026-01-04T02:45:45.829198Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437184 to execution unit CompletedOperations 2026-01-04T02:45:45.829217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437184 on unit CompletedOperations 2026-01-04T02:45:45.829237Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437184 is Executed 2026-01-04T02:45:45.829251Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437184 executing on unit CompletedOperations 2026-01-04T02:45:45.829265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437184 has finished 2026-01-04T02:45:45.829282Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:45.829296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:45.829312Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:45.829341Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:45.829462Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:349:2316], Recipient [1:349:2316]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.829483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.829520Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-04T02:45:45.829538Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:45.829556Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [1000006:36] at 9437185 for ReadTableScan 2026-01-04T02:45:45.829571Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit ReadTableScan 2026-01-04T02:45:45.829585Z node 1 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [1000006:36] at 9437185 error: , IsFatalError: 0 2026-01-04T02:45:45.829608Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-04T02:45:45.829621Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit ReadTableScan 2026-01-04T02:45:45.829634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompleteOperation 2026-01-04T02:45:45.829662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompleteOperation 2026-01-04T02:45:45.829778Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is DelayComplete 2026-01-04T02:45:45.829819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompleteOperation 2026-01-04T02:45:45.829849Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000006:36] at 9437185 to execution unit CompletedOperations 2026-01-04T02:45:45.829864Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000006:36] at 9437185 on unit CompletedOperations 2026-01-04T02:45:45.829883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000006:36] at 9437185 is Executed 2026-01-04T02:45:45.829900Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000006:36] at 9437185 executing on unit CompletedOperations 2026-01-04T02:45:45.829913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000006:36] at 9437185 has finished 2026-01-04T02:45:45.829926Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:45.829943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-04T02:45:45.829957Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-04T02:45:45.829972Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-04T02:45:45.842661Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:45:45.842723Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:45:45.842752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437185 on unit CompleteOperation 2026-01-04T02:45:45.842820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437185 at tablet 9437185 send result to client [1:103:2137], exec latency: 4 ms, propose latency: 5 ms 2026-01-04T02:45:45.842873Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 2026-01-04T02:45:45.843045Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:45.843065Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:45.843082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437186 on unit CompleteOperation 2026-01-04T02:45:45.843107Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 4 ms, propose latency: 5 ms 2026-01-04T02:45:45.843126Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:45.843188Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:45.843204Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:45.843227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000006:36] at 9437184 on unit CompleteOperation 2026-01-04T02:45:45.843254Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000006 : 36] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 4 ms, propose latency: 5 ms 2026-01-04T02:45:45.843272Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReadSetAck [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:39.103255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:39.103915Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:39.107019Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T02:45:39.186416Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:39.192514Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:39.193306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:39.193583Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:39.193746Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:39.195111Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:39.195148Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:39.584681Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:39.706254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:39.706370Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:39.706794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:39.706907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:39.764711Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:45:39.765348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:39.765775Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:39.930237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:45:39.944187Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:40.568039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:40.676437Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:40.676545Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:41.007322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:41.099910Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:41.104129Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:41.104446Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:1613:2394] 2026-01-04T02:45:41.104687Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:41.154471Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [2:1587:2381], Recipient [2:1613:2394]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:41.158978Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:41.159503Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:41.161154Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:41.161275Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:41.161330Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:41.161674Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:41.161921Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:41.161996Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:1637:2394] in generation 1 2026-01-04T02:45:41.164498Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:41.199557Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:41.199776Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:41.199903Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:1641:2411] 2026-01-04T02:45:41.199940Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:41.199978Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:41.200011Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:41.200272Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1613:2394], Recipient [2:1613:2394]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.200322Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.200600Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:41.200683Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:41.200778Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:41.200820Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:41.200869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:41.200921Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:41.200955Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:41.200986Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:41.201028Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:41.201136Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1610:2392], Recipient [2:1613:2394]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.201182Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.201222Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:1596:3052], serverId# [2:1610:2392], sessionId# [0:0:0] 2026-01-04T02:45:41.201675Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:814:2460], Recipient [2:1610:2392] 2026-01-04T02:45:41.201720Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:41.201873Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:41.202084Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:41.202158Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:41.202243Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:41.202308Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:41.202347Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:41.202380Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:41.202413Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:41.202693Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 720751862240 ... ecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037888 2026-01-04T02:45:44.862051Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:44.862094Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976715670] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:45:44.862126Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715670 2026-01-04T02:45:44.862163Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 3 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715670 2026-01-04T02:45:44.862191Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976715670] at 72075186224037888 on unit CompleteWrite 2026-01-04T02:45:44.862235Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:44.862298Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:45:44.862329Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:45:44.862350Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:45:44.862430Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 3503} 2026-01-04T02:45:44.862547Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:45:44.862578Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976715670] at 72075186224037890 on unit ExecuteWrite 2026-01-04T02:45:44.862605Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037890 from 72075186224037890 to 72075186224037888 txId 281474976715670 2026-01-04T02:45:44.862635Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037890 from 72075186224037890 to 72075186224037889 txId 281474976715670 2026-01-04T02:45:44.862662Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [3503:281474976715670] at 72075186224037890 on unit CompleteWrite 2026-01-04T02:45:44.862698Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:44.862755Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-04T02:45:44.862781Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-04T02:45:44.863161Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2479:2533], Recipient [2:2351:2503]: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:45:44.863202Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:44.863233Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037888 source 72075186224037890 dest 72075186224037888 producer 72075186224037890 txId 281474976715670 2026-01-04T02:45:44.863285Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037888 got read set: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:45:44.863416Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976715670] from 72075186224037888 at tablet 72075186224037888 send result to client [1:2574:3613] 2026-01-04T02:45:44.863626Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2351:2503], Recipient [2:2479:2533]: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-04T02:45:44.863661Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:44.863691Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715670 2026-01-04T02:45:44.863738Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 3 Flags# 0} 2026-01-04T02:45:44.863841Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976715670] from 72075186224037890 at tablet 72075186224037890 send result to client [1:2576:3613] 2026-01-04T02:45:44.864426Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:44.864738Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:44.865362Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2351:2503], Recipient [1:2463:3587] 2026-01-04T02:45:44.865399Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:44.865438Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715670 2026-01-04T02:45:44.865496Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-04T02:45:44.865548Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:45:44.866028Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [2:65:2065] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 6 LatestStep: 3503 UnfrozenTablets: 72075186224037890 2026-01-04T02:45:44.866162Z node 2 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [2:65:2065] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 3503} 2026-01-04T02:45:44.866705Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287425, Sender [2:2479:2533], Recipient [1:2463:3587] 2026-01-04T02:45:44.866745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3172: StateWork, processing event TEvTxProcessing::TEvReadSet 2026-01-04T02:45:44.866784Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037890 dest 72075186224037889 producer 72075186224037890 txId 281474976715670 2026-01-04T02:45:44.866853Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037890 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-04T02:45:44.866949Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:764: Complete volatile write [3503 : 281474976715670] from 72075186224037889 at tablet 72075186224037889 send result to client [1:2575:3613] 2026-01-04T02:45:44.867689Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:44.874848Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:45:44.875014Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2351:2503], Recipient [2:2479:2533]: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037890 TabletDest# 72075186224037888 SetTabletConsumer# 72075186224037888 Flags# 0 Seqno# 1} 2026-01-04T02:45:44.875055Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.875103Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715670 2026-01-04T02:45:44.875335Z node 2 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-04T02:45:44.875543Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2479:2533], Recipient [2:2351:2503]: {TEvReadSet step# 3503 txid# 281474976715670 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 3} 2026-01-04T02:45:44.875582Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.875627Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715670 2026-01-04T02:45:44.875727Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:45:44.876080Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2351:2503], Recipient [1:2411:3546] 2026-01-04T02:45:44.876119Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.876161Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715670 2026-01-04T02:45:44.876612Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2411:3546], Recipient [2:2351:2503] 2026-01-04T02:45:44.876651Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.876687Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715670 2026-01-04T02:45:44.876733Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2411:3546], Recipient [2:2479:2533] 2026-01-04T02:45:44.876759Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.876789Z node 2 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037889 consumer 72075186224037889 txId 281474976715670 2026-01-04T02:45:44.877127Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [2:2479:2533], Recipient [1:2411:3546] 2026-01-04T02:45:44.877176Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:44.877213Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715670 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] >> DataShardOutOfOrder::UncommittedReads [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadPriority [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:40.071580Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:40.162041Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:40.162359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:40.162398Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:40.391401Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:40.391551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:40.445264Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494737914262 != 1767494737914265 2026-01-04T02:45:40.453692Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:40.495608Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:40.591517Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:40.865578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:40.867364Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 1 HANDLE EvProposeTransaction marker# C0 2026-01-04T02:45:40.867406Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 1 step# 1500 Status# 16 SEND to# [1:399:2398] Proxy marker# C1 2026-01-04T02:45:40.878297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:40.972820Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 1 has been planned 2026-01-04T02:45:40.972910Z node 1 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 1 for mediator 72057594046382081 tablet 72057594046644480 2026-01-04T02:45:40.973103Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:268: Coordinator# 72057594046316545 scheduling step 2000 in 0.500000s at 1.950000s 2026-01-04T02:45:40.973426Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 1500, txid# 1 marker# C2 2026-01-04T02:45:40.973474Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 1 stepId# 1500 Status# 17 SEND EvProposeTransactionStatus to# [1:399:2398] Proxy 2026-01-04T02:45:40.974768Z node 1 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:397: tablet# 72057594046316545 HANDLE EvMediatorQueueConfirmations MediatorId# 72057594046382081 2026-01-04T02:45:40.974853Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:84: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 tablet 72057594046644480 removed=1 2026-01-04T02:45:40.974880Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:91: at tablet# 72057594046316545 [2:10] persistent tx 1 for mediator 72057594046382081 acknowledged 2026-01-04T02:45:40.974926Z node 1 :TX_COORDINATOR DEBUG: coordinator__mediators_confirmations.cpp:99: at tablet# 72057594046316545 [2:10] persistent tx 1 acknowledged 2026-01-04T02:45:40.999790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:41.034278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:41.034997Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:41.035184Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:884:2765] 2026-01-04T02:45:41.035397Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:41.067685Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:876:2759], Recipient [1:884:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:41.068276Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:41.068372Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:41.069713Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:41.069781Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:41.069837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:41.070093Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:41.070185Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:41.070238Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:900:2765] in generation 1 2026-01-04T02:45:41.070569Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:41.088339Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:41.088499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:41.088569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:902:2775] 2026-01-04T02:45:41.088596Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:41.088619Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:41.088643Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:41.088765Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:884:2765], Recipient [1:884:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.088798Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.089187Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:41.089289Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:41.089363Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:41.089418Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:41.089453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:41.089476Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:41.089503Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:41.089535Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:41.089573Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:41.089682Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:887:2766], Recipient [1:884:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.089713Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.089790Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:883:2764], serverId# [1:887:2766], sessionId# [0:0:0] 2026-01-04T02:45:41.090153Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:887:2766] 2026-01-04T02:45:41.090188Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:41.090280Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:41.090451Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:41.090484Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:41.090547Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:41.090578Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:41.090602Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:41.090639Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:41.090672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:41.090903Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:41.090929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72 ... shard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1624:3263], Recipient [1:1565:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-04T02:45:46.007591Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:46.007643Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:46.007704Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:46.007734Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:46.007775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:46.007801Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:46.007839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-04T02:45:46.007873Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:46.007893Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:46.007907Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:46.007921Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:46.008008Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 4500 TxId: 18446744073709551615 } LockTxId: 281474976715683 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:45:46.008210Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715683, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:45:46.008249Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v4500/18446744073709551615 2026-01-04T02:45:46.008280Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1624:3263], 0} after executionsCount# 1 2026-01-04T02:45:46.008314Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1624:3263], 0} sends rowCount# 5, bytes# 160, quota rows left# 996, quota bytes left# 5242720, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:46.008368Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1624:3263], 0} finished in read 2026-01-04T02:45:46.008420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:46.008436Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:46.008453Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:46.008473Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:46.008502Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:46.008516Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:46.008531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T02:45:46.008555Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:46.008619Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:45:46.009856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1624:3263], Recipient [1:1565:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:45:46.009912Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } } 2026-01-04T02:45:46.140119Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep 2026-01-04T02:45:46.140211Z node 1 :TX_COORDINATOR TRACE: coordinator_impl.cpp:293: Coordinator# 72057594046316545 scheduling step 5500 in 0.499900s at 5.450000s 2026-01-04T02:45:46.143149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1648:3280], Recipient [1:1565:3228]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-04T02:45:46.143314Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:46.143394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:46.143500Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:45:46.143541Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:46.143577Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:46.143611Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:46.143661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-04T02:45:46.143695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:45:46.143722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:46.143744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:46.143781Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:46.143893Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 5000 TxId: 18446744073709551615 } LockTxId: 281474976715686 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 1 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T02:45:46.144211Z node 1 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715686, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:45:46.144270Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v5000/18446744073709551615 2026-01-04T02:45:46.144312Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1648:3280], 0} after executionsCount# 1 2026-01-04T02:45:46.144378Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1648:3280], 0} sends rowCount# 6, bytes# 192, quota rows left# 995, quota bytes left# 5242688, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:46.144459Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1648:3280], 0} finished in read 2026-01-04T02:45:46.144536Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:45:46.144564Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:46.144588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:46.144612Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:46.144655Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T02:45:46.144675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:46.144698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-04T02:45:46.144736Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:46.144827Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:45:46.145020Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [1:70:2117], Recipient [1:1565:3228]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715686 LockNode: 1 Status: STATUS_SUBSCRIBED 2026-01-04T02:45:46.145766Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1648:3280], Recipient [1:1565:3228]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:45:46.145839Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 5 } items { uint32_value: 5 } }, { items { uint32_value: 7 } items { uint32_value: 7 } }, { items { uint32_value: 9 } items { uint32_value: 9 } }, { items { uint32_value: 11 } items { uint32_value: 11 } } |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedTimeoutSplit [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] >> DataShardOutOfOrder::TestImmediateQueueThenSplit-UseSink [GOOD] >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestWriteToExtraChannelThenReadMixedChannelsReturnsOk [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:108:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] ... r refreshed! new actor is[22:86:2116] Leader for TabletID 72057594037927937 is [22:86:2116] sender: [22:202:2057] recipient: [22:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [23:56:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:59:2057] recipient: [23:53:2097] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:76:2057] recipient: [23:14:2061] !Reboot 72057594037927937 (actor [23:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:82:2057] recipient: [23:39:2086] Leader for TabletID 72057594037927937 is [23:58:2099] sender: [23:85:2057] recipient: [23:84:2115] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:87:2057] recipient: [23:84:2115] !Reboot 72057594037927937 (actor [23:58:2099]) rebooted! !Reboot 72057594037927937 (actor [23:58:2099]) tablet resolver refreshed! new actor is[23:86:2116] Leader for TabletID 72057594037927937 is [23:86:2116] sender: [23:202:2057] recipient: [23:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [24:56:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:59:2057] recipient: [24:53:2097] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:76:2057] recipient: [24:14:2061] !Reboot 72057594037927937 (actor [24:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:83:2057] recipient: [24:39:2086] Leader for TabletID 72057594037927937 is [24:58:2099] sender: [24:86:2057] recipient: [24:85:2115] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:88:2057] recipient: [24:85:2115] !Reboot 72057594037927937 (actor [24:58:2099]) rebooted! !Reboot 72057594037927937 (actor [24:58:2099]) tablet resolver refreshed! new actor is[24:87:2116] Leader for TabletID 72057594037927937 is [24:87:2116] sender: [24:203:2057] recipient: [24:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [25:56:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:59:2057] recipient: [25:53:2097] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:76:2057] recipient: [25:14:2061] !Reboot 72057594037927937 (actor [25:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:86:2057] recipient: [25:39:2086] Leader for TabletID 72057594037927937 is [25:58:2099] sender: [25:89:2057] recipient: [25:88:2118] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:91:2057] recipient: [25:88:2118] !Reboot 72057594037927937 (actor [25:58:2099]) rebooted! !Reboot 72057594037927937 (actor [25:58:2099]) tablet resolver refreshed! new actor is[25:90:2119] Leader for TabletID 72057594037927937 is [25:90:2119] sender: [25:206:2057] recipient: [25:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [26:56:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:59:2057] recipient: [26:53:2097] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:76:2057] recipient: [26:14:2061] !Reboot 72057594037927937 (actor [26:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:86:2057] recipient: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:89:2057] recipient: [26:88:2118] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:91:2057] recipient: [26:88:2118] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:90:2119] Leader for TabletID 72057594037927937 is [26:90:2119] sender: [26:206:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:87:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:90:2057] recipient: [27:89:2118] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:92:2057] recipient: [27:89:2118] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:91:2119] Leader for TabletID 72057594037927937 is [27:91:2119] sender: [27:207:2057] recipient: [27:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] !Reboot 72057594037927937 (actor [28:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:90:2057] recipient: [28:39:2086] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:93:2057] recipient: [28:92:2121] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:95:2057] recipient: [28:92:2121] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:94:2122] Leader for TabletID 72057594037927937 is [28:94:2122] sender: [28:210:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:90:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:93:2057] recipient: [29:92:2121] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:95:2057] recipient: [29:92:2121] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:94:2122] Leader for TabletID 72057594037927937 is [29:94:2122] sender: [29:210:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:91:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:94:2057] recipient: [30:93:2121] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:96:2057] recipient: [30:93:2121] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:95:2122] Leader for TabletID 72057594037927937 is [30:95:2122] sender: [30:211:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:93:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:96:2057] recipient: [31:95:2123] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:98:2057] recipient: [31:95:2123] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:97:2124] Leader for TabletID 72057594037927937 is [31:97:2124] sender: [31:213:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:93:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:214:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateSequence-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::UncommittedReads [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:44.711607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:44.812966Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:44.813420Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:44.813477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:45.065621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:45.065770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:45.130091Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494742508362 != 1767494742508365 2026-01-04T02:45:45.138571Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:45.179432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:45.274319Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:45.535447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:45.549219Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:45.650690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:45.678744Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:45.679546Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:45.679798Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:45.679980Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:45.718282Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:45.718852Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:45.718931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:45.720223Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:45.720286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:45.720328Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:45.720582Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:45.720687Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:45.720749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:45.721103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:45.751166Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:45.751325Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:45.751420Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:45.751449Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:45.751473Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:45.751498Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:45.751603Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.751640Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.751963Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:45.752097Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:45.752169Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:45.752201Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:45.752229Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:45.752257Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:45.752281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:45.752303Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:45.752331Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:45.752414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:45.752438Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:45.752491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:45.752850Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:45.752894Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:45.752983Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:45.753162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:45.753201Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:45.753259Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:45.753287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:45.753315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:45.753338Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:45.753362Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:45.753588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:45.753648Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:45.753682Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:45.753713Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:45.753768Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:45.753798Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:45.753855Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:45.753884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:45.753911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:45.754521Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:45.754556Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:45.754580Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:45.754627Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715657 at tablet 72075186224037888 send to client, exec latency: 0 ms, prop ... 02:45:46.840577Z node 1 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 4) 2026-01-04T02:45:46.840634Z node 1 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:38:1] 2026-01-04T02:45:46.840718Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-04T02:45:46.840762Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:45:46.840796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-04T02:45:46.840832Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:46.840877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:46.840919Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-04T02:45:46.840967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-04T02:45:46.840998Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:45:46.841023Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:46.841044Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:45:46.841079Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:45:46.841114Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:45:46.841134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:45:46.841148Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T02:45:46.841161Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:45:46.841181Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-04T02:45:46.841227Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-04T02:45:46.841315Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037888, row count=1 2026-01-04T02:45:46.841344Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:45:46.841384Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:46.841400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:45:46.841433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:45:46.841455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:46.841487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-04T02:45:46.841505Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:45:46.841535Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:46.841565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:46.841598Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:45:46.841624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:46.841669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished ... blocked commit for tablet 72075186224037888 2026-01-04T02:45:46.935698Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:1175:2957], Recipient [1:885:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:45:46.935936Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:46.936006Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3500/18446744073709551615 ImmediateWriteEdgeReplied# v3500/18446744073709551615 2026-01-04T02:45:46.936033Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v3500/18446744073709551615 2026-01-04T02:45:46.936084Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:46.936156Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:46.936190Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:46.936219Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:46.936268Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:46.936312Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T02:45:46.936349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:46.936395Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:46.936425Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:46.936455Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:46.936567Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:45:46.936775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayComplete 2026-01-04T02:45:46.936812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:46.936844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:46.936877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:46.936912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:46.936934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:46.936958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T02:45:46.937010Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:47.009705Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:698: Actor# [1:26:2073] HANDLE NKikimrTxMediatorTimecast.TEvGranularUpdate Mediator: 72057594046382081 Bucket: 0 SubscriptionId: 1 LatestStep: 4000 2026-01-04T02:45:47.009780Z node 1 :TX_MEDIATOR_TIMECAST DEBUG: time_cast.cpp:631: Actor# [1:26:2073] HANDLE {TEvUpdate Mediator# 72057594046382081 Bucket# 0 TimeBarrier# 4000} 2026-01-04T02:45:47.136289Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:45:47.136359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:47.136407Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 1000 ms, status: STATUS_COMPLETED 2026-01-04T02:45:47.136489Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:47.136582Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:45:47.136617Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:47.136661Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[1:1175:2957], 0} after executionsCount# 1 2026-01-04T02:45:47.136708Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[1:1175:2957], 0} sends rowCount# 4, bytes# 128, quota rows left# 997, quota bytes left# 5242752, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:47.136847Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[1:1175:2957], 0} finished in read 2026-01-04T02:45:47.138889Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:1175:2957], Recipient [1:885:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:45:47.138944Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } }, { items { uint32_value: 3 } items { uint32_value: 3 } }, { items { uint32_value: 4 } items { uint32_value: 4 } } |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:36.864339Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:36.982836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:36.983183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:36.983234Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:37.231618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:37.231726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:37.312443Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494734541129 != 1767494734541132 2026-01-04T02:45:37.324935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:37.369688Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:37.453320Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:37.730672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:37.743577Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:37.846478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:37.879043Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:37.880042Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:37.880302Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:37.880534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:37.923109Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:37.923876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:37.924004Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:37.925654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:37.925721Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:37.925778Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:37.926298Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:37.926416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:37.926507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:37.937170Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:37.970849Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:37.971009Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:37.971092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:37.971122Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:37.971161Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:37.971191Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:37.971291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.971325Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.971561Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:37.971623Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:37.971691Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:37.971769Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:37.971816Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:37.971868Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:37.971901Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:37.971935Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:37.971983Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:37.972094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.972122Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.972169Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:37.972443Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:37.972491Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:37.972574Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:37.972722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:37.972764Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:37.972821Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:37.972851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:37.972881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:37.972918Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:37.972948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:37.973182Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:37.973236Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:37.973266Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:37.973287Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:37.973336Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:37.973357Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:37.973380Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:37.973403Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:37.973420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:37.974553Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:37.974587Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:37.985248Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:37.985319Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 618Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-04T02:45:47.101650Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CheckDataTx 2026-01-04T02:45:47.101677Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:47.101704Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:47.101741Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-04T02:45:47.101787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:281474976715664] at 72075186224037888 2026-01-04T02:45:47.101847Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-04T02:45:47.101874Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:47.101897Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:45:47.101915Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:45:47.101932Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-04T02:45:47.101946Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:45:47.101991Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit ExecuteKqpDataTx 2026-01-04T02:45:47.102016Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit ExecuteKqpDataTx 2026-01-04T02:45:47.102079Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:269: Operation [0:281474976715664] (execute_kqp_data_tx) at 72075186224037888 set memory limit 4193432 2026-01-04T02:45:47.102163Z node 2 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: false 2026-01-04T02:45:47.102232Z node 2 :TX_DATASHARD TRACE: execute_kqp_data_tx_unit.cpp:521: add locks to result: 0 2026-01-04T02:45:47.102273Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-04T02:45:47.102296Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit ExecuteKqpDataTx 2026-01-04T02:45:47.102321Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:47.102346Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:47.102382Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:45:47.102436Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is DelayComplete 2026-01-04T02:45:47.102459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:47.102484Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:47.102525Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:47.102556Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037888 is Executed 2026-01-04T02:45:47.102571Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:47.102588Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037888 has finished 2026-01-04T02:45:47.102631Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:47.102666Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:47.102698Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:47.103977Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [2:70:2117], Recipient [2:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 2 Status: STATUS_NOT_FOUND 2026-01-04T02:45:47.104216Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1119:2929], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:47.104245Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:47.104295Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1118:2928], serverId# [2:1119:2929], sessionId# [0:0:0] 2026-01-04T02:45:47.104427Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [2:1117:2927], Recipient [2:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-04T02:45:47.105337Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1122:2932], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:47.105381Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:47.105420Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1121:2931], serverId# [2:1122:2932], sessionId# [0:0:0] 2026-01-04T02:45:47.105589Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1120:2930], Recipient [2:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-04T02:45:47.105669Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:47.105713Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2501/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/18446744073709551615 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-04T02:45:47.105765Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-04T02:45:47.105835Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:47.105922Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:47.105959Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:47.105994Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:47.106029Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:47.106075Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T02:45:47.106129Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:47.106154Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:47.106171Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:47.106187Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:47.106260Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-04T02:45:47.106487Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1120:2930], 1000} after executionsCount# 1 2026-01-04T02:45:47.106545Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1120:2930], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:47.106607Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1120:2930], 1000} finished in read 2026-01-04T02:45:47.106666Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:47.106691Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:47.106715Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:47.106738Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:47.106774Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:45:47.106809Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:47.106829Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T02:45:47.106860Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:47.106927Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestCopyRangeToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [26:39:2086] Leader for TabletID 72057594037927937 is [26:58:2099] sender: [26:93:2057] recipient: [26:92:2121] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:95:2057] recipient: [26:92:2121] !Reboot 72057594037927937 (actor [26:58:2099]) rebooted! !Reboot 72057594037927937 (actor [26:58:2099]) tablet resolver refreshed! new actor is[26:94:2122] Leader for TabletID 72057594037927937 is [26:94:2122] sender: [26:210:2057] recipient: [26:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [27:56:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:59:2057] recipient: [27:53:2097] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:76:2057] recipient: [27:14:2061] !Reboot 72057594037927937 (actor [27:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:91:2057] recipient: [27:39:2086] Leader for TabletID 72057594037927937 is [27:58:2099] sender: [27:94:2057] recipient: [27:93:2121] Leader for TabletID 72057594037927937 is [27:95:2122] sender: [27:96:2057] recipient: [27:93:2121] !Reboot 72057594037927937 (actor [27:58:2099]) rebooted! !Reboot 72057594037927937 (actor [27:58:2099]) tablet resolver refreshed! new actor is[27:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [28:56:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:59:2057] recipient: [28:53:2097] Leader for TabletID 72057594037927937 is [28:58:2099] sender: [28:76:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:78:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:81:2057] recipient: [30:80:2112] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:83:2057] recipient: [30:80:2112] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:82:2113] Leader for TabletID 72057594037927937 is [30:82:2113] sender: [30:198:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:78:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:81:2057] recipient: [31:80:2112] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:83:2057] recipient: [31:80:2112] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:82:2113] Leader for TabletID 72057594037927937 is [31:82:2113] sender: [31:198:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:79:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:81:2112] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:84:2057] recipient: [32:81:2112] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:83:2113] Leader for TabletID 72057594037927937 is [32:83:2113] sender: [32:199:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:82:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:85:2057] recipient: [34:84:2115] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:87:2057] recipient: [34:84:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:86:2116] Leader for TabletID 72057594037927937 is [34:86:2116] sender: [34:202:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:83:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:86:2057] recipient: [35:85:2115] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:88:2057] recipient: [35:85:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:87:2116] Leader for TabletID 72057594037927937 is [35:87:2116] sender: [35:203:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2118] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:91:2057] recipient: [36:88:2118] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2119] Leader for TabletID 72057594037927937 is [36:90:2119] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2118] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:91:2057] recipient: [37:88:2118] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2119] Leader for TabletID 72057594037927937 is [37:90:2119] sender: [37:206:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:87:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:90:2057] recipient: [38:89:2118] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:92:2057] recipient: [38:89:2118] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:91:2119] Leader for TabletID 72057594037927937 is [38:91:2119] sender: [38:207:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] >> EraseRowsTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds >> DistributedEraseTests::ConditionalEraseRowsShouldErase >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 >> DataShardVolatile::DistributedUpsertRestartAfterPlan-UseSink [GOOD] >> DataShardVolatile::CompactedVolatileChangesCommit |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithClose [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted >> DataShardOutOfOrder::TestShardRestartPlannedCommitShouldSucceed-EvWrite [GOOD] >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds >> TConsoleTests::TestScaleRecommenderPolicies [GOOD] >> TConsoleTests::TestScaleRecommenderPoliciesValidation >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestSnapshotReadAfterBrokenLock-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:36.725713Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:36.844894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:36.846204Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:36.846284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:37.163818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:37.164406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:37.248516Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494734540731 != 1767494734540734 2026-01-04T02:45:37.266700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:37.316384Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:37.407979Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:37.706795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:37.720070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:37.832894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:38.130050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:38.342685Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:38.342800Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:38.342877Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:38.343457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:38.343622Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:38.348318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:38.489468Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:45:38.563529Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-04T02:45:43.607664Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZjY1MmM5MGMtYWEzN2RkMDQtZTkxZTY2YzUtOTE2ZGVhYTA=, ActorId: [1:1167:2960], ActorState: ExecuteState, LegacyTraceId: 01ke3eaxfz5xbr0vxxjtc58rns, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# ... waiting for SysViewsRoster update finished 2026-01-04T02:45:46.337997Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:46.341937Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:46.345419Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:46.345724Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:46.345896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:46.533887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:46.534006Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:46.558216Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494744122692 != 1767494744122696 2026-01-04T02:45:46.563444Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:46.606158Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:46.683198Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:46.927609Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:46.939681Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:47.039759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:47.253631Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:47.453882Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.453956Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.454021Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.454644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.454729Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.457791Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:47.598278Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:45:47.632331Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 2 } } 2026-01-04T02:45:48.989397Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZjEwMDcxZGEtNDZmNjBhZTktZTExOTNjMTMtODU2NDdlYTk=, ActorId: [2:1159:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3eb2q492bayecy3g2g5azk, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`" issue_code: 2001 severity: 1 } trace_id# |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase >> EraseRowsTests::EraseRowsShouldSuccess ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestPlannedHalfOverloadedSplit-UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:40.850638Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:40.937137Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:40.937461Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:40.937504Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:41.160206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:41.160338Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:41.211189Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494738543411 != 1767494738543414 2026-01-04T02:45:41.222258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:41.263297Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:41.354825Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:41.636499Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:41.650492Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:41.753442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:41.781836Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:41.782667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:41.782903Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:41.783150Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:41.815886Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:41.816504Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:41.816590Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:41.817995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:41.818067Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:41.818123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:41.818419Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:41.818537Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:41.818606Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:41.829381Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:41.860173Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:41.860413Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:41.860574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:41.860625Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:41.860684Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:41.860729Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:41.860905Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.860954Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:41.861388Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:41.861475Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:41.861547Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:41.861630Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:41.861678Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:41.861709Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:41.861752Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:41.861781Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:41.861834Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:41.861947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.861973Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:41.862007Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:41.862338Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:41.862372Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:41.862467Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:41.862675Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:41.862730Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:41.862809Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:41.862851Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:41.862884Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:41.862916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:41.862942Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:41.863162Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:41.863193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:41.863218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:41.863245Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:41.863329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:41.863392Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:41.863430Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:41.863480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:41.863501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:41.864646Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:41.864675Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:41.875336Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:41.875405Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... :45:48.736266Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [2:1355:3099], Recipient [2:969:2824]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037893 ClientId: [2:1355:3099] ServerId: [2:1359:3103] } 2026-01-04T02:45:48.736301Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T02:45:48.736382Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-04T02:45:48.736418Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:48.736447Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037893 2026-01-04T02:45:48.736477Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037893 has no attached operations 2026-01-04T02:45:48.736504Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037893 2026-01-04T02:45:48.736532Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037893 TxInFly 0 2026-01-04T02:45:48.736568Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-04T02:45:48.736682Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877763, Sender [2:1353:3097], Recipient [2:886:2765]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037891 ClientId: [2:1353:3097] ServerId: [2:1357:3101] } 2026-01-04T02:45:48.736724Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3187: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T02:45:48.736836Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1216:3003], Recipient [2:1216:3003]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:48.736868Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:48.737042Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1222:3007]: {TEvRegisterTabletResult TabletId# 72075186224037893 Entry# 3000} 2026-01-04T02:45:48.737075Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:45:48.737105Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037893 time 3000 2026-01-04T02:45:48.737136Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-04T02:45:48.737379Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1359:3103], Recipient [2:1222:3007]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:45:48.737412Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:45:48.737448Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037893, clientId# [2:1355:3099], serverId# [2:1359:3103], sessionId# [0:0:0] 2026-01-04T02:45:48.737621Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877764, Sender [2:1357:3101], Recipient [2:1216:3003]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:45:48.737654Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3190: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:45:48.737680Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:1353:3097], serverId# [2:1357:3101], sessionId# [0:0:0] 2026-01-04T02:45:48.737828Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-04T02:45:48.737862Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:48.737889Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037891 2026-01-04T02:45:48.737917Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037891 has no attached operations 2026-01-04T02:45:48.737941Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037891 2026-01-04T02:45:48.737965Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:45:48.738000Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-04T02:45:48.738146Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270976, Sender [2:26:2073], Recipient [2:1216:3003]: {TEvRegisterTabletResult TabletId# 72075186224037891 Entry# 3000} 2026-01-04T02:45:48.738175Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3191: StateWork, processing event TEvMediatorTimecast::TEvRegisterTabletResult 2026-01-04T02:45:48.738202Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2026-01-04T02:45:48.738230Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:45:48.738633Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1222:3007]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-04T02:45:48.738668Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-04T02:45:48.738700Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037893 coordinator 72057594046316545 last step 0 next step 3000 2026-01-04T02:45:48.738738Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037893: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-04T02:45:48.738786Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037893 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:45:48.739047Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270978, Sender [2:26:2073], Recipient [2:1216:3003]: NKikimr::TEvMediatorTimecast::TEvSubscribeReadStepResult{ CoordinatorId# 72057594046316545 LastReadStep# 0 NextReadStep# 3000 ReadStep# 3000 } 2026-01-04T02:45:48.739086Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3192: StateWork, processing event TEvMediatorTimecast::TEvSubscribeReadStepResult 2026-01-04T02:45:48.739114Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 0 next step 3000 2026-01-04T02:45:48.739154Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-04T02:45:48.739198Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2858: CheckMediatorStateRestored at 72075186224037891 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:45:48.750135Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037888 ack split to schemeshard 281474976715664 2026-01-04T02:45:48.751113Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715665 2026-01-04T02:45:48.754397Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:399:2398], Recipient [2:888:2766] 2026-01-04T02:45:48.754479Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715664, at datashard: 72075186224037888, state: SplitSrcWaitForPartitioningChanged 2026-01-04T02:45:48.757554Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553158, Sender [2:399:2398], Recipient [2:976:2828] 2026-01-04T02:45:48.757614Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715665, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-01-04T02:45:48.759712Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037888 ack split partitioning changed to schemeshard 281474976715664 2026-01-04T02:45:48.759811Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:45:48.760450Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:877:2759], Recipient [2:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:45:48.760664Z node 2 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715665 2026-01-04T02:45:48.760715Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:45:48.761137Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [2:962:2819], Recipient [2:969:2824]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:45:49.120193Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [2:1169:2864], Recipient [2:886:2765]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_DATA SourceDeprecated { RawX1: 1169 RawX2: 8589937456 } TxBody: " \0008\000`\200\200\200\005j\213\007\010\001\022\314\006\010\001\022\024\n\022\t\221\004\000\000\000\000\000\000\0210\013\000\000\002\000\000\000\032\262\002\010\240\215\006\022\207\002\037\002\022KqpEffect\005\205\006\213\000\205\002\206\205\004\207\203\004?\004\014key\024valueh%kqp%tx_result_binding_0_1\204\214\002\030Inputs(Parameters\034Program\013?\000)\251\000?\n\014Arg\000\002)\211\002?\016\204\214\002(KqpEffects\000)\211\010?\032\213\010\203\010\203\010\203\005@\203\010\204?\006\210\203\004\203\004\203\0144KqpUpsertRows\000\013?&\003?\036\177\000\001\205\000\000\000\000\001\003? L\003?\"\000\003?$\002\017)\211\002?(?\010 Iterator\000)\211\004?\010?\n\203\004\030Member\000?\026\003?@\000\002\004\000\006\010\002?.\003\203\004\004\003\203\004\002\003\003?0\000\r\010\000\n\001/\032\0369\000\000\000\000\000\000\000@i\000\000\000\000\000\000\360?q\000\000\000\000\ 2026-01-04T02:45:49.120281Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:49.120412Z node 2 :TX_DATASHARD NOTICE: datashard.cpp:3109: Rejecting data TxId 281474976715663 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-04T02:45:49.120835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715664, at schemeshard: 72057594046644480 2026-01-04T02:45:49.121303Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardRestartDuringWaitingRead [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:42.413002Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:42.496859Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:42.497175Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:42.497221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:42.739186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:42.739328Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:42.821474Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494740409746 != 1767494740409749 2026-01-04T02:45:42.842724Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:42.890059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:42.986704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:43.248397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:43.262134Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:43.364167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:43.395079Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:43.396132Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:43.396414Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:892:2768] 2026-01-04T02:45:43.396634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:43.445191Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:892:2768]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:43.445298Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:43.446598Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:43.446840Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:895:2770] 2026-01-04T02:45:43.447145Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:43.455868Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:880:2761], Recipient [1:895:2770]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:43.456341Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:43.456522Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:43.458392Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:43.458478Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:43.458543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:43.458941Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:43.459143Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:43.459235Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:923:2768] in generation 1 2026-01-04T02:45:43.459672Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:43.459795Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:43.461287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:43.461375Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:43.461427Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:43.461711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:43.461833Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:43.461888Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:924:2770] in generation 1 2026-01-04T02:45:43.472601Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:43.500940Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:43.501162Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:43.501323Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:927:2789] 2026-01-04T02:45:43.501383Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:43.501425Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:43.501465Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:43.501787Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:892:2768], Recipient [1:892:2768]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:43.501880Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:43.501987Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:43.502032Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:43.502113Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:43.502172Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:928:2790] 2026-01-04T02:45:43.502198Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:43.502225Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:43.502270Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:43.502581Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:895:2770], Recipient [1:895:2770]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:43.502617Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:43.502848Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:43.502945Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:43.503070Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:43.503111Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:43.503169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:43.503209Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:43.503291Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:43.503327Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:43.503378Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:43.503427Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:43.503501Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:43.503602Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:896:2771], Recipient [1:892:2768]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:43.503644Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:43.503693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:885:2764], serverId# [1:896:2771], sessionId# [0:0:0] 2026-01-04T02:45:43.503739Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:43.503782Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:43.503816 ... riteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T02:45:44.494035Z node 1 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=1 2026-01-04T02:45:44.494080Z node 1 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:45:44.494153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:44.494188Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:45:44.494218Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:45:44.494260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:44.494329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:44.494359Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:45:44.494387Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:44.494412Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:44.494469Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T02:45:44.494493Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:44.494519Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T02:45:44.505276Z node 1 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:45:44.505345Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:44.505411Z node 1 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:45:44.505490Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:44.506066Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [1:1164:2945] TxId: 281474976715662. Ctx: { TraceId: 01ke3eaybdcpe5ay97rkx6hzbp, Database: , SessionId: ydb://session/3?node_id=1&id=ZDU1MGI0ZjgtNmYyZTViMjItMTdjNTJiM2UtMjEyYjM4ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:45:44.506139Z node 1 :KQP_EXECUTER TRACE: {KQPEX@kqp_executer_impl.h:1505} ActorId: [1:1164:2945] TxId: 281474976715662. Ctx: { TraceId: 01ke3eaybdcpe5ay97rkx6hzbp, Database: , SessionId: ydb://session/3?node_id=1&id=ZDU1MGI0ZjgtNmYyZTViMjItMTdjNTJiM2UtMjEyYjM4ZA==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate, become ZombieState trace_id# ... waiting for SysViewsRoster update finished 2026-01-04T02:45:47.117530Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:47.121589Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:47.124795Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:47.124972Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:47.125091Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:47.304855Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:47.304983Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:47.321226Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494744943032 != 1767494744943036 2026-01-04T02:45:47.325245Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:47.364877Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:47.431245Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:47.692444Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:47.705584Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:47.805593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:48.050245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:48.267386Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1026:2867], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:48.267506Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1037:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:48.267596Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:48.268850Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1042:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:48.269082Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:48.273636Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:48.420052Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1040:2875], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:45:48.456114Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1098:2914] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for readsets 2026-01-04T02:45:49.131107Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:1184:2953], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [2:1160:2953]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-04T02:45:49.155056Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:1174:2953], SessionActorId: [2:1160:2953], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:1160:2953]. 2026-01-04T02:45:49.155488Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=YWVhNWM5NjItOTc3YjFhNzctYTFkYjkwNTgtYzFlNGI0Yjg=, ActorId: [2:1160:2953], ActorState: ExecuteState, LegacyTraceId: 01ke3eb2ds7c3ykhp9m40cj8eg, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:1175:2953] from: [2:1174:2953] trace_id# 2026-01-04T02:45:49.155557Z node 2 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976715664, task: 1, CA Id [2:1210:2991]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-04T02:45:49.156156Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:1175:2953] TxId: 281474976715663. Ctx: { TraceId: 01ke3eb2ds7c3ykhp9m40cj8eg, Database: /Root, SessionId: ydb://session/3?node_id=2&id=YWVhNWM5NjItOTc3YjFhNzctYTFkYjkwNTgtYzFlNGI0Yjg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-04T02:45:49.156494Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=YWVhNWM5NjItOTc3YjFhNzctYTFkYjkwNTgtYzFlNGI0Yjg=, ActorId: [2:1160:2953], ActorState: ExecuteState, LegacyTraceId: 01ke3eb2ds7c3ykhp9m40cj8eg, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/table-1`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# { items { uint32_value: 1 } items { uint32_value: 10 } }, { items { uint32_value: 3 } items { uint32_value: 30 } } >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderNonConflictingWrites-EvWrite [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:36.810229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:36.905164Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:36.905457Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:36.905506Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:37.163816Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:37.164419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:37.265772Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494734578316 != 1767494734578319 2026-01-04T02:45:37.278791Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:37.324053Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:37.421829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:37.707193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:37.720922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:37.833036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:37.874910Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:37.876019Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:37.876272Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:37.876465Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:37.922959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:37.923806Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:37.923936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:37.925719Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:37.925823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:37.925919Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:37.926547Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:37.926676Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:37.926791Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:37.937528Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:37.970453Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:37.970660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:37.970796Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:37.970846Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:37.970898Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:37.970948Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:37.971130Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.971183Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.971586Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:37.971673Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:37.971786Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:37.971877Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:37.971943Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:37.971981Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:37.972034Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:37.972071Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:37.972117Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:37.972269Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.972318Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.972376Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:37.972742Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:37.972786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:37.972916Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:37.973130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:37.973205Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:37.973282Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:37.973342Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:37.973382Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:37.973420Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:37.973461Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:37.973748Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:37.973782Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:37.973835Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:37.973869Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:37.973939Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:37.973982Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:37.974021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:37.974055Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:37.974088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:37.975475Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:37.975529Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:37.986260Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:37.986329Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 30 BuildCpuTimeUs: 44 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749628 CreateTimeMs: 1767494749622 UpdateTimeMs: 1767494749630 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.631633Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1277:3057] 2026-01-04T02:45:49.631697Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3062], CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], CA [2:1278:3058], trace_id# 2026-01-04T02:45:49.631741Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 5 compute actor(s) and 0 datashard(s): CA [2:1282:3062], CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], CA [2:1278:3058], trace_id# 2026-01-04T02:45:49.632208Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1278:3058] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 519 DurationUs: 1000 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 267 FinishTimeMs: 1767494749630 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 207 BuildCpuTimeUs: 60 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749629 CreateTimeMs: 1767494749622 UpdateTimeMs: 1767494749630 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.632279Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1278:3058] 2026-01-04T02:45:49.632331Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3062], CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.632372Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 4 compute actor(s) and 0 datashard(s): CA [2:1282:3062], CA [2:1279:3059], CA [2:1280:3060], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.632643Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1279:3059] TaskId# 4 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 359 DurationUs: 2000 Tasks { TaskId: 4 StageId: 3 CpuTimeUs: 145 FinishTimeMs: 1767494749631 InputRows: 1 InputBytes: 5 OutputRows: 1 OutputBytes: 5 ComputeCpuTimeUs: 113 BuildCpuTimeUs: 32 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749629 CreateTimeMs: 1767494749622 UpdateTimeMs: 1767494749631 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.632704Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1279:3059] 2026-01-04T02:45:49.632749Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3062], CA [2:1280:3060], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.632789Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 3 compute actor(s) and 0 datashard(s): CA [2:1282:3062], CA [2:1280:3060], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.632901Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1280:3060] TaskId# 5 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 527 DurationUs: 1000 Tasks { TaskId: 5 StageId: 4 CpuTimeUs: 281 FinishTimeMs: 1767494749631 InputRows: 2 InputBytes: 10 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 228 BuildCpuTimeUs: 53 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749630 CreateTimeMs: 1767494749622 UpdateTimeMs: 1767494749631 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.632953Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1280:3060] 2026-01-04T02:45:49.632991Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3062], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.633026Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 2 compute actor(s) and 0 datashard(s): CA [2:1282:3062], CA [2:1281:3061], trace_id# 2026-01-04T02:45:49.633250Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1281:3061] TaskId# 6 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 377 Tasks { TaskId: 6 StageId: 5 CpuTimeUs: 150 FinishTimeMs: 1767494749632 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ComputeCpuTimeUs: 84 BuildCpuTimeUs: 66 HostName: "ghrun-46vtxmsa7i" NodeId: 2 CreateTimeMs: 1767494749622 UpdateTimeMs: 1767494749632 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.633307Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1281:3061] 2026-01-04T02:45:49.633350Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1282:3062], trace_id# 2026-01-04T02:45:49.633389Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1282:3062], trace_id# 2026-01-04T02:45:49.633643Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1282:3062] TaskId# 7 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 359 DurationUs: 1000 Tasks { TaskId: 7 StageId: 6 CpuTimeUs: 127 FinishTimeMs: 1767494749633 InputRows: 2 InputBytes: 7 OutputRows: 2 OutputBytes: 7 ResultRows: 2 ResultBytes: 7 ComputeCpuTimeUs: 91 BuildCpuTimeUs: 36 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749632 CreateTimeMs: 1767494749623 UpdateTimeMs: 1767494749633 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.633700Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1282:3062] 2026-01-04T02:45:49.633970Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:45:49.634045Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1272:3038] TxId: 281474976715667. Ctx: { TraceId: 01ke3eb2zp1jkpm7kny4rgesva, Database: , SessionId: ydb://session/3?node_id=2&id=NjgxNThmYTItYTc5ODdlMzAtOTVlMGE3MjUtY2ExMzY1Y2U=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.003550s ReadRows: 2 ReadBytes: 16 RequestUnits# 2 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } }, { items { uint32_value: 4 } items { uint32_value: 2 } } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestOutOfOrderRestartLocksReorderedWithoutBarrier [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:36.865315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:36.963441Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:36.963789Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:36.963841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:37.193198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:37.193325Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:37.275563Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494734540735 != 1767494734540738 2026-01-04T02:45:37.290914Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:37.335840Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:37.430878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:37.707136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:37.720882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:37.832836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:37.875552Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:37.876299Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:37.876549Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:37.876764Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:37.909436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:37.910054Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:37.910143Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:37.911668Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:37.911768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:37.911864Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:37.912275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:37.912417Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:37.912512Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:37.923220Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:37.949495Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:37.949684Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:37.949834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:37.949884Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:37.949934Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:37.949976Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:37.950118Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.950165Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:37.950539Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:37.950622Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:37.950715Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:37.950809Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:37.950898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:37.950939Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:37.950981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:37.951018Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:37.951074Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:37.951211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.951252Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:37.951290Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:37.951641Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:37.951683Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:37.951817Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:37.952050Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:37.952115Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:37.952199Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:37.952255Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:37.952294Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:37.952331Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:37.952366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:37.952615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:37.952663Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:37.952696Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:37.952728Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:37.952797Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:37.952846Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:37.952879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:37.952912Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:37.952938Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:37.954342Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:37.954388Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:37.965108Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:37.965174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... iledNodes# 0 trace_id# 2026-01-04T02:45:49.837164Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037888] trace_id# 2026-01-04T02:45:49.837219Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:45:49.837441Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:1384:3096] 2026-01-04T02:45:49.837491Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:1384:3096], channels: 1 2026-01-04T02:45:49.865191Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:45:49.865269Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1384:3096], trace_id# 2026-01-04T02:45:49.865331Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1384:3096], trace_id# 2026-01-04T02:45:49.865377Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:45:49.866177Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1384:3096] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:45:49.866239Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1384:3096], trace_id# 2026-01-04T02:45:49.866293Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1384:3096], trace_id# 2026-01-04T02:45:49.866632Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [2:1386:3096], Recipient [2:1299:3049]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-04T02:45:49.866756Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:45:49.866803Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v3002/281474976715665 IncompleteEdge# v{min} UnprotectedReadEdge# v3000/18446744073709551615 ImmediateWriteEdge# v3000/18446744073709551615 ImmediateWriteEdgeReplied# v4000/18446744073709551615 2026-01-04T02:45:49.866837Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v5000/18446744073709551615 2026-01-04T02:45:49.866903Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckRead 2026-01-04T02:45:49.866997Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:49.867041Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:45:49.867075Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:49.867108Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:49.867144Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-04T02:45:49.867183Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:49.867207Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:49.867223Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:45:49.867238Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:45:49.867312Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:45:49.867471Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[2:1386:3096], 0} after executionsCount# 1 2026-01-04T02:45:49.867524Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[2:1386:3096], 0} sends rowCount# 1, bytes# 32, quota rows left# 32766, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:45:49.867587Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[2:1386:3096], 0} finished in read 2026-01-04T02:45:49.867634Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:49.867654Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:45:49.867676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:49.867699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:49.867727Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:49.867741Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:49.867772Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-04T02:45:49.867806Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:45:49.868318Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [2:1386:3096], Recipient [2:1299:3049]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:45:49.868363Z node 2 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:45:49.868878Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1384:3096] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 729 Tasks { TaskId: 1 CpuTimeUs: 126 FinishTimeMs: 1767494749868 OutputRows: 1 OutputBytes: 5 Tables { TablePath: "/Root/table-1" ReadRows: 1 ReadBytes: 8 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 5 ComputeCpuTimeUs: 38 BuildCpuTimeUs: 88 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767494749868 CreateTimeMs: 1767494749865 UpdateTimeMs: 1767494749868 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:49.868985Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1384:3096] 2026-01-04T02:45:49.869136Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:45:49.869189Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1380:3096] TxId: 281474976715672. Ctx: { TraceId: 01ke3eb3hw06c7zbmws9yhf5n6, Database: , SessionId: ydb://session/3?node_id=2&id=OGRlNDkzNjgtZmIxMDhhNDAtODE0ZWI3ZjgtNjZjOTZkYjI=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000729s ReadRows: 1 ReadBytes: 8 RequestUnits# 1 ForceFlag# true trace_id# { items { uint32_value: 3 } items { uint32_value: 2 } } >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin >> AsyncIndexChangeExchange::SenderShouldBeActivatedOnTableWithAsyncIndex [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ImmediateBetweenOnline_oo8_dirty [GOOD] Test command err: 2026-01-04T02:45:46.299629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:46.300984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:46.301036Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:46.307770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:46.308023Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:46.308211Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:46.338307Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:46.344085Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:46.344180Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:46.345310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:46.345372Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:46.345418Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:46.345716Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:46.346727Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:46.346780Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:46.401496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:46.426276Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:46.426428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:46.426530Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:46.426574Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:46.426618Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:46.426650Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:46.426823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.426851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.427067Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:46.427135Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:46.427187Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:46.427218Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:46.427244Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:46.427280Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:46.427307Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:46.427344Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:46.427373Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:46.427431Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.427456Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.427495Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:46.434445Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\001J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:46.434500Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:46.434581Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:46.434735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:46.434765Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:46.434806Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:46.434856Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:46.434895Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:46.434926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:46.434951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:46.435155Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:46.435179Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:46.435204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:46.435225Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:46.435265Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:46.435295Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:46.435322Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:46.435353Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:46.435377Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:46.447157Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:46.447208Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:46.447233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:46.447273Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:46.447319Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:46.447717Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.447770Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.447799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:46.447906Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:46.447941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:46.448061Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:46.448089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:46.448124Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:46.448159Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:46.453909Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:46.453963Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:46.454149Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.454181Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.454219Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:46.454246Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:46.454281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:46.454316Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:46.454356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:50.912585Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:50.912675Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:50.912697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437186 on unit CompleteOperation 2026-01-04T02:45:50.912724Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.912758Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:50.912778Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:50.912886Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437186 2026-01-04T02:45:50.912910Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437186 on unit CompleteOperation 2026-01-04T02:45:50.912940Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437186 at tablet 9437186 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.912977Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437186 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:50.913011Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437186 2026-01-04T02:45:50.913327Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 134 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 44} 2026-01-04T02:45:50.913368Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.913400Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 134 2026-01-04T02:45:50.913509Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 137 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 45} 2026-01-04T02:45:50.913540Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.913561Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 137 2026-01-04T02:45:50.913652Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 140 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 46} 2026-01-04T02:45:50.913686Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.913708Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 140 2026-01-04T02:45:50.913782Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 143 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 47} 2026-01-04T02:45:50.913823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.913865Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 143 2026-01-04T02:45:50.913969Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 146 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 48} 2026-01-04T02:45:50.914003Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.914025Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 146 2026-01-04T02:45:50.914092Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 49} 2026-01-04T02:45:50.914112Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.914133Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 149 2026-01-04T02:45:50.914199Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:459:2401], Recipient [1:238:2230]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437184 TabletDest# 9437186 SetTabletConsumer# 9437186 Flags# 0 Seqno# 50} 2026-01-04T02:45:50.914230Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.914253Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437184 source 9437184 dest 9437186 consumer 9437186 txId 152 2026-01-04T02:45:50.914318Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:50.914344Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:149] at 9437184 on unit CompleteOperation 2026-01-04T02:45:50.914382Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 149] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.914432Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:50.914486Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:50.914612Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:50.914634Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:151] at 9437184 on unit CompleteOperation 2026-01-04T02:45:50.914664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 151] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.914703Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:50.914725Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:50.914838Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:50.914862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:152] at 9437184 on unit CompleteOperation 2026-01-04T02:45:50.914892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 152] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.914929Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:50.914970Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:50.915070Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:50.915120Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:154] at 9437184 on unit CompleteOperation 2026-01-04T02:45:50.915156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 154] from 9437184 at tablet 9437184 send result to client [1:103:2137], exec latency: 2 ms, propose latency: 3 ms 2026-01-04T02:45:50.915220Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:50.915249Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:50.915412Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 149 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 97} 2026-01-04T02:45:50.915446Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.915473Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 149 2026-01-04T02:45:50.915592Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 151 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 98} 2026-01-04T02:45:50.915622Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.915645Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 151 2026-01-04T02:45:50.915717Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 152 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 99} 2026-01-04T02:45:50.915745Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.915790Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 152 2026-01-04T02:45:50.915879Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:238:2230], Recipient [1:349:2316]: {TEvReadSet step# 1000005 txid# 154 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 100} 2026-01-04T02:45:50.915907Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:45:50.915932Z node 1 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 154 >> DataShardWrite::UncommittedUpdateLockNewRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::ZigZag_oo [GOOD] Test command err: 2026-01-04T02:45:34.727035Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.728659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.728710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.737922Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.738221Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.738471Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.782805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.789308Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.789381Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.790704Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.790754Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.790803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.791115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.792095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.792150Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.850931Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.880971Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.881131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.881239Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.881287Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.881344Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.881390Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.881590Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.881629Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.881900Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.881992Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.882061Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.882097Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.882130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.882165Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.882227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.882273Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.882314Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.882403Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.882435Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.882499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.888790Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\004\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.888851Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.888946Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.889089Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.889128Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.889172Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.889232Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.889274Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.889315Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.889366Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.889595Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.889625Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.889657Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.889688Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.889744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.889792Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.889843Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.889881Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.889916Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.901784Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.901867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.901898Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.901936Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.901996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.902482Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.902543Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.902582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.902715Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.902750Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.902887Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.902927Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.902958Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.903008Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.906626Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.906681Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.906903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.906942Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.906994Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.907034Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.907068Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.907121Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.907154Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ions 2026-01-04T02:45:51.955503Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:51.955708Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [6:350:2317], Recipient [6:350:2317]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:51.955737Z node 6 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:51.955791Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437185 2026-01-04T02:45:51.955817Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:51.955837Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-04T02:45:51.955863Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000016:45] in PlanQueue unit at 9437185 2026-01-04T02:45:51.955885Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PlanQueue 2026-01-04T02:45:51.955909Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.955929Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PlanQueue 2026-01-04T02:45:51.955948Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadTxDetails 2026-01-04T02:45:51.955969Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadTxDetails 2026-01-04T02:45:51.956517Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 9437185 loaded tx from db 1000016:45 keys extracted: 2 2026-01-04T02:45:51.956555Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.956577Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadTxDetails 2026-01-04T02:45:51.956621Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit FinalizeDataTxPlan 2026-01-04T02:45:51.956650Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit FinalizeDataTxPlan 2026-01-04T02:45:51.956680Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.956700Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit FinalizeDataTxPlan 2026-01-04T02:45:51.956717Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:51.956734Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildAndWaitDependencies 2026-01-04T02:45:51.956770Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:455: Operation [1000016:45] is the new logically complete end at 9437185 2026-01-04T02:45:51.956799Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:461: Operation [1000016:45] is the new logically incomplete end at 9437185 2026-01-04T02:45:51.956823Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [1000016:45] at 9437185 2026-01-04T02:45:51.956852Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.956867Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:51.956882Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BuildDataTxOutRS 2026-01-04T02:45:51.956901Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BuildDataTxOutRS 2026-01-04T02:45:51.956934Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.956951Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BuildDataTxOutRS 2026-01-04T02:45:51.956967Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit StoreAndSendOutRS 2026-01-04T02:45:51.956985Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit StoreAndSendOutRS 2026-01-04T02:45:51.957003Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957024Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit StoreAndSendOutRS 2026-01-04T02:45:51.957042Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit PrepareDataTxInRS 2026-01-04T02:45:51.957060Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit PrepareDataTxInRS 2026-01-04T02:45:51.957081Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957098Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit PrepareDataTxInRS 2026-01-04T02:45:51.957114Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit LoadAndWaitInRS 2026-01-04T02:45:51.957130Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit LoadAndWaitInRS 2026-01-04T02:45:51.957149Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957167Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit LoadAndWaitInRS 2026-01-04T02:45:51.957182Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit BlockFailPoint 2026-01-04T02:45:51.957198Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit BlockFailPoint 2026-01-04T02:45:51.957215Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957231Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit BlockFailPoint 2026-01-04T02:45:51.957245Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit ExecuteDataTx 2026-01-04T02:45:51.957262Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit ExecuteDataTx 2026-01-04T02:45:51.957528Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:311: Executed operation [1000016:45] at tablet 9437185 with status COMPLETE 2026-01-04T02:45:51.957566Z node 6 :TX_DATASHARD TRACE: execute_data_tx_unit.cpp:317: Datashard execution counters for [1000016:45] at 9437185: {NSelectRow: 2, NSelectRange: 0, NUpdateRow: 0, NEraseRow: 0, SelectRowRows: 2, SelectRowBytes: 16, SelectRangeRows: 0, SelectRangeBytes: 0, UpdateRowBytes: 0, EraseRowBytes: 0, SelectRangeDeletedRowSkips: 0, InvisibleRowSkips: 0} 2026-01-04T02:45:51.957603Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957622Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit ExecuteDataTx 2026-01-04T02:45:51.957641Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompleteOperation 2026-01-04T02:45:51.957658Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompleteOperation 2026-01-04T02:45:51.957820Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is DelayComplete 2026-01-04T02:45:51.957857Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompleteOperation 2026-01-04T02:45:51.957878Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000016:45] at 9437185 to execution unit CompletedOperations 2026-01-04T02:45:51.957902Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [1000016:45] at 9437185 on unit CompletedOperations 2026-01-04T02:45:51.957925Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [1000016:45] at 9437185 is Executed 2026-01-04T02:45:51.957943Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000016:45] at 9437185 executing on unit CompletedOperations 2026-01-04T02:45:51.957961Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [1000016:45] at 9437185 has finished 2026-01-04T02:45:51.957991Z node 6 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437185 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:51.958031Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437185 2026-01-04T02:45:51.958060Z node 6 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437185 has no attached operations 2026-01-04T02:45:51.958081Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 9437185 2026-01-04T02:45:51.971628Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437184 step# 1000016 txid# 45} 2026-01-04T02:45:51.971722Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437184 step# 1000016} 2026-01-04T02:45:51.971827Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:51.971913Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437184 on unit CompleteOperation 2026-01-04T02:45:51.972000Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437184 at tablet 9437184 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:45:51.972074Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:51.972404Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:91: Sending '{TEvPlanStepAck TabletId# 9437185 step# 1000016 txid# 45} 2026-01-04T02:45:51.972446Z node 6 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 9437185 step# 1000016} 2026-01-04T02:45:51.972492Z node 6 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437185 2026-01-04T02:45:51.972524Z node 6 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000016:45] at 9437185 on unit CompleteOperation 2026-01-04T02:45:51.972575Z node 6 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000016 : 45] from 9437185 at tablet 9437185 send result to client [6:103:2137], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:45:51.972636Z node 6 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437185 >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt4Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestReadTableImmediateWriteBlock [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:44.684332Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:44.782654Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:44.783107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:44.783161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:45.022776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:45.022879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:45.085691Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494742425731 != 1767494742425734 2026-01-04T02:45:45.097835Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:45.139687Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:45.237601Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:45.507885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:45.521387Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:45.626067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:45.655765Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:45.656667Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:45.656942Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:45.657195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:45.692591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:45.693485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:45.693610Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:45.695632Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:45.695717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:45.695820Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:45.696270Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:45.696432Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:45.696551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:45.707414Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:45.743578Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:45.743776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:45.743882Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:45.743921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:45.743950Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:45.743980Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:45.744116Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.744157Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:45.744495Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:45.744575Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:45.744655Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:45.744684Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:45.744729Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:45.744760Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:45.744789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:45.744814Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:45.744841Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:45.744939Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:45.744967Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:45.745019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:45.745347Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:45.745386Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:45.745476Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:45.745652Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:45.745696Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:45.745757Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:45.745790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:45.745839Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:45.745867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:45.745926Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:45.746187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:45.746220Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:45.746252Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:45.746279Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:45.746320Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:45.746346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:45.746372Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:45.746400Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:45.746419Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:45.747525Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:45.747567Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:45.758205Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:45.758321Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... tabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1137:2834], trace_id# 2026-01-04T02:45:52.155266Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1137:2834], trace_id# 2026-01-04T02:45:52.155318Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:45:52.156084Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1137:2834] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:45:52.156130Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Waiting for: CA [2:1137:2834], trace_id# 2026-01-04T02:45:52.156171Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:1137:2834], trace_id# 2026-01-04T02:45:52.156795Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1137:2834] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 636 Tasks { TaskId: 1 CpuTimeUs: 254 FinishTimeMs: 1767494752156 EgressBytes: 10 EgressRows: 1 ComputeCpuTimeUs: 15 BuildCpuTimeUs: 239 HostName: "ghrun-46vtxmsa7i" NodeId: 2 CreateTimeMs: 1767494752155 UpdateTimeMs: 1767494752156 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:52.156878Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1137:2834] 2026-01-04T02:45:52.156932Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:1133:2834] trace_id# 2026-01-04T02:45:52.156975Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000636s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:45:52.157410Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [2:1140:2834], Recipient [2:892:2769]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-04T02:45:52.157460Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-04T02:45:52.157545Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [2:892:2769], Recipient [2:892:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:45:52.157570Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T02:45:52.157628Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-04T02:45:52.157712Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-04T02:45:52.157769Z node 2 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table-1, shard: 72075186224037888, write point (Uint32 : 5) 2026-01-04T02:45:52.157822Z node 2 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 5) table: [72057594046644480:38:1] 2026-01-04T02:45:52.157889Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CheckWrite 2026-01-04T02:45:52.157924Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:52.157957Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CheckWrite 2026-01-04T02:45:52.157982Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:45:52.158006Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:45:52.158031Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T02:45:52.158071Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:4] at 72075186224037888 2026-01-04T02:45:52.158098Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:52.158123Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:45:52.158145Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:45:52.158160Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:45:52.158176Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:52.158189Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:45:52.158201Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T02:45:52.158214Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:45:52.158242Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-04T02:45:52.158275Z node 2 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T02:45:52.158359Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-04T02:45:52.158392Z node 2 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:45:52.158444Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:52.158466Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:45:52.158492Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:45:52.158514Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:52.158584Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:52.158603Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:45:52.158630Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:4] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:45:52.158651Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:4] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:45:52.158693Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:4] at 72075186224037888 is Executed 2026-01-04T02:45:52.158706Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:4] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:45:52.158722Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:4] at 72075186224037888 has finished 2026-01-04T02:45:52.169269Z node 2 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:45:52.169336Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:4] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:45:52.169389Z node 2 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 4 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:45:52.169478Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.169964Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1134:2834] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vce34bftg4q4kfq08z, Database: , SessionId: ydb://session/3?node_id=2&id=NGE1NDAxMjEtZDExZWYwNC04YzgxMDc3OC05YzBlZWQz, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds >> EraseRowsTests::ConditionalEraseRowsShouldNotErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors >> DistributedEraseTests::DistributedEraseTxShouldFailOnVariousErrors [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldErase >> EraseRowsTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> EraseRowsTests::EraseRowsFromReplicatedTable >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser >> DistributedEraseTests::ConditionalEraseRowsShouldErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsCheckLimits >> DistributedEraseTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Seconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestLateKqpScanAfterColumnDrop+UseSink [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:45.246014Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:45.381676Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:45.382161Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:45.382240Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:45.679509Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:45.679592Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:45.726027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494743079346 != 1767494743079349 2026-01-04T02:45:45.736992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:45.777176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:45.854366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:46.126500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:46.139702Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:46.242910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:46.277515Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:46.278627Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:46.278913Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:46.279128Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:46.325087Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:46.325971Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:46.326136Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:46.327849Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:46.327935Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:46.327989Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:46.328356Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:46.328482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:46.328572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:46.339258Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:46.370513Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:46.370674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:46.370761Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:46.370803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:46.370832Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:46.370857Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:46.370959Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.370989Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:46.371231Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:46.371308Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:46.371370Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:46.371433Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:46.371464Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:46.371488Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:46.371511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:46.371536Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:46.371572Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:46.371656Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.371678Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:46.371705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:46.372018Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:46.372053Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:46.372136Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:46.372318Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:46.372348Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:46.372409Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:46.372438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:46.372465Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:46.372488Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:46.372511Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:46.372722Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:46.372755Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:46.372790Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:46.372820Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:46.372886Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:46.372911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:46.372931Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:46.372951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:46.372967Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:46.374030Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:886:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:46.374062Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:46.384705Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:46.384774Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... ode 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 1. pass away 2026-01-04T02:45:53.331615Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=1;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:45:53.333612Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:45:53.334074Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:1085:2877] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:1090:2903] TaskId# 1 State# COMPUTE_STATE_FAILURE Stats# {CpuTimeUs: 196441 Tasks { TaskId: 1 CpuTimeUs: 194379 Tables { TablePath: "/Root/table-1" } ComputeCpuTimeUs: 9 BuildCpuTimeUs: 194370 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-46vtxmsa7i" NodeId: 2 CreateTimeMs: 1767494752690 CurrentWaitInputTimeUs: 124926 UpdateTimeMs: 1767494753331 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:45:53.334178Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Compute actor has finished execution: [2:1090:2903] 2026-01-04T02:45:53.335079Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1210} ActorId: [2:1085:2877] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Task does not have the CA id yet or is already complete TaskId# 1 trace_id# 2026-01-04T02:45:53.335183Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1085:2877] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1091:2904] TaskId# 2 trace_id# 2026-01-04T02:45:53.335266Z node 2 :KQP_EXECUTER INFO: {KQPEX@kqp_executer_impl.h:1203} ActorId: [2:1085:2877] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Aborting compute actor execution Issues# {
: Error: Terminate execution } ComputeActor# [2:1092:2905] TaskId# 3 trace_id# 2026-01-04T02:45:53.335446Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1085:2877] TxId: 281474976715662. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:45:53.335541Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1091:2904], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3eb5vg0zk3ra1xrcxycv3v. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. CA StateFunc 271646735 2026-01-04T02:45:53.335612Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1091:2904], TxId: 281474976715662, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3eb5vg0zk3ra1xrcxycv3v. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=. CurrentExecutionId : . DatabaseId : /Root. Database : . }. Handle abort execution event from: [2:1085:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-04T02:45:53.335713Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 2. pass away 2026-01-04T02:45:53.335840Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=2;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:45:53.337176Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:45:53.337300Z node 2 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [2:1092:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3eb5vg0zk3ra1xrcxycv3v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. CA StateFunc 271646735 2026-01-04T02:45:53.337353Z node 2 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [2:1092:2905], TxId: 281474976715662, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3eb5vg0zk3ra1xrcxycv3v. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=. CurrentExecutionId : . Database : . DatabaseId : /Root. }. Handle abort execution event from: [2:1085:2877], status: SCHEME_ERROR, reason: {
: Error: Terminate execution } 2026-01-04T02:45:53.337420Z node 2 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976715662, task: 3. pass away 2026-01-04T02:45:53.337474Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976715662;task_id=3;success=0;message={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:45:53.340086Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976715662, taskId: 3. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:45:53.340248Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1093:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_fetcher_actor.cpp:106;event=TEvTerminateFromCompute;sender=[2:1090:2903];info={
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:45:53.340325Z node 2 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[2:1093:2906];scan_id=1;tx_id=281474976715662;fline=kqp_scan_compute_manager.h:321;event=abort_all_scanners;error_message=Send abort execution from compute actor, message: {
: Error: COMPUTE_STATE_FAILURE }; 2026-01-04T02:45:53.340710Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, ActorId: [2:1059:2877], ActorState: ExecuteState, LegacyTraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } trace_id# 2026-01-04T02:45:53.341067Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [2:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T02:45:53.341117Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [2:68:2115] TxId# 281474976715664 ProcessProposeKqpTransaction 2026-01-04T02:45:53.341594Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 3000, txId: 281474976715661] shutting down 2026-01-04T02:45:53.341671Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [2:68:2115] Handle TEvProposeTransaction 2026-01-04T02:45:53.341717Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [2:68:2115] TxId# 0 ProcessProposeTransaction 2026-01-04T02:45:53.341856Z node 2 :TX_PROXY DEBUG: proxy_impl.cpp:288: actor# [2:68:2115] Cookie# 0 userReqId# "" txid# 0 reqId# [2:1129:2937] SnapshotReq marker# P0 2026-01-04T02:45:53.342241Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:152: Actor# [2:1131:2937] txid# 0 HANDLE EvNavigateKeySetResult TResolveTablesActor marker# P1 ErrorCount# 0 2026-01-04T02:45:53.342458Z node 2 :TX_PROXY DEBUG: resolvereq.cpp:272: Actor# [2:1131:2937] txid# 0 HANDLE EvResolveKeySetResult TResolveTablesActor marker# P2 ErrorCount# 0 2026-01-04T02:45:53.342544Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976715664. Resolved key sets: 0 2026-01-04T02:45:53.342658Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976715664. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {0, 0} 2026-01-04T02:45:53.342731Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:1128:2877] TxId: 281474976715664. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:45:53.342859Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:1128:2877] TxId: 281474976715664. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:45:53.342922Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:1128:2877] TxId: 281474976715664. Ctx: { TraceId: 01ke3eb5vg0zk3ra1xrcxycv3v, Database: , SessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:45:53.343995Z node 2 :TX_PROXY DEBUG: snapshotreq.cpp:1451: Actor# [2:1129:2937] SEND TEvDiscardVolatileSnapshotRequest to datashard 72075186224037888 marker# P3 2026-01-04T02:45:53.344278Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:926: Forwarded response to sender actor, requestId: 3, sender: [2:830:2725], selfId: [2:66:2113], source: [2:1059:2877] 2026-01-04T02:45:53.344573Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1129:2937], Recipient [2:886:2766]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-04T02:45:53.345537Z node 2 :KQP_PROXY DEBUG: kqp_proxy_service.cpp:1252: Session closed, sessionId: ydb://session/3?node_id=2&id=ZDVkNzVkNTUtMzczMGY0MGYtOWEwOGNmYzYtZTg1ZTEyNzk=, workerId: [2:1059:2877], local sessions count: 0 Response { QueryIssues { message: "Table \'/Root/table-1\' scheme changed." issue_code: 2028 severity: 1 } QueryIssues { message: "Query invalidated on scheme/internal error during Scan execution" issue_code: 2019 severity: 1 } TxMeta { } } YdbStatus: ABORTED ConsumedRu: 440 >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> TConsoleTests::TestScaleRecommenderPoliciesValidation [GOOD] Test command err: 2026-01-04T02:44:05.768044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:05.768109Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:06.005497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:07.033079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:1, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:44:07.178076Z node 6 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.178487Z node 6 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.179016Z node 6 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 8770803585739797980 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:07.294292Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.294705Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.294890Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7439842512800093094 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:07.335518Z node 7 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.336054Z node 7 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.336307Z node 7 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 4259648450806622490 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:07.366289Z node 9 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.366888Z node 9 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.367133Z node 9 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 9504960796042937067 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:44:07.398359Z node 8 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:44:07.399059Z node 8 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:44:07.399294Z node 8 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpodByXX/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10759242364371073398 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 Drive ... isk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 10056308525851510415 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:45:47.229255Z node 152 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:45:47.229979Z node 152 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:45:47.230219Z node 152 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 15785431694805596871 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:45:47.285722Z node 148 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:45:47.286326Z node 148 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:45:47.286522Z node 148 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 17617300506919759744 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:45:47.317867Z node 146 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:45:47.318399Z node 146 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:45:47.318606Z node 146 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 14950774711063721093 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:45:47.372041Z node 147 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:45:47.372527Z node 147 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:45:47.372691Z node 147 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f45/r3tmp/tmpuzOAem/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 2066679750011509266 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:45:47.719267Z node 145 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:47.719375Z node 145 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:47.788167Z node 145 :STATISTICS WARN: tx_init.cpp:298: [72075186233409554] TTxInit::Complete. EnableColumnStatistics=false 2026-01-04T02:45:51.197435Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:51.197551Z node 154 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.318498Z node 154 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> DataShardWrite::DistributedInsertReadSetWithoutLocks-Volatile [GOOD] >> DataShardWrite::DistributedInsertWithoutLocks+Volatile >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin >> EraseRowsTests::EraseRowsShouldSuccess [GOOD] >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotErase [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system >> AsyncIndexChangeExchange::SenderShouldShakeHandsOnce [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError >> KqpLimits::OutOfSpaceYQLUpsertFail [GOOD] >> KqpLimits::ManyPartitionsSortingLimit |93.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] |93.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] >> AnalyzeColumnshard::AnalyzeRebootColumnShard >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Microseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDate32 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 >> TKeyValueTest::TestConcatWorksNewApi [GOOD] >> TKeyValueTest::TestConcatToLongKey >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberMilliSeconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds >> DataShardWrite::UncommittedUpdateLockDeletedRowAboveSnapshot [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow >> DataShardVolatile::CompactedVolatileChangesCommit [GOOD] >> DataShardVolatile::CompactedVolatileChangesAbort >> EraseRowsTests::ConditionalEraseRowsShouldErase [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgInt8Milliseconds [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsFromReplicatedTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.775157Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.911615Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.912094Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.912148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:52.137924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:52.138023Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:52.196454Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748823571 != 1767494748823574 2026-01-04T02:45:52.208629Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:52.251487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:52.352378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.639042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.652548Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.753692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.781720Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:52.781950Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.813832Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.813933Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.815306Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.815366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.815417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.815694Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.815818Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.815904Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.826493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.848674Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.848832Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.848921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.848950Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.848979Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.849004Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.849375Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.849467Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.849536Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.849590Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.849637Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.849670Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.849750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.850122Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.850352Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.850430Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.851626Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.862341Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.862463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.979156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.983868Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.983944Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.984313Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.984359Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.984423Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.984694Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.984871Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.985101Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.985177Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.987244Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.987668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.990101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.990153Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.990978Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.991050Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.992302Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.992345Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.992401Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.992457Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.992513Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.992643Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.997175Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.998792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.998862Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.999780Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:53.009493Z node 1 :KQP_WORKLOAD_SERV ... 6-01-04T02:45:56.128053Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:56.128149Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:56.290584Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:56.290667Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:56.307869Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494753892999 != 1767494753893003 2026-01-04T02:45:56.312065Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:56.353643Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:56.433887Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:56.704720Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:56.716984Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:56.819078Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:56.840252Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-04T02:45:56.840461Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:56.887841Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:56.887985Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:56.889743Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:56.889847Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:56.889908Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:56.890224Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:56.890358Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:56.890436Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-04T02:45:56.901122Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:56.901205Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:56.901313Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:56.901422Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-04T02:45:56.901463Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:56.901621Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:56.901663Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.902065Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:56.902163Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:56.902220Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.902262Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.902304Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:56.902348Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.902432Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:56.902574Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:56.902789Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:56.902869Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:56.904762Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.915282Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:56.915369Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:57.031063Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:57.032021Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:57.032078Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:57.032813Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.032869Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:57.032916Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:57.033167Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:57.033296Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:57.033621Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.033685Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:57.034112Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:57.034488Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:57.036261Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:57.036312Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:57.036652Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:57.036724Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.037917Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:57.038199Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.038243Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:57.038287Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:57.038345Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:57.038396Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:57.038474Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:57.040550Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:57.040722Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:57.040769Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:57.044631Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-04T02:45:57.044709Z node 2 :TX_DATASHARD NOTICE: datashard__op_rows.cpp:209: Rejecting erase request on datashard: tablet# 72075186224037888, error# Can't execute erase at replicated table 2026-01-04T02:45:57.044818Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64NanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.405088Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.532078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.532516Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.532581Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.745075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.745199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.818425Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748823595 != 1767494748823598 2026-01-04T02:45:51.834263Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.878266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.981750Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.257484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.270783Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.372355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.406860Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:52.407108Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.455772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.455880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.457511Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.457586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.457646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.458012Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.458130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.458214Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.468836Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.511560Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.511831Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.511928Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.511969Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.512002Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.512035Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.512425Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.512514Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.512589Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.512668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.512730Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.512782Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.512900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.513358Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.513637Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.513718Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.515467Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.526132Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.526251Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.642094Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.651114Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.651187Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.651495Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.651533Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.651576Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.651786Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.651898Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.652065Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.652121Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.653487Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.653827Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.656030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.656079Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.656846Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.656914Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.658049Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.658090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.658129Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.658171Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.658208Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.658319Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.661461Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.662693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.662741Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.663355Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.670312Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:56.713408Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:56.713630Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:56.713751Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:56.714067Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.714130Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:56.714512Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:56.714846Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.716567Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:56.716615Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.716962Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:56.717026Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.718151Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.718379Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.718415Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:56.718460Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:56.718517Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:56.718563Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:56.718627Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.720352Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:56.720496Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:56.720547Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:56.728281Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.728378Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.728442Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.729249Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.729405Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.733113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:56.738388Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.852028Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.854628Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:45:56.888430Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:45:56.961670Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1054:2873], serverId# [2:1055:2874], sessionId# [0:0:0] 2026-01-04T02:45:56.961986Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:45:56.962094Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-04T02:45:56.972718Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.975487Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:56.976074Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:56.987005Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:56.987074Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.987310Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:56.987355Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:45:56.987602Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.987655Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.987708Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:56.987797Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.987904Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:56.988738Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:56.989078Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:56.989278Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.989324Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:56.989368Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:45:56.989563Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:56.989632Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.990182Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:45:56.990425Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:45:56.990610Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:45:56.990669Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:45:57.017938Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:57.018005Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:45:57.018152Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.018190Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:57.018228Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:45:57.018356Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:57.018414Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.018471Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnUint64MicroSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.517291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.606307Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.606653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.606701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.813669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.813759Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.877376Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748823606 != 1767494748823609 2026-01-04T02:45:51.893185Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.933849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:52.012994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.290461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.303186Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.403965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.434114Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:52.434275Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.466400Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.466535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.467736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.467807Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.467844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.468098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.468196Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.468260Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.478790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.499013Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.499160Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.499232Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.499258Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.499282Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.499305Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.499628Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.499695Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.499765Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.499811Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.499846Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.499874Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.499946Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.500266Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.500481Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.500542Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.501686Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.512310Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.512414Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.627889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.636580Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.636660Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.637057Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.637106Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.637176Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.637455Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.637594Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.637799Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.637890Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.639706Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.640154Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.642286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.642324Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.642928Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.642978Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.643850Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.643888Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.643940Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.643982Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.644023Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.644093Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.647987Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.649558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.649633Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.650432Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.658758Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:56.784024Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:56.784286Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:56.784462Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:56.784825Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.784890Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:56.785329Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:56.785707Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.787528Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:56.787582Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.787926Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:56.787987Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.789203Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.789481Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.789517Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:56.789565Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:56.789625Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:56.789667Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:56.789735Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.791721Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:56.791899Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:56.791953Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:56.800785Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.800912Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.800997Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.801994Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.802215Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:56.806504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:56.813129Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.926400Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:56.928340Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:45:56.961589Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:45:57.017361Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1054:2873], serverId# [2:1055:2874], sessionId# [0:0:0] 2026-01-04T02:45:57.017657Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:45:57.017761Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-04T02:45:57.028454Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:57.031566Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:57.032156Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:57.043076Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:57.043134Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:57.043296Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:57.043324Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:45:57.043518Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.043559Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:57.043593Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:57.043635Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.043695Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:57.044322Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:57.044573Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:57.044694Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.044724Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:57.044754Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:45:57.044909Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:57.044948Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.045320Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:45:57.045492Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:45:57.045623Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:45:57.045659Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:45:57.047107Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:57.047147Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:45:57.047250Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:57.047272Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:57.047297Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:45:57.047374Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:57.047407Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:57.047438Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser >> TKeyValueTest::TestGetStatusWorks [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSchemeTx [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldSuccessOnShardedIndex [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> TraverseDatashard::TraverseOneTableServerless >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestGetStatusWorks [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 1:2057] recipient: [28:90:2119] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:93:2057] recipient: [28:90:2119] !Reboot 72057594037927937 (actor [28:58:2099]) rebooted! !Reboot 72057594037927937 (actor [28:58:2099]) tablet resolver refreshed! new actor is[28:92:2120] Leader for TabletID 72057594037927937 is [28:92:2120] sender: [28:208:2057] recipient: [28:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [29:56:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:59:2057] recipient: [29:53:2097] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:76:2057] recipient: [29:14:2061] !Reboot 72057594037927937 (actor [29:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:88:2057] recipient: [29:39:2086] Leader for TabletID 72057594037927937 is [29:58:2099] sender: [29:91:2057] recipient: [29:90:2119] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:93:2057] recipient: [29:90:2119] !Reboot 72057594037927937 (actor [29:58:2099]) rebooted! !Reboot 72057594037927937 (actor [29:58:2099]) tablet resolver refreshed! new actor is[29:92:2120] Leader for TabletID 72057594037927937 is [29:92:2120] sender: [29:208:2057] recipient: [29:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [30:56:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:59:2057] recipient: [30:52:2097] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:76:2057] recipient: [30:14:2061] !Reboot 72057594037927937 (actor [30:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:89:2057] recipient: [30:39:2086] Leader for TabletID 72057594037927937 is [30:58:2099] sender: [30:92:2057] recipient: [30:91:2119] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:94:2057] recipient: [30:91:2119] !Reboot 72057594037927937 (actor [30:58:2099]) rebooted! !Reboot 72057594037927937 (actor [30:58:2099]) tablet resolver refreshed! new actor is[30:93:2120] Leader for TabletID 72057594037927937 is [30:93:2120] sender: [30:209:2057] recipient: [30:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [31:56:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:59:2057] recipient: [31:53:2097] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:76:2057] recipient: [31:14:2061] !Reboot 72057594037927937 (actor [31:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:92:2057] recipient: [31:39:2086] Leader for TabletID 72057594037927937 is [31:58:2099] sender: [31:95:2057] recipient: [31:94:2122] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:97:2057] recipient: [31:94:2122] !Reboot 72057594037927937 (actor [31:58:2099]) rebooted! !Reboot 72057594037927937 (actor [31:58:2099]) tablet resolver refreshed! new actor is[31:96:2123] Leader for TabletID 72057594037927937 is [31:96:2123] sender: [31:212:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:92:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:95:2057] recipient: [32:94:2122] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:97:2057] recipient: [32:94:2122] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:96:2123] Leader for TabletID 72057594037927937 is [32:96:2123] sender: [32:212:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:78:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:81:2057] recipient: [35:80:2112] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:83:2057] recipient: [35:80:2112] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:82:2113] Leader for TabletID 72057594037927937 is [35:82:2113] sender: [35:198:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:79:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:82:2057] recipient: [37:81:2112] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:84:2057] recipient: [37:81:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:83:2113] Leader for TabletID 72057594037927937 is [37:83:2113] sender: [37:199:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:81:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:84:2057] recipient: [38:83:2114] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:86:2057] recipient: [38:83:2114] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:85:2115] Leader for TabletID 72057594037927937 is [38:85:2115] sender: [38:201:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:84:2057] recipient: [39:83:2114] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:86:2057] recipient: [39:83:2114] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:85:2115] Leader for TabletID 72057594037927937 is [39:85:2115] sender: [39:201:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2114] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:87:2057] recipient: [40:84:2114] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2115] Leader for TabletID 72057594037927937 is [40:86:2115] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnTimestamp64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:52.828537Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:52.909586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:52.909919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:52.909957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:53.105666Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:53.105784Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:53.186505Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494750393986 != 1767494750393989 2026-01-04T02:45:53.198948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:53.244321Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:53.321835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:53.610757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:53.624596Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:53.725929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:53.753567Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:53.753739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.784909Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.785008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.786277Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:53.786334Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:53.786370Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:53.786621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.786725Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.786800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:53.797348Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.822756Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:53.822912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.823055Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:53.823098Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.823131Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:53.823163Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.823552Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:53.823643Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:53.823706Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.823769Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.823810Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:53.823841Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.823921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:53.824230Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:53.824527Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:53.824619Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:53.825902Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:53.836459Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:53.836578Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:53.952609Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:53.957070Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:53.957139Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.957479Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.957533Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:53.957602Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:53.957902Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:53.958046Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:53.958244Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.958327Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:53.960309Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:53.960685Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.963040Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:53.963087Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.963966Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:53.964037Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.964864Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.964902Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.964955Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:53.964996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:53.965028Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:53.965091Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.968108Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:53.969210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:53.969254Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:53.969844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:53.976557Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:58.089708Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:58.089924Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:58.090044Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:58.090344Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.090393Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:58.090806Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:58.091117Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.092691Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:58.092743Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.093051Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:58.093134Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.094340Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.094568Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.094603Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:58.094645Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:58.094705Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:58.094748Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:58.094815Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.096725Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:58.096874Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:58.096936Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:58.104685Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:58.104780Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:58.104846Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:58.105613Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:967:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:58.105782Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:58.109164Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:58.114727Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.246168Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.248325Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:965:2817], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:45:58.282357Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:45:58.398219Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1054:2873], serverId# [2:1055:2874], sessionId# [0:0:0] 2026-01-04T02:45:58.398694Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:45:58.398895Z node 2 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:45:58.409860Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.414337Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:58.415259Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.426689Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.426785Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.427072Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:58.427131Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:45:58.427449Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.427511Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.427571Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:58.427639Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.427829Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:1062:2880], serverId# [2:1063:2881], sessionId# [0:0:0] 2026-01-04T02:45:58.428846Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:58.429232Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:58.429449Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.429499Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:58.429553Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:45:58.429828Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:58.429902Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.430549Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:45:58.430808Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:45:58.431022Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:45:58.431084Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:45:58.433067Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:45:58.433127Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:45:58.433288Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.433327Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:45:58.433369Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:45:58.433494Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.433550Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.433598Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::EraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:53.089572Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:53.172930Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:53.173244Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:53.173284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:53.392065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:53.392172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:53.465596Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494750812816 != 1767494750812819 2026-01-04T02:45:53.477440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:53.518938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:53.597728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:53.888274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:53.902461Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:54.006401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:54.052943Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:54.053163Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:54.084832Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:54.084938Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:54.086387Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:54.086453Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:54.086508Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:54.086780Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:54.086885Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:54.086975Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:54.097626Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:54.126998Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:54.127174Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:54.127266Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:54.127298Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:54.127324Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:54.127352Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:54.127770Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:54.127855Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:54.127928Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:54.127979Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:54.128035Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:54.128067Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:54.128156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:54.128550Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:54.128799Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:54.128879Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:54.130205Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:54.140902Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:54.141024Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:54.257637Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:54.262456Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:54.262547Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:54.262945Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:54.262998Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:54.263072Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:54.263381Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:54.263542Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:54.263774Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:54.263859Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:54.265942Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:54.266378Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:54.268870Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:54.268926Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:54.269754Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:54.269846Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:54.271114Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:54.271158Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:54.271229Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:54.271305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:54.271356Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:54.271464Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:54.276431Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:54.278119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:54.278200Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:54.279092Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:54.289241Z node 1 :KQP_WORKLOAD_SERV ... hard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:58.558630Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:58.558662Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.558985Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:58.559072Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:58.559116Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.559151Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.559184Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:58.559229Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.559295Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:58.559402Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:58.559604Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:58.559668Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:58.561253Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.572011Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:58.572143Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:58.689340Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:917:2783], serverId# [2:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:58.690311Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:58.690360Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.691055Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.691097Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:58.691132Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:58.691328Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:58.691437Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:58.691703Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:58.691770Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:58.692097Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:58.692433Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.693871Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:58.693918Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.694191Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:58.694243Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.695226Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.695444Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:58.695487Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:58.695523Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:58.695578Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:58.695617Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:58.695695Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.697160Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:58.697281Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:58.697342Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:58.702455Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-04T02:45:58.702594Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.713501Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.713593Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.713983Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:953:2811], serverId# [2:954:2812], sessionId# [0:0:0] 2026-01-04T02:45:58.716112Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-04T02:45:58.716287Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.716500Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.716555Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.716796Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:959:2817], serverId# [2:960:2818], sessionId# [0:0:0] 2026-01-04T02:45:58.718731Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-04T02:45:58.718893Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.719087Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.719138Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.719331Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:964:2822], serverId# [2:965:2823], sessionId# [0:0:0] 2026-01-04T02:45:58.721145Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-04T02:45:58.721292Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.721467Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.721513Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.721713Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:969:2827], serverId# [2:970:2828], sessionId# [0:0:0] 2026-01-04T02:45:58.723506Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-04T02:45:58.723621Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.723912Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.723968Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.724181Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:974:2832], serverId# [2:975:2833], sessionId# [0:0:0] 2026-01-04T02:45:58.726064Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] 2026-01-04T02:45:58.726213Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:45:58.726392Z node 2 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:45:58.726436Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:58.726635Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:979:2837], serverId# [2:980:2838], sessionId# [0:0:0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldFailOnVariousErrors [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.419766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.531323Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.531613Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.531648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.746065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.746150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.798652Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748823615 != 1767494748823618 2026-01-04T02:45:51.807254Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.848450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.941974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.199878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.213189Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.316151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.350846Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:52.351073Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.398013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.398127Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.399823Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.399895Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.399944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.400316Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.400431Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.400515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.411166Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.441534Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.441669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.441741Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.441768Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.441793Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.441833Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.442119Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.442186Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.442232Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.442293Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.442347Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.442386Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.442463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.442772Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.442960Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.443018Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.444166Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.454813Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.454934Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.571088Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.575763Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.575832Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.576179Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.576223Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.576279Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.576567Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.576704Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.576909Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.576981Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.584180Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.584652Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.587037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.587086Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.587822Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.587892Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.589065Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.589115Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.589171Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.589226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.589276Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.589352Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.593785Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.595398Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.595463Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.596320Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.605706Z node 1 :KQP_WORKLOAD_SERV ... ain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-01-04T02:45:58.499425Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-04T02:45:58.499467Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.536162Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037894 actor [2:1452:3222] 2026-01-04T02:45:58.536351Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:58.543152Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:58.543245Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:58.544382Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037894 2026-01-04T02:45:58.544436Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037894 2026-01-04T02:45:58.544479Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037894 2026-01-04T02:45:58.544715Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:58.544809Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:58.544856Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037894 persisting started state actor id [2:1468:3222] in generation 1 2026-01-04T02:45:58.565751Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:58.565837Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037894 2026-01-04T02:45:58.565918Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037894 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:58.565976Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037894, actorId: [2:1470:3232] 2026-01-04T02:45:58.566003Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-04T02:45:58.566037Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037894, state: WaitScheme 2026-01-04T02:45:58.566071Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:45:58.566389Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037894 2026-01-04T02:45:58.566476Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037894 2026-01-04T02:45:58.566519Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-04T02:45:58.566544Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.566568Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037894 TxInFly 0 2026-01-04T02:45:58.566592Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-04T02:45:58.566873Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1451:3221], serverId# [2:1455:3223], sessionId# [0:0:0] 2026-01-04T02:45:58.566958Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:45:58.567151Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037894 txId 281474976715663 ssId 72057594046644480 seqNo 2:7 2026-01-04T02:45:58.567238Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715663 at tablet 72075186224037894 2026-01-04T02:45:58.567609Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-04T02:45:58.578367Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:45:58.578493Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037894 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:58.705463Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1476:3238], serverId# [2:1478:3240], sessionId# [0:0:0] 2026-01-04T02:45:58.705979Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715663 at step 5000 at tablet 72075186224037894 { Transactions { TxId: 281474976715663 AckTo { RawX1: 0 RawX2: 0 } } Step: 5000 MediatorID: 72057594046382081 TabletID: 72075186224037894 } 2026-01-04T02:45:58.706024Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:45:58.706428Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-04T02:45:58.706465Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:58.706499Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [5000:281474976715663] in PlanQueue unit at 72075186224037894 2026-01-04T02:45:58.706688Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037894 loaded tx from db 5000:281474976715663 keys extracted: 0 2026-01-04T02:45:58.706771Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:58.707300Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037894 2026-01-04T02:45:58.707359Z node 2 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037894 tableId# [OwnerId: 72057594046644480, LocalPathId: 44] schema version# 1 2026-01-04T02:45:58.707665Z node 2 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037894 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:58.707989Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:58.709056Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037894 time 4500 2026-01-04T02:45:58.709090Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:45:58.709787Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037894 step# 5000} 2026-01-04T02:45:58.709864Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-04T02:45:58.710793Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037894 2026-01-04T02:45:58.710826Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037894 2026-01-04T02:45:58.710855Z node 2 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037894 2026-01-04T02:45:58.710899Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [5000 : 281474976715663] from 72075186224037894 at tablet 72075186224037894 send result to client [2:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:58.710933Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037894 Sending notify to schemeshard 72057594046644480 txId 281474976715663 state Ready TxInFly 0 2026-01-04T02:45:58.710985Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:45:58.711864Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037894 2026-01-04T02:45:58.711942Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:45:58.712030Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:45:58.712069Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037891 2026-01-04T02:45:58.712108Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037892 2026-01-04T02:45:58.712262Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037893 2026-01-04T02:45:58.712300Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:58.712724Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037894 coordinator 72057594046316545 last step 0 next step 5000 2026-01-04T02:45:58.715165Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715663 datashard 72075186224037894 state Ready 2026-01-04T02:45:58.715219Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-04T02:45:58.719262Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1506:3262], serverId# [2:1507:3263], sessionId# [0:0:0] 2026-01-04T02:45:58.719396Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1506:3262], serverId# [2:1507:3263], sessionId# [0:0:0] 2026-01-04T02:45:58.720579Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1511:3267], serverId# [2:1512:3268], sessionId# [0:0:0] 2026-01-04T02:45:58.720757Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1511:3267], serverId# [2:1512:3268], sessionId# [0:0:0] 2026-01-04T02:45:58.722200Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1516:3272], serverId# [2:1517:3273], sessionId# [0:0:0] 2026-01-04T02:45:58.722375Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037894, clientId# [2:1516:3272], serverId# [2:1517:3273], sessionId# [0:0:0] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> DataShardWrite::DistributedInsertWithoutLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AsyncIndexChangeExchange::SenderShouldShakeHandsTwice [GOOD] >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateSecret-Protect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.962938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.962962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963100Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.075827Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.075913Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.076820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.088261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.088824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.089013Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.098360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.098538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.099066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.099324Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.101747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.102915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.108107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108195Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.108365Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.108507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.108687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.116173Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.241647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.241912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.242097Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.242140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.242336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.242401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.246822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.247105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.247420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.247517Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.247571Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.247634Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.249729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.251189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.251344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.254056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.255444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.255599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.256561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.256705Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.256765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.257052Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.257116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.257300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.257369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.259036Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-04T02:45:59.070152Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:45:59.070174Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:45:59.070194Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:45:59.070213Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-04T02:45:59.070232Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 3 2026-01-04T02:45:59.071454Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.071526Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.071561Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:59.071590Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:45:59.071619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:45:59.072608Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.072672Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.072699Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:59.072724Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:45:59.072748Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:45:59.074227Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.074291Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.074314Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:59.074338Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-04T02:45:59.074364Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:45:59.074696Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.074754Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 3 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:45:59.074776Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:45:59.074798Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 3 2026-01-04T02:45:59.074822Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 2 2026-01-04T02:45:59.074872Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:45:59.076832Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:59.076952Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:59.078569Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:45:59.078648Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:45:59.079912Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:45:59.079946Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:45:59.081271Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:45:59.081394Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.081432Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2682:4671] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:45:59.082719Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:45:59.082761Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:45:59.082825Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:45:59.082844Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:45:59.082895Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:45:59.082924Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:45:59.082996Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:45:59.083024Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:45:59.083092Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:45:59.083120Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:45:59.085265Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:45:59.085425Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:45:59.085568Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:45:59.085626Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.085660Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2685:4674] 2026-01-04T02:45:59.085853Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.085888Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2685:4674] 2026-01-04T02:45:59.086023Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:45:59.086100Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.086120Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2685:4674] 2026-01-04T02:45:59.086266Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:45:59.086331Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.086352Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2685:4674] 2026-01-04T02:45:59.086450Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:45:59.086482Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2685:4674] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 >> AnalyzeDatashard::AnalyzeTwoTables |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] >> KqpLimits::ManyPartitionsSortingLimit [GOOD] >> KqpLimits::QSReplySize+useSink |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] >> AnalyzeColumnshard::AnalyzeTwoColumnTables >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateIndexedTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgDate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.519074Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.626299Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.626771Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.626837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.841274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.841368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.894364Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494749296156 != 1767494749296159 2026-01-04T02:45:51.906037Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.947825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:52.038538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.305737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.319154Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.419515Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.447481Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:52.447658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.483008Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.483093Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.484425Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.484482Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.484535Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.484772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.484863Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.484925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.495525Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.535189Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.535346Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.535419Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.535442Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.535467Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.535502Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.535824Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.535900Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.535958Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.536022Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.536077Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.536118Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.536227Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.536663Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.536929Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.536998Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.538200Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.548770Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.548875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.664243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.667460Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.667525Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.667815Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.667857Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.667905Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.668111Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.668228Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.668378Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.668425Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.669793Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.670124Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.671738Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.671788Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.672378Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.672425Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.673233Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.673270Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.673309Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.673357Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.673393Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.673461Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.676470Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.677537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.677584Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.678226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.685023Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:00.891671Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:46:00.891927Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:46:00.892052Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:46:00.892645Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:00.892697Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:46:00.893060Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:00.893383Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:00.894637Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:46:00.894703Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:00.895386Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:46:00.895448Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:00.896201Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:00.896251Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:00.896306Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:00.896368Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:00.896412Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:46:00.896482Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:00.897379Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:00.898877Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:46:00.899428Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:46:00.899503Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:46:00.906265Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:00.906343Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:00.906396Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:00.906807Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:00.906919Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:00.910298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:00.915060Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.030890Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.034147Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:46:01.068434Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:01.196982Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:46:01.197287Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:46:01.197409Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:46:01.208114Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.211524Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.213452Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:46:01.224685Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:46:01.224784Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.225053Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.225101Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:46:01.225377Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.225438Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.225489Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:46:01.225552Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.225668Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.226537Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:46:01.226850Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:01.227003Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.227034Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.227074Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:46:01.227272Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.227328Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.227775Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:46:01.228048Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 43, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:01.228163Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:46:01.228202Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:46:01.262303Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.262360Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:46:01.262708Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.262735Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.262763Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:46:01.262857Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.262900Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.262934Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> PersQueueSdkReadSessionTest::ReadSessionWithCloseNotCommitted [GOOD] >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnPgTimestamp [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:52.451044Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:52.548813Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:52.549271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:52.549327Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:52.801532Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:52.801640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:52.874884Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494750442022 != 1767494750442025 2026-01-04T02:45:52.891030Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:52.937647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:53.032253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:53.296738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:53.310059Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:53.411167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:53.438832Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:53.439014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.472634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.472724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.474212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:53.474324Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:53.474409Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:53.474748Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.474875Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.474978Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:53.485604Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.511531Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:53.511712Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.511825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:53.511860Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.511892Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:53.511938Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.512350Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:53.512458Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:53.512539Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.512599Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.512653Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:53.512690Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.512787Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:53.513225Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:53.513478Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:53.513571Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:53.515199Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:53.525787Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:53.525900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:53.641658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:53.646134Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:53.646228Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.646586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.646644Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:53.646707Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:53.646982Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:53.647123Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:53.647322Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.647405Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:53.649473Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:53.649972Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.652363Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:53.652419Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.653275Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:53.653342Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.654587Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.654635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.654691Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:53.654750Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:53.654804Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:53.654888Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.664952Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:53.666732Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:53.666801Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:53.667664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:53.677126Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:01.346029Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:46:01.346248Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:46:01.346350Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:46:01.346837Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.346883Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:46:01.347198Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:01.347480Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.348464Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:46:01.348503Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.349163Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:46:01.349217Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.349844Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.349888Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:01.349928Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:01.349972Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:01.350009Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:46:01.350065Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.351079Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.352589Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:46:01.353028Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:46:01.353090Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:46:01.359086Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.359157Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.359223Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.359816Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.359996Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.364788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:01.371221Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.486218Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.489143Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:46:01.523454Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:01.628968Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:46:01.629248Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:46:01.629364Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:46:01.640084Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.643509Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.644355Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:46:01.655412Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:46:01.655490Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.655708Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.655758Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:46:01.655982Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.656033Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.656081Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:46:01.656136Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.656253Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.657130Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:46:01.657428Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:01.657632Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.657677Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.657722Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:46:01.657952Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.658022Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.658589Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:46:01.658902Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 48, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:01.659041Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:46:01.659091Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:46:01.714212Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.714272Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:46:01.714664Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.714700Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.714735Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:46:01.714848Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.714898Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.714938Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorks [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldBreakLocks [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:52.374671Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:52.463006Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:52.463430Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:52.463490Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:52.695205Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:52.695298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:52.741986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494749986494 != 1767494749986497 2026-01-04T02:45:52.750133Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:52.790949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:52.884613Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:53.162785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:53.176555Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:53.278123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:53.316676Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-04T02:45:53.316834Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.348057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.348152Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.349333Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:53.349408Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:53.349460Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:53.349698Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.349949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.350006Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-04T02:45:53.350262Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2784] 2026-01-04T02:45:53.350410Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.356701Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2787] 2026-01-04T02:45:53.356893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.365288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.365424Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.367038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:53.367123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:53.367182Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:53.367474Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.367658Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.367724Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-04T02:45:53.368068Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.368158Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.369310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:45:53.369377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:45:53.369417Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:45:53.369726Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.369832Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.369881Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-04T02:45:53.380555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.402771Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:53.402914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.402996Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-04T02:45:53.403026Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.403067Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:53.403094Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.403377Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.403404Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:53.403443Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.403484Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-04T02:45:53.403510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:53.403528Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:53.403541Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:53.403900Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.403937Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:45:53.404015Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.404090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-04T02:45:53.404112Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:45:53.404132Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:45:53.404159Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:53.404340Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:53.404429Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:53.404631Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.404681Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.404729Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:53.404772Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.404818Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:53.404893Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:53.405039Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:909:2782], sessionId# [0:0:0] 2026-01-04T02:45:53.405108Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:53.405139Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.405162Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:45:53.405190Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:45:53.405222Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-04T02: ... G: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.175608Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:46:01.175689Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.176745Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.176790Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:01.176844Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:01.176912Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:01.176970Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:46:01.177074Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.178335Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.180338Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:46:01.181045Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:46:01.181121Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:46:01.190676Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.190796Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.190879Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.191509Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.191697Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.196946Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:01.204535Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.322254Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.326030Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:46:01.361373Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:01.453880Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:46:01.454321Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:46:01.454528Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=3 2026-01-04T02:46:01.465553Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.686699Z node 3 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976715661, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] { items { uint64_value: 0 } } 2026-01-04T02:46:01.695247Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1094:2905], serverId# [3:1095:2906], sessionId# [0:0:0] 2026-01-04T02:46:01.696340Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:46:01.707835Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:46:01.707934Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.708039Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-04T02:46:01.708914Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-04T02:46:01.708995Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.709159Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.709213Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037888 2026-01-04T02:46:01.709572Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.709626Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.709677Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:46:01.709738Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.709859Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1094:2905], serverId# [3:1095:2906], sessionId# [0:0:0] 2026-01-04T02:46:01.787633Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-04T02:46:01.787854Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=6; 2026-01-04T02:46:01.799045Z node 3 :TX_DATASHARD INFO: datashard_write_operation.cpp:804: Write transaction 6 at 72075186224037888 has an error: Operation is aborting because locks are not valid 2026-01-04T02:46:01.799368Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:46:01.799603Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:46:01.799699Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.800035Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:1116:2879], Table: `/Root/table-1` ([72057594046644480:38:1]), SessionActorId: [3:1061:2879]Got LOCKS BROKEN for table `/Root/table-1`. ShardID=72075186224037888, Sink=[3:1116:2879].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:46:01.800807Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:1109:2879], SessionActorId: [3:1061:2879], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:1061:2879]. 2026-01-04T02:46:01.801234Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Mjk5MGE1M2QtMWM4NGI5OTktMTVhYTA0NWEtOGM2ZWY4Mjc=, ActorId: [3:1061:2879], ActorState: ExecuteState, LegacyTraceId: 01ke3ebf7e2mfp1gn915xzast3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:1110:2879] from: [3:1109:2879] trace_id# 2026-01-04T02:46:01.801458Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:1110:2879] TxId: 281474976715662. Ctx: { TraceId: 01ke3ebf7e2mfp1gn915xzast3, Database: , SessionId: ydb://session/3?node_id=3&id=Mjk5MGE1M2QtMWM4NGI5OTktMTVhYTA0NWEtOGM2ZWY4Mjc=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/table-1`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:46:01.801992Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Mjk5MGE1M2QtMWM4NGI5OTktMTVhYTA0NWEtOGM2ZWY4Mjc=, ActorId: [3:1061:2879], ActorState: ExecuteState, LegacyTraceId: 01ke3ebf7e2mfp1gn915xzast3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/table-1`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:46:01.803144Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-04T02:46:01.803233Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-04T02:46:01.803412Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDatetime64 [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.110178Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.221789Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.222183Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.222226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.429144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.429266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.479919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748823586 != 1767494748823589 2026-01-04T02:45:51.490579Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.533958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.636957Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:51.914408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:51.927138Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.040200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.087328Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:52.087530Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.132962Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.133109Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.135763Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.135850Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.135898Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.137622Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.137773Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.137889Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.148627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.173601Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.174913Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.175041Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.175076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.175111Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.175144Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.176367Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.176453Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.176535Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.176593Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.176692Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.176756Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.176859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.177290Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.177636Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.177728Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.179614Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.190228Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.190342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.307174Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.317924Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.317990Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.318291Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.318325Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.318374Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.318604Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.318731Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.318910Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.319004Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.321758Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.323006Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.324797Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.324842Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.325440Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.325501Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.326394Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.326428Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.326471Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.326519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.326554Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.326657Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.329723Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.331030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.331136Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.331814Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.358616Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:01.406970Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:46:01.435136Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:46:01.435323Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:46:01.436243Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.436311Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:46:01.436727Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:01.437075Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.438425Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:46:01.438473Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.439275Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:46:01.439341Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.440096Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.440134Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:01.440177Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:01.440232Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:01.440274Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:46:01.440360Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.441404Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.442872Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:46:01.443418Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:46:01.443473Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:46:01.450723Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.450816Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.450873Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.451319Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.451436Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.455442Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:01.461232Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.577496Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.581103Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:46:01.616240Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:01.699269Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:46:01.699630Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:46:01.699825Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=5 2026-01-04T02:46:01.710618Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.714314Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.715040Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:46:01.726401Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:46:01.726536Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.726845Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.726906Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:46:01.727219Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.727274Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.727342Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:46:01.727415Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.727545Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.728452Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:46:01.728740Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:01.728881Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.728928Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.728984Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:46:01.729162Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.729214Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.729764Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:46:01.730027Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:01.730149Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:46:01.730192Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:46:01.731574Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.731628Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:46:01.731975Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.732006Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.732035Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:46:01.732128Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.732167Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.732208Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> EraseRowsTests::ConditionalEraseRowsShouldEraseOnDyNumberNanoSeconds [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.237124Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.335325Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.335638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.335677Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.596236Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.596323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.641078Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748862220 != 1767494748862223 2026-01-04T02:45:51.648888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.689731Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.776180Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.074112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.087723Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.189666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.221739Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:45:52.222025Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.254253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.254340Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.255545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.255605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.255657Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.255926Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.256014Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.256081Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:52.266614Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.292894Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.293037Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.293131Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:52.293163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.293195Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.293236Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.293540Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.293637Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.293703Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.293756Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.293821Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.293874Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.293971Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:52.294316Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:52.294500Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:52.294561Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:52.295885Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.306439Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:52.306536Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:52.421932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:52.425178Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:52.425247Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.425481Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.425526Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:52.425581Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:52.425761Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:52.425932Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:52.426164Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.426239Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:52.427694Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:52.428022Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.430019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:52.430058Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.430677Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:52.430722Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.431519Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.431565Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.431608Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:52.431650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:52.431684Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:52.431743Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.434700Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:52.435899Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:45:52.435964Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:45:52.436660Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:45:52.444145Z node 1 :KQP_WORKLOAD_SERV ... pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:01.565298Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:46:01.565606Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:46:01.565754Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:46:01.566619Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.566693Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:46:01.567207Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:01.618037Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.620018Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:46:01.620087Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.621142Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:46:01.621234Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.622241Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.622292Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:01.622348Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:01.622414Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:01.622466Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:46:01.622551Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.623740Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.625658Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:46:01.626356Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:46:01.626430Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:46:01.635775Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.635890Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.635973Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.636543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.636733Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:01.641897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:01.649256Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.766376Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:01.769760Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:46:01.804772Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:01.901996Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1054:2873], serverId# [3:1055:2874], sessionId# [0:0:0] 2026-01-04T02:46:01.902386Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037888 2026-01-04T02:46:01.902583Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037888, row count=4 2026-01-04T02:46:01.913626Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.918327Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.919324Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:27: TTxDirectBase(48) Execute: at tablet# 72075186224037888 2026-01-04T02:46:01.930803Z node 3 :TX_DATASHARD INFO: datashard__op_rows.cpp:81: TTxDirectBase(48) Complete: at tablet# 72075186224037888 2026-01-04T02:46:01.930892Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:01.931191Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.931239Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 3, at: 72075186224037888 2026-01-04T02:46:01.931528Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.931585Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.931640Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:46:01.931710Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.931845Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [3:1062:2880], serverId# [3:1063:2881], sessionId# [0:0:0] 2026-01-04T02:46:01.932810Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:46:01.933166Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:01.933390Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.933442Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.933490Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:46:01.933736Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.933830Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.934511Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 1 2026-01-04T02:46:01.934787Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715661, Size: 37, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:01.934922Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715661, PendingAcks: 0 2026-01-04T02:46:01.934979Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715661, MessageQuota: 0 2026-01-04T02:46:01.936777Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:01.936829Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037888 2026-01-04T02:46:01.937233Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:01.937276Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:01.937317Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715661] at 72075186224037888 for ReadTableScan 2026-01-04T02:46:01.937439Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:01.937492Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:01.937543Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin >> DataShardWrite::UncommittedUpdateLockUncommittedNewRow [GOOD] >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow >> TraverseDatashard::TraverseTwoTables >> AnalyzeColumnshard::AnalyzeSameOperationId >> DistributedEraseTests::ConditionalEraseRowsCheckLimits [GOOD] >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks+Volatile [GOOD] >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnDeadShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.087675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.213253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.213783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.213870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.493533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.493641Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.552037Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748918515 != 1767494748918518 2026-01-04T02:45:51.563796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.605156Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.691668Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:51.964067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:51.976689Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.079155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.122940Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-04T02:45:52.123123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.167925Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.168041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.169837Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.169900Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.169956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.170327Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.170711Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.170800Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-04T02:45:52.171202Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2784] 2026-01-04T02:45:52.171412Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.178799Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2787] 2026-01-04T02:45:52.178964Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.184830Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.184917Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.185822Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:52.185875Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:52.185909Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:52.186115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.186246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.186285Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-04T02:45:52.186455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.186557Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.187330Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:45:52.187366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:45:52.187391Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:45:52.187603Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.187686Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.187736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-04T02:45:52.198570Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.230003Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.230199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.230316Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-04T02:45:52.230361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.230424Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.230471Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.230843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.230886Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:52.230957Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.231065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-04T02:45:52.231101Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:52.231124Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:52.231151Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:52.231482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.231523Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:45:52.231572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.231652Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-04T02:45:52.231677Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:45:52.231719Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:45:52.231764Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:52.231991Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.232103Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.232357Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.232410Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.232465Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.232511Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.232561Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:52.232657Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:52.232815Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:909:2782], sessionId# [0:0:0] 2026-01-04T02:45:52.232860Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:52.232887Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.232911Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:45:52.232942Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:45:52.232978Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-04T02: ... d__readset.cpp:91: TTxReadSet::Complete at 72075186224037888 2026-01-04T02:46:03.538506Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:03.538615Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-04T02:46:03.538675Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037890 source 72075186224037890 dest 72075186224037888 consumer 72075186224037888 txId 281474976715661 2026-01-04T02:46:03.538741Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1293:3022] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 1 2026-01-04T02:46:03.538872Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:46:03.538950Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1293:3022] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037889, status# 1 2026-01-04T02:46:03.539020Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-04T02:46:03.539041Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-04T02:46:03.539080Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:787: [DistEraser] [3:1293:3022] HandlePropose TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 1 2026-01-04T02:46:03.539108Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:904: [DistEraser] [3:1293:3022] Register plan: txId# 281474976715662, minStep# 2502, maxStep# 32502 2026-01-04T02:46:03.539191Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-01-04T02:46:03.539230Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 2026-01-04T02:46:03.551627Z node 3 :TX_DATASHARD INFO: datashard.cpp:189: OnDetach: 72075186224037888 2026-01-04T02:46:03.553038Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:46:03.555223Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037889 is reset 2026-01-04T02:46:03.555294Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037888 from 72075186224037890 is reset 2026-01-04T02:46:03.555412Z node 3 :TX_DATASHARD ERROR: datashard_distributed_erase.cpp:167: [DistEraser] [3:1293:3022] Reply: txId# 281474976715662, status# SHARD_UNKNOWN, error# Tx state unknown: reason# lost pipe while waiting for reply (plan), txId# 281474976715662, shard# 72075186224037888 2026-01-04T02:46:03.555909Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-04T02:46:03.555944Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-04T02:46:03.556015Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:46:03.556048Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:03.556087Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 1 2026-01-04T02:46:03.556144Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:46:03.556379Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1287:3017], serverId# [3:1288:3018], sessionId# [0:0:0] 2026-01-04T02:46:03.574006Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:1304:3032] 2026-01-04T02:46:03.574187Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:46:03.576849Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:46:03.577702Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:46:03.579421Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:46:03.579484Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:46:03.579529Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:46:03.579817Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:46:03.580064Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:46:03.580130Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:1319:3032] in generation 2 2026-01-04T02:46:03.591267Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:46:03.591384Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-01-04T02:46:03.591486Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:03.591804Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:1321:3040] 2026-01-04T02:46:03.591839Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:46:03.591877Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:46:03.591910Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:03.592095Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-04T02:46:03.592272Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-04T02:46:03.593145Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:46:03.593228Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:46:03.593371Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2501 2026-01-04T02:46:03.593411Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:03.593609Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:46:03.593674Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:03.593707Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:46:03.593739Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 1 2026-01-04T02:46:03.593772Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:03.593859Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-04T02:46:03.593892Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-01-04T02:46:03.593928Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 1 at 72075186224037888 from 72075186224037888 to 72075186224037889 txId 281474976715661 2026-01-04T02:46:03.594080Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 2500 next step 2501 2026-01-04T02:46:03.594133Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037889 source 72075186224037888 dest 72075186224037889 producer 72075186224037888 txId 281474976715661 2026-01-04T02:46:03.594192Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037889 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:46:03.594233Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037889 2026-01-04T02:46:03.594269Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:46:03.594306Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037889 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 1 Flags# 0} 2026-01-04T02:46:03.594413Z node 3 :TX_DATASHARD DEBUG: datashard__progress_resend_rs.cpp:14: Start TTxProgressResendRS at tablet 72075186224037888 2026-01-04T02:46:03.594443Z node 3 :TX_DATASHARD INFO: datashard.cpp:4131: Resend RS at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-01-04T02:46:03.594463Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037888 from 72075186224037888 to 72075186224037890 txId 281474976715661 2026-01-04T02:46:03.594556Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 281474976715661 2026-01-04T02:46:03.594648Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037890 source 72075186224037888 dest 72075186224037890 producer 72075186224037888 txId 281474976715661 2026-01-04T02:46:03.594681Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037890 got read set: {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-04T02:46:03.594704Z node 3 :TX_DATASHARD NOTICE: datashard_pipeline.cpp:734: Outdated readset for 2501:281474976715661 at 72075186224037890 2026-01-04T02:46:03.594725Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037890 2026-01-04T02:46:03.594750Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:99: Send RS Ack at 72075186224037890 {TEvReadSet step# 2501 txid# 281474976715661 TabletSource# 72075186224037888 TabletDest# 72075186224037890 SetTabletProducer# 72075186224037888 ReadSet.Size()# 2 Seqno# 2 Flags# 0} 2026-01-04T02:46:03.594790Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037890 consumer 72075186224037890 txId 281474976715661 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> AnalyzeColumnshard::AnalyzeShard >> TKeyValueTest::TestRewriteThenLastValueNewApi [GOOD] >> TKeyValueTest::TestSetExecutorFastLogPolicy >> AsyncIndexChangeExchange::SenderShouldShakeHandsAfterAddingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable >> AnalyzeColumnshard::AnalyzeRebootSa >> TraverseColumnShard::TraverseServerlessColumnTable >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] >> AnalyzeColumnshard::AnalyzeStatus >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldFailOnSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:52.886085Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:53.014176Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:53.014515Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:53.014552Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:53.217045Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:53.217131Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:53.270330Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494750806761 != 1767494750806764 2026-01-04T02:45:53.279302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:53.324371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:53.417495Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:53.690117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:53.703318Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:53.806090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:53.844910Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-04T02:45:53.845095Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.877469Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.877556Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.878858Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:53.878914Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:53.878962Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:53.879243Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.879508Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.879552Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-04T02:45:53.879813Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2784] 2026-01-04T02:45:53.879989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.886487Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2787] 2026-01-04T02:45:53.886664Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:53.892665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.892778Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.893732Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:53.893792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:53.893859Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:53.894053Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.894195Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.894232Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-04T02:45:53.894443Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:53.894513Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:53.895336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:45:53.895374Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:45:53.895402Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:45:53.895623Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:53.895719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:53.895819Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-04T02:45:53.906439Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.924947Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:53.925077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.925152Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-04T02:45:53.925179Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:53.925226Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:53.925257Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:53.925533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.925574Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:53.925630Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.925670Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-04T02:45:53.925685Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:53.925699Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:53.925713Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:53.926002Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:53.926026Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:45:53.926082Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:53.926130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-04T02:45:53.926145Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:45:53.926159Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:45:53.926172Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:53.926285Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:53.926342Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:53.926524Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:53.926564Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.926600Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:53.926630Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:53.926659Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:53.926706Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:53.926792Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:909:2782], sessionId# [0:0:0] 2026-01-04T02:45:53.926820Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:53.926835Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:53.926858Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:45:53.926877Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:45:53.926908Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-04T02: ... node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 7, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:46:04.733636Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 7, finished edge# 0, front# 0 2026-01-04T02:46:04.734802Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 8, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:46:04.734829Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 8, finished edge# 0, front# 0 2026-01-04T02:46:04.736316Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:46:04.736346Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-01-04T02:46:04.736637Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:256: 72075186224037889 snapshot complete for split OpId 281474976715663 2026-01-04T02:46:04.736812Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 12 for split OpId 281474976715663 2026-01-04T02:46:04.736852Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 24 for split OpId 281474976715663 2026-01-04T02:46:04.736875Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 36 for split OpId 281474976715663 2026-01-04T02:46:04.736910Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 48 for split OpId 281474976715663 2026-01-04T02:46:04.737072Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 146 total snapshot size is 194 for split OpId 281474976715663 2026-01-04T02:46:04.737231Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 3 snapshot size is 12 total snapshot size is 206 for split OpId 281474976715663 2026-01-04T02:46:04.737260Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 4 snapshot size is 12 total snapshot size is 218 for split OpId 281474976715663 2026-01-04T02:46:04.737282Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 7 snapshot size is 12 total snapshot size is 230 for split OpId 281474976715663 2026-01-04T02:46:04.737303Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 8 snapshot size is 12 total snapshot size is 242 for split OpId 281474976715663 2026-01-04T02:46:04.737373Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:332: 72075186224037889 BorrowSnapshot: table 1001 snapshot size is 155 total snapshot size is 397 for split OpId 281474976715663 2026-01-04T02:46:04.737783Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:424: 72075186224037889 Sending snapshots from src for split OpId 281474976715663 2026-01-04T02:46:04.737950Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037892 size 221 2026-01-04T02:46:04.738045Z node 3 :TX_DATASHARD DEBUG: datashard_impl.h:2361: Sending snapshot for split opId 281474976715663 from datashard 72075186224037889 to datashard 72075186224037891 size 215 2026-01-04T02:46:04.738255Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037891, clientId# [3:1395:3098], serverId# [3:1396:3099], sessionId# [0:0:0] 2026-01-04T02:46:04.738284Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037892, clientId# [3:1394:3097], serverId# [3:1397:3100], sessionId# [0:0:0] 2026-01-04T02:46:04.738372Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037891 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-01-04T02:46:04.738856Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:175: 72075186224037892 Received snapshot for split/merge TxId 281474976715663 from tabeltId 72075186224037889 2026-01-04T02:46:04.740249Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037891 ack snapshot OpId 281474976715663 2026-01-04T02:46:04.740432Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-04T02:46:04.740543Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:04.740640Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:46:04.740712Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037891, actorId: [3:1400:3103] 2026-01-04T02:46:04.740745Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037891 2026-01-04T02:46:04.740778Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-04T02:46:04.740807Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:46:04.740921Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037891 for split OpId 281474976715663 2026-01-04T02:46:04.741464Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037891 time 3000 2026-01-04T02:46:04.741499Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:46:04.741647Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-04T02:46:04.741668Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:04.741691Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:46:04.741715Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-04T02:46:04.741840Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1395:3098], serverId# [3:1396:3099], sessionId# [0:0:0] 2026-01-04T02:46:04.741886Z node 3 :TX_DATASHARD DEBUG: datashard_split_dst.cpp:304: 72075186224037892 ack snapshot OpId 281474976715663 2026-01-04T02:46:04.741953Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037892 2026-01-04T02:46:04.742002Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037892 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:46:04.742043Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-04T02:46:04.742074Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037892, actorId: [3:1402:3105] 2026-01-04T02:46:04.742090Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037892 2026-01-04T02:46:04.742112Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037892 2026-01-04T02:46:04.742128Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T02:46:04.742196Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:461: 72075186224037889 Received snapshot Ack from dst 72075186224037892 for split OpId 281474976715663 2026-01-04T02:46:04.742767Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037891 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-04T02:46:04.742812Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037891: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-04T02:46:04.742878Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037892 time 3000 2026-01-04T02:46:04.742901Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T02:46:04.742956Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:46:04.742975Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:04.742993Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037892 TxInFly 0 2026-01-04T02:46:04.743012Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T02:46:04.743088Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037892, clientId# [3:1394:3097], serverId# [3:1397:3100], sessionId# [0:0:0] 2026-01-04T02:46:04.743188Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037892 coordinator 72057594046316545 last step 2500 next step 3000 2026-01-04T02:46:04.743208Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2824: CheckMediatorStateRestored at 72075186224037892: waitStep# 3000 readStep# 3000 observedStep# 3000 2026-01-04T02:46:04.764323Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:485: 72075186224037889 ack split to schemeshard 281474976715663 2026-01-04T02:46:04.768326Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:565: Got TEvSplitPartitioningChanged: opId: 281474976715663, at datashard: 72075186224037889, state: SplitSrcWaitForPartitioningChanged 2026-01-04T02:46:04.770156Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-04T02:46:04.770213Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-04T02:46:04.770498Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:46:04.770537Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:21: Progress tx at non-ready tablet 72075186224037889 state 5 2026-01-04T02:46:04.770691Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1287:3017], serverId# [3:1288:3018], sessionId# [0:0:0] 2026-01-04T02:46:04.770890Z node 3 :TX_DATASHARD DEBUG: datashard_split_src.cpp:532: 72075186224037889 ack split partitioning changed to schemeshard 281474976715663 2026-01-04T02:46:04.770953Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 1 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:46:04.771002Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardOutOfOrder::TestShardSnapshotReadNoEarlyReply [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:46.141787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:46.223101Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:46.223444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:46.223488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:46.463346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:46.463475Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:46.527865Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494743966474 != 1767494743966477 2026-01-04T02:45:46.541927Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:46.581960Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:46.666314Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:46.950146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:46.962781Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:47.062650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:47.314767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ===== UPSERT initial rows 2026-01-04T02:45:47.514423Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.514500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.514550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.515085Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1038:2873], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.515239Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:47.519337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:47.662271Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1039:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:45:47.725206Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===== Begin SELECT { items { uint32_value: 1 } items { uint32_value: 1 } }, { items { uint32_value: 2 } items { uint32_value: 1 } } ===== UPSERT and commit ... waiting for commit read sets ... captured readset ... captured readset ===== restarting tablet ===== Waiting for commit response ===== Last SELECT { items { uint32_value: 3 } items { uint32_value: 2 } } ... waiting for SysViewsRoster update finished 2026-01-04T02:45:51.913949Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.917771Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:51.920990Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.921242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.921388Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:52.094778Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:52.094899Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:52.119157Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494749551617 != 1767494749551621 2026-01-04T02:45:52.124499Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:52.169578Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:52.238353Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:52.541391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:52.553845Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.655424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.868353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:53.075843Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1024:2865], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:53.075933Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1035:2870], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:53.075991Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:53.076641Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1039:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:53.076742Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:53.080221Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:53.222384Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1038:2873], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:45:53.258691Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1096:2912] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for at least 2 blocked commits 2026-01-04T02:45:55.620391Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:45:55.620446Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 ... blocked commit for tablet 72075186224037889 ... shards are ready for read-only immediate transactions ... waiting for at least 2 blocked commits ... blocked commit for tablet 72075186224037888 ... blocked commit for tablet 72075186224037889 >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> TraverseDatashard::TraverseTwoTablesServerless >> TKeyValueTest::TestInlineCopyRangeWorks [GOOD] >> TKeyValueTest::TestInlineCopyRangeWorksNewApi >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin >> DistributedEraseTests::ConditionalEraseRowsShouldNotEraseModifiedRows [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_volatile/unittest >> DataShardVolatile::CompactedVolatileChangesAbort [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:42:31.143636Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:42:31.293886Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:42:31.294478Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:42:31.294545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:42:31.651437Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:42:31.651558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:42:31.738469Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494548493194 != 1767494548493197 2026-01-04T02:42:31.754741Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:42:31.803677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:42:31.911155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:42:32.115704Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:42:32.361379Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:42:32.498530Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:42:32.498633Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:42:32.498814Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:865:2748] 2026-01-04T02:42:32.632233Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:865:2748] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:42:32.632362Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:865:2748] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:42:32.634485Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:42:32.634629Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:865:2748] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:42:32.635107Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:42:32.635292Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:865:2748] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 1000 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:42:32.635470Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:865:2748] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:42:32.638459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:42:32.639013Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:865:2748] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:42:32.639854Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:865:2748] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:42:32.639922Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:865:2748] txid# 281474976715657 SEND to# [1:847:2730] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:42:32.676900Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:42:32.678041Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:42:32.678395Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:890:2769] 2026-01-04T02:42:32.678693Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:42:32.735563Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:890:2769]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:42:32.736502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:42:32.736699Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:42:32.740130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:42:32.740237Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:42:32.740994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:42:32.741555Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:42:32.741743Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:42:32.741914Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:905:2769] in generation 1 2026-01-04T02:42:32.752929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:42:32.802856Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:42:32.803090Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:42:32.803202Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:907:2779] 2026-01-04T02:42:32.803238Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:42:32.803271Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:42:32.803314Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:42:32.803584Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:890:2769], Recipient [1:890:2769]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:42:32.803655Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:42:32.804046Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:42:32.804160Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:42:32.804263Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:42:32.804326Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:42:32.804867Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:42:32.804917Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:42:32.804955Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:42:32.804986Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:42:32.805043Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:42:32.805206Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:892:2770], Recipient [1:890:2769]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:42:32.805267Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:42:32.805322Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:887:2767], serverId# [1:892:2770], sessionId# [0:0:0] 2026-01-04T02:42:32.805427Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:892:2770] 2026-01-04T02:42:32.805472Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:42:32.806088Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:42:32.806452Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:42:32.806538Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:42:32.806657Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:42:32.806712Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... ted with status OK role: Leader [27:1176:2971] 2026-01-04T02:46:05.788607Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:323: TClient[72075186224037888] send queued [27:1176:2971] 2026-01-04T02:46:05.788695Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1176:2971] 2026-01-04T02:46:05.788809Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:331: TClient[72075186224037888] shutdown pipe due to pending shutdown request [27:1176:2971] 2026-01-04T02:46:05.788912Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72075186224037888] notify reset [27:1176:2971] 2026-01-04T02:46:05.789233Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [27:1175:2970], Recipient [27:919:2782]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-04T02:46:05.789377Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} queued, type NKikimr::NDataShard::TDataShard::TTxCompactTable 2026-01-04T02:46:05.789531Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:46:05.789679Z node 27 :TABLET_EXECUTOR DEBUG: TCompactionLogic PrepareForceCompaction for 72075186224037888 table 1001, mode Full, forced state None, forced mode Full 2026-01-04T02:46:05.789885Z node 27 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [27:1175:2970], partsCount# 0, memtableSize# 656, memtableWaste# 3952, memtableRows# 2 2026-01-04T02:46:05.790039Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} hope 1 -> done Change{16, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:46:05.790168Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:16} Tx{28, NKikimr::NDataShard::TDataShard::TTxCompactTable} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:46:05.790476Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888: task 1, edge 9223372036854775807/0, generation 0 2026-01-04T02:46:05.790572Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:16} starting compaction 2026-01-04T02:46:05.790981Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} starting Scan{1 on 1001, Compact{72075186224037888.1.16, eph 1}} 2026-01-04T02:46:05.791120Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} started compaction 1 2026-01-04T02:46:05.791191Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy PrepareCompaction for 72075186224037888 started compaction 1 generation 0 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 8036114045278162160 2026-01-04T02:46:05.793957Z node 27 :TABLET_EXECUTOR INFO: Leader{72075186224037888:1:17} Compact 1 on TGenCompactionParams{1001: gen 0 epoch +inf, 0 parts} step 16, product {tx status + 1 parts epoch 2} done 2026-01-04T02:46:05.794237Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CompactionFinished for 72075186224037888: compaction 1, generation 0 2026-01-04T02:46:05.794363Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 1, state Free, final id 0, final level 0 2026-01-04T02:46:05.794418Z node 27 :TABLET_EXECUTOR DEBUG: TGenCompactionStrategy CheckGeneration for 72075186224037888 generation 3, state Free, final id 0, final level 0 2026-01-04T02:46:05.794765Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.548329Z 2026-01-04T02:46:05.794910Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} queued, type NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs 2026-01-04T02:46:05.795019Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:46:05.795108Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-04T02:46:05.795197Z node 27 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [27:1175:2970]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:46:05.795794Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} hope 1 -> done Change{17, redo 83b alter 0b annex 0, ~{ 27 } -{ }, 0 gb} 2026-01-04T02:46:05.795902Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:18} Tx{29, NKikimr::NDataShard::TDataShard::TTxPersistFullCompactionTs} release 4194304b of static, Memory{0 dyn 0} ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 1846274210338117576 ... blocking NKikimr::TEvBlobStorage::TEvPut from TABLET_REQ_WRITE_LOG to BS_PROXY_ACTOR cookie 17487436101768932932 ========= Starting an immediate read ========= 2026-01-04T02:46:06.018121Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72075186224037888] send [27:1108:2916] 2026-01-04T02:46:06.018251Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72075186224037888] push event to server [27:1108:2916] 2026-01-04T02:46:06.018662Z node 27 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [27:1201:2978], Recipient [27:919:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false KeysSize: 1 2026-01-04T02:46:06.018897Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-04T02:46:06.019031Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:46:06.019230Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:46:06.019341Z node 27 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2502/281474976715662 IncompleteEdge# v{min} UnprotectedReadEdge# v{min} ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T02:46:06.019434Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2502/18446744073709551615 2026-01-04T02:46:06.019567Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-04T02:46:06.019777Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:06.019878Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:46:06.019976Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:46:06.020047Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:46:06.020100Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-04T02:46:06.020170Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:06.020198Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:46:06.020216Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:46:06.020235Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:46:06.020389Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false } 2026-01-04T02:46:06.020603Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is DelayComplete 2026-01-04T02:46:06.020644Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:46:06.020716Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:46:06.020782Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:46:06.020823Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:06.020843Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:46:06.020881Z node 27 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-04T02:46:06.020957Z node 27 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:46:06.021079Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> done Change{18, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T02:46:06.021187Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72075186224037888:1:19} Tx{30, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:46:06.114518Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} queued, type NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep 2026-01-04T02:46:06.114680Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:46:06.114980Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} hope 1 -> done Change{16, redo 134b alter 0b annex 0, ~{ 2 } -{ }, 0 gb} 2026-01-04T02:46:06.115123Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:17} Tx{24, NKikimr::NFlatTxCoordinator::TTxCoordinator::TTxPlanStep} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:46:06.115894Z node 27 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:18} commited cookie 1 for step 17 2026-01-04T02:46:06.116263Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [27:528:2474] 2026-01-04T02:46:06.116368Z node 27 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [27:528:2474] >> AnalyzeColumnshard::Analyze >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_volatile/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin |93.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.6%| [TA] $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_volatile/test-results/unittest/{meta.json ... results_accumulator.log} >> DataShardWrite::UncommittedUpdateLockUncommittedDeleteRow [GOOD] >> DataShardWrite::LocksBrokenStats |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnFreshTable [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin >> TraverseDatashard::TraverseOneTable >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsAsyncIndex [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:50.983529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:51.107288Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:51.107695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:51.107764Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:51.413912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:51.414026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:51.490061Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494748881252 != 1767494748881255 2026-01-04T02:45:51.504643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:51.547133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:51.626709Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:51.916549Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:51.930062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:52.040118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:52.091019Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-04T02:45:52.091314Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.141315Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.141465Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.143371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:52.143464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:52.143524Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:52.143929Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.144292Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.144381Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-04T02:45:52.144770Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2784] 2026-01-04T02:45:52.144989Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.154839Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2787] 2026-01-04T02:45:52.155067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:52.164398Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.164551Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.166065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:52.166149Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:52.166199Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:52.166505Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.166691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.166753Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-04T02:45:52.167041Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:52.167138Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:52.168515Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:45:52.168587Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:45:52.168640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:45:52.168984Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:52.169075Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:52.169125Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-04T02:45:52.180072Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.210340Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:52.210562Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.210689Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-04T02:45:52.210730Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:52.210791Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:52.210832Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:52.211228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.211275Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:52.211341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.211405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-04T02:45:52.211446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:52.211473Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:52.211499Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:52.211886Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:52.211919Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:45:52.211968Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:52.212050Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-04T02:45:52.212074Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:45:52.212099Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:45:52.212132Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:52.212363Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:52.212485Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:52.212716Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:52.212775Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.212843Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:52.212899Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:52.212957Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:52.213051Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:52.213236Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:909:2782], sessionId# [0:0:0] 2026-01-04T02:45:52.213288Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:52.213315Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:52.213341Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:45:52.213372Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:45:52.213413Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-04T02: ... datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 3500} 2026-01-04T02:46:10.209072Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-04T02:46:10.209129Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4020: Send RS 2 at 72075186224037891 from 72075186224037891 to 72075186224037893 txId 281474976715666 2026-01-04T02:46:10.209190Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-04T02:46:10.209246Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037891 at tablet 72075186224037891 send result to client [3:1613:3238], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:46:10.209347Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037891, records: { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 50] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 47] SchemaVersion: 1 } 2026-01-04T02:46:10.209401Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:46:10.209658Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1613:3238] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037891, status# 2 2026-01-04T02:46:10.209841Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:215: TTxRequestChangeRecords Execute: at tablet# 72075186224037891 2026-01-04T02:46:10.210141Z node 3 :TX_DATASHARD DEBUG: datashard_change_sending.cpp:235: Send 3 change records: to# [3:1413:3120], at tablet# 72075186224037891 2026-01-04T02:46:10.210216Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:260: TTxRequestChangeRecords Complete: sent# 3, forgotten# 0, left# 0, at tablet# 72075186224037891 2026-01-04T02:46:10.210300Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3389: Receive RS at 72075186224037893 source 72075186224037891 dest 72075186224037893 producer 72075186224037891 txId 281474976715666 2026-01-04T02:46:10.210399Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:15: TTxReadSet::Execute at 72075186224037893 got read set: {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletProducer# 72075186224037891 ReadSet.Size()# 19 Seqno# 2 Flags# 0} 2026-01-04T02:46:10.210499Z node 3 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037893 2026-01-04T02:46:10.210738Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-04T02:46:10.210771Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:46:10.210806Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [3500:281474976715666] at 72075186224037893 for LoadAndWaitInRS 2026-01-04T02:46:10.211140Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:10.211536Z node 3 :TX_DATASHARD DEBUG: datashard_change_receiving.cpp:468: Handle TEvChangeExchange::TEvApplyRecords: origin# 72075186224037891, generation# 1, at tablet# 72075186224037892 2026-01-04T02:46:10.222820Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-04T02:46:10.222897Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715666] from 72075186224037893 at tablet 72075186224037893 send result to client [3:1613:3238], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:46:10.222973Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037893 {TEvReadSet step# 3500 txid# 281474976715666 TabletSource# 72075186224037891 TabletDest# 72075186224037893 SetTabletConsumer# 72075186224037893 Flags# 0 Seqno# 2} 2026-01-04T02:46:10.223015Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-04T02:46:10.223117Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1613:3238] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715666, shard# 72075186224037893, status# 2 2026-01-04T02:46:10.223156Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1613:3238] Reply: txId# 281474976715666, status# OK, error# 2026-01-04T02:46:10.223267Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037891 source 72075186224037891 dest 72075186224037893 consumer 72075186224037893 txId 281474976715666 2026-01-04T02:46:10.223584Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037891 2026-01-04T02:46:10.223640Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037891 2026-01-04T02:46:10.223788Z node 3 :TX_DATASHARD INFO: datashard_change_sending.cpp:310: TTxRemoveChangeRecords Execute: records# 3, at tablet# 72075186224037891 2026-01-04T02:46:10.223814Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 4, at tablet: 72075186224037891 2026-01-04T02:46:10.223890Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 5, at tablet: 72075186224037891 2026-01-04T02:46:10.223923Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1096: RemoveChangeRecord: order: 6, at tablet: 72075186224037891 2026-01-04T02:46:10.224099Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:1608:3234], serverId# [3:1609:3235], sessionId# [0:0:0] 2026-01-04T02:46:10.224165Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037891 2026-01-04T02:46:10.224195Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037891 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:10.224222Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:46:10.225081Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037893 2026-01-04T02:46:10.225370Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037893 2026-01-04T02:46:10.225537Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-04T02:46:10.225575Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.225623Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for WaitForStreamClearance 2026-01-04T02:46:10.225835Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.225895Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-04T02:46:10.226382Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-01-04T02:46:10.226531Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037893, TxId: 281474976715667, MessageQuota: 1 2026-01-04T02:46:10.227897Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037893 2026-01-04T02:46:10.227940Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715667, at: 72075186224037893 2026-01-04T02:46:10.228068Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037893 2026-01-04T02:46:10.228098Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.228123Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715667] at 72075186224037893 for ReadTableScan 2026-01-04T02:46:10.228211Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:10.228246Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037893 2026-01-04T02:46:10.228280Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037893 2026-01-04T02:46:10.229177Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-04T02:46:10.229384Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-04T02:46:10.229488Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:46:10.229512Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.229535Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for WaitForStreamClearance 2026-01-04T02:46:10.229664Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.229694Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T02:46:10.230093Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-01-04T02:46:10.230182Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037892, TxId: 281474976715668, MessageQuota: 1 2026-01-04T02:46:10.272696Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037892 2026-01-04T02:46:10.272756Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715668, at: 72075186224037892 2026-01-04T02:46:10.272929Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:46:10.272963Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:10.272996Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715668] at 72075186224037892 for ReadTableScan 2026-01-04T02:46:10.273101Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:10.273152Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T02:46:10.273188Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest >> AnalyzeColumnshard::AnalyzeDeadline ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::DistributedInsertDuplicateWithLocks-Volatile [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:22.428628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:22.528878Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:22.529441Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:22.529512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:22.767520Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:22.767618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:22.817126Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494720086829 != 1767494720086832 2026-01-04T02:45:22.830075Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:22.873844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:22.959714Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:23.234329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:23.247431Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:23.353450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:23.400466Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:23.401634Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:23.401921Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:45:23.402159Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:23.413468Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:23.446720Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:881:2763], Recipient [1:896:2773]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:23.447050Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:23.447297Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:45:23.447467Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:23.453937Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:883:2764], Recipient [1:899:2775]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:23.454594Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:23.454739Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:23.456210Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:23.456269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:23.456308Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:23.456613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:23.456801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:23.456898Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:45:23.457188Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:23.457244Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:23.458249Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:23.458295Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:23.458342Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:23.458528Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:23.458593Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:23.458633Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:45:23.469360Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:23.493155Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:23.493361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:23.493467Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:45:23.493498Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:23.493530Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:23.493560Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:23.493823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:896:2773], Recipient [1:896:2773]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.493873Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.493998Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:23.494032Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:23.494078Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:23.494130Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:45:23.494148Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:23.494172Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:23.494195Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:23.494450Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:899:2775], Recipient [1:899:2775]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.494481Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.494622Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:23.494713Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:23.494805Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:23.494838Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:23.494880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:23.494914Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:23.494957Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:23.494991Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:23.495027Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:23.495060Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:23.495108Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:23.495178Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:902:2776], Recipient [1:896:2773]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:23.495227Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:23.495269Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:45:23.495308Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:23.495329Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:23.495345 ... TASHARD TRACE: volatile_tx.cpp:884: Processed readset with decision 2 from 72075186224037888 to 72075186224037889 at tablet 72075186224037889 2026-01-04T02:46:10.619809Z node 11 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037889 2026-01-04T02:46:10.620399Z node 11 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=1234567890011; 2026-01-04T02:46:10.620642Z node 11 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [3000 : 1234567890011] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } 2026-01-04T02:46:10.620722Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:46:10.620844Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [11:921:2784], Recipient [11:919:2782]: {TEvReadSet step# 3000 txid# 1234567890011 TabletSource# 72075186224037888 TabletDest# 72075186224037889 SetTabletConsumer# 72075186224037889 Flags# 0 Seqno# 2} 2026-01-04T02:46:10.620878Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:10.620912Z node 11 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037888 source 72075186224037888 dest 72075186224037889 consumer 72075186224037889 txId 1234567890011 2026-01-04T02:46:10.786215Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1166:2967], Recipient [11:919:2782]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T02:46:10.786379Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:46:10.786472Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-04T02:46:10.786580Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:10.786625Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:46:10.786673Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:46:10.786714Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:46:10.786754Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-04T02:46:10.786810Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:10.786832Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:46:10.786851Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:46:10.786872Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:46:10.786972Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T02:46:10.787215Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:46:10.787275Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1166:2967], 0} after executionsCount# 1 2026-01-04T02:46:10.787332Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1166:2967], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:46:10.787418Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1166:2967], 0} finished in read 2026-01-04T02:46:10.787488Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:10.787509Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:46:10.787530Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:46:10.787550Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:46:10.787585Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T02:46:10.787604Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:46:10.787626Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-04T02:46:10.787668Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:46:10.787795Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T02:46:10.788539Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1166:2967], Recipient [11:919:2782]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:46:10.788591Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } 2026-01-04T02:46:10.788700Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1166:2967], Recipient [11:921:2784]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 RangesSize: 1 2026-01-04T02:46:10.788877Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037889, FollowerId 0 2026-01-04T02:46:10.788918Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CheckRead 2026-01-04T02:46:10.788970Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:46:10.788992Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CheckRead 2026-01-04T02:46:10.789013Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit BuildAndWaitDependencies 2026-01-04T02:46:10.789034Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit BuildAndWaitDependencies 2026-01-04T02:46:10.789067Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037889 2026-01-04T02:46:10.789090Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:46:10.789109Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit BuildAndWaitDependencies 2026-01-04T02:46:10.789127Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit ExecuteRead 2026-01-04T02:46:10.789145Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit ExecuteRead 2026-01-04T02:46:10.789207Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037889 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 3000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 1000 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1000 } 2026-01-04T02:46:10.789342Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037889 promoting UnprotectedReadEdge to v3000/18446744073709551615 2026-01-04T02:46:10.789373Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037889 Complete read# {[11:1166:2967], 1} after executionsCount# 1 2026-01-04T02:46:10.789399Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037889 read iterator# {[11:1166:2967], 1} sends rowCount# 1, bytes# 32, quota rows left# 999, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:46:10.789441Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037889 read iterator# {[11:1166:2967], 1} finished in read 2026-01-04T02:46:10.789471Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:46:10.789488Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit ExecuteRead 2026-01-04T02:46:10.789505Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037889 to execution unit CompletedOperations 2026-01-04T02:46:10.789523Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037889 on unit CompletedOperations 2026-01-04T02:46:10.789550Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037889 is Executed 2026-01-04T02:46:10.789567Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037889 executing on unit CompletedOperations 2026-01-04T02:46:10.789588Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037889 has finished 2026-01-04T02:46:10.789607Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037889 2026-01-04T02:46:10.789652Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037889 2026-01-04T02:46:10.790211Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [11:1166:2967], Recipient [11:921:2784]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:46:10.790247Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037889 ReadCancel: { ReadId: 1 } { items { int32_value: 1 } items { int32_value: 1001 } }, { items { int32_value: 11 } items { int32_value: 1002 } } |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalTable-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962849Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.963002Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963123Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.964062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.109591Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.109669Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.110575Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.123852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.124324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.124523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.137335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.137565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.138309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.138635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.141541Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.141734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.142932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.143000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.143123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.143172Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.143221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.143413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.150504Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.295261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.295482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.295704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.295752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.296021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.296114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.298204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.298430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.298650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.298729Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.298794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.298834Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.300598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.300653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.300690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.302414Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.302459Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.302507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.302560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.313243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.315044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.315247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.316365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.316496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.316560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.316784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.316833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.317004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.317087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.319210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 3, at schemeshard: 72057594046678944, txId: 253 2026-01-04T02:46:10.664464Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 180], version: 5 2026-01-04T02:46:10.664488Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 180] was 2 2026-01-04T02:46:10.664982Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-04T02:46:10.665041Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 181 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-04T02:46:10.665065Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 253 2026-01-04T02:46:10.665087Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 181], version: 2 2026-01-04T02:46:10.665112Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 181] was 2 2026-01-04T02:46:10.665672Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-04T02:46:10.665734Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 178 Version: 2 PathOwnerId: 72057594046678944, cookie: 253 2026-01-04T02:46:10.665759Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 253 2026-01-04T02:46:10.665781Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 253, pathId: [OwnerId: 72057594046678944, LocalPathId: 178], version: 2 2026-01-04T02:46:10.665832Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 178] was 2 2026-01-04T02:46:10.665888Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 253, subscribers: 0 2026-01-04T02:46:10.668158Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-04T02:46:10.668251Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-04T02:46:10.668334Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-04T02:46:10.669728Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 2026-01-04T02:46:10.669830Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 253 TestModificationResult got TxId: 253, wait until txId: 253 TestWaitNotification wait txId: 253 2026-01-04T02:46:10.671584Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 253: send EvNotifyTxCompletion 2026-01-04T02:46:10.671616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 253 2026-01-04T02:46:10.673428Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 253, at schemeshard: 72057594046678944 2026-01-04T02:46:10.673509Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 253: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.673538Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 253: satisfy waiter [32:4242:6229] TestWaitNotification: OK eventTxId 253 TestWaitNotification wait txId: 245 2026-01-04T02:46:10.675112Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 245: send EvNotifyTxCompletion 2026-01-04T02:46:10.675143Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 245 TestWaitNotification wait txId: 246 2026-01-04T02:46:10.675203Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 246: send EvNotifyTxCompletion 2026-01-04T02:46:10.675224Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 246 TestWaitNotification wait txId: 247 2026-01-04T02:46:10.675270Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 247: send EvNotifyTxCompletion 2026-01-04T02:46:10.675290Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 247 TestWaitNotification wait txId: 248 2026-01-04T02:46:10.675335Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 248: send EvNotifyTxCompletion 2026-01-04T02:46:10.675354Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 248 TestWaitNotification wait txId: 249 2026-01-04T02:46:10.675400Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 249: send EvNotifyTxCompletion 2026-01-04T02:46:10.675420Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 249 TestWaitNotification wait txId: 250 2026-01-04T02:46:10.675464Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 250: send EvNotifyTxCompletion 2026-01-04T02:46:10.675498Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 250 TestWaitNotification wait txId: 251 2026-01-04T02:46:10.675564Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 251: send EvNotifyTxCompletion 2026-01-04T02:46:10.675591Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 251 TestWaitNotification wait txId: 252 2026-01-04T02:46:10.675638Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 252: send EvNotifyTxCompletion 2026-01-04T02:46:10.675662Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 252 2026-01-04T02:46:10.677633Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 245, at schemeshard: 72057594046678944 2026-01-04T02:46:10.677824Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 245: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.677852Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 245: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.678040Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 246, at schemeshard: 72057594046678944 2026-01-04T02:46:10.678399Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 247, at schemeshard: 72057594046678944 2026-01-04T02:46:10.678449Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 246: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.678473Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 246: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.678688Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 248, at schemeshard: 72057594046678944 2026-01-04T02:46:10.678802Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 249, at schemeshard: 72057594046678944 2026-01-04T02:46:10.678838Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 247: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.678861Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 247: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.678978Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 250, at schemeshard: 72057594046678944 2026-01-04T02:46:10.679035Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 248: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.679059Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 248: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.679169Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 251, at schemeshard: 72057594046678944 2026-01-04T02:46:10.679205Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 249: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.679226Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 249: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.679347Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 250: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.679371Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 250: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.679514Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 252, at schemeshard: 72057594046678944 2026-01-04T02:46:10.679619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 251: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.679642Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 251: satisfy waiter [32:4245:6232] 2026-01-04T02:46:10.679782Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 252: got EvNotifyTxCompletionResult 2026-01-04T02:46:10.679804Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 252: satisfy waiter [32:4245:6232] TestWaitNotification: OK eventTxId 245 TestWaitNotification: OK eventTxId 246 TestWaitNotification: OK eventTxId 247 TestWaitNotification: OK eventTxId 248 TestWaitNotification: OK eventTxId 249 TestWaitNotification: OK eventTxId 250 TestWaitNotification: OK eventTxId 251 TestWaitNotification: OK eventTxId 252 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTableServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:01.728490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:01.822965Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:01.830167Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:01.830521Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:01.830670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:02.180815Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:02.277943Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:02.278026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:02.325265Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:02.429989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:03.033044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:03.034107Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:03.034168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:03.034202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:03.034632Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:03.100197Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:03.652778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:06.347043Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:06.353179Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:06.356423Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:06.381218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:06.381314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:06.409018Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:06.411025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:06.603592Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.604462Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.605194Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.605944Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.606221Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.606411Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.606578Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.606775Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.606868Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.778531Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:46:06.789932Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:06.790015Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:06.802758Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:06.918758Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:06.991200Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:06.991290Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:07.016351Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:07.016572Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:07.016744Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:07.016788Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:07.016827Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:07.016864Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:07.016901Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:07.016939Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:07.017518Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:07.156512Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:46:07.156959Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:46:07.159211Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:46:07.159270Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:46:07.159349Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:07.162913Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:07.166776Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:07.170959Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:07.233771Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:07.249907Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:07.249995Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:07.261750Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:46:07.262824Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:46:07.566022Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:07.568077Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:07.571622Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:07.575156Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:07.575267Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 2814749767 ... ... 2026-01-04T02:46:07.949227Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:07.949299Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Column diff is empty, finishing 2026-01-04T02:46:08.503901Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:08.542458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.229927Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.281164Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8275: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, at schemeshard: 72075186224037899 2026-01-04T02:46:09.281217Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8291: Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult, StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037899 2026-01-04T02:46:09.281271Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:3130:3154], at schemeshard: 72075186224037899, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037899 2026-01-04T02:46:09.283479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3136:3156] 2026-01-04T02:46:09.283804Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:3136:3156], schemeshard id = 72075186224037899 2026-01-04T02:46:10.390136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3470:3578], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.390251Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.390592Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3488:3583], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.390652Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.403550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.759705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3767:3630], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.760063Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.760585Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3771:3633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.760660Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.761704Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3774:3636]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:10.761865Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:10.762015Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:46:10.762058Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3777:3639] 2026-01-04T02:46:10.762102Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3777:3639] 2026-01-04T02:46:10.762685Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3778:3564] 2026-01-04T02:46:10.762835Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3777:3639], server id = [2:3778:3564], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:10.763070Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3778:3564], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:10.763151Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:46:10.763592Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:46:10.763685Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3774:3636], StatRequests.size() = 1 2026-01-04T02:46:10.800119Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:10.800463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3782:3643], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.800576Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.800996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3786:3647], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.801075Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3789:3650], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.801117Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:10.807233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:10.913750Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:10.913864Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:10.935071Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3777:3639], schemeshard count = 1 2026-01-04T02:46:11.184520Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3791:3652], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:46:11.436201Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3907:3723] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:11.454028Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3930:3739]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:11.454375Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:11.454435Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3930:3739], StatRequests.size() = 1 2026-01-04T02:46:11.695225Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3972:3609]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:11.698024Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:11.698073Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:11.698622Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:11.698691Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:46:11.698741Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:11.721656Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:11.730018Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:46:11.730931Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:46:11.731050Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:11.731241Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_erase_rows/unittest >> DistributedEraseTests::ConditionalEraseRowsShouldNotFailOnMissingRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:55.281443Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:55.366874Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:55.367186Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:55.367224Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:55.581186Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:55.581270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:55.652746Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494753283089 != 1767494753283092 2026-01-04T02:45:55.665186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:55.712173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:55.812609Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:56.093789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:56.107160Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:56.207995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:56.243218Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:908:2781] 2026-01-04T02:45:56.243389Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:56.288997Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:56.289130Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:56.290736Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:56.290811Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:56.290877Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:56.291228Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:56.291583Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:56.291644Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:943:2781] in generation 1 2026-01-04T02:45:56.291987Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:912:2784] 2026-01-04T02:45:56.292190Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:56.301030Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:915:2787] 2026-01-04T02:45:56.301221Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:56.309493Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:56.309616Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:56.310904Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:45:56.310980Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:45:56.311033Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:45:56.311319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:56.311496Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:56.311548Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:956:2784] in generation 1 2026-01-04T02:45:56.311860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:56.311949Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:56.313093Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037890 2026-01-04T02:45:56.313142Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037890 2026-01-04T02:45:56.313204Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037890 2026-01-04T02:45:56.313510Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:56.313613Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:56.313662Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037890 persisting started state actor id [1:957:2787] in generation 1 2026-01-04T02:45:56.324428Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:56.350233Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:56.350412Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:56.350519Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:961:2813] 2026-01-04T02:45:56.350568Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:56.350625Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:56.350657Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:56.350979Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:56.351018Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:45:56.351072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:56.351119Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:962:2814] 2026-01-04T02:45:56.351140Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:45:56.351174Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:45:56.351204Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:45:56.351554Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:56.351586Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:45:56.351650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037890 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:56.351723Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037890, actorId: [1:963:2815] 2026-01-04T02:45:56.351744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:45:56.351780Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:45:56.351801Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:45:56.351970Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:56.352067Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:56.352277Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:56.352327Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.352373Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:56.352420Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:56.352460Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:45:56.352532Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:45:56.352669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:898:2776], serverId# [1:909:2782], sessionId# [0:0:0] 2026-01-04T02:45:56.352711Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:45:56.352744Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:56.352771Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:45:56.352803Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:45:56.352849Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037890 2026-01-04T02: ... 037888 2026-01-04T02:46:12.254669Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037888 consumer 72075186224037888 txId 281474976715662 2026-01-04T02:46:12.254730Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1293:3022] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037888, status# 2 2026-01-04T02:46:12.254821Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:46:12.254871Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037890 at tablet 72075186224037890 send result to client [3:1293:3022], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:46:12.254921Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 72075186224037890 {TEvReadSet step# 3000 txid# 281474976715662 TabletSource# 72075186224037889 TabletDest# 72075186224037890 SetTabletConsumer# 72075186224037890 Flags# 0 Seqno# 6} 2026-01-04T02:46:12.254949Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:46:12.255020Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:981: [DistEraser] [3:1293:3022] HandlePlan TEvDataShard::TEvProposeTransactionResult: txId# 281474976715662, shard# 72075186224037890, status# 2 2026-01-04T02:46:12.255055Z node 3 :TX_DATASHARD DEBUG: datashard_distributed_erase.cpp:165: [DistEraser] [3:1293:3022] Reply: txId# 281474976715662, status# OK, error# 2026-01-04T02:46:12.255168Z node 3 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 72075186224037889 source 72075186224037889 dest 72075186224037890 consumer 72075186224037890 txId 281474976715662 2026-01-04T02:46:12.255314Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-04T02:46:12.255366Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4502: Conditional erase complete: cookie: 4, at: 72075186224037889 2026-01-04T02:46:12.255654Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:46:12.255690Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:12.255719Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:46:12.255807Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:46:12.255900Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [3:1287:3017], serverId# [3:1288:3018], sessionId# [0:0:0] 2026-01-04T02:46:12.256900Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:46:12.257213Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:46:12.257363Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:46:12.257407Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.257448Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for WaitForStreamClearance 2026-01-04T02:46:12.257658Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.257712Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:46:12.258194Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 1 2026-01-04T02:46:12.258395Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037889, TxId: 281474976715664, Size: 70, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:12.258510Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037889, TxId: 281474976715664, PendingAcks: 0 2026-01-04T02:46:12.258565Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037889, TxId: 281474976715664, MessageQuota: 0 2026-01-04T02:46:12.270017Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037889 2026-01-04T02:46:12.270086Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037889 2026-01-04T02:46:12.270263Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:46:12.270302Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.270346Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037889 for ReadTableScan 2026-01-04T02:46:12.270481Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:12.270534Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:46:12.270575Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:46:12.272960Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:46:12.273284Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:46:12.273497Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:12.273543Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.273589Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for WaitForStreamClearance 2026-01-04T02:46:12.273788Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.273861Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:12.274350Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 1 2026-01-04T02:46:12.274576Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715665, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:12.274741Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715665, PendingAcks: 0 2026-01-04T02:46:12.274785Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715665, MessageQuota: 0 2026-01-04T02:46:12.301848Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:46:12.301913Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715665, at: 72075186224037888 2026-01-04T02:46:12.302069Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:46:12.302102Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.302136Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715665] at 72075186224037888 for ReadTableScan 2026-01-04T02:46:12.302258Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:12.302308Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:46:12.302356Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:12.304699Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-04T02:46:12.305006Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-04T02:46:12.305169Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:46:12.305214Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.305257Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for WaitForStreamClearance 2026-01-04T02:46:12.305482Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.305539Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:46:12.306126Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 1 2026-01-04T02:46:12.306350Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715666, Size: 35, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:46:12.306489Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715666, PendingAcks: 0 2026-01-04T02:46:12.306555Z node 3 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715666, MessageQuota: 0 2026-01-04T02:46:12.343586Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-04T02:46:12.343657Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715666, at: 72075186224037890 2026-01-04T02:46:12.343795Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:46:12.343830Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:46:12.343874Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715666] at 72075186224037890 for ReadTableScan 2026-01-04T02:46:12.344001Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:46:12.344064Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:46:12.344113Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_erase_rows/unittest |93.7%| [TA] $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_erase_rows/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] >> DataShardWrite::LocksBrokenStats [GOOD] >> KqpLimits::QSReplySize+useSink [GOOD] >> KqpLimits::QSReplySize-useSink >> AsyncIndexChangeExchange::ShouldDeliverChangesOnAlteredTable [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestConcatToLongKey [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... t: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:96:2057] recipient: [32:95:2123] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:98:2057] recipient: [32:95:2123] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:97:2124] Leader for TabletID 72057594037927937 is [32:97:2124] sender: [32:213:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:94:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:97:2057] recipient: [33:96:2123] Leader for TabletID 72057594037927937 is [33:98:2124] sender: [33:99:2057] recipient: [33:96:2123] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:98:2124] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:78:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:81:2057] recipient: [36:80:2112] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:83:2057] recipient: [36:80:2112] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:82:2113] Leader for TabletID 72057594037927937 is [36:82:2113] sender: [36:198:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:78:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:81:2057] recipient: [37:80:2112] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:83:2057] recipient: [37:80:2112] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:82:2113] Leader for TabletID 72057594037927937 is [37:82:2113] sender: [37:198:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:79:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:82:2057] recipient: [38:81:2112] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:84:2057] recipient: [38:81:2112] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:83:2113] Leader for TabletID 72057594037927937 is [38:83:2113] sender: [38:199:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:82:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:85:2057] recipient: [39:84:2115] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:87:2057] recipient: [39:84:2115] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:86:2116] Leader for TabletID 72057594037927937 is [39:86:2116] sender: [39:202:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:82:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:85:2057] recipient: [40:84:2115] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:87:2057] recipient: [40:84:2115] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:86:2116] Leader for TabletID 72057594037927937 is [40:86:2116] sender: [40:202:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:83:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:86:2057] recipient: [41:85:2115] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:88:2057] recipient: [41:85:2115] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:87:2116] Leader for TabletID 72057594037927937 is [41:87:2116] sender: [41:203:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:86:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:89:2057] recipient: [42:88:2118] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:91:2057] recipient: [42:88:2118] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:90:2119] Leader for TabletID 72057594037927937 is [42:90:2119] sender: [42:206:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:86:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:89:2057] recipient: [43:88:2118] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:91:2057] recipient: [43:88:2118] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:90:2119] Leader for TabletID 72057594037927937 is [43:90:2119] sender: [43:206:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:87:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:90:2057] recipient: [44:89:2118] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:92:2057] recipient: [44:89:2118] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:91:2119] Leader for TabletID 72057594037927937 is [44:91:2119] sender: [44:207:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin >> PersQueueSdkReadSessionTest::ReadSessionWithExplicitlySpecifiedPartitions [GOOD] >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_write/unittest >> DataShardWrite::LocksBrokenStats [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:22.244202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:22.400090Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:22.400728Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:22.400790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:22.721206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:22.721343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:22.787169Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494720086809 != 1767494720086812 2026-01-04T02:45:22.801042Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:22.843726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:22.951712Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:23.228887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:23.241830Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:23.353343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:23.399947Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:23.400818Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:23.401926Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:23.402133Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:23.440665Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:23.441339Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:23.441430Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:23.443718Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:23.443802Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:23.443852Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:23.445502Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:23.445666Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:23.445779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:23.456643Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:23.488018Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:23.489228Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:23.489444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:23.489491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:23.489540Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:23.489578Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:23.489738Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.490520Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:23.491730Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:23.491888Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:23.491993Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:23.492045Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:23.492111Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:45:23.492149Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:45:23.492186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:45:23.492219Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:23.492263Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:23.492421Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:23.492471Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:23.492546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:23.492925Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:45:23.492974Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:23.493087Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:23.493471Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:45:23.494181Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:23.494293Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:23.494413Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:45:23.494454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:45:23.494490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:45:23.494526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:45:23.494844Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:23.494899Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T02:45:23.494952Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:45:23.494994Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T02:45:23.495062Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T02:45:23.495093Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:45:23.495130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T02:45:23.495160Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T02:45:23.495193Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:23.497524Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T02:45:23.497570Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:23.508201Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:23.508288Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 6715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T02:46:14.660599Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CheckWrite 2026-01-04T02:46:14.660625Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:46:14.660646Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CheckWrite 2026-01-04T02:46:14.660665Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:46:14.660685Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:46:14.660711Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/0 2026-01-04T02:46:14.660746Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:6] at 72075186224037888 2026-01-04T02:46:14.660767Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:46:14.660784Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:46:14.660801Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T02:46:14.660820Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit BlockFailPoint 2026-01-04T02:46:14.660837Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:46:14.660851Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T02:46:14.660867Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T02:46:14.660883Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit ExecuteWrite 2026-01-04T02:46:14.660906Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037888 2026-01-04T02:46:14.660972Z node 11 :TX_DATASHARD TRACE: datashard_kqp.cpp:815: KqpEraseLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 2026-01-04T02:46:14.661016Z node 11 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:6] at 72075186224037888 2026-01-04T02:46:14.661064Z node 11 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T02:46:14.661126Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:46:14.661153Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T02:46:14.661188Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T02:46:14.661224Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:46:14.661247Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is DelayComplete 2026-01-04T02:46:14.661268Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T02:46:14.661288Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:46:14.661307Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:46:14.661336Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T02:46:14.661353Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:46:14.661371Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-04T02:46:14.661404Z node 11 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T02:46:14.661426Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:6] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T02:46:14.661450Z node 11 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 6 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T02:46:14.661494Z node 11 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:46:14.662737Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [11:70:2117], Recipient [11:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976715661 LockNode: 11 Status: STATUS_NOT_FOUND 2026-01-04T02:46:14.663057Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1111:2914], Recipient [11:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:46:14.663088Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:46:14.663142Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1110:2913], serverId# [11:1111:2914], sessionId# [0:0:0] 2026-01-04T02:46:14.663221Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [11:1109:2912], Recipient [11:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-04T02:46:14.664328Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [11:1114:2917], Recipient [11:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:46:14.664385Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:46:14.664431Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [11:1113:2916], serverId# [11:1114:2917], sessionId# [0:0:0] 2026-01-04T02:46:14.664594Z node 11 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [11:1112:2915], Recipient [11:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC RangesSize: 1 2026-01-04T02:46:14.664686Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T02:46:14.664738Z node 11 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2501/0 ImmediateWriteEdgeReplied# v2501/18446744073709551615 2026-01-04T02:46:14.664779Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to non-repeatable v2501/18446744073709551615 2026-01-04T02:46:14.664832Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckRead 2026-01-04T02:46:14.664919Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:46:14.664964Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckRead 2026-01-04T02:46:14.665001Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T02:46:14.665034Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T02:46:14.665070Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T02:46:14.665108Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:46:14.665127Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T02:46:14.665143Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T02:46:14.665161Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteRead 2026-01-04T02:46:14.665233Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 1000 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC } 2026-01-04T02:46:14.665437Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[11:1112:2915], 1000} after executionsCount# 1 2026-01-04T02:46:14.665501Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[11:1112:2915], 1000} sends rowCount# 1, bytes# 32, quota rows left# 18446744073709551614, quota bytes left# 18446744073709551583, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:46:14.665568Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[11:1112:2915], 1000} finished in read 2026-01-04T02:46:14.665627Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:46:14.665647Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T02:46:14.665666Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:46:14.665684Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:46:14.665712Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T02:46:14.665728Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:46:14.665751Z node 11 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T02:46:14.665790Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T02:46:14.665902Z node 11 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_write/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher >> TBoardSubscriber2DCTest::ReconnectReplica >> TBoardSubscriberTest::SimpleSubscriber >> TBoardSubscriberTest::DropByDisconnect >> TBoardSubscriberTest::ReconnectReplica |93.7%| [TA] $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} |93.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_write/test-results/unittest/{meta.json ... results_accumulator.log} >> TBoardSubscriber2DCTest::DropByDisconnect >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TBoardSubscriberTest::DropByDisconnect [GOOD] >> AnalyzeColumnshard::AnalyzeShard [GOOD] >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:07.336966Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:07.440433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:07.447163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:07.447422Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:07.447520Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:07.747641Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:07.829537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:07.829664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:07.875480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:07.950154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:08.591389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:08.592114Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:08.592167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:08.592218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:08.592824Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:08.658897Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:09.198640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:11.778849Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:11.785214Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:11.788622Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:11.812774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:11.812887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:11.840807Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:11.842444Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.003934Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.004045Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.005389Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.005904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.006308Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007013Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007211Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007315Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007513Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007616Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.007696Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.033022Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.199358Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.218414Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.218480Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.286626Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:12.287897Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:12.288105Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:12.288164Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:12.288216Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:12.288267Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:12.288320Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:12.288369Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:12.288906Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:12.289223Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:12.291824Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:12.301501Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:12.301570Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:12.301658Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.315544Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.327932Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:12.332160Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:12.336330Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.336432Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.367924Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:12.368387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:12.490061Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:12.635316Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:12.764444Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.766760Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.770075Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:12.773508Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:12.773624Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:13.904383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:14.267328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3035:3418], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.267503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.338469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3039:3421], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.338588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.339534Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3042:3424]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:14.339677Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:14.339738Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3044:3426] 2026-01-04T02:46:14.339803Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3044:3426] 2026-01-04T02:46:14.340297Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3045:3180] 2026-01-04T02:46:14.340552Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3044:3426], server id = [2:3045:3180], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:14.340673Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3045:3180], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:14.340737Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:46:14.340970Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:46:14.341039Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3042:3424], StatRequests.size() = 1 2026-01-04T02:46:14.355581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:14.356078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3049:3430], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.356292Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.356884Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3052:3433], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.357031Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.357177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3056:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.364704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:14.498701Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:14.498767Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:14.519797Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3044:3426], schemeshard count = 1 2026-01-04T02:46:14.760740Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3058:3439], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:46:14.883682Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3163:3504] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:14.895174Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3186:3520]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:14.895309Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:14.895338Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3186:3520], StatRequests.size() = 1 2026-01-04T02:46:15.014825Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:15.336915Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:3529:3585]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:15.337078Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:15.337116Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:3529:3585], StatRequests.size() = 1 2026-01-04T02:46:15.359443Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:3538:3594]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:15.359552Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:46:15.359577Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:3538:3594], StatRequests.size() = 1 2026-01-04T02:46:15.455035Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3582:3435]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:15.457732Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:15.457797Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:15.458156Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:15.458206Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:15.458257Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:15.465601Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:15.470525Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:46:15.470823Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:46:15.470952Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:15.471039Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:15.471251Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:3610:3451]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:15.473786Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:15.473844Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:15.474096Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:15.474139Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:15.474184Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:15.474350Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:15.476393Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-04T02:46:15.476694Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-04T02:46:15.477053Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-04T02:46:15.477179Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::SimpleSubscriber [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::SimpleSubscriber |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::DropByDisconnect [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineWriteReadRangeLimitThenLimitWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... 85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:89:2057] recipient: [36:88:2117] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:91:2057] recipient: [36:88:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:90:2118] Leader for TabletID 72057594037927937 is [36:90:2118] sender: [36:206:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:105:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:85:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:88:2057] recipient: [45:87:2117] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:90:2057] recipient: [45:87:2117] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:89:2118] Leader for TabletID 72057594037927937 is [45:89:2118] sender: [45:205:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:85:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:88:2057] recipient: [46:87:2117] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:90:2057] recipient: [46:87:2117] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:89:2118] Leader for TabletID 72057594037927937 is [46:89:2118] sender: [46:205:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:86:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:89:2057] recipient: [47:88:2117] Leader for TabletID 72057594037927937 is [47:90:2118] sender: [47:91:2057] recipient: [47:88:2117] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:90:2118] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ReconnectReplica [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::ManySubscribersManyPublisher [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:07.683371Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:07.757866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:07.763397Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:07.763672Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:07.763797Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:08.051139Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.127336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.127446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.173223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.238372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:08.860828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:08.861567Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:08.861604Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:08.861630Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:08.861942Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:08.924962Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:09.437557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:11.860789Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:11.866211Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:11.868784Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:11.890357Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:11.890447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:11.916729Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:11.918083Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.081860Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.082338Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.082841Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.083347Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.083556Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.083671Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.083803Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.083936Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.084007Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.207509Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:12.262069Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.262170Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.274698Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.397598Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.441394Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.441460Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.492234Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:12.492411Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:12.492575Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:12.492626Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:12.492670Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:12.492722Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:12.492767Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:12.492816Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:12.493284Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:12.629708Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:46:12.630170Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:12.632254Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:46:12.632304Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:46:12.632382Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.636185Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.639942Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:12.644117Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:12.709251Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:12.726305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.726427Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.740366Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:46:12.741582Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:46:13.047670Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:13.049745Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:13.052968Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:13.055981Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:13.056084Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:14.185148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:3375], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.185315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.185727Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2805:3380], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.185781Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:14.200876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:46:14.291471Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:46:14.291711Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:46:14.291953Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:46:14.292039Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:46:14.292118Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:46:14.292203Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:46:14.292292Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:46:14.292379Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:46:14.292465Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:46:14.292547Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:46:14.292621Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:46:14.292686Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:46:14.292777Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:46:14.314656Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:46:14.314779Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:46:14.314884Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:46:14.314915Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:46:14.315072Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:46:14.315111Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:46:14.315187Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:46:14.315216Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:46:14.315275Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:46:14.315317Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:46:14.315373Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:46:14.315412Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:46:14.315606Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:46:14.315644Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:46:14.315761Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:46:14.315799Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:46:14.315842Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:46:14.315870Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:46:14.315913Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:46:14.315942Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:46:14.316054Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:46:14.316091Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-04T02:46:14.316181Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-04T02:46:14.316217Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-04T02:46:14.374919Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2859:3042];ev=NActors::IEventHandle;tablet_id=72075186224037899;tx_id=281474976715659;this=136361934468256;method=TTxController::StartProposeOnExecute;tx_info=281474976715659:TX_KIND_SCHEMA;min=2969;max=18446744073709551615;plan=0;src=[2:1933:2446];cookie=121:2;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:46:14.419867Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:46:14.419970Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; 2026-01-04T02:46:14.420007Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715659;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=34;result=not_found; waiting actualization: 0/0.000015s |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::DropByDisconnect [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system >> TBoardSubscriber2DCTest::NotAvailableByShutdown >> TBoardSubscriberTest::NotAvailableByShutdown |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::SimpleSubscriber [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] >> TBoardSubscriberTest::ManySubscribersManyPublisher |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::NotAvailableByShutdown [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterDroppingIndex [GOOD] >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriber2DCTest::NotAvailableByShutdown [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseTwoTablesServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:09.617091Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.689885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.696174Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:09.696427Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:09.696524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:10.005766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:10.092999Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:10.093134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:10.140014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:10.221429Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:10.815512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:10.815972Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:10.816009Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:10.816038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:10.816331Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:10.878828Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:11.384339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:13.845951Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:13.851998Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:13.854743Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:13.878479Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:13.878587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:13.907935Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:13.910279Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:14.076219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:14.076321Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:14.077708Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.078328Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.078733Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.079595Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.079946Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.080085Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.080303Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.080429Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.080479Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:14.107390Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:14.287552Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:14.310498Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:14.310565Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:14.339027Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:14.340301Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:14.340462Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:14.340507Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:14.340550Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:14.340596Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:14.340649Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:14.340684Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:14.341166Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:14.342269Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:14.345504Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:46:14.350850Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. Describe result: PathErrorUnknown 2026-01-04T02:46:14.350897Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. Creating table 2026-01-04T02:46:14.350996Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:14.387596Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:14.395330Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:14.398388Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:14.400898Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:14.400974Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:14.421392Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:14.421654Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:14.537761Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:14.613554Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:46:14.790148Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:14.791677Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:14.794472Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:14.797026Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:14.797117Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2107:2576] Owner: [2:2106:2575]. Subscribe on create table tx: 2814749767 ... ror: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.398023Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:17.636161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3718:3620], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.686202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.686736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3722:3623], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.686815Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.687758Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3725:3626]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:17.687904Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:17.688060Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:46:17.688097Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3728:3629] 2026-01-04T02:46:17.688169Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3728:3629] 2026-01-04T02:46:17.688697Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3729:3549] 2026-01-04T02:46:17.688849Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3728:3629], server id = [2:3729:3549], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:17.689077Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3729:3549], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:17.689141Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:46:17.689350Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:46:17.689417Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3725:3626], StatRequests.size() = 1 2026-01-04T02:46:17.702125Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:17.702670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3733:3633], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.702835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.703273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3737:3637], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.725037Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3740:3640], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.725226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:17.729953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:17.834066Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:17.834127Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:17.855135Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3728:3629], schemeshard count = 1 2026-01-04T02:46:18.086663Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3742:3642], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715662 completed, doublechecking } 2026-01-04T02:46:18.330050Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3851:3706] txid# 281474976715663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:18.340569Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3874:3722]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:18.340681Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:18.340707Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3874:3722], StatRequests.size() = 1 2026-01-04T02:46:18.449048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72075186224037899, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.826907Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [1:4208:3790]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:18.827129Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:18.827195Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [1:4208:3790], StatRequests.size() = 1 2026-01-04T02:46:18.856133Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [1:4217:3799]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:18.856338Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:46:18.856369Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [1:4217:3799], StatRequests.size() = 1 2026-01-04T02:46:18.995303Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4262:3796]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:18.998168Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:18.998236Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:18.998711Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:18.998776Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:46:18.998828Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:19.005912Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:19.011383Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:46:19.011608Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:46:19.011878Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:19.011978Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:19.012204Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4290:3812]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:19.014443Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:19.014493Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:19.014899Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:19.014937Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 2 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:46:19.014978Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:19.015146Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 33] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:19.016972Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-04T02:46:19.017234Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 2026-01-04T02:46:19.017553Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 4 ], RowsCount[ 0 ] 2026-01-04T02:46:19.017779Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 2 |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin |93.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/base/ut_board_subscriber/unittest >> TBoardSubscriberTest::ManySubscribersManyPublisher [GOOD] |93.7%| [TM] {BAZEL_UPLOAD} ydb/core/base/ut_board_subscriber/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin |93.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TA] $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpRm::DisonnectNodes >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] >> KqpRm::SingleTask |93.8%| [TA] {RESULT} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} |93.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/base/ut_board_subscriber/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::Update [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseDatashard::TraverseOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:13.004024Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:13.100446Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:13.107504Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:13.107788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:13.107888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:13.421576Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:13.501939Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:13.502029Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:13.549250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:13.630133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:14.261797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:14.262210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:14.262267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:14.262298Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:14.262658Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:14.328851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:14.832646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:17.201202Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:17.207144Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:17.209751Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:17.232403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:17.232499Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:17.259421Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:17.260984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:17.417721Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:17.417800Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:17.418893Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.419358Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.419769Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.420527Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.420719Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.420793Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.420985Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.421142Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.421231Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:17.446524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:17.611172Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:17.630339Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:17.630416Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:17.652371Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:17.653349Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:17.653531Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:17.653573Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:17.653617Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:17.653658Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:17.653716Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:17.653767Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:17.654329Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:17.654553Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:17.656272Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:17.662950Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:17.663002Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:17.663072Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:17.671679Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:17.707163Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:17.710233Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:17.713039Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:17.713163Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:17.735126Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:17.735281Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:17.835648Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:17.874942Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:18.103356Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:18.104969Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2553:2875], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:18.107651Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.110481Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:18.110589Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 281474976720658 2026-01-04T02:46:18.121577Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on tx: 281474976720658 registered 2026-01-04T02:46:18.291902Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Request: create. Transaction completed: 281474976720658. Doublechecking... 2026-01-04T02:46:18.478221Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:18.478300Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Column diff is empty, finishing 2026-01-04T02:46:18.947269Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:19.102675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2747:3369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.102805Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.103177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2765:3374], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.103261Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.117015Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:19.568532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3053:3425], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.568840Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.569314Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3057:3428], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.569388Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.570377Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [1:3060:3431]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:19.570527Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:19.570609Z node 1 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [1:3062:3433] 2026-01-04T02:46:19.570676Z node 1 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [1:3062:3433] 2026-01-04T02:46:19.571208Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3063:3191] 2026-01-04T02:46:19.571511Z node 1 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 1, client id = [1:3062:3433], server id = [2:3063:3191], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:19.571663Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:3063:3191], node id = 1, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:19.571721Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 1, schemeshard count = 1 2026-01-04T02:46:19.571930Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 18446744073709551615 2026-01-04T02:46:19.571994Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [1:3060:3431], StatRequests.size() = 1 2026-01-04T02:46:19.572217Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:19.591479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3067:3437], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.591593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.591986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3071:3441], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.592108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.592310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:3074:3444], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.597634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:19.751992Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:19.752056Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:19.772763Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3062:3433], schemeshard count = 1 2026-01-04T02:46:20.016414Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:3076:3446], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:46:20.161603Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:3182:3514] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:20.172570Z node 1 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [1:3205:3530]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:20.172682Z node 1 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:20.172706Z node 1 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [1:3205:3530], StatRequests.size() = 1 2026-01-04T02:46:20.287961Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:3254:3234]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:20.289984Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:20.290029Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:20.290291Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:20.290322Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 1 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:20.290361Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:20.297074Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:20.301267Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:46:20.301480Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 2 ], RowsCount[ 0 ] 2026-01-04T02:46:20.301587Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 2026-01-04T02:46:20.301648Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 1 |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateReplication-NoProtect-NoDbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.963061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.963106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.964260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.094173Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.094256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.095177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.108308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.108841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.109026Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.121228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.121440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.122267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.122549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.125439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.125616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.126604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.126653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.126728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.126760Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.126840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.127004Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.132177Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.245611Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.245837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.246019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.246081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.246294Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.246361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.248521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.248743Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.248952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249005Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.249044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.249075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.250927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.250978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.251020Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.252535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.252580Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.252615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.252664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.255436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.257125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.257302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.258431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.258648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.258952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.259004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.259156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.259217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.261032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-04T02:46:20.267254Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-04T02:46:20.267277Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-04T02:46:20.267300Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-04T02:46:20.267318Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-04T02:46:20.267336Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-04T02:46:20.268500Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.268574Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.268600Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:20.268626Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-04T02:46:20.268653Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-04T02:46:20.269509Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.269573Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.269597Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:20.269621Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-04T02:46:20.269643Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-04T02:46:20.270576Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.270640Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.270662Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:20.270684Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-04T02:46:20.270709Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-04T02:46:20.271503Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.271574Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:20.271598Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:20.271621Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-04T02:46:20.271643Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-04T02:46:20.271691Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-04T02:46:20.273784Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:20.273922Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:20.275348Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:20.275429Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-04T02:46:20.276528Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-04T02:46:20.276561Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-04T02:46:20.277759Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-04T02:46:20.277857Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.277895Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:5325:6877] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-04T02:46:20.278881Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-04T02:46:20.278912Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-04T02:46:20.278987Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-04T02:46:20.279008Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-04T02:46:20.279052Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-04T02:46:20.279072Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-04T02:46:20.279115Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-04T02:46:20.279134Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-04T02:46:20.279189Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-04T02:46:20.279210Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-04T02:46:20.280618Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-04T02:46:20.280750Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-04T02:46:20.280850Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.280881Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:5328:6880] 2026-01-04T02:46:20.280977Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.280997Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:5328:6880] 2026-01-04T02:46:20.281053Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-04T02:46:20.281190Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-04T02:46:20.281239Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.281260Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:5328:6880] 2026-01-04T02:46:20.281305Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-04T02:46:20.281417Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.281440Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:5328:6880] 2026-01-04T02:46:20.281527Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-04T02:46:20.281547Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:5328:6880] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] >> KqpRm::SingleTask [GOOD] |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::SingleTask [GOOD] Test command err: 2026-01-04T02:46:22.388724Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:46:22.389215Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001e60/r3tmp/tmp9rL6gA/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:46:22.389855Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001e60/r3tmp/tmp9rL6gA/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001e60/r3tmp/tmp9rL6gA/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 11379178089186115150 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:46:22.441764Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:46:22.442024Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:46:22.456279Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:468:2102] with ResourceBroker at [2:438:2101] 2026-01-04T02:46:22.456382Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:469:2103] 2026-01-04T02:46:22.456430Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:466:2346] with ResourceBroker at [1:437:2327] 2026-01-04T02:46:22.456467Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:467:2347] 2026-01-04T02:46:22.456548Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:46:22.456569Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:46:22.456591Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:46:22.456611Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:46:22.457814Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.469098Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.470268Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.470337Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.470564Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:46:22.470685Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:46:22.470732Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:46:22.470754Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.470826Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.470970Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:46:22.470984Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.471030Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.471427Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:46:22.471484Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.471775Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.471975Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:46:22.472145Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.472263Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:46:22.472336Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:46:22.472552Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:46:22.474440Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:393: Submitted new kqp_query task kqp-1-2-1 (1 by [1:466:2346]) priority=0 resources={0, 100} 2026-01-04T02:46:22.474487Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning waiting task kqp-1-2-1 (1 by [1:466:2346]) to queue queue_kqp_resource_manager 2026-01-04T02:46:22.474522Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:637: Allocate resources {0, 100} for task kqp-1-2-1 (1 by [1:466:2346]) from queue queue_kqp_resource_manager 2026-01-04T02:46:22.474548Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:679: Assigning in-fly task kqp-1-2-1 (1 by [1:466:2346]) to queue queue_kqp_resource_manager 2026-01-04T02:46:22.474578Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:711: Updated planned resource usage for queue queue_kqp_resource_manager from 0.000000 to 0.250000 (insert task kqp-1-2-1 (1 by [1:466:2346])) 2026-01-04T02:46:22.480180Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:351: TxId: 1, taskId: 2. Allocated TKqpResourcesRequest{ MemoryPool: 1, Memory: 100ExternalMemory: 0 } 2026-01-04T02:46:22.480368Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:528: Finish task kqp-1-2-1 (1 by [1:466:2346]) (release resources {0, 100}) 2026-01-04T02:46:22.480414Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:577: Updated planned resource usage for queue queue_kqp_resource_manager from 0.250000 to 0.000000 (remove task kqp-1-2-1 (1 by [1:466:2346])) 2026-01-04T02:46:22.480457Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 1, taskId: 2. Released resources, Memory: 100, Free Tier: 0, ExecutionUnits: 0. |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest >> TKeyValueTest::TestRewriteThenLastValue [GOOD] >> TKeyValueTest::TestRenameWorksNewApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateView-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962996Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.963031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.963064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.087701Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.087802Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.088729Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.101997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.102442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.102646Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.112709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.112962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.113764Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.114143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.116998Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.117191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.118431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.118499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.118607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.118679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.118750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.118952Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.127031Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.266844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.267217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.267510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.267587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.267931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.268054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.270883Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.271184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.271476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.271586Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.271666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.271734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.273695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.273742Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.273842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.275170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.275219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.275264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.275311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.277870Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.279144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.279269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.280107Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.280241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.280288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.280486Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.280526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.280649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.280709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.282155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 2 2026-01-04T02:46:21.547587Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:46:21.547614Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:46:21.547637Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:46:21.547654Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 4 2026-01-04T02:46:21.547673Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-04T02:46:21.548881Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.548956Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.548985Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:46:21.549017Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:46:21.549058Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:46:21.550195Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.550266Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.550292Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:46:21.550318Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:46:21.550342Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:46:21.551629Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.551695Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 4 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.551719Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:46:21.551758Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 4 2026-01-04T02:46:21.551785Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:46:21.552098Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.552155Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:46:21.552177Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:46:21.552200Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-04T02:46:21.552222Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 1 2026-01-04T02:46:21.552270Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:46:21.554144Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:46:21.554259Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:46:21.555470Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:46:21.555549Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:46:21.556602Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:46:21.556638Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:46:21.557681Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:46:21.557821Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.557861Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:2682:4671] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:46:21.558800Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:46:21.558828Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:46:21.558888Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:46:21.558916Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:46:21.558979Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:46:21.558999Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:46:21.559040Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:46:21.559063Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:46:21.559114Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:46:21.559134Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:46:21.561018Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:46:21.561121Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:46:21.561199Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:46:21.561234Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.561261Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:2685:4674] 2026-01-04T02:46:21.561373Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.561391Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:2685:4674] 2026-01-04T02:46:21.561471Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:46:21.561519Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.561539Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:2685:4674] 2026-01-04T02:46:21.561631Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:46:21.561682Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.561700Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:2685:4674] 2026-01-04T02:46:21.561791Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:46:21.561827Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:2685:4674] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestSetExecutorFastLogPolicy [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:105:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099 ... 57] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:142:2057] recipient: [56:141:2158] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:144:2057] recipient: [56:141:2158] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:143:2159] Leader for TabletID 72057594037927937 is [56:143:2159] sender: [56:259:2057] recipient: [56:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] !Reboot 72057594037927937 (actor [57:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:140:2057] recipient: [57:39:2086] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:143:2057] recipient: [57:142:2158] Leader for TabletID 72057594037927937 is [57:144:2159] sender: [57:145:2057] recipient: [57:142:2158] !Reboot 72057594037927937 (actor [57:58:2099]) rebooted! !Reboot 72057594037927937 (actor [57:58:2099]) tablet resolver refreshed! new actor is[57:144:2159] Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [58:56:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:59:2057] recipient: [58:53:2097] Leader for TabletID 72057594037927937 is [58:58:2099] sender: [58:76:2057] recipient: [58:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [59:56:2057] recipient: [59:52:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:52:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:81:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:84:2057] recipient: [63:83:2114] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:86:2057] recipient: [63:83:2114] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:85:2115] Leader for TabletID 72057594037927937 is [63:85:2115] sender: [63:201:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:81:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:84:2057] recipient: [64:83:2114] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:86:2057] recipient: [64:83:2114] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:85:2115] Leader for TabletID 72057594037927937 is [64:85:2115] sender: [64:201:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:82:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:85:2057] recipient: [65:84:2114] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:87:2057] recipient: [65:84:2114] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:86:2115] Leader for TabletID 72057594037927937 is [65:86:2115] sender: [65:202:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:52:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:52:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:85:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:88:2057] recipient: [66:87:2117] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:90:2057] recipient: [66:87:2117] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:89:2118] Leader for TabletID 72057594037927937 is [66:89:2118] sender: [66:205:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:205:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:86:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:89:2057] recipient: [68:88:2117] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:91:2057] recipient: [68:88:2117] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:90:2118] Leader for TabletID 72057594037927937 is [68:90:2118] sender: [68:206:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> KqpRm::DisonnectNodes [GOOD] |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/rm_service/ut/unittest >> KqpRm::DisonnectNodes [GOOD] Test command err: 2026-01-04T02:46:22.386874Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:46:22.387264Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f87/r3tmp/tmpiuz9Gc/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:46:22.387845Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001f87/r3tmp/tmpiuz9Gc/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001f87/r3tmp/tmpiuz9Gc/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 1918185040369232018 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:46:22.442062Z node 2 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:46:22.442348Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T02:46:22.456476Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [2:468:2102] with ResourceBroker at [2:438:2101] 2026-01-04T02:46:22.456582Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [2:469:2103] 2026-01-04T02:46:22.456634Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:609: Start KqpResourceManagerActor at [1:466:2346] with ResourceBroker at [1:437:2327] 2026-01-04T02:46:22.456676Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:121: Start KqpResourceInfoExchangerActor at [1:467:2347] 2026-01-04T02:46:22.456763Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:46:22.456788Z node 2 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:46:22.456812Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_rm_service.cpp:806: Failed to deliver subscription request to config dispatcher 2026-01-04T02:46:22.456835Z node 1 :KQP_RESOURCE_MANAGER CRIT: kqp_resource_info_exchanger.cpp:410: Failed to deliver subscription request to config dispatcher. 2026-01-04T02:46:22.457832Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.471393Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.471616Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.471683Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: data_center update, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.471924Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:46:22.472061Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:462: Received tenant pool status for exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:46:22.472144Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:46:22.472182Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.472298Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.472491Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:763: Received tenant pool status, serving tenant: /dc-1, board: kqprm+/dc-1 2026-01-04T02:46:22.472509Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:923: Don't set KqpProxySharedResources 2026-01-04T02:46:22.472550Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: tenant updated, payload: NodeId: 2 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372045444738657 } Timestamp: 1767494782 AvailableComputeActors: 100 UsedMemory: 0 TotalMemory: 1000 Memory { Pool: 1 Available: 1000 } ExecutionUnits: 100 2026-01-04T02:46:22.472935Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 0 2026-01-04T02:46:22.472999Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.473329Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.473573Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:46:22.473768Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:22.473941Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:46:22.474028Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 2 2026-01-04T02:46:22.474257Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:523: Get resources info from node: 1 2026-01-04T02:46:23.498066Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:46:23.498179Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request 2026-01-04T02:46:23.499609Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-04T02:46:23.499818Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-04T02:46:23.499887Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046578946] NodeDisconnected NodeId# 2 2026-01-04T02:46:23.500995Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:75:2076] ServerId# [1:361:2278] TabletId# 72057594037932033 PipeClientId# [2:75:2076] 2026-01-04T02:46:23.501418Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:152:2089] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-04T02:46:23.501575Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 2 2026-01-04T02:46:23.501701Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:492: Subcriber is not available for info exchanger, serving tenant: /dc-1, board: kqpexch+/dc-1 2026-01-04T02:46:23.501745Z node 2 :KQP_RESOURCE_MANAGER INFO: kqp_resource_info_exchanger.cpp:167: Kill previous info exchanger subscriber for 'kqpexch+/dc-1' at [2:473:2105], reason: tenant updated 2026-01-04T02:46:23.501904Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:23.504283Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:476: Get board info from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:23.504587Z node 2 :KQP_RESOURCE_MANAGER DEBUG: kqp_resource_info_exchanger.cpp:498: Get board info update from subscriber, serving tenant: /dc-1, board: kqpexch+/dc-1, with size: 1 2026-01-04T02:46:23.833023Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:423: Schedule Snapshot request |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/rm_service/ut/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.8%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TA] $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TA] {RESULT} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/rm_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_NoBuckets [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> AsyncIndexChangeExchange::ShouldRemoveRecordsAfterCancelIndexBuild [GOOD] >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_AllBuckets [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::GetLatestMaxCpuUsagePercent [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> TSchemeShardPartitionStatsTopCpuUsageTest::UpdateCpuUsage_SomeBuckets [GOOD] |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression |93.9%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |93.9%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> PersQueueSdkReadSessionTest::ClosesAfterFailedConnectionToCds [GOOD] Test command err: 2026-01-04T02:44:42.601184Z :WriteAndReadSomeMessagesWithSyncCompression INFO: Random seed for debugging is 1767494682601154 2026-01-04T02:44:42.806417Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331855458923480:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:42.806545Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:42.840826Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331856544194305:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:42.841735Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:42.843855Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:42.851319Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:43.053104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:43.069312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:43.102277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:43.102368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:43.103371Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:43.103449Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:43.139462Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:44:43.139622Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:43.140290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:43.177326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:43.223991Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:43.227760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002efb/r3tmp/yandexhoieTh.tmp 2026-01-04T02:44:43.227811Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002efb/r3tmp/yandexhoieTh.tmp 2026-01-04T02:44:43.228083Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002efb/r3tmp/yandexhoieTh.tmp 2026-01-04T02:44:43.228218Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:43.235820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:43.259667Z INFO: TTestServer started on Port 14728 GrpcPort 19139 PQClient connected to localhost:19139 2026-01-04T02:44:43.422294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:43.813298Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:43.848847Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:45.735057Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331868343826699:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:45.735429Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331868343826691:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:45.735802Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:45.736200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331868343826708:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:45.736269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:45.738759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:44:45.756891Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331868343826706:2334], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:44:46.039510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:44:46.040454Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331868343826801:2986] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:44:46.047650Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591331869429096532:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:46.048185Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YjExYWIxZWQtZDkxZjU1ZjUtZjhlNTk4OTUtYTRiMzYwODY=, ActorId: [2:7591331869429096492:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3e95633gbh9pmfjzwqef0p, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:46.053330Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:44:46.060445Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591331872638794150:2347], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:44:46.060837Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NjhhOTU1MTItMjdiOWRmNjktMzU5ODI0MjUtZDI2MGQwNDI=, ActorId: [1:7591331868343826689:2328], ActorState: ExecuteState, LegacyTraceId: 01ke3e9515c58g0fmxg6ak02pw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:44:46.061200Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 20 ... tion 0 topic 'rt3.dc1--test-topic' owner src 2026-01-04T02:46:26.801673Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:46:26.801692Z node 16 :PERSQUEUE DEBUG: partition.cpp:2423: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxEvent(TMessage) 2026-01-04T02:46:26.801713Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T02:46:26.801726Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:46:26.801745Z node 16 :PERSQUEUE DEBUG: partition.cpp:2487: [72075186224037892][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TMessage) 2026-01-04T02:46:26.801773Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T02:46:26.801786Z node 16 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037892][Partition][0][StateIdle] Batch completed (1) 2026-01-04T02:46:26.801825Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:46:26.801870Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:37: [72075186224037892][Partition][0][StateIdle] TPartition::ReplyOwnerOk. Partition: 0 2026-01-04T02:46:26.801912Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:46:26.802428Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'rt3.dc1--test-topic' requestId: 2026-01-04T02:46:26.802447Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037892] got client message batch for topic 'rt3.dc1--test-topic' partition 0 2026-01-04T02:46:26.802511Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'rt3.dc1--test-topic' partition: 0 messageNo: 0 requestId: cookie: 0 2026-01-04T02:46:26.802789Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:871: session inited cookie: 1 partition: 0 MaxSeqNo: 0 sessionId: src|6edda573-163966b-1d326ca0-425de122_0 2026-01-04T02:46:26.803480Z :INFO: [] MessageGroupId [src] SessionId [] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767494786803 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:46:26.803607Z :INFO: [] MessageGroupId [src] SessionId [] Write session established. Init response: session_id: "src|6edda573-163966b-1d326ca0-425de122_0" topic: "test-topic" cluster: "dc1" supported_codecs: CODEC_RAW supported_codecs: CODEC_GZIP supported_codecs: CODEC_LZOP 2026-01-04T02:46:26.803788Z :INFO: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session: close. Timeout = 0 ms 2026-01-04T02:46:26.803837Z :INFO: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session will now close 2026-01-04T02:46:26.803884Z :DEBUG: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session: aborting 2026-01-04T02:46:26.804516Z :INFO: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:46:26.804538Z :DEBUG: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session is aborting and will not restart 2026-01-04T02:46:26.804597Z :DEBUG: [] MessageGroupId [src] SessionId [src|6edda573-163966b-1d326ca0-425de122_0] Write session: destroy 2026-01-04T02:46:26.804692Z node 15 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 1 sessionId: src|6edda573-163966b-1d326ca0-425de122_0 grpc read done: success: 0 data: 2026-01-04T02:46:26.804721Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 1 sessionId: src|6edda573-163966b-1d326ca0-425de122_0 grpc read failed 2026-01-04T02:46:26.804749Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 1 sessionId: src|6edda573-163966b-1d326ca0-425de122_0 grpc closed 2026-01-04T02:46:26.804771Z node 15 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 1 sessionId: src|6edda573-163966b-1d326ca0-425de122_0 is DEAD 2026-01-04T02:46:26.805692Z node 15 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:46:26.806028Z node 16 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [15:7591332302601765408:2453] destroyed 2026-01-04T02:46:26.806080Z node 16 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:46:26.806107Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:46:26.806124Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.806137Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:46:26.806157Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.806171Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:46:26.821493Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:46:26.821532Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.821550Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:46:26.821578Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.821596Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:46:26.827040Z :INFO: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Starting read session 2026-01-04T02:46:26.827074Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Starting cluster discovery 2026-01-04T02:46:26.827224Z :INFO: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4795: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4795
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4795. " 2026-01-04T02:46:26.827251Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Restart cluster discovery in 0.008467s 2026-01-04T02:46:26.836015Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Starting cluster discovery 2026-01-04T02:46:26.836313Z :INFO: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4795: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4795
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4795. " 2026-01-04T02:46:26.836369Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Restart cluster discovery in 0.010876s 2026-01-04T02:46:26.847987Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Starting cluster discovery 2026-01-04T02:46:26.848187Z :INFO: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Cluster discovery request failed. Status: TRANSPORT_UNAVAILABLE. Issues: "
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4795: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4795
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4795. " 2026-01-04T02:46:26.848213Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Restart cluster discovery in 0.033748s 2026-01-04T02:46:26.883019Z :DEBUG: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Starting cluster discovery 2026-01-04T02:46:26.883374Z :NOTICE: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Aborting read session. Description: SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4795: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4795
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4795. " } 2026-01-04T02:46:26.883560Z :NOTICE: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } SessionClosed { Status: TRANSPORT_UNAVAILABLE Issues: "
: Error: Failed to discover clusters
: Error: GRpc error: (14): connections to all backends failing; last error: UNKNOWN: ipv4:127.0.0.1:4795: Failed to connect to remote host: Connection refused
: Error: Grpc error response on endpoint localhost:4795
: Error: Endpoint list is empty for database /Root, cluster endpoint localhost:4795. " } 2026-01-04T02:46:26.883675Z :INFO: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Closing read session. Close timeout: 0.000000s 2026-01-04T02:46:26.883778Z :NOTICE: [/Root] [/Root] [4fb45abd-e997bd77-7eefce33-54d84ae1] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:46:26.921912Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:46:26.921947Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.921960Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:46:26.921977Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:26.921990Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:46:27.022258Z node 16 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:46:27.022294Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:27.022308Z node 16 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:46:27.022326Z node 16 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:46:27.022339Z node 16 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineCopyRangeWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:88:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:91:2057] recipient: [11:90:2119] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:93:2057] recipient: [11:90:2119] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:92:2120] Leader for TabletID 72057594037927937 is [11:92:2120] sender: [11:208:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:208:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:82:2113] Leader for TabletID 72057594037927937 is [45:82:2113] sender: [45:198:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:78:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:81:2057] recipient: [46:80:2112] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:83:2057] recipient: [46:80:2112] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:82:2113] Leader for TabletID 72057594037927937 is [46:82:2113] sender: [46:198:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:79:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:82:2057] recipient: [47:81:2112] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:84:2057] recipient: [47:81:2112] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:83:2113] Leader for TabletID 72057594037927937 is [47:83:2113] sender: [47:199:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] !Reboot 72057594037927937 (actor [48:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:82:2057] recipient: [48:39:2086] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:85:2057] recipient: [48:84:2115] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:87:2057] recipient: [48:84:2115] !Reboot 72057594037927937 (actor [48:58:2099]) rebooted! !Reboot 72057594037927937 (actor [48:58:2099]) tablet resolver refreshed! new actor is[48:86:2116] Leader for TabletID 72057594037927937 is [48:86:2116] sender: [48:202:2057] recipient: [48:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [49:56:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:59:2057] recipient: [49:53:2097] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:76:2057] recipient: [49:14:2061] !Reboot 72057594037927937 (actor [49:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:82:2057] recipient: [49:39:2086] Leader for TabletID 72057594037927937 is [49:58:2099] sender: [49:85:2057] recipient: [49:84:2115] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:87:2057] recipient: [49:84:2115] !Reboot 72057594037927937 (actor [49:58:2099]) rebooted! !Reboot 72057594037927937 (actor [49:58:2099]) tablet resolver refreshed! new actor is[49:86:2116] Leader for TabletID 72057594037927937 is [49:86:2116] sender: [49:202:2057] recipient: [49:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [50:56:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:59:2057] recipient: [50:53:2097] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:76:2057] recipient: [50:14:2061] !Reboot 72057594037927937 (actor [50:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:83:2057] recipient: [50:39:2086] Leader for TabletID 72057594037927937 is [50:58:2099] sender: [50:86:2057] recipient: [50:85:2115] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:88:2057] recipient: [50:85:2115] !Reboot 72057594037927937 (actor [50:58:2099]) rebooted! !Reboot 72057594037927937 (actor [50:58:2099]) tablet resolver refreshed! new actor is[50:87:2116] Leader for TabletID 72057594037927937 is [50:87:2116] sender: [50:203:2057] recipient: [50:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [51:56:2057] recipient: [51:54:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:59:2057] recipient: [51:54:2097] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:76:2057] recipient: [51:14:2061] !Reboot 72057594037927937 (actor [51:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:86:2057] recipient: [51:39:2086] Leader for TabletID 72057594037927937 is [51:58:2099] sender: [51:89:2057] recipient: [51:88:2118] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:91:2057] recipient: [51:88:2118] !Reboot 72057594037927937 (actor [51:58:2099]) rebooted! !Reboot 72057594037927937 (actor [51:58:2099]) tablet resolver refreshed! new actor is[51:90:2119] Leader for TabletID 72057594037927937 is [51:90:2119] sender: [51:206:2057] recipient: [51:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [52:56:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:59:2057] recipient: [52:53:2097] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:76:2057] recipient: [52:14:2061] !Reboot 72057594037927937 (actor [52:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:86:2057] recipient: [52:39:2086] Leader for TabletID 72057594037927937 is [52:58:2099] sender: [52:89:2057] recipient: [52:88:2118] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:91:2057] recipient: [52:88:2118] !Reboot 72057594037927937 (actor [52:58:2099]) rebooted! !Reboot 72057594037927937 (actor [52:58:2099]) tablet resolver refreshed! new actor is[52:90:2119] Leader for TabletID 72057594037927937 is [52:90:2119] sender: [52:206:2057] recipient: [52:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [53:56:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:59:2057] recipient: [53:53:2097] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:76:2057] recipient: [53:14:2061] !Reboot 72057594037927937 (actor [53:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:87:2057] recipient: [53:39:2086] Leader for TabletID 72057594037927937 is [53:58:2099] sender: [53:90:2057] recipient: [53:89:2118] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:92:2057] recipient: [53:89:2118] !Reboot 72057594037927937 (actor [53:58:2099]) rebooted! !Reboot 72057594037927937 (actor [53:58:2099]) tablet resolver refreshed! new actor is[53:91:2119] Leader for TabletID 72057594037927937 is [53:91:2119] sender: [53:207:2057] recipient: [53:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [54:56:2057] recipient: [54:52:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:59:2057] recipient: [54:52:2097] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:76:2057] recipient: [54:14:2061] !Reboot 72057594037927937 (actor [54:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:90:2057] recipient: [54:39:2086] Leader for TabletID 72057594037927937 is [54:58:2099] sender: [54:93:2057] recipient: [54:92:2121] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:95:2057] recipient: [54:92:2121] !Reboot 72057594037927937 (actor [54:58:2099]) rebooted! !Reboot 72057594037927937 (actor [54:58:2099]) tablet resolver refreshed! new actor is[54:94:2122] Leader for TabletID 72057594037927937 is [54:94:2122] sender: [54:210:2057] recipient: [54:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [55:56:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:59:2057] recipient: [55:53:2097] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:76:2057] recipient: [55:14:2061] !Reboot 72057594037927937 (actor [55:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:90:2057] recipient: [55:39:2086] Leader for TabletID 72057594037927937 is [55:58:2099] sender: [55:93:2057] recipient: [55:92:2121] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:95:2057] recipient: [55:92:2121] !Reboot 72057594037927937 (actor [55:58:2099]) rebooted! !Reboot 72057594037927937 (actor [55:58:2099]) tablet resolver refreshed! new actor is[55:94:2122] Leader for TabletID 72057594037927937 is [55:94:2122] sender: [55:210:2057] recipient: [55:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [56:56:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:59:2057] recipient: [56:53:2097] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:76:2057] recipient: [56:14:2061] !Reboot 72057594037927937 (actor [56:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:91:2057] recipient: [56:39:2086] Leader for TabletID 72057594037927937 is [56:58:2099] sender: [56:94:2057] recipient: [56:93:2121] Leader for TabletID 72057594037927937 is [56:95:2122] sender: [56:96:2057] recipient: [56:93:2121] !Reboot 72057594037927937 (actor [56:58:2099]) rebooted! !Reboot 72057594037927937 (actor [56:58:2099]) tablet resolver refreshed! new actor is[56:95:2122] Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [57:56:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:59:2057] recipient: [57:53:2097] Leader for TabletID 72057594037927937 is [57:58:2099] sender: [57:76:2057] recipient: [57:14:2061] |93.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] >> KqpLimits::QSReplySize-useSink [GOOD] >> AnalyzeDatashard::DropTableNavigateError [GOOD] |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |93.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions >> TestSetCloudPermissions::CanSetPermissionsForRootDb >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateExternalDataSource-NoProtect-NoDbAdmin-clusteradmin [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962837Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962867Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.962939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.962963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963045Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.069700Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.069786Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.070632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.080327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.084171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.084410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.090057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.090280Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.092392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.094814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.105857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.106066Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.108413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.108608Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.108659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.108802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.115799Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:239:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.257404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.257653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.257903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.257959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.258190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.258254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.260463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.260689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.260908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.260982Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.261017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.261053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.262922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.262979Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.263016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.264684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.264737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.264781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.264840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.274593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.276446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.276653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.277638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.277752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.277794Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.278068Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.278120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.278322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.278412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.280435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 3 2026-01-04T02:46:29.281505Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 190, publications: 4, subscribers: 0 2026-01-04T02:46:29.281535Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 137], 6 2026-01-04T02:46:29.281565Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 138], 6 2026-01-04T02:46:29.281592Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 139], 5 2026-01-04T02:46:29.281618Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 190, [OwnerId: 72057594046678944, LocalPathId: 140], 2 2026-01-04T02:46:29.282826Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.282905Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 137 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.282935Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:29.282965Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 137], version: 6 2026-01-04T02:46:29.283000Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 137] was 2 2026-01-04T02:46:29.284137Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.284205Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 138 Version: 6 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.284230Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:29.284257Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 138], version: 6 2026-01-04T02:46:29.284288Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-04T02:46:29.285595Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.285675Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.285700Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:29.285723Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-04T02:46:29.285748Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-04T02:46:29.286374Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.286444Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 2 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:29.286468Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:29.286493Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 2 2026-01-04T02:46:29.286519Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 2 2026-01-04T02:46:29.286573Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 190, subscribers: 0 2026-01-04T02:46:29.288237Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:29.288806Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:29.289953Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:29.290037Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 TestModificationResult got TxId: 190, wait until txId: 190 TestWaitNotification wait txId: 190 2026-01-04T02:46:29.291176Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 190: send EvNotifyTxCompletion 2026-01-04T02:46:29.291212Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 190 2026-01-04T02:46:29.292500Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 190, at schemeshard: 72057594046678944 2026-01-04T02:46:29.292587Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.292619Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [32:2943:4932] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-04T02:46:29.293583Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-04T02:46:29.293616Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-04T02:46:29.293681Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-04T02:46:29.293701Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-04T02:46:29.293746Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-04T02:46:29.293767Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-04T02:46:29.293850Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-04T02:46:29.293872Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-04T02:46:29.293917Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-04T02:46:29.293935Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-04T02:46:29.295181Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-04T02:46:29.295290Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.295320Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [32:2946:4935] 2026-01-04T02:46:29.295554Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-04T02:46:29.295716Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.295751Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [32:2946:4935] 2026-01-04T02:46:29.295806Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-04T02:46:29.295923Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-04T02:46:29.295959Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.295979Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [32:2946:4935] 2026-01-04T02:46:29.296078Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-04T02:46:29.296114Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.296134Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [32:2946:4935] 2026-01-04T02:46:29.296229Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-04T02:46:29.296251Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [32:2946:4935] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::DropTableNavigateError [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:59.459373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:59.541301Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:59.549185Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:59.549436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:59.549529Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:59.938167Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:00.040619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:00.040726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:00.088353Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:00.113723Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:00.799223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:00.799825Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:00.799877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:00.799908Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:00.800423Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:00.864756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:01.408405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:04.337476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.345771Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:04.382280Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.382419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.411912Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:04.414024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.606898Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.607713Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.608670Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.609585Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.609892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.610113Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.610349Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.610538Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.610700Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.786390Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:04.798036Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.798121Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.811473Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.016568Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.061745Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:05.061875Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:05.093267Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:05.094572Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:05.094823Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:05.094892Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:05.094949Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:05.095004Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:05.095068Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:05.095117Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:05.095628Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:05.223919Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2194:2603] 2026-01-04T02:46:05.225928Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:05.237585Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. Describe result: PathErrorUnknown 2026-01-04T02:46:05.237658Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. Creating table 2026-01-04T02:46:05.237753Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.248524Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2256:2633], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.251135Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2239:2626] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:05.253927Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:05.285024Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:05.301341Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.301431Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2293:2654], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.315562Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2327:2681] 2026-01-04T02:46:05.316062Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2327:2681], schemeshard id = 72075186224037897 2026-01-04T02:46:05.632727Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.635566Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2600:2888], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.639509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:05.643277Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:05.643371Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:2200:2608]. Subscribe on create table tx: 281474976720658 2026-01-04T02:46:05.656015Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2201:2609] Owner: [2:22 ... :46:08.276790Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:08.428506Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:3229:3281] 2026-01-04T02:46:08.431114Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3227:3480] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-04T02:46:08.431179Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-04T02:46:08.431218Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-04T02:46:08.494889Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-04T02:46:08.524903Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037899 not found 2026-01-04T02:46:08.526008Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037900 not found 2026-01-04T02:46:08.526404Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037902 not found 2026-01-04T02:46:08.526485Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72075186224037888 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037901 not found 2026-01-04T02:46:08.578895Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:08.578980Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:08.643772Z node 1 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [1:3124:3449], schemeshard count = 1 2026-01-04T02:46:11.118618Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:11.119041Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 1 2026-01-04T02:46:11.119386Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-04T02:46:12.895026Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:46:14.119807Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:14.120324Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 2 2026-01-04T02:46:14.120606Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 2 2026-01-04T02:46:15.812950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:46:15.813019Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:15.978231Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:46:17.200687Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:46:17.200741Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:17.278644Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:17.279025Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 3 2026-01-04T02:46:17.279199Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 3 2026-01-04T02:46:19.134691Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:46:20.515455Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:20.515886Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 4 2026-01-04T02:46:20.516095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 4 2026-01-04T02:46:22.858068Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:46:24.346158Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:24.346817Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 5 2026-01-04T02:46:24.347267Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 5 2026-01-04T02:46:26.848549Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:46:26.848645Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:46:26.848691Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:46:26.848733Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:46:26.954337Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:309: [72075186224037894] EvPropagateTimeout 2026-01-04T02:46:28.747732Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:46:28.748120Z node 1 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 1 cookie: 6 2026-01-04T02:46:28.748329Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 6 2026-01-04T02:46:28.873943Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 0, at schemeshard: 72075186224037897 2026-01-04T02:46:28.874006Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:46:28.874229Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 25, entries count: 1, are all stats full: 1 2026-01-04T02:46:28.886940Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:46:29.563710Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:46:29.563818Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:667: [72075186224037894] ScheduleNextAnalyze. table [OwnerId: 72075186224037897, LocalPathId: 34] was deleted, deleting its statistics 2026-01-04T02:46:29.565124Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4164:3645], ActorId: [2:4165:3646], Starting query actor #1 [2:4166:3647] 2026-01-04T02:46:29.565212Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4165:3646], ActorId: [2:4166:3647], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:29.569402Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4165:3646], ActorId: [2:4166:3647], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MzFiY2ZmOTUtNzVkODU2M2EtYjI3MDdjY2YtYjg3YmJkZmY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DELETE FROM `.metadata/_statistics` WHERE owner_id = $owner_id AND local_path_id = $local_path_id; 2026-01-04T02:46:29.659191Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4175:3656]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:29.659378Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:29.659428Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4177:3658] 2026-01-04T02:46:29.659474Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4177:3658] 2026-01-04T02:46:29.659770Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4177:3658], server id = [2:4178:3659], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:29.659833Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4178:3659] 2026-01-04T02:46:29.659906Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4178:3659], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:29.659956Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:29.660031Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:29.660081Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4175:3656], StatRequests.size() = 1 2026-01-04T02:46:29.660156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:29.832314Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4165:3646], ActorId: [2:4166:3647], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MzFiY2ZmOTUtNzVkODU2M2EtYjI3MDdjY2YtYjg3YmJkZmY=, TxId: 2026-01-04T02:46:29.832378Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4165:3646], ActorId: [2:4166:3647], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzFiY2ZmOTUtNzVkODU2M2EtYjI3MDdjY2YtYjg3YmJkZmY=, TxId: 2026-01-04T02:46:29.832681Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4164:3645], ActorId: [2:4165:3646], Got response [2:4166:3647] SUCCESS 2026-01-04T02:46:29.833106Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:29.845762Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:29.845845Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3227:3480] 2026-01-04T02:46:29.846325Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4200:3671]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:29.848428Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:29.848466Z node 2 :STATISTICS ERROR: service_impl.cpp:799: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] Navigate failed 2026-01-04T02:46:29.848492Z node 2 :STATISTICS DEBUG: service_impl.cpp:1329: ReplyFailed(), request id = 2 |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |93.9%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> AsyncIndexChangeExchange::ShouldDeliverChangesOnSplitMerge [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForRootDb [GOOD] Test command err: 2026-01-04T02:46:31.316970Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332326621191118:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:31.317061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:31.500978Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:31.527483Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:31.527636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:31.564734Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:31.622804Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:31.656307Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-04T02:46:31.656359Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d4fd46f07d0] Connect to grpc://localhost:24170 2026-01-04T02:46:31.659336Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d4fd46f07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.databases.l...(truncated) } 2026-01-04T02:46:31.667005Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d4fd46f07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:46:31.667791Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:46:31.667914Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root, user: user1@as, from ip: >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetAllPermissions [GOOD] Test command err: 2026-01-04T02:46:31.316824Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332324653524647:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:31.316891Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:31.535270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:31.535349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:31.554424Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:31.565292Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:31.580423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:31.649628Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.developerApi.update ydb.tables.write ydb.databases.create ydb.databases.connect ydb.developerApi.get ydb.tables.select) 2026-01-04T02:46:31.649726Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c9f5daf07d0] Connect to grpc://localhost:30185 2026-01-04T02:46:31.652510Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c9f5daf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-04T02:46:31.667458Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c9f5daf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:46:31.667809Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:46:31.667913Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsForDbWithoutCloudUserAttributes [GOOD] Test command err: 2026-01-04T02:46:31.317042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332326965673737:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:31.317123Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:31.523460Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:31.534777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:31.534912Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:31.566194Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:31.612609Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:31.613776Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332326965673693:2081] 1767494791312502 != 1767494791312505 2026-01-04T02:46:31.649653Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.clusters.get ydb.clusters.manage ydb.clusters.monitor) 2026-01-04T02:46:31.649729Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7c7809cf07d0] Connect to grpc://localhost:11466 2026-01-04T02:46:31.653096Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7c7809cf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.monitor" } } result_filter: ALL_FAILED } 2026-01-04T02:46:31.666907Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7c7809cf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:46:31.667750Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:46:31.667866Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |94.0%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest >> TestSetCloudPermissions::CanSetPermissionsWithoutGizmoResourse [GOOD] Test command err: 2026-01-04T02:46:31.316901Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332326542946358:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:31.316957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:31.546462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:31.546567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:31.590661Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:31.591418Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332326542946315:2081] 1767494791312502 != 1767494791312505 2026-01-04T02:46:31.591443Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:31.607536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:31.654245Z node 1 :TICKET_PARSER TRACE: ticket_parser_impl.h:519: Ticket **** (8E120919) asking for AccessServiceBulkAuthorization( ydb.schemas.getMetadata ydb.clusters.get ydb.clusters.manage ydb.databases.list ydb.clusters.monitor ydb.streams.write ydb.tables.write ydb.databases.create ydb.databases.connect ydb.tables.select) 2026-01-04T02:46:31.654323Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7d2bcfaf07d0] Connect to grpc://localhost:28153 2026-01-04T02:46:31.657951Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7d2bcfaf07d0] Request BulkAuthorizeRequest { iam_token: "**** (8E120919)" actions { items { resource_path { id: "database12345" type: "ydb.database" } resource_path { id: "folder12345" type: "resource-manager.folder" } permission: "ydb.schemas.getMetadata" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.get" } items { resource_path { id: "cluster.folder98765" type: "resource-manager.folder" } permission: "ydb.clusters.manage" } items { resource_path { id: "database12345" type: "ydb.databa...(truncated) } 2026-01-04T02:46:31.667154Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7d2bcfaf07d0] Response BulkAuthorizeResponse { subject { user_account { id: "user1" } } } 2026-01-04T02:46:31.667794Z node 1 :TICKET_PARSER DEBUG: ticket_parser_impl.h:1875: Ticket **** (8E120919) () has now valid token of user1@as 2026-01-04T02:46:31.667930Z node 1 :GRPC_PROXY_NO_CONNECT_ACCESS DEBUG: grpc_request_check_actor.h:718: Skip check permission connect db, user is a admin, database: /Root/db, user: user1@as, from ip: |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/grpc_request_check_actor_ut/unittest |94.1%| [TA] $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TA] {RESULT} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/grpc_services/grpc_request_check_actor_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.0%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletInAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:59.815430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:59.919769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:59.927381Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:59.927768Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:59.927916Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:00.346730Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:00.446024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:00.446134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:00.493078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:00.579031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:01.199353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:01.199817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:01.199857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:01.199889Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:01.200303Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:01.263297Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:01.763646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:04.559792Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.565471Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:04.568196Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.590640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.590718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.619661Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:04.621865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.802569Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.802690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.804153Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.804826Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.805362Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.806441Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.806694Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.806854Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.807118Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.807254Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.807359Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.833464Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.086901Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.110840Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:05.110928Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:05.140051Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:05.141274Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:05.141536Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:05.141593Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:05.141659Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:05.141715Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:05.141766Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:05.141838Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:05.142460Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:05.142769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:05.145457Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:05.155555Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:05.155637Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:05.155773Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.169798Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.181564Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:05.190595Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:05.194511Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.194622Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.226315Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:05.226709Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:05.339138Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:05.376541Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:05.618685Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.620806Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.624364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:05.627265Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:05.627366Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-04T02:46:32.869857Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-04T02:46:32.869896Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-04T02:46:32.869978Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-04T02:46:32.870037Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:32.870130Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:46:32.870190Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:32.870249Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:32.870308Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:32.870467Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:32.871274Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:32.872106Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:32.872185Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:32.872322Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5585:4792] Owner: [2:5584:4791]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:32.872383Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5585:4792] Owner: [2:5584:4791]. Column diff is empty, finishing 2026-01-04T02:46:32.873658Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:32.873731Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:32.875403Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:32.892424Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5594:4799] 2026-01-04T02:46:32.892594Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5546:4771], server id = [2:5594:4799], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:32.892796Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5594:4799], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:46:32.893071Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5595:4800] 2026-01-04T02:46:32.893170Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5595:4800], schemeshard id = 72075186224037897 2026-01-04T02:46:32.965457Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:32.965671Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 2 2026-01-04T02:46:32.966348Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5532:4758], server id = [2:5536:4762], tablet id = 72075186224037900 2026-01-04T02:46:32.966398Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:32.966905Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5600:4805], server id = [2:5604:4809], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:32.967018Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5600:4805], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:32.967424Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5601:4806], server id = [2:5605:4810], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:32.967483Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5601:4806], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:32.968633Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5602:4807], server id = [2:5606:4811], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:32.968692Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5602:4807], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:32.969142Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5603:4808], server id = [2:5607:4812], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:32.969198Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5603:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:32.970526Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:32.971152Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5600:4805], server id = [2:5604:4809], tablet id = 72075186224037899 2026-01-04T02:46:32.971185Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:32.971823Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:32.972227Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5603:4808], server id = [2:5607:4812], tablet id = 72075186224037902 2026-01-04T02:46:32.972257Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:32.972700Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:32.973334Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5602:4807], server id = [2:5606:4811], tablet id = 72075186224037901 2026-01-04T02:46:32.973364Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:32.974232Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:32.974283Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:32.974549Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:32.974802Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:32.975057Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5614:4818], ActorId: [2:5615:4819], Starting query actor #1 [2:5616:4820] 2026-01-04T02:46:32.975123Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5615:4819], ActorId: [2:5616:4820], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:32.978238Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5601:4806], server id = [2:5605:4810], tablet id = 72075186224037900 2026-01-04T02:46:32.978273Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:32.979830Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5615:4819], ActorId: [2:5616:4820], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=Y2E5ZDA0NGUtYzEyMzM0ODAtNGNiNzk2NmYtZDhlMTcwZjc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:33.095393Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5625:4829]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:33.095621Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:33.095656Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5625:4829], StatRequests.size() = 1 2026-01-04T02:46:33.213376Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5615:4819], ActorId: [2:5616:4820], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=Y2E5ZDA0NGUtYzEyMzM0ODAtNGNiNzk2NmYtZDhlMTcwZjc=, TxId: 2026-01-04T02:46:33.213444Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5615:4819], ActorId: [2:5616:4820], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Y2E5ZDA0NGUtYzEyMzM0ODAtNGNiNzk2NmYtZDhlMTcwZjc=, TxId: 2026-01-04T02:46:33.213775Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5614:4818], ActorId: [2:5615:4819], Got response [2:5616:4820] SUCCESS 2026-01-04T02:46:33.214115Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:33.228297Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:33.228361Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:33.327917Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5652:4837]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:33.328212Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:33.328252Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:33.328473Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:33.328505Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:33.328551Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:33.331185Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::QSReplySize-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 64658, MsgBus: 1398 2026-01-04T02:41:41.171444Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331079381455453:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:41.171495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:41.377908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:41.557313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:41.557451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:41.565363Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:41.624392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:41.629263Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591331079381455427:2081] 1767494501170747 != 1767494501170750 2026-01-04T02:41:41.644425Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:41.802246Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:41.802267Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:41.802275Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:41.802391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:42.229926Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:42.293036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:42.397883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:42.582510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:42.770722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:42.864349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.535619Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331092266360299:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.535750Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.536112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331092266360309:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.536174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:44.862484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:44.948094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.011482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.088458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.164243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.225486Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.285545Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.367307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:45.471390Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331096561329099:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.471472Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.471526Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331096561329104:2491], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.471805Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331096561329107:2493], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.471860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:45.475097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:45.495526Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331096561329106:2492], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:41:45.570808Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331096561329188:5485] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:46.171531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591331079381455453:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:46.171598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:47.156079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... tion cookie mismatch for subscription [5:7591332254513997538:2081] 1767494775732948 != 1767494775732951 2026-01-04T02:46:15.835610Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:15.835706Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:15.854842Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:15.896135Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:15.896169Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:15.896186Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:15.896303Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:15.915104Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:16.288223Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:16.296306Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:16.354071Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:16.538896Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:16.613933Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:16.764429Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:18.725237Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332267398901291:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:18.725320Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:18.725502Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332267398901301:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:18.725539Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:18.793902Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.826066Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.858940Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.892542Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.922124Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.955866Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:18.989098Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:19.038236Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:19.125645Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332271693869469:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.125754Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.125791Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332271693869474:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.125963Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332271693869476:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.126013Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:19.129681Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:19.141551Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591332271693869477:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:19.224472Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591332271693869529:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:20.722873Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:20.743458Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[5:7591332254513997565:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:20.745290Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:29.231667Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NmYzZDhmNTktNDA5Yzc2YzItYWUzYmYwOGUtODQzN2ZkY2U=, ActorId: [5:7591332306053609042:2665], ActorState: ExecuteState, LegacyTraceId: 01ke3ec8c717hn7zvkja452wnz, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data." issue_code: 2013 severity: 1 } trace_id#
: Error: Intermediate data materialization exceeded size limit (88240925 > 50331648). This usually happens when trying to write large amounts of data or to perform lookup by big collection of keys in single query. Consider using smaller batches of data., code: 2013 |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateBlockStoreVolume-Protect-DbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateKesus-NoProtect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithAsyncCompression [GOOD] >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomDotRanges_DelayRS [GOOD] Test command err: 2026-01-04T02:45:34.737673Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.739253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.739302Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.748483Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.748759Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.749005Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.793311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.800534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.800627Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.802226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.802294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.802374Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.802790Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.804115Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.804202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.870123Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.897492Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.897671Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.897819Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.897868Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.897927Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.897981Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.898200Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.898249Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.898543Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.898643Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.898728Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.898763Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.898796Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.898840Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.898879Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.898925Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.898977Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.899068Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.899104Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.899156Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.904980Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.905048Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.905156Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.905330Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.905375Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.905424Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.905487Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.905547Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.905588Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.905624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.905980Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.906022Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.906056Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.906087Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.906139Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.906183Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.906217Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.906263Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.906296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.918381Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.918460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.918495Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.918544Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.918614Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.919155Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.919230Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.919294Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.919437Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.919478Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.919646Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.919694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.919744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.919812Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.923610Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.923681Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.923941Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.923984Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.924053Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.924091Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.924150Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.924204Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.924240Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... ent [32:349:2316]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-04T02:46:37.336607Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.336626Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-04T02:46:37.336672Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-04T02:46:37.336692Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.336712Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-04T02:46:37.336758Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-04T02:46:37.336777Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.336798Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-04T02:46:37.336861Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-04T02:46:37.336882Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.336902Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-04T02:46:37.336952Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-04T02:46:37.336975Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337001Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-04T02:46:37.337090Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-04T02:46:37.337111Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337132Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-04T02:46:37.337178Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-04T02:46:37.337197Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337215Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-04T02:46:37.337249Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-04T02:46:37.337268Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337286Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-04T02:46:37.337331Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-04T02:46:37.337350Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337369Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-04T02:46:37.337420Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-04T02:46:37.337449Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337469Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-04T02:46:37.337514Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-04T02:46:37.337534Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337553Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-04T02:46:37.337596Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-04T02:46:37.337615Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337634Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-04T02:46:37.337678Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-04T02:46:37.337697Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337717Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-04T02:46:37.337761Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-04T02:46:37.337780Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337799Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-04T02:46:37.337859Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-04T02:46:37.337878Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337898Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-04T02:46:37.337952Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-04T02:46:37.337979Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.337998Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-04T02:46:37.338059Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-04T02:46:37.338085Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.338104Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-04T02:46:37.338164Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-04T02:46:37.338181Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.338200Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-04T02:46:37.338243Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-04T02:46:37.338262Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.338281Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-04T02:46:37.338324Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:238:2230], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:46:37.338342Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:46:37.338361Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 28 25 27 28 30 29 31 30 28 24 28 31 29 18 31 31 31 15 21 31 31 29 18 24 2 31 9 - - 3 3 - actual 28 25 27 28 30 29 31 30 28 24 28 31 29 18 31 31 31 15 21 31 31 29 18 24 2 31 9 - - 3 3 - interm 2 4 2 6 0 6 4 4 4 4 6 - 6 2 4 - - - - - - - - - 2 3 4 - - 3 3 - |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap >> KqpCost::ScanScriptingRangeFullScan-SourceRead >> KqpCost::WriteRow+isSink-isOlap >> KqpCost::IndexLookupAndTake+useSink >> KqpCost::IndexLookup+useSink >> KqpCost::OlapWriteRow >> KqpCost::CTASWithRetry+isOlap >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink >> KqpCost::WriteRowInsertFails-isSink-isOlap >> KqpCost::ScanQueryRangeFullScan+SourceRead |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeAggregate [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.721057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.816064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.824139Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.824533Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.824679Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:05.209115Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.286412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:05.286536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.334096Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.440204Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:06.015411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:06.015804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:06.015833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:06.015865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:06.016265Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:06.081514Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.593615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.153735Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.159281Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.161958Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.185680Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.185782Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.213659Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.215202Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.381364Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.381499Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.383009Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.383791Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.384400Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.385635Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.385913Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.386052Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.386388Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.386574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.386745Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.413242Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.623100Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.647617Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.647701Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.676615Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.677950Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.678171Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.678228Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.678280Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.678328Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.678393Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.678451Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.724077Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:09.724390Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:09.727021Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:09.736040Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:09.736114Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:09.736225Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.748304Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.765474Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:09.769884Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:09.773354Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.773461Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.802350Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:09.802735Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:09.917772Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:09.943956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:10.196533Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.198414Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.201623Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.204250Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:10.204322Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... T02:46:36.560023Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-04T02:46:36.560053Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767494796455329 2026-01-04T02:46:36.560079Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-04T02:46:36.560109Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-04T02:46:36.560142Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-04T02:46:36.560214Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-04T02:46:36.560260Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:36.560324Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:46:36.560375Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:36.560417Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:36.560461Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:36.560589Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:36.561505Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:36.561796Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5570:4781] Owner: [2:5569:4780]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:36.561859Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5570:4781] Owner: [2:5569:4780]. Column diff is empty, finishing 2026-01-04T02:46:36.562262Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:36.562318Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:36.563214Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:36.563272Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:36.563916Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:36.580510Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5579:4788] 2026-01-04T02:46:36.580641Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5535:4760], server id = [2:5579:4788], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:36.580820Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5579:4788], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:46:36.580982Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5580:4789] 2026-01-04T02:46:36.581050Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5580:4789], schemeshard id = 72075186224037897 2026-01-04T02:46:36.616299Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:36.616434Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-04T02:46:36.617265Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5585:4793], server id = [2:5589:4797], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:36.617663Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5585:4793], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:36.617997Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5586:4794], server id = [2:5590:4798], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:36.618042Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5586:4794], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:36.618645Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5587:4795], server id = [2:5591:4799], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:36.618702Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5587:4795], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:36.619196Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5588:4796], server id = [2:5592:4800], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:36.619233Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5588:4796], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:36.622827Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:36.623480Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5585:4793], server id = [2:5589:4797], tablet id = 72075186224037899 2026-01-04T02:46:36.623514Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:36.624043Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:36.624444Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5586:4794], server id = [2:5590:4798], tablet id = 72075186224037900 2026-01-04T02:46:36.624472Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:36.624875Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:36.625265Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5588:4796], server id = [2:5592:4800], tablet id = 72075186224037902 2026-01-04T02:46:36.625287Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:36.625535Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:36.625569Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:36.625741Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:36.625931Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:36.626298Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5605:4809], ActorId: [2:5606:4810], Starting query actor #1 [2:5607:4811] 2026-01-04T02:46:36.626347Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5606:4810], ActorId: [2:5607:4811], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:36.628594Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5587:4795], server id = [2:5591:4799], tablet id = 72075186224037901 2026-01-04T02:46:36.628620Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:36.629140Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5606:4810], ActorId: [2:5607:4811], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTRhNGI1ZDUtNGZhMDA0YzEtNTdmZDc3ZDMtNTk5YWY5NTM=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:36.707459Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5616:4820]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:36.707694Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:36.707732Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5616:4820], StatRequests.size() = 1 2026-01-04T02:46:36.834071Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5606:4810], ActorId: [2:5607:4811], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTRhNGI1ZDUtNGZhMDA0YzEtNTdmZDc3ZDMtNTk5YWY5NTM=, TxId: 2026-01-04T02:46:36.834146Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5606:4810], ActorId: [2:5607:4811], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTRhNGI1ZDUtNGZhMDA0YzEtNTdmZDc3ZDMtNTk5YWY5NTM=, TxId: 2026-01-04T02:46:36.834448Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5605:4809], ActorId: [2:5606:4810], Got response [2:5607:4811] SUCCESS 2026-01-04T02:46:36.834755Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:36.870503Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:36.870565Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:36.927066Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5643:4828]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:36.927412Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:36.927458Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:36.927724Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:36.927790Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:36.927827Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:36.930922Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpCost::CTAS+isOlap |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous >> TKeyValueTest::TestRenameWorksNewApi [GOOD] >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestRenameWorksNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:87:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:89:2118] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:92:2057] recipient: [11:89:2118] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:91:2119] Leader for TabletID 72057594037927937 is [11:91:2119] sender: [11:111:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:88:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:91:2057] recipient: [12:90:2119] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:93:2057] recipient: [12:90:2119] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:92:2120] Leader for TabletID 72057594037927937 is [12:92:2120] sender: [12:112:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13 ... recipient: [59:52:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:59:2057] recipient: [59:52:2097] Leader for TabletID 72057594037927937 is [59:58:2099] sender: [59:76:2057] recipient: [59:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [60:56:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:59:2057] recipient: [60:53:2097] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:76:2057] recipient: [60:14:2061] !Reboot 72057594037927937 (actor [60:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:78:2057] recipient: [60:39:2086] Leader for TabletID 72057594037927937 is [60:58:2099] sender: [60:81:2057] recipient: [60:80:2112] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:83:2057] recipient: [60:80:2112] !Reboot 72057594037927937 (actor [60:58:2099]) rebooted! !Reboot 72057594037927937 (actor [60:58:2099]) tablet resolver refreshed! new actor is[60:82:2113] Leader for TabletID 72057594037927937 is [60:82:2113] sender: [60:198:2057] recipient: [60:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [61:56:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:59:2057] recipient: [61:53:2097] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:76:2057] recipient: [61:14:2061] !Reboot 72057594037927937 (actor [61:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:78:2057] recipient: [61:39:2086] Leader for TabletID 72057594037927937 is [61:58:2099] sender: [61:81:2057] recipient: [61:80:2112] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:83:2057] recipient: [61:80:2112] !Reboot 72057594037927937 (actor [61:58:2099]) rebooted! !Reboot 72057594037927937 (actor [61:58:2099]) tablet resolver refreshed! new actor is[61:82:2113] Leader for TabletID 72057594037927937 is [61:82:2113] sender: [61:198:2057] recipient: [61:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [62:56:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:59:2057] recipient: [62:53:2097] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:76:2057] recipient: [62:14:2061] !Reboot 72057594037927937 (actor [62:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:79:2057] recipient: [62:39:2086] Leader for TabletID 72057594037927937 is [62:58:2099] sender: [62:82:2057] recipient: [62:81:2112] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:84:2057] recipient: [62:81:2112] !Reboot 72057594037927937 (actor [62:58:2099]) rebooted! !Reboot 72057594037927937 (actor [62:58:2099]) tablet resolver refreshed! new actor is[62:83:2113] Leader for TabletID 72057594037927937 is [62:83:2113] sender: [62:199:2057] recipient: [62:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [63:56:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:59:2057] recipient: [63:54:2097] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:76:2057] recipient: [63:14:2061] !Reboot 72057594037927937 (actor [63:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:82:2057] recipient: [63:39:2086] Leader for TabletID 72057594037927937 is [63:58:2099] sender: [63:85:2057] recipient: [63:84:2115] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:87:2057] recipient: [63:84:2115] !Reboot 72057594037927937 (actor [63:58:2099]) rebooted! !Reboot 72057594037927937 (actor [63:58:2099]) tablet resolver refreshed! new actor is[63:86:2116] Leader for TabletID 72057594037927937 is [63:86:2116] sender: [63:202:2057] recipient: [63:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [64:56:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:59:2057] recipient: [64:53:2097] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:76:2057] recipient: [64:14:2061] !Reboot 72057594037927937 (actor [64:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:82:2057] recipient: [64:39:2086] Leader for TabletID 72057594037927937 is [64:58:2099] sender: [64:85:2057] recipient: [64:84:2115] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:87:2057] recipient: [64:84:2115] !Reboot 72057594037927937 (actor [64:58:2099]) rebooted! !Reboot 72057594037927937 (actor [64:58:2099]) tablet resolver refreshed! new actor is[64:86:2116] Leader for TabletID 72057594037927937 is [64:86:2116] sender: [64:202:2057] recipient: [64:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [65:56:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:59:2057] recipient: [65:52:2097] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:76:2057] recipient: [65:14:2061] !Reboot 72057594037927937 (actor [65:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:83:2057] recipient: [65:39:2086] Leader for TabletID 72057594037927937 is [65:58:2099] sender: [65:86:2057] recipient: [65:85:2115] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:88:2057] recipient: [65:85:2115] !Reboot 72057594037927937 (actor [65:58:2099]) rebooted! !Reboot 72057594037927937 (actor [65:58:2099]) tablet resolver refreshed! new actor is[65:87:2116] Leader for TabletID 72057594037927937 is [65:87:2116] sender: [65:203:2057] recipient: [65:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [66:56:2057] recipient: [66:52:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:59:2057] recipient: [66:52:2097] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:76:2057] recipient: [66:14:2061] !Reboot 72057594037927937 (actor [66:58:2099]) on event NKikimr::TEvKeyValue::TEvCollect ! Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:84:2057] recipient: [66:39:2086] Leader for TabletID 72057594037927937 is [66:58:2099] sender: [66:87:2057] recipient: [66:86:2116] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:89:2057] recipient: [66:86:2116] !Reboot 72057594037927937 (actor [66:58:2099]) rebooted! !Reboot 72057594037927937 (actor [66:58:2099]) tablet resolver refreshed! new actor is[66:88:2117] Leader for TabletID 72057594037927937 is [66:88:2117] sender: [66:108:2057] recipient: [66:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [67:56:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:59:2057] recipient: [67:53:2097] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:76:2057] recipient: [67:14:2061] !Reboot 72057594037927937 (actor [67:58:2099]) on event NKikimr::TEvKeyValue::TEvCompleteGC ! Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:85:2057] recipient: [67:39:2086] Leader for TabletID 72057594037927937 is [67:58:2099] sender: [67:88:2057] recipient: [67:87:2117] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:90:2057] recipient: [67:87:2117] !Reboot 72057594037927937 (actor [67:58:2099]) rebooted! !Reboot 72057594037927937 (actor [67:58:2099]) tablet resolver refreshed! new actor is[67:89:2118] Leader for TabletID 72057594037927937 is [67:89:2118] sender: [67:109:2057] recipient: [67:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [68:56:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:59:2057] recipient: [68:53:2097] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:76:2057] recipient: [68:14:2061] !Reboot 72057594037927937 (actor [68:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:88:2057] recipient: [68:39:2086] Leader for TabletID 72057594037927937 is [68:58:2099] sender: [68:91:2057] recipient: [68:90:2120] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:93:2057] recipient: [68:90:2120] !Reboot 72057594037927937 (actor [68:58:2099]) rebooted! !Reboot 72057594037927937 (actor [68:58:2099]) tablet resolver refreshed! new actor is[68:92:2121] Leader for TabletID 72057594037927937 is [68:92:2121] sender: [68:208:2057] recipient: [68:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [69:56:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:59:2057] recipient: [69:53:2097] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:76:2057] recipient: [69:14:2061] !Reboot 72057594037927937 (actor [69:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:88:2057] recipient: [69:39:2086] Leader for TabletID 72057594037927937 is [69:58:2099] sender: [69:91:2057] recipient: [69:90:2120] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:93:2057] recipient: [69:90:2120] !Reboot 72057594037927937 (actor [69:58:2099]) rebooted! !Reboot 72057594037927937 (actor [69:58:2099]) tablet resolver refreshed! new actor is[69:92:2121] Leader for TabletID 72057594037927937 is [69:92:2121] sender: [69:208:2057] recipient: [69:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [70:56:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:59:2057] recipient: [70:53:2097] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:76:2057] recipient: [70:14:2061] !Reboot 72057594037927937 (actor [70:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:89:2057] recipient: [70:39:2086] Leader for TabletID 72057594037927937 is [70:58:2099] sender: [70:92:2057] recipient: [70:91:2120] Leader for TabletID 72057594037927937 is [70:93:2121] sender: [70:94:2057] recipient: [70:91:2120] !Reboot 72057594037927937 (actor [70:58:2099]) rebooted! !Reboot 72057594037927937 (actor [70:58:2099]) tablet resolver refreshed! new actor is[70:93:2121] Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [71:56:2057] recipient: [71:52:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:59:2057] recipient: [71:52:2097] Leader for TabletID 72057594037927937 is [71:58:2099] sender: [71:76:2057] recipient: [71:14:2061] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> Initializer::Simple [GOOD] >> KqpCost::ScanQueryRangeFullScan-SourceRead >> KqpCost::IndexLookupJoin-StreamLookupJoin >> KqpCost::WriteRow-isSink+isOlap [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap >> KqpCost::ScanScriptingRangeFullScan+SourceRead >> KqpCost::CTAS-isOlap >> KqpCost::OlapPointLookup >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink+isOlap [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink+isOlap [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/initializer/ut/unittest >> Initializer::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:31.927951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:32.092607Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:32.093164Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:32.093218Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:32.403462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:32.403567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:32.465427Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494729890415 != 1767494729890418 2026-01-04T02:45:32.475120Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:32.519362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:32.623698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:33.333771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:33.333844Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:33.333907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:33.339493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:33.339921Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:33.363001Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:43.551860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2764], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:43.551986Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:908:2769], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:43.552066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:43.555761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2772], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:43.555960Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:45:43.561758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:45:43.641460Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:913:2773], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:45:43.710508Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:969:2810] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:45:44.084868Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:979:2819], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:45:44.091440Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NThjM2I4ZmQtOThiYWZjNjMtZjBmMGE0MzAtM2NjYTNhNTI=, ActorId: [1:894:2761], ActorState: ExecuteState, LegacyTraceId: 01ke3eaxfnafrsry7je91td8ep, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/test]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/test]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=0 2026-01-04T02:45:44.162727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:45.489558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:45:45.869743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:46.453551Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) Initialization finished REQUEST=SELECT * FROM `/Root/.metadata/test`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/test`;EXPECTATION=1 REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 2026-01-04T02:46:08.422214Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1541:3224] txid# 281474976715678, Access denied for root@builtin on path /Root/.metadata/test, with access RemoveSchema 2026-01-04T02:46:08.422422Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1541:3224] txid# 281474976715678, issues: { message: "Access denied for root@builtin on path /Root/.metadata/test" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/test`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/test, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/test`;EXPECTATION=0;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;RESULT=
: Fatal: ydb/core/kqp/host/kqp_host.cpp:946 ExecuteDataQuery(): requirement false failed, message: Unexpected query type for execute script action: Ddl, code: 1 ;EXPECTATION=0 FINISHED_REQUEST=DELETE FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 2026-01-04T02:46:40.008654Z node 1 :TX_PROXY ERROR: schemereq.cpp:1228: Actor# [1:1714:3347] txid# 281474976715686, Access denied for root@builtin on path /Root/.metadata/initialization/migrations, with access RemoveSchema 2026-01-04T02:46:40.008854Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1714:3347] txid# 281474976715686, issues: { message: "Access denied for root@builtin on path /Root/.metadata/initialization/migrations" issue_code: 200000 severity: 1 } REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;RESULT=
: Error: Execution, code: 1060
:1:12: Error: Executing DROP TABLE
: Error: Access denied., code: 2018
: Error: Access denied for root@builtin on path /Root/.metadata/initialization/migrations, code: 200000 ;EXPECTATION=0 FINISHED_REQUEST=DROP TABLE `/Root/.metadata/initialization/migrations`;EXPECTATION=0;WAITING=1 E0104 02:46:40.908911745 774802 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-04T02:46:40.908655434+00:00"} |94.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/initializer/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatUnavailableNode [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:03.804287Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:03.916608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:03.923669Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:03.924128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:03.924293Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:04.350433Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:04.457391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.457480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.504013Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.560708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:05.213064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:05.213555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:05.213594Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:05.213627Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:05.214103Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:05.277106Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:05.798124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:08.616297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:08.622971Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:08.626123Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:08.650002Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.650094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.677937Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:08.680055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.843958Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.844089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.845574Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.846296Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.846818Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.847949Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.848246Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.848470Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.848712Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.848863Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.849001Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:08.875574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.090788Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.108653Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.108724Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.128488Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.129435Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.129676Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.129733Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.129768Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.129823Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.129863Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.129906Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.130467Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:09.130811Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:09.133317Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:09.142338Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:09.142418Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:09.142518Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.156095Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.168230Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:09.172787Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:09.176366Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.176483Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.206001Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:09.206388Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:09.320891Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:09.359282Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:09.632322Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.633884Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.636643Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:09.639188Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:09.639272Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... TVkMjA=, TxId: 2026-01-04T02:46:37.707637Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5444:4708], ActorId: [2:5445:4709], Got response [2:5446:4710] SUCCESS 2026-01-04T02:46:37.707893Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:37.721052Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-04T02:46:37.721115Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:37.829288Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:37.829373Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:37.955009Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5457:4721], schemeshard count = 1 2026-01-04T02:46:38.965216Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:46:38.965298Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:46:38.965351Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:38.972150Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:39.004387Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:39.005096Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:39.005182Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:39.006531Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:39.030707Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:39.030945Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:46:39.031630Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5541:4766], server id = [2:5545:4770], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:39.032116Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5541:4766], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:39.032328Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5542:4767], server id = [2:5546:4771], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:39.032367Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5542:4767], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:39.033580Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5543:4768], server id = [2:5548:4773], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:39.033627Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5543:4768], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:39.034088Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5544:4769], server id = [2:5547:4772], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:39.034129Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5544:4769], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:39.037267Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:39.037534Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5541:4766], server id = [2:5545:4770], tablet id = 72075186224037899 2026-01-04T02:46:39.037575Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:39.038806Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:39.039136Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5542:4767], server id = [2:5546:4771], tablet id = 72075186224037900 2026-01-04T02:46:39.039175Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:39.039702Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:39.040047Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5543:4768], server id = [2:5548:4773], tablet id = 72075186224037901 2026-01-04T02:46:39.040069Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:39.040411Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:39.040454Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:39.040673Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5544:4769], server id = [2:5547:4772], tablet id = 72075186224037902 2026-01-04T02:46:39.040695Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:39.040760Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:39.040850Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:39.041309Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:39.079957Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:39.080230Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-04T02:46:39.080943Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5565:4784], server id = [2:5566:4785], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:39.081043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5565:4784], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:39.082610Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:39.082705Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:39.082858Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:39.083047Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:39.083441Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5568:4787], ActorId: [2:5569:4788], Starting query actor #1 [2:5570:4789] 2026-01-04T02:46:39.083501Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5569:4788], ActorId: [2:5570:4789], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:39.085792Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5565:4784], server id = [2:5566:4785], tablet id = 72075186224037900 2026-01-04T02:46:39.085872Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:39.086658Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5569:4788], ActorId: [2:5570:4789], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDhjZDg4ODEtNjZiMWRlODEtNzAyZjM4ZDYtMzkxYTQzZg==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:39.202879Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5579:4798]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:39.203211Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:39.203288Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5579:4798], StatRequests.size() = 1 2026-01-04T02:46:39.379830Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5569:4788], ActorId: [2:5570:4789], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDhjZDg4ODEtNjZiMWRlODEtNzAyZjM4ZDYtMzkxYTQzZg==, TxId: 2026-01-04T02:46:39.379904Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5569:4788], ActorId: [2:5570:4789], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDhjZDg4ODEtNjZiMWRlODEtNzAyZjM4ZDYtMzkxYTQzZg==, TxId: 2026-01-04T02:46:39.380285Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5568:4787], ActorId: [2:5569:4788], Got response [2:5570:4789] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:46:39.380724Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5593:4804]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:39.380962Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:39.381470Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:39.381527Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:39.383430Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:39.383502Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:39.383570Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:39.387910Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionZeroNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.507051Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.611266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.618960Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.619323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.619476Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:04.951542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.033109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:05.033197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.079576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.104390Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:05.782088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:05.782601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:05.782663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:05.782700Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:05.783254Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:05.848778Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.418778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.248682Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.254972Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.284382Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.284473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.312457Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.314439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.501438Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.502033Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.502538Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503150Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503365Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503445Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503589Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503697Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.503804Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.667236Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:09.691482Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.691626Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.705174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.879287Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.918211Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.918315Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.943833Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.944765Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.944936Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.944986Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.945027Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.945064Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.945119Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.945163Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.945498Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:10.068443Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2229:2621] 2026-01-04T02:46:10.070133Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:10.073084Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.073191Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2249:2632], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.085678Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Describe result: PathErrorUnknown 2026-01-04T02:46:10.085733Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Creating table 2026-01-04T02:46:10.085816Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.094218Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2305:2674] 2026-01-04T02:46:10.095257Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2305:2674], schemeshard id = 72075186224037897 2026-01-04T02:46:10.120484Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2345:2689], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.129985Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.141157Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:10.141247Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:10.153049Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:10.181764Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:10.654963Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:10.766734Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:10.766831Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2237:2628] Owner: [2:2236:2627]. Column diff is empty, finishing 2026-01-04T02:46:11.252425Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:11.485416Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:3382], Data ... ed schemeshards count = 1 2026-01-04T02:46:37.801262Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:37.801449Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:37.801547Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5534:4743], StatRequests.size() = 1 2026-01-04T02:46:37.801726Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:37.917109Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5524:4733], ActorId: [2:5525:4734], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDE5NjQwZTYtOGM4NTc3OS0yNzYxYjNjZS01YWMyYmRkYQ==, TxId: 2026-01-04T02:46:37.917185Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5524:4733], ActorId: [2:5525:4734], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDE5NjQwZTYtOGM4NTc3OS0yNzYxYjNjZS01YWMyYmRkYQ==, TxId: 2026-01-04T02:46:37.917557Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5523:4732], ActorId: [2:5524:4733], Got response [2:5525:4734] SUCCESS 2026-01-04T02:46:37.917799Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:37.931817Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-04T02:46:37.931870Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:37.985321Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:37.985389Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:38.039343Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5536:4745], schemeshard count = 1 2026-01-04T02:46:38.992164Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:46:38.992259Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:46:38.992302Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:38.997111Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:39.028544Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:39.029190Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:39.029289Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:39.030690Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 2 2026-01-04T02:46:39.030757Z node 2 :STATISTICS WARN: tx_response_tablet_distribution.cpp:63: [72075186224037894] TTxResponseTabletDistribution::Execute. Some tablets are probably in Hive boot queue 2026-01-04T02:46:39.030824Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:39.991212Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:40.017104Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:40.017347Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:46:40.018118Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5649:4800], server id = [2:5653:4804], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:40.018509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5649:4800], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.020098Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5650:4801], server id = [2:5654:4805], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:40.020178Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5650:4801], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.021138Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5651:4802], server id = [2:5655:4806], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:40.021188Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5651:4802], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.021742Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5652:4803], server id = [2:5657:4808], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:40.021844Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5652:4803], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.026630Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:40.026973Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5649:4800], server id = [2:5653:4804], tablet id = 72075186224037899 2026-01-04T02:46:40.027014Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.027551Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:40.027926Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5650:4801], server id = [2:5654:4805], tablet id = 72075186224037900 2026-01-04T02:46:40.027950Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.028340Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:40.028499Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:40.028534Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:40.028670Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:40.028836Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:40.029181Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5669:4816], ActorId: [2:5670:4817], Starting query actor #1 [2:5671:4818] 2026-01-04T02:46:40.029245Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5670:4817], ActorId: [2:5671:4818], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:40.031527Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5651:4802], server id = [2:5655:4806], tablet id = 72075186224037901 2026-01-04T02:46:40.031556Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.032159Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5670:4817], ActorId: [2:5671:4818], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjU2ODY3ZjItYWRhNmVkOTctNGRiMTZlNmMtNDI1MDUyNzk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:40.032540Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5652:4803], server id = [2:5657:4808], tablet id = 72075186224037902 2026-01-04T02:46:40.032561Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.143903Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5680:4827]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:40.144209Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:40.144263Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5680:4827], StatRequests.size() = 1 2026-01-04T02:46:40.310265Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5670:4817], ActorId: [2:5671:4818], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjU2ODY3ZjItYWRhNmVkOTctNGRiMTZlNmMtNDI1MDUyNzk=, TxId: 2026-01-04T02:46:40.310347Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5670:4817], ActorId: [2:5671:4818], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjU2ODY3ZjItYWRhNmVkOTctNGRiMTZlNmMtNDI1MDUyNzk=, TxId: 2026-01-04T02:46:40.310762Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5669:4816], ActorId: [2:5670:4817], Got response [2:5671:4818] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:46:40.311219Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5694:4833]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:40.311478Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:40.312016Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:40.312077Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:40.312895Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:40.312957Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:40.313021Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:40.318884Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpCost::VectorIndexLookup+useSink |94.2%| [TA] $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.2%| [TA] {RESULT} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::QuerySeviceRangeFullScan >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-anonymous [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser |94.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/initializer/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableHiveDistributionAbsentNodes [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:58.041236Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:58.167479Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:58.180598Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:58.180927Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:58.181024Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:58.594150Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:58.701773Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:58.701901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:58.750075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:58.830591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:59.762411Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:59.763531Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:45:59.763732Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:45:59.763787Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:45:59.764247Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:45:59.833039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:00.373629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:02.977876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:02.984220Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:02.987433Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:03.025865Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:03.025992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:03.057002Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:03.058913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:03.249226Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.252005Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.253167Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.254059Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.254403Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.254597Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.254801Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.254976Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.255063Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:03.429102Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:03.440973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:03.441097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:03.454551Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:03.610847Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:03.710965Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:03.711073Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:03.739203Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:03.739510Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:03.739733Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:03.739813Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:03.739867Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:03.739948Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:03.740011Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:03.740066Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:03.740789Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:03.883398Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2223:2620] 2026-01-04T02:46:03.883689Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:03.886584Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Describe result: PathErrorUnknown 2026-01-04T02:46:03.886628Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Creating table 2026-01-04T02:46:03.886692Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:03.890140Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2242:2627], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:03.893739Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2234:2625] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:03.896936Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:03.959428Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:03.976189Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:03.976272Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2302:2660], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:03.986813Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2328:2683] 2026-01-04T02:46:03.988990Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2328:2683], schemeshard id = 72075186224037897 2026-01-04T02:46:04.303464Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:04.305602Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2594:2886], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:04.310550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:04.314595Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:04.314683Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2225:2622] Owner: [2:2224:2621]. Subscribe on create table tx: 28 ... 843Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5503:4741], server id = [2:5504:4742], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:38.709899Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5504:4742] 2026-01-04T02:46:38.709998Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5504:4742], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:38.710063Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:38.710221Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:38.710281Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:5501:4739], StatRequests.size() = 1 2026-01-04T02:46:38.710342Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:38.826134Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5491:4729], ActorId: [2:5492:4730], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZTc2YTQxMzctMTE4NTA0NDUtY2MxZDU1Y2EtYThmYjg0NGM=, TxId: 2026-01-04T02:46:38.826194Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5491:4729], ActorId: [2:5492:4730], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTc2YTQxMzctMTE4NTA0NDUtY2MxZDU1Y2EtYThmYjg0NGM=, TxId: 2026-01-04T02:46:38.826474Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5490:4728], ActorId: [2:5491:4729], Got response [2:5492:4730] SUCCESS 2026-01-04T02:46:38.827108Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:38.840780Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-04T02:46:38.840826Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:38.941749Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:38.941865Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:39.055494Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:5503:4741], schemeshard count = 1 2026-01-04T02:46:40.091881Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:46:40.091965Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:46:40.092007Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:40.095753Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:40.125705Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:40.126280Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:40.126354Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:40.127456Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:40.164440Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:40.164820Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:46:40.165943Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5587:4786], server id = [2:5591:4790], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:40.166463Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5587:4786], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.166866Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5588:4787], server id = [2:5592:4791], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:40.166934Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5588:4787], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.168160Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5589:4788], server id = [2:5593:4792], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:40.168223Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5589:4788], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.168720Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5590:4789], server id = [2:5594:4793], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:40.168774Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5590:4789], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:40.174950Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:40.175439Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5587:4786], server id = [2:5591:4790], tablet id = 72075186224037899 2026-01-04T02:46:40.175490Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.175870Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:40.177009Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5588:4787], server id = [2:5592:4791], tablet id = 72075186224037900 2026-01-04T02:46:40.177044Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.177475Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:40.178039Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5589:4788], server id = [2:5593:4792], tablet id = 72075186224037901 2026-01-04T02:46:40.178071Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.178485Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:40.178562Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:40.178733Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:40.178886Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:40.179381Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5607:4802], ActorId: [2:5608:4803], Starting query actor #1 [2:5609:4804] 2026-01-04T02:46:40.179446Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5608:4803], ActorId: [2:5609:4804], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:40.181233Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5590:4789], server id = [2:5594:4793], tablet id = 72075186224037902 2026-01-04T02:46:40.181258Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:40.181875Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5608:4803], ActorId: [2:5609:4804], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDEzZjA4OTUtNDE2YTgxNjEtM2E1NDJhNjctZmU2ZWNjM2Q=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:40.299090Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5618:4813]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:40.299463Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:40.299521Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5618:4813], StatRequests.size() = 1 2026-01-04T02:46:40.469767Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5608:4803], ActorId: [2:5609:4804], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDEzZjA4OTUtNDE2YTgxNjEtM2E1NDJhNjctZmU2ZWNjM2Q=, TxId: 2026-01-04T02:46:40.469855Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5608:4803], ActorId: [2:5609:4804], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDEzZjA4OTUtNDE2YTgxNjEtM2E1NDJhNjctZmU2ZWNjM2Q=, TxId: 2026-01-04T02:46:40.470250Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5607:4802], ActorId: [2:5608:4803], Got response [2:5609:4804] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:46:40.470693Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5632:4819]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:40.470905Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:40.471842Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:40.471903Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:40.472277Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:40.472331Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:40.472393Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:40.476147Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 >> KqpCost::VectorIndexLookup-useSink |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] >> KqpCost::OlapRangeFullScan >> KqpCost::AAARangeFullScan |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink+isOlap [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::CancelAfterRoTxWithFollowerStreamLookupDepededRead [GOOD] Test command err: Trying to start YDB, gRPC: 9675, MsgBus: 6138 2026-01-04T02:41:20.062039Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330991066118819:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.062236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.106325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:20.413517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.413638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.418788Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.435043Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.501051Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.674975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.768985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.769029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.769054Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.769131Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.070882Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.513627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.522658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.572225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.719658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.866636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.958503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.585873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003951022548:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.585972Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.586373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331003951022558:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.586427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.948000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.975574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.002239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.032202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.067991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.102634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.158374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.242860Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008245990723:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.242964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.243217Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008245990728:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.243222Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008245990729:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.243280Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.246488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.259894Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331008245990732:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.340936Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331008245990783:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.062052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330991066118819:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.062116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:25.870616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself ... "Cancelling after 453ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:27.693985Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ec856181mts365amqydkn, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 455ms" severity: 1 }{ message: "Cancelling after 455ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:28.223478Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ec8nn522wfazwb2r0f8a2, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 457ms" severity: 1 }{ message: "Cancelling after 457ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:28.797371Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ec97hd2csy2xne1sdey3w, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 459ms" severity: 1 }{ message: "Cancelling after 459ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:29.326952Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ec9r19zgqv7gbv9me2vk1, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 461ms" severity: 1 }{ message: "Cancelling after 461ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:29.865910Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eca8t4fp5mfxnz8gcq8s4, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 463ms" severity: 1 }{ message: "Cancelling after 462ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:30.442946Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecats08nnhvhjkybmt6br, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 465ms" severity: 1 }{ message: "Cancelling after 465ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:30.970320Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecbb68d30qcrs8th85f6h, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 467ms" severity: 1 }{ message: "Cancelling after 467ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:31.577119Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecby3cp9q9199yhmrbbed, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 469ms" severity: 1 }{ message: "Cancelling after 469ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:32.231871Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eccjgfve9x01g4kj9h3br, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 471ms" severity: 1 }{ message: "Cancelling after 471ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:32.831963Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecd569j7sxh5450pgjhm7, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 473ms" severity: 1 }{ message: "Cancelling after 473ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:33.402931Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecdpz25cg82k7t5tzh91k, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 475ms" severity: 1 }{ message: "Cancelling after 475ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:34.027004Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eceadf6cq3d7m5b7sp7sx, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 477ms" severity: 1 }{ message: "Cancelling after 477ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:34.655779Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecey05vcx3gxkbc7kn5z2, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 479ms" severity: 1 }{ message: "Cancelling after 478ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:35.229242Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecffv7qprje7kv11ktwtn, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 481ms" severity: 1 }{ message: "Cancelling after 481ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:35.970627Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecg6y4y9yj9db2kaq30y1, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 483ms" severity: 1 }{ message: "Cancelling after 483ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:36.671413Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecgwsb1bgnns527dgzg4p, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 485ms" severity: 1 }{ message: "Cancelling after 485ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:37.780882Z node 5 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [5:7591332350979022842:2533] TxId: 281474976710836. Ctx: { TraceId: 01ke3echzcd1zfzz4j7yds0fxr, Database: /Root, SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# CANCELLED Issues# [ {
: Error: Request canceled after 488ms } {
: Error: Cancelling after 488ms during execution } ] trace_id# 2026-01-04T02:46:37.781180Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3echzcd1zfzz4j7yds0fxr, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 488ms" severity: 1 }{ message: "Cancelling after 488ms during execution" severity: 1 } trace_id# 2026-01-04T02:46:38.277603Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecjev0nvyfdem5nxampze, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 489ms" severity: 1 }{ message: "Cancelling after 489ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:38.864451Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eck1478afy7yx9yj71jyj, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 491ms" severity: 1 }{ message: "Cancelling after 491ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:39.474811Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eckm27ee8aam7h5evtkr4, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 493ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:40.112651Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecm808pxqeftgb0ap0jft, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 495ms" severity: 1 }{ message: "Cancelling after 495ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:40.814133Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecmxw9ybatnzycr63nwaj, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 497ms" severity: 1 }{ message: "Cancelling after 497ms during compilation" severity: 1 } trace_id# 2026-01-04T02:46:41.498228Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=MTNiNTVmNTMtM2U4MzQ5YjItOWRmODYwNDAtNDljMjM0ZjM=, ActorId: [5:7591331925777254829:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ecnk698x828zsnz1akqzr, Create QueryResponse for error on request, msg: status# CANCELLED issues# { message: "Request canceled after 499ms" severity: 1 }{ message: "Cancelling after 499ms during compilation" severity: 1 } trace_id# |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest >> KqpCost::WriteRow+isSink-isOlap [GOOD] >> KqpCost::IndexLookup+useSink [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] >> KqpCost::IndexLookupAndTake+useSink [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser >> KqpCost::WriteRow-isSink-isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 7091, MsgBus: 25091 2026-01-04T02:46:39.074317Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332361367006348:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.074500Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.304165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.316441Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.316531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.338716Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.401606Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.416407Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332361367006320:2081] 1767494799069803 != 1767494799069806 2026-01-04T02:46:39.509688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603959Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603998Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604096Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:40.080686Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.255015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.315010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.480540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.615893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.693191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.254415Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332374251910081:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.254512Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.254836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332374251910091:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.254880Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.609014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.638392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.672764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.703764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.737373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.773430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.809144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.859112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.933372Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332374251910960:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.933450Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.933644Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332374251910965:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.933712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332374251910966:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.933753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.937568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.947806Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332374251910969:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.034916Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332378546878316:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.074783Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332361367006348:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.074843Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:45.046010Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494805055, txId: 281474976710673] shutting down ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 23583, MsgBus: 27908 2026-01-04T02:46:39.194602Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332359586887150:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.195162Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.444456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.444546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.484501Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.500179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.505417Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.603946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603973Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603988Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604099Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:39.742133Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:40.187339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.192839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:46:40.196444Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.264344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.412396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.563902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.639945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.220634Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372471790800:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.220751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.221140Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372471790810:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.221293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.576005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.605091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.631257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.661592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.693158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.767007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.801842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.856604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.936806Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372471791681:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.936895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.937095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372471791687:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.937139Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372471791686:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.937164Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.940673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.951077Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332372471791690:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.021229Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332376766759037:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.189438Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332359586887150:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.189488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.616752Z node 1 :KQP_GATEWAY DEBUG: kqp_metadata_loader.cpp:922: Load table metadata from cache by path, request Path: /Root/Test 2026-01-04T02:46:44.870079Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snapshot_manager.cpp:37: Start KqpSnapshotManager at [1:7591332381061726612:2521] 2026-01-04T02:46:44.870107Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snap ... ffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:46:44.900920Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726627:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:44.900923Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726628:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:46:44.900967Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:46:44.901004Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710674, task: 2. Finish input channelId: 1, from: [1:7591332381061726627:2528] 2026-01-04T02:46:44.901081Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726628:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:44.901082Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726627:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646927 2026-01-04T02:46:44.901116Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726627:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:44.901171Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 1. Tasks execution finished 2026-01-04T02:46:44.901188Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591332381061726627:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:46:44.901315Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7591332381061726628:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:46:44.901315Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 1. pass away 2026-01-04T02:46:44.901427Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:46:44.901561Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7591332381061726627:2528] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 17752 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 9368 FinishTimeMs: 1767494804901 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 1 ReadBytes: 20 AffectedPartitions: 1 } IngressRows: 3 ComputeCpuTimeUs: 943 BuildCpuTimeUs: 8425 HostName: "ghrun-46vtxmsa7i" NodeId: 1 StartTimeMs: 1767494804900 CreateTimeMs: 1767494804882 UpdateTimeMs: 1767494804901 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:46:44.901576Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:46:44.901649Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7591332381061726627:2528] 2026-01-04T02:46:44.901726Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7591332381061726628:2529], trace_id# 2026-01-04T02:46:44.901857Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494804 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-04T02:46:44.902003Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7591332381061726598:2521] SeqNo# 1 Rows# 1 trace_id# 2026-01-04T02:46:44.905605Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:549} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Send ack ChannelId# 2 SeqNo# 1 Enough# false FreeSpace# 8388468 Recipient# [1:7591332381061726630:2529] trace_id# 2026-01-04T02:46:44.905697Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332381061726628:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:44.905766Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710674, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:46:44.905785Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 2. Tasks execution finished 2026-01-04T02:46:44.905797Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591332381061726628:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecs1cdr0e93ze29cek198. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:46:44.905910Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 2. pass away 2026-01-04T02:46:44.906001Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:46:44.906164Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:46:44.906402Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7591332381061726628:2529] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 3128 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 1825 FinishTimeMs: 1767494804905 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 228 BuildCpuTimeUs: 1597 HostName: "ghrun-46vtxmsa7i" NodeId: 1 CreateTimeMs: 1767494804892 UpdateTimeMs: 1767494804905 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:46:44.906458Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7591332381061726628:2529] 2026-01-04T02:46:44.906635Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:46:44.906701Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:7591332381061726623:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ecs1cdr0e93ze29cek198, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NjIwNDRhMjItODNkZTc0NzgtYjY3YTI0NTAtYTJmYjY0NDE=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.020880s ReadRows: 1 ReadBytes: 20 RequestUnits# 13 ForceFlag# true trace_id# 2026-01-04T02:46:44.907555Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494804922, txId: 281474976710673] shutting down 2026-01-04T02:46:44.907648Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2026-01-04T02:46:46.901588Z, after 1.994368s |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 62418, MsgBus: 2071 2026-01-04T02:46:39.070109Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332360598208771:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.070358Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.298432Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.341527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.341637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.404926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.421048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.438820Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332360598208743:2081] 1767494799066320 != 1767494799066323 2026-01-04T02:46:39.522315Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603926Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603960Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:40.078662Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.218846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.268539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.393010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.539197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.596500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.068712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373483112501:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.068841Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.069125Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373483112511:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.069168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.555058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.587376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.616399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.642942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.674169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.709757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.743328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.794923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.898135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373483113380:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.898209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.898516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373483113385:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.898573Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373483113386:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.898837Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.903918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.912799Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332373483113389:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:42.985965Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332373483113440:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.073897Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332360598208771:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.073960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.490981Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14864, MsgBus: 30105 2026-01-04T02:46:39.070488Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332358969537671:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.070884Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.331569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.331658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.389209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.424885Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.433494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.603913Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603941Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603977Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604063Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:39.642624Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:40.078071Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.254312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.274376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:46:40.326600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.482785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.626690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.691183Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.243035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371854441393:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.243148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.243500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371854441403:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.243618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.556028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.591768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.621734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.649127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.678544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.717043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.749034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.823599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.908976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371854442273:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.909039Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.909136Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371854442278:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.909192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371854442279:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.909230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.912557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.923789Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332371854442282:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.019144Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332376149409629:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.070696Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332358969537671:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.070758Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.594951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 3754 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1491 affected_shards: 1 } compilation { duration_us: 59903 cpu_time_us: 54837 } process_cpu_time_us: 604 total_duration_us: 65823 total_cpu_time_us: 56932 query_phases { duration_us: 3693 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1175 affected_shards: 1 } compilation { duration_us: 57709 cpu_time_us: 52715 } process_cpu_time_us: 692 total_duration_us: 63258 total_cpu_time_us: 54582 2026-01-04T02:46:44.951015Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=5; 2026-01-04T02:46:44.961733Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:46:44.961923Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:46:44.962164Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332380444377387:2531], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [1:7591332380444377233:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037927, Sink=[1:7591332380444377387:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:46:44.962691Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332380444377380:2531], SessionActorId: [1:7591332380444377233:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7591332380444377233:2531]. 2026-01-04T02:46:44.962963Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NGY5YzFiM2UtOTZiYjNlYmItMWQxMTI4M2UtNTdlNjJiNTg=, ActorId: [1:7591332380444377233:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecsceerqqsyeyta0b5k9z, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7591332380444377381:2531] from: [1:7591332380444377380:2531] trace_id# 2026-01-04T02:46:44.963104Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332380444377381:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ecsceerqqsyeyta0b5k9z, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NGY5YzFiM2UtOTZiYjNlYmItMWQxMTI4M2UtNTdlNjJiNTg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:46:44.963483Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGY5YzFiM2UtOTZiYjNlYmItMWQxMTI4M2UtNTdlNjJiNTg=, ActorId: [1:7591332380444377233:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecsceerqqsyeyta0b5k9z, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 24747 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1264 } compilation { duration_us: 58228 cpu_time_us: 53550 } process_cpu_time_us: 799 total_duration_us: 84875 total_cpu_time_us: 55613 query_phases { duration_us: 3730 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1327 affected_shards: 1 } compilation { duration_us: 50085 cpu_time_us: 45472 } process_cpu_time_us: 562 total_duration_us: 55877 total_cpu_time_us: 47361 query_phases { duration_us: 2873 cpu_time_us: 1500 affected_shards: 1 } compilation { duration_us: 91974 cpu_time_us: 86891 } process_cpu_time_us: 727 total_duration_us: 96672 total_cpu_time_us: 89118 query_phases { duration_us: 3920 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1535 affected_shards: 1 } compilation { duration_us: 70292 cpu_time_us: 65683 } process_cpu_time_us: 665 total_duration_us: 75828 total_cpu_time_us: 67883 query_phases { duration_us: 4015 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1676 affected_shards: 1 } compilation { duration_us: 73556 cpu_time_us: 68508 } process_cpu_time_us: 583 total_duration_us: 79285 total_cpu_time_us: 70767 query_phases { duration_us: 3918 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1208 affected_shards: 1 } compilation { duration_us: 51072 cpu_time_us: 46424 } process_cpu_time_us: 518 total_duration_us: 56403 total_cpu_time_us: 48150 >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 6424, MsgBus: 63896 2026-01-04T02:46:39.113959Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332360456567932:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.114133Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.330761Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.330872Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.348331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.394131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.446124Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332360456567806:2081] 1767494799107812 != 1767494799107815 2026-01-04T02:46:39.448473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.603921Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603940Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603957Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604049Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:39.663326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:40.116340Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.216630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.230400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:46:40.277329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.413819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.558332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.623026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.315389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373341471589:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.315500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.316132Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373341471599:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.316178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.616505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.645763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.682003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.715454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.751126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.801580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.843934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.891046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.963712Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373341472470:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.963818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.964160Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373341472475:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.964223Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332373341472476:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.964283Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.967793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.978321Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332373341472479:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.081370Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332377636439826:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.112761Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332360456567932:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.112837Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.565488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22094, MsgBus: 6253 2026-01-04T02:46:39.070110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332358856584255:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.070242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.313894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.325159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.325291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.354644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.402220Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.405790Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332358856584228:2081] 1767494799068595 != 1767494799068598 2026-01-04T02:46:39.603986Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.604026Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.604048Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:39.609845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:40.077579Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.253914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.324088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.471703Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.628326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.694918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.062433Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371741487986:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.062550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.062871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371741487996:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.062929Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.554912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.586262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.613158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.639769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.670136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.708972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.745053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.794849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.897111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371741488874:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371741488880:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897376Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371741488879:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897413Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.903972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.914051Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332371741488883:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:42.978367Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332371741488934:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.070129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332358856584255:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.070213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.587980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow [GOOD] >> KqpCost::CTAS-isOlap [GOOD] >> TSchemeShardSubDomainTest::SimultaneousDeclare >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] >> AnalyzeDatashard::AnalyzeOneTable [GOOD] >> KqpCost::OlapPointLookup [GOOD] >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 8339, MsgBus: 4356 2026-01-04T02:46:39.070121Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332359129188259:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.070736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.316817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.344752Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.348299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.385864Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.441429Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332359129188234:2081] 1767494799066218 != 1767494799066221 2026-01-04T02:46:39.459764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.517829Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603912Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603935Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.604014Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:40.080708Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.223701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.273733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.423516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.568641Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.640755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.199654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372014091994:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.199801Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.200225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372014092004:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.200305Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.554962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.584060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.611223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.636774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.668122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.705210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.782223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.866109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.929984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372014092881:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.930089Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.930113Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372014092886:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.930338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372014092888:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.930376Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.933494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.942197Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332372014092889:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.000968Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332372014092941:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.069193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332359129188259:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.069256Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.704359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 648 cpu_time_us: 648 } query_phases { duration_us: 3366 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1023 affected_shards: 1 } compilation { duration_us: 76674 cpu_time_us: 71305 } process_cpu_time_us: 1112 total_duration_us: 82412 total_cpu_time_us: 74088 query_phases { duration_us: 628 cpu_time_us: 628 } query_phases { duration_us: 2833 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1030 affected_shards: 1 } compilation { duration_us: 75872 cpu_time_us: 69964 } process_cpu_time_us: 1044 total_duration_us: 80846 total_cpu_time_us: 72666 2026-01-04T02:46:45.406116Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332384898995255:2561], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01ke3ecsk9brhv9vtwmgwt5ha2. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZmVjMjIzYi01Y2NmZTU1ZC00ZDEwOWE3LTRmNTUxZDFi. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:46:45.406525Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332384898995257:2562], TxId: 281474976710678, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecsk9brhv9vtwmgwt5ha2. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ZmVjMjIzYi01Y2NmZTU1ZC00ZDEwOWE3LTRmNTUxZDFi. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332384898995252:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:45.407107Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZmVjMjIzYi01Y2NmZTU1ZC00ZDEwOWE3LTRmNTUxZDFi, ActorId: [1:7591332380604027795:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecsk9brhv9vtwmgwt5ha2, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 925 cpu_time_us: 925 } query_phases { duration_us: 5122 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 7239 affected_shards: 1 } query_phases { duration_us: 28746 cpu_time_us: 29234 } compilation { duration_us: 266104 cpu_time_us: 255841 } process_cpu_time_us: 2095 total_duration_us: 309318 total_cpu_time_us: 295334 query_phases { duration_us: 757 cpu_time_us: 757 } query_phases { duration_us: 3291 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 2635 affected_shards: 1 } query_phases { duration_us: 1318 cpu_time_us: 1711 } query_phases { duration_us: 4451 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1184 affected_shards: 1 } compilation { duration_us: 240925 cpu_time_us: 234736 } process_cpu_time_us: 1684 total_duration_us: 253921 total_cpu_time_us: 242707 query_phases { duration_us: 752 cpu_time_us: 752 } query_phases { duration_us: 5634 table_access { name: "/Root/TestTable" partitions_count: 1 } cpu_time_us: 3990 affected_shards: 1 } query_phases { duration_us: 1329 cpu_time_us: 564 affected_shards: 1 } compilation { duration_us: 253911 cpu_time_us: 244824 } process_cpu_time_us: 1670 total_duration_us: 267928 total_cpu_time_us: 251800 query_phases { duration_us: 707 cpu_time_us: 707 } query_phases { duration_us: 3546 table_access { name: "/Root/TestTable" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3617 affected_shards: 1 } query_phases { duration_us: 3705 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1300 affected_shards: 1 } compilation { duration_us: 201991 cpu_time_us: 195262 } process_cpu_time_us: 1581 total_duration_us: 214181 total_cpu_time_us: 202467 query_phases { duration_us: 563 cpu_time_us: 563 } query_phases { duration_us: 3105 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1015 affected_shards: 1 } compilation { duration_us: 78709 cpu_time_us: 72411 } process_cpu_time_us: 978 total_duration_us: 84782 total_cpu_time_us: 74967 query_phases { duration_us: 500 cpu_time_us: 500 } query_phases { duration_us: 2919 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 871 affected_shards: 1 } compilation { duration_us: 73739 cpu_time_us: 67472 } process_cpu_time_us: 906 total_duration_us: 78658 total_cpu_time_us: 69749 >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanScriptingRangeFullScan+SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 30401, MsgBus: 15102 2026-01-04T02:46:41.155346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332369206385773:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.155444Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.385311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.389583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.389664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.413289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.500236Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.505897Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332369206385744:2081] 1767494801152944 != 1767494801152947 2026-01-04T02:46:41.556417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.556438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.556445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.556531Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.653911Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:41.963770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:42.026874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.162052Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.166292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.315284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.385337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.348855Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382091289510:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.348984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.349284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382091289520:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.349340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.669216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.704151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.738727Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.767393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.798254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.828762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.890067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.931124Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.999589Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382091290388:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.999665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.999898Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382091290393:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.999917Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382091290394:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.999956Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.004045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:45.013003Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332382091290397:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:45.081056Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332386386257744:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.155614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332369206385773:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.155681Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.998785Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494807029, txId: 281474976710673] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::ScanQueryRangeFullScan-SourceRead [GOOD] Test command err: Trying to start YDB, gRPC: 12851, MsgBus: 19648 2026-01-04T02:46:40.975433Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332366044628516:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:40.975526Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.168739Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.195290Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.195372Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.250882Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.251299Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332366044628475:2081] 1767494800972530 != 1767494800972533 2026-01-04T02:46:41.264212Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.348063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:41.382408Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.382434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.382440Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.382525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.830696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:41.893537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:41.983515Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.049360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.182844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.252217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:43.913184Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332378929532228:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.913310Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.913610Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332378929532238:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.913674Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.259332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.289992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.316888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.348567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.379639Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.417729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.482790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.533576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.598118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332383224500403:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.598189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.598301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332383224500408:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.598345Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332383224500409:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.598390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.602818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:44.613254Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332383224500412:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:44.694306Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332383224500463:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:45.975790Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332366044628516:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:45.975865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.503543Z node 1 :KQP_GATEWAY DEBUG: kqp_metadata_loader.cpp:922: Load table metadata from cache by path, request Path: /Root/Test 2026-01-04T02:46:46.659882Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_snapshot_manager.cpp:37: Start KqpSnapshotManager at [1:7591332391814435334:2521] 2026-01-04T02:46:46.659907Z node 1 :KQP_RESOURCE_MANAGER D ... kqp_scan_fetcher_actor.cpp:594: SelfId: [1:7591332391814435352:2530]. Scheduled table scans, in flight: 0 shards. pending shards to read: 0, pending resolve shards: 0, average read rows: 3, average read bytes: 0, 2026-01-04T02:46:46.688677Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7591332391814435350:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:46:46.688679Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: self_id=[1:7591332391814435352:2530];scan_id=1;tx_id=281474976710674;fline=kqp_scan_compute_manager.h:441;event=wait_all_scanner_finished;scans=0; 2026-01-04T02:46:46.688730Z node 1 :KQP_COMPUTE DEBUG: log.h:466: kqp_scan_compute_actor.cpp:216 :TEvFetcherFinished: [1:7591332391814435352:2530] 2026-01-04T02:46:46.688744Z node 1 :KQP_COMPUTE DEBUG: kqp_scan_fetcher_actor.cpp:685: SelfId: [1:7591332391814435352:2530]. EVLOGKQP(max_in_flight:1) InFlightScans:InFlightShards:;wScans=0;wShards=0; {SHARD(72075186224037914):CHUNKS=1;D=0.000000s;PacksCount=1;RowsCount=3;BytesCount=0;MinPackSize=3;MaxPackSize=3;CAVG=0.000000s;CMIN=0.000000s;CMAX=0.000000s;}; 2026-01-04T02:46:46.688790Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:669: TxId: 281474976710674, task: 1. Tasks execution finished, waiting for chunk delivery in output channelId: 1, seqNo: [1] 2026-01-04T02:46:46.688817Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332391814435351:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646923 2026-01-04T02:46:46.688865Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:163: TxId: 281474976710674, task: 2. Finish input channelId: 1, from: [1:7591332391814435350:2528] 2026-01-04T02:46:46.688914Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332391814435351:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:46.689131Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 1. Tasks execution finished 2026-01-04T02:46:46.689131Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:526: SelfId: [1:7591332391814435351:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Continue execution, either output buffers are not empty or not all channels are ready, hasDataToSend: 1, channelsReady: 1 2026-01-04T02:46:46.689148Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591332391814435350:2528], TxId: 281474976710674, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:46:46.689291Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 1. pass away 2026-01-04T02:46:46.689394Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:334} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Send TEvStreamData Recipient# [1:7591332391814435320:2521] SeqNo# 1 Rows# 1 trace_id# 2026-01-04T02:46:46.689397Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=1;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:46:46.689619Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7591332391814435350:2528] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 10807 DurationUs: 1000 Tasks { TaskId: 1 CpuTimeUs: 915 FinishTimeMs: 1767494806689 OutputRows: 1 OutputBytes: 19 Tables { TablePath: "/Root/Test" ReadRows: 3 ReadBytes: 96 } ComputeCpuTimeUs: 115 BuildCpuTimeUs: 800 Sources { IngressName: "CS" Ingress { } } HostName: "ghrun-46vtxmsa7i" NodeId: 1 StartTimeMs: 1767494806688 CreateTimeMs: 1767494806673 UpdateTimeMs: 1767494806689 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:46:46.689624Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 1. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:46:46.689725Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7591332391814435350:2528] 2026-01-04T02:46:46.689796Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [1:7591332391814435351:2529], trace_id# 2026-01-04T02:46:46.689889Z node 1 :KQP_RESOURCE_MANAGER INFO: kqp_rm_service.cpp:939: Send to publish resource usage for reason: alloc, payload: NodeId: 1 ResourceManagerActorId { RawX1: 7886758914357752171 RawX2: 9223372041149771361 } Timestamp: 1767494806 AvailableComputeActors: 9999 UsedMemory: 0 TotalMemory: 10737418240 Memory { Pool: 1 Available: 10737418240 } ExecutionUnits: 9999 KqpProxyNodeResources { NodeId: 1 DataCenterNumId: 49 ActiveWorkersCount: 1 DataCenterId: "1" } 2026-01-04T02:46:46.692531Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:549} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Send ack ChannelId# 2 SeqNo# 1 Enough# false FreeSpace# 8388468 Recipient# [1:7591332391814435354:2529] trace_id# 2026-01-04T02:46:46.692608Z node 1 :KQP_COMPUTE DEBUG: kqp_pure_compute_actor.cpp:160: SelfId: [1:7591332391814435351:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. CA StateFunc 271646922 2026-01-04T02:46:46.692678Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:657: TxId: 281474976710674, task: 2. Tasks execution finished, don't wait for ack delivery in input channelId: 1, seqNo: [1] 2026-01-04T02:46:46.692697Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:674: TxId: 281474976710674, task: 2. Tasks execution finished 2026-01-04T02:46:46.692710Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_impl.h:546: SelfId: [1:7591332391814435351:2529], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ectx875etgeb6y6f63x87. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Compute state finished. All channels and sinks finished 2026-01-04T02:46:46.692814Z node 1 :KQP_COMPUTE DEBUG: dq_compute_actor_channels.cpp:494: TxId: 281474976710674, task: 2. pass away 2026-01-04T02:46:46.692908Z node 1 :KQP_COMPUTE DEBUG: log.cpp:841: fline=kqp_compute_actor_factory.cpp:60;problem=finish_compute_actor;tx_id=281474976710674;task_id=2;success=1;message={
: Error: COMPUTE_STATE_FINISHED }; 2026-01-04T02:46:46.692955Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [1:7591332391814435351:2529] TaskId# 2 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 1882 Tasks { TaskId: 2 StageId: 1 CpuTimeUs: 858 FinishTimeMs: 1767494806692 InputRows: 1 InputBytes: 19 OutputRows: 1 OutputBytes: 19 ResultRows: 1 ResultBytes: 19 ComputeCpuTimeUs: 168 BuildCpuTimeUs: 690 HostName: "ghrun-46vtxmsa7i" NodeId: 1 CreateTimeMs: 1767494806683 UpdateTimeMs: 1767494806692 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:46:46.693006Z node 1 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [1:7591332391814435351:2529] 2026-01-04T02:46:46.693081Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:404: TxId: 281474976710674, taskId: 2. Released resources, Memory: 0, Free Tier: 1048576, ExecutionUnits: 1. 2026-01-04T02:46:46.693175Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:46:46.693219Z node 1 :KQP_RESOURCE_MANAGER DEBUG: kqp_rm_service.cpp:903: Schedule publish at 2026-01-04T02:46:48.689401Z, after 1.996316s 2026-01-04T02:46:46.693238Z node 1 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [1:7591332391814435345:2521] TxId: 281474976710674. Ctx: { TraceId: 01ke3ectx875etgeb6y6f63x87, Database: /Root, SessionId: ydb://session/3?node_id=1&id=NmM4NDEzMzUtYzczMTMzMzEtZDg3NDIwNmEtYTY3N2I3M2Y=, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.012689s ReadRows: 3 ReadBytes: 96 RequestUnits# 8 ForceFlag# true trace_id# 2026-01-04T02:46:46.694119Z node 1 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 1767494806707, txId: 281474976710673] shutting down |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableRebootSaTabletBeforeSave [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:07.686621Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:07.757866Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:07.763150Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:07.763401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:07.763522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:08.132780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.222244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.222323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.268038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.329574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:08.939248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:08.939821Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:08.939877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:08.939922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:08.940439Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:09.004117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:09.514340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:11.872059Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:11.878133Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:11.880990Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:11.904073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:11.904150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:11.930456Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:11.932069Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.104588Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.104727Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.106460Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.107211Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.107806Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109099Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109378Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109492Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109764Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109945Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.110056Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.136028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.319239Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.338517Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.338608Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.369185Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:12.370740Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:12.371013Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:12.371087Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:12.371152Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:12.371195Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:12.371250Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:12.371309Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:12.371994Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:12.372382Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:12.375413Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:12.385859Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:12.385935Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:12.386035Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.398934Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.436174Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:12.445281Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:12.448563Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.448730Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.478378Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:12.478571Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:12.581536Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:12.608860Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:12.862326Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.864251Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2553:2875], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.867292Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:12.870006Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:12.870084Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... T02:46:46.144122Z node 2 :STATISTICS DEBUG: tx_init.cpp:74: [72075186224037894] Loaded traversal table local path id: 34 2026-01-04T02:46:46.144160Z node 2 :STATISTICS DEBUG: tx_init.cpp:79: [72075186224037894] Loaded traversal start time: 1767494805991902 2026-01-04T02:46:46.144222Z node 2 :STATISTICS DEBUG: tx_init.cpp:84: [72075186224037894] Loaded traversal IsColumnTable: 1 2026-01-04T02:46:46.144265Z node 2 :STATISTICS DEBUG: tx_init.cpp:89: [72075186224037894] Loaded global traversal round: 2 2026-01-04T02:46:46.144307Z node 2 :STATISTICS DEBUG: tx_init.cpp:64: [72075186224037894] Loaded traversal table database: 2026-01-04T02:46:46.144382Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 1 2026-01-04T02:46:46.144434Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:46.144522Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:46:46.144586Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:46.144671Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:46.144738Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:46.144861Z node 2 :STATISTICS DEBUG: tx_init.cpp:306: [72075186224037894] TTxInit::Complete. Start navigate. PathId [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:46.145923Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:46.147241Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:46.147320Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:46.147452Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:5601:4792] Owner: [2:5600:4791]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:46.147518Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:5601:4792] Owner: [2:5600:4791]. Column diff is empty, finishing 2026-01-04T02:46:46.149277Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:46.149363Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:46.151103Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:46.180788Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5610:4799] 2026-01-04T02:46:46.181095Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:5610:4799], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:46:46.181242Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5564:4771], server id = [2:5610:4799], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:46.181457Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:5611:4800] 2026-01-04T02:46:46.181577Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:5611:4800], schemeshard id = 72075186224037897 2026-01-04T02:46:46.244725Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:46.244870Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-04T02:46:46.245715Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5616:4805], server id = [2:5620:4809], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:46.245858Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5616:4805], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:46.246226Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5617:4806], server id = [2:5621:4810], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:46.246290Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5617:4806], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:46.247166Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5618:4807], server id = [2:5622:4811], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:46.247229Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5618:4807], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:46.247877Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5619:4808], server id = [2:5623:4812], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:46.247946Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5619:4808], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:46.249187Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:46.249732Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5616:4805], server id = [2:5620:4809], tablet id = 72075186224037899 2026-01-04T02:46:46.249782Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:46.250090Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:46.250466Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:46.250616Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5617:4806], server id = [2:5621:4810], tablet id = 72075186224037900 2026-01-04T02:46:46.250638Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:46.250821Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:46.250855Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:46.251022Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:46.251211Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:46.251440Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5628:4817], ActorId: [2:5629:4818], Starting query actor #1 [2:5630:4819] 2026-01-04T02:46:46.251491Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5629:4818], ActorId: [2:5630:4819], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:46.254386Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5618:4807], server id = [2:5622:4811], tablet id = 72075186224037901 2026-01-04T02:46:46.254417Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:46.254764Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5619:4808], server id = [2:5623:4812], tablet id = 72075186224037902 2026-01-04T02:46:46.254791Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:46.255331Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5629:4818], ActorId: [2:5630:4819], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZDMxZmNjY2MtZjdlNzVhZGYtNDY3YzEzZGItMzA3MzYzZTg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:46.355358Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5639:4828]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:46.355674Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:46.355728Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5639:4828], StatRequests.size() = 1 2026-01-04T02:46:46.524225Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5629:4818], ActorId: [2:5630:4819], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDMxZmNjY2MtZjdlNzVhZGYtNDY3YzEzZGItMzA3MzYzZTg=, TxId: 2026-01-04T02:46:46.524352Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5629:4818], ActorId: [2:5630:4819], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDMxZmNjY2MtZjdlNzVhZGYtNDY3YzEzZGItMzA3MzYzZTg=, TxId: 2026-01-04T02:46:46.524885Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5628:4817], ActorId: [2:5629:4818], Got response [2:5630:4819] SUCCESS 2026-01-04T02:46:46.525341Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:46.542995Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:46.543078Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:46.639008Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5666:4836]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:46.639454Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:46.639526Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:46.639883Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:46.639972Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:46.640037Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:46.643930Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 26958, MsgBus: 4282 2026-01-04T02:46:41.342049Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332369308365118:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.342745Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.569908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.597436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.597500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.664586Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.673110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.754573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.754608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.754620Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.754720Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.804298Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:42.161377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:42.235042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.356666Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.375052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.520864Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.579367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.326231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382193268827:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.326322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.326549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382193268837:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.326613Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.668317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.702603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.733170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.763019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.793542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.829042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.864259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.913087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.989480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382193269708:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.989574Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.989760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382193269713:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.989839Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382193269714:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.989886Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.993631Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:45.010772Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332382193269717:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:45.079917Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332386488237064:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.342550Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332369308365118:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.342614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.599548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.866704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 6090 table_access { name: "/Root/.tmp/sessions/b81fd11d-4698-f963-aee0-cab26852dc93/Root/TestTable2_f75102a4-47a9-d564-99ce-8b9648833f96" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 4604 affected_shards: 1 } compilation { duration_us: 11959 cpu_time_us: 8243 } process_cpu_time_us: 980 total_duration_us: 287922 total_cpu_time_us: 13827 2026-01-04T02:46:47.038753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:46:47.044533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapWriteRow [GOOD] Test command err: Trying to start YDB, gRPC: 26420, MsgBus: 12014 2026-01-04T02:46:39.070107Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332358463180797:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.070187Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.310048Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.363877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.363958Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.414675Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332358463180765:2081] 1767494799066638 != 1767494799066641 2026-01-04T02:46:39.422439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.424075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.569841Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603954Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604035Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:40.079673Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.200962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.266023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.393219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.543790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.606516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.030968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371348084532:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.031126Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.031514Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371348084541:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.031563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.554930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.584104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.611482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.638645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.668101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.707443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.780181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.853098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.919489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371348085423:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.919563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.919614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371348085428:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.919761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371348085430:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.919804Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.922721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.931798Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332371348085431:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:43.032070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332375643052781:3778] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.072031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332358463180797:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.072100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.548345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/creat ... 54;result=not_found; 2026-01-04T02:46:44.941111Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.941120Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.948930Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.948985Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.948998Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.950040Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.950091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.950106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.956869Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.956924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:44.956939Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 6764 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2004 affected_shards: 1 } query_phases { duration_us: 7851 cpu_time_us: 260 affected_shards: 1 } compilation { duration_us: 58610 cpu_time_us: 53942 } process_cpu_time_us: 754 total_duration_us: 75255 total_cpu_time_us: 56960 query_phases { duration_us: 6483 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1606 affected_shards: 1 } query_phases { duration_us: 6084 cpu_time_us: 242 affected_shards: 1 } compilation { duration_us: 49819 cpu_time_us: 45783 } process_cpu_time_us: 709 total_duration_us: 64265 total_cpu_time_us: 48340 query_phases { duration_us: 21016 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1925 affected_shards: 1 } query_phases { duration_us: 6868 cpu_time_us: 272 affected_shards: 1 } compilation { duration_us: 51556 cpu_time_us: 47178 } process_cpu_time_us: 758 total_duration_us: 83695 total_cpu_time_us: 50133 query_phases { duration_us: 18093 table_access { name: "/Root/TestTable" updates { rows: 2 bytes: 744 } partitions_count: 2 } cpu_time_us: 7172 affected_shards: 2 } query_phases { duration_us: 9496 cpu_time_us: 241 affected_shards: 2 } compilation { duration_us: 75702 cpu_time_us: 70073 } process_cpu_time_us: 802 total_duration_us: 105355 total_cpu_time_us: 78288 query_phases { duration_us: 21633 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1614 affected_shards: 1 } query_phases { duration_us: 5777 cpu_time_us: 209 affected_shards: 1 } compilation { duration_us: 78334 cpu_time_us: 73411 } process_cpu_time_us: 751 total_duration_us: 107749 total_cpu_time_us: 75985 query_phases { duration_us: 10461 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 2243 affected_shards: 1 } query_phases { duration_us: 14179 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1829 affected_shards: 2 } query_phases { duration_us: 7599 cpu_time_us: 234 affected_shards: 2 } compilation { duration_us: 109049 cpu_time_us: 104058 } process_cpu_time_us: 1254 total_duration_us: 144614 total_cpu_time_us: 109618 2026-01-04T02:46:45.892627Z node 1 :TX_COLUMNSHARD_RESTORE WARN: log.cpp:841: tablet_id=72075186224037935;tablet_actor_id=[1:7591332379938020518:2546];this=137413713394368;activity=1;task_id=9bf78cce-e91711f0-8a902c22-9fa11e1d::4;fline=restore.cpp:28;event=merge_data_problems;write_id=4;tablet_id=72075186224037935;message=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}; 2026-01-04T02:46:45.893026Z node 1 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7591332379938020518:2546];ev=NKikimr::NColumnShard::TEvPrivate::TEvWriteBlobsResult;tablet_id=72075186224037935;event=TEvWriteBlobsResult;fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]};tx_id=281474976715689; 2026-01-04T02:46:45.893455Z node 1 :TX_COLUMNSHARD_SCAN WARN: actor.cpp:152: Scan [1:7591332384232988634:2821] got AbortExecution txId: 281474976715689 scanId: 1 gen: 1 tablet: 72075186224037935 code: ABORTED reason: {
: Error: task finished: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]} } 2026-01-04T02:46:45.907155Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332384232988631:2819], Table: `/Root/TestTable` ([72057594046644480:54:1]), SessionActorId: [0:0:0]Got CONSTRAINT VIOLATION for table `/Root/TestTable`. ShardID=72075186224037935, Sink=[1:7591332384232988631:2819].{
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } 2026-01-04T02:46:45.907252Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [1:7591332384232988628:2819], TxId: 281474976715689, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ect9z76rgpq754k7vzx8e. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQxZWVjNDctMWIyY2JhNjktYmViYjQ3NDQtYWJlNWMxMTM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } } 2026-01-04T02:46:45.907330Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332384232988628:2819], TxId: 281474976715689, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ect9z76rgpq754k7vzx8e. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=OWQxZWVjNDctMWIyY2JhNjktYmViYjQ3NDQtYWJlNWMxMTM=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Constraint violated. Table: `/Root/TestTable`., code: 2012 subissue: {
: Error: Conflict with existing key. {"sorting_columns":[{"name":"Group","value":"1"},{"name":"Name","value":"Anna"}],"fields":["Group: Uint32","Name: String"]}, code: 2012 } }. query_phases { duration_us: 42514 cpu_time_us: 3418 } compilation { duration_us: 42396 cpu_time_us: 38760 } process_cpu_time_us: 860 total_duration_us: 87045 total_cpu_time_us: 43038 2026-01-04T02:46:45.910368Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OWQxZWVjNDctMWIyY2JhNjktYmViYjQ3NDQtYWJlNWMxMTM=, ActorId: [1:7591332379938020382:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ect9z76rgpq754k7vzx8e, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key. {\"sorting_columns\":[{\"name\":\"Group\",\"value\":\"1\"},{\"name\":\"Name\",\"value\":\"Anna\"}],\"fields\":[\"Group: Uint32\",\"Name: String\"]}" issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 10689 cpu_time_us: 2134 affected_shards: 1 } query_phases { duration_us: 5103 cpu_time_us: 206 affected_shards: 1 } compilation { duration_us: 69262 cpu_time_us: 64110 } process_cpu_time_us: 832 total_duration_us: 87134 total_cpu_time_us: 67282 query_phases { duration_us: 5478 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1542 affected_shards: 1 } query_phases { duration_us: 7111 cpu_time_us: 237 affected_shards: 1 } compilation { duration_us: 52625 cpu_time_us: 48370 } process_cpu_time_us: 729 total_duration_us: 67286 total_cpu_time_us: 50878 query_phases { duration_us: 7056 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 2069 affected_shards: 1 } query_phases { duration_us: 5114 cpu_time_us: 259 affected_shards: 1 } compilation { duration_us: 57503 cpu_time_us: 52462 } process_cpu_time_us: 828 total_duration_us: 72360 total_cpu_time_us: 55618 query_phases { duration_us: 6024 table_access { name: "/Root/TestTable" deletes { rows: 1 } partitions_count: 1 } cpu_time_us: 1635 affected_shards: 1 } query_phases { duration_us: 10063 table_access { name: "/Root/TestTable" updates { rows: 1 bytes: 368 } partitions_count: 1 } cpu_time_us: 1748 affected_shards: 2 } query_phases { duration_us: 8597 cpu_time_us: 215 affected_shards: 2 } compilation { duration_us: 85848 cpu_time_us: 80221 } process_cpu_time_us: 1144 total_duration_us: 113265 total_cpu_time_us: 84963 query_phases { duration_us: 870 cpu_time_us: 870 } query_phases { duration_us: 187860 table_access { name: "/Root/TestTable" reads { rows: 2 bytes: 40 } deletes { rows: 2 } partitions_count: 2 } cpu_time_us: 77796 affected_shards: 10 } query_phases { duration_us: 11901 cpu_time_us: 668 affected_shards: 10 } compilation { duration_us: 396148 cpu_time_us: 382848 } process_cpu_time_us: 1971 total_duration_us: 601542 total_cpu_time_us: 464153 |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching >> KqpCost::QuerySeviceRangeFullScan [GOOD] >> KqpCost::CTAS+isOlap [GOOD] |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |94.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false |94.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-ordinaryuser [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin >> BasicUsage::WriteAndReadSomeMessagesWithNoCompression [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin-StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 15956, MsgBus: 21210 2026-01-04T02:46:41.049242Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332367112774604:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.049387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.244283Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.244425Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.295504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.302451Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.323904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332367112774499:2081] 1767494801044445 != 1767494801044448 2026-01-04T02:46:41.339654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.429663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.429688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.429701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.429797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.459803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:41.780226Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:41.785761Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:46:41.848951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:41.989223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.062236Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.127391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.191323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:43.834469Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332375702710961:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.834544Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.834791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332375702710971:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:43.834848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.094875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.126847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.155846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.187131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.217726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.250281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.308447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.348168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.429444Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332379997679139:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.429531Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.429600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332379997679144:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.429786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332379997679146:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.429868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.433204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:44.443355Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332379997679147:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:44.548892Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332379997679199:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.047928Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332367112774604:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.048003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.384223Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.442257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.468883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAtLeast8BytesInStorage+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 22780, MsgBus: 22340 2026-01-04T02:46:41.140110Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332367264012266:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.141732Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.330025Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.372607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.372714Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.429941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332367264012235:2081] 1767494801137709 != 1767494801137712 2026-01-04T02:46:41.431345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.433774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.522432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.522461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.522468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.522547Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.557909Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:41.939609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:41.945246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:46:42.010716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.128332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.174615Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.273910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.343451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.407198Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380148915992:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.407336Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.407707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380148916002:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.407793Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.687371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.717383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.789489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.819776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.851085Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.884079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.914080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.959196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.030963Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332384443884167:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.031047Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.031325Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332384443884172:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.031337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332384443884173:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.031407Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:45.035120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:45.044936Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332384443884176:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:45.105053Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332384443884227:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.139453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332367264012266:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.139527Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.708660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/SecondaryKeys/Index/indexImplTable 1 8 /Root/SecondaryKeys 1 8 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails+isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 25448, MsgBus: 25044 2026-01-04T02:46:41.185665Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332370007160897:2121];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.187798Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.218201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:41.472342Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.472450Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.476017Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.481839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.577962Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.645702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:41.661632Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.661669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.661683Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.661850Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:42.113282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:42.161155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.211277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.281991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.428308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.490599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.166607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382892064562:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.166759Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.167705Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382892064572:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.167796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.513988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.547274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.579543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.614685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.642403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.677233Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.711008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.781414Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.857332Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382892065447:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.857397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.857481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382892065452:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.857552Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332382892065454:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.857595Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.860978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:44.871516Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332382892065456:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:44.972153Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332382892065507:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.185589Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332370007160897:2121];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.185653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.527247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemesha ... equest, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3253 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 1857 } compilation { duration_us: 59714 cpu_time_us: 55465 } process_cpu_time_us: 1024 total_duration_us: 65196 total_cpu_time_us: 58346 2026-01-04T02:46:47.445207Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=6; 2026-01-04T02:46:47.445551Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332395776968076:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7591332391482000406:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7591332395776968076:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:46:47.445648Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332395776968069:2531], SessionActorId: [1:7591332391482000406:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7591332391482000406:2531]. 2026-01-04T02:46:47.445927Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvtb89eyewqqs89nsgaq, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7591332395776968070:2531] from: [1:7591332395776968069:2531] trace_id# 2026-01-04T02:46:47.446033Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332395776968070:2531] TxId: 281474976715687. Ctx: { TraceId: 01ke3ecvtb89eyewqqs89nsgaq, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:46:47.446345Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvtb89eyewqqs89nsgaq, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 4170 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1744 } compilation { duration_us: 65476 cpu_time_us: 60231 } process_cpu_time_us: 755 total_duration_us: 74180 total_cpu_time_us: 62730 2026-01-04T02:46:47.518164Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=7; 2026-01-04T02:46:47.518460Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332395776968097:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7591332391482000406:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7591332395776968097:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:46:47.518539Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332395776968090:2531], SessionActorId: [1:7591332391482000406:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7591332391482000406:2531]. 2026-01-04T02:46:47.518744Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvww9a3e10br6w21fa05, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7591332395776968091:2531] from: [1:7591332395776968090:2531] trace_id# 2026-01-04T02:46:47.518827Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332395776968091:2531] TxId: 281474976715689. Ctx: { TraceId: 01ke3ecvww9a3e10br6w21fa05, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:46:47.519072Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvww9a3e10br6w21fa05, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3646 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 1591 } compilation { duration_us: 60829 cpu_time_us: 55273 } process_cpu_time_us: 812 total_duration_us: 66378 total_cpu_time_us: 57676 2026-01-04T02:46:47.597422Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=8; 2026-01-04T02:46:47.597678Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332395776968115:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7591332391482000406:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7591332395776968115:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:46:47.597750Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332395776968108:2531], SessionActorId: [1:7591332391482000406:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7591332391482000406:2531]. 2026-01-04T02:46:47.597948Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvz4besbpcc5h772z9cc, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7591332395776968109:2531] from: [1:7591332395776968108:2531] trace_id# 2026-01-04T02:46:47.598036Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332395776968109:2531] TxId: 281474976715691. Ctx: { TraceId: 01ke3ecvz4besbpcc5h772z9cc, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:46:47.598318Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvz4besbpcc5h772z9cc, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3284 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 2 bytes: 40 } partitions_count: 1 } cpu_time_us: 1347 } compilation { duration_us: 68927 cpu_time_us: 62933 } process_cpu_time_us: 742 total_duration_us: 73897 total_cpu_time_us: 65022 2026-01-04T02:46:47.673236Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=9; 2026-01-04T02:46:47.673436Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:7591332395776968143:2531], Table: `/Root/TestTable2` ([72057594046644480:55:1]), SessionActorId: [1:7591332391482000406:2531]Got CONSTRAINT VIOLATION for table `/Root/TestTable2`. ShardID=72075186224037928, Sink=[1:7591332395776968143:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:46:47.673516Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332395776968136:2531], SessionActorId: [1:7591332391482000406:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:7591332391482000406:2531]. 2026-01-04T02:46:47.673649Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecw1mf7qdg6hqwnd0wwh3, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:7591332395776968137:2531] from: [1:7591332395776968136:2531] trace_id# 2026-01-04T02:46:47.673709Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332395776968137:2531] TxId: 281474976715693. Ctx: { TraceId: 01ke3ecw1mf7qdg6hqwnd0wwh3, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestTable2`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:46:47.673946Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OTc1ZTE0YWQtNzllN2ZmMDItMTY3ZmM3MS1mNjVlM2RlYw==, ActorId: [1:7591332391482000406:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ecw1mf7qdg6hqwnd0wwh3, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestTable2`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# query_phases { duration_us: 3262 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } updates { rows: 3 bytes: 60 } partitions_count: 1 } cpu_time_us: 1603 } compilation { duration_us: 64086 cpu_time_us: 59041 } process_cpu_time_us: 678 total_duration_us: 69221 total_cpu_time_us: 61322 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapPointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 12329, MsgBus: 28814 2026-01-04T02:46:41.327237Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332367822279115:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:41.327626Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:41.583016Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:41.614337Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:41.614418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:41.658831Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:41.680679Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332367822279091:2081] 1767494801324828 != 1767494801324831 2026-01-04T02:46:41.693502Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:41.750636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:41.750664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:41.750670Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:41.750797Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:41.804112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:42.167369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:42.180373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:46:42.238590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.334533Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:42.384613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.537769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.612339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.219518Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380707182853:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.219644Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.220014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380707182863:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.220072Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.547077Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.574488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.603689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.633557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.662902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.696173Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.744992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.784615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.852319Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380707183737:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.852382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.852531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380707183742:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.852604Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332380707183744:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.852678Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.856158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:44.867606Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332380707183746:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:44.951064Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332380707183797:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:46.327160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332367822279115:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:46.327254Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:46.637907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... p:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:46:46.997356Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:46:46.997387Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:46:46.997419Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=SyncPortionFromChunks;id=11; 2026-01-04T02:46:46.997449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV1Chunks_V2;id=RestoreV1Chunks_V2; 2026-01-04T02:46:46.997621Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV1Chunks_V2;id=15; 2026-01-04T02:46:46.997661Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV2Chunks;id=RestoreV2Chunks; 2026-01-04T02:46:46.997770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV2Chunks;id=16; 2026-01-04T02:46:46.997823Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanDeprecatedSnapshot;id=CleanDeprecatedSnapshot; 2026-01-04T02:46:46.997880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanDeprecatedSnapshot;id=17; 2026-01-04T02:46:46.997920Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreV0ChunksMeta;id=RestoreV0ChunksMeta; 2026-01-04T02:46:46.997961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreV0ChunksMeta;id=18; 2026-01-04T02:46:46.997992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CopyBlobIdsToV2;id=CopyBlobIdsToV2; 2026-01-04T02:46:46.998116Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CopyBlobIdsToV2;id=19; 2026-01-04T02:46:46.998155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-04T02:46:46.998245Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-04T02:46:46.998315Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-04T02:46:47.001580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591332389297118867:2539];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976715673;this=137163460596544;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494807001;max=18446744073709551615;plan=0;src=[1:7591332367822279439:2145];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005554Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005589Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.005588Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.038055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.038124Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.038138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.041067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.041126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.041142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.046081Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.046153Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.046175Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.048954Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.049010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.049024Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.056050Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.056121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.056135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.063320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.063378Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.063392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.070316Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.070415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.070431Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.077379Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.077434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:47.077448Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2 >> KqpCost::PointLookup >> KqpCost::OlapRange |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeOneTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.695171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.763765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.769063Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.769299Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.769393Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:05.059642Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.158423Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:05.158537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.204713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.284529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:05.895314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:05.895686Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:05.895726Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:05.895765Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:05.896230Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:05.959811Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.476632Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.279550Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.285160Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.287989Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.316375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.316480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.344228Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.346026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.527615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.527725Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.529125Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.529763Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.530318Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.531371Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.531699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.531881Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.532116Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.532259Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.532436Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.558399Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.780076Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.796037Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.796097Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.815254Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.816162Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.816310Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.816349Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.816387Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.816420Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.816457Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.816488Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.816952Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:09.817150Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:09.818926Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:09.825985Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:09.826041Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:09.826131Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.834547Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.843383Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:09.847556Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:09.850276Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.850359Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.871897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:09.872272Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:09.972691Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:10.109067Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:10.262552Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.264265Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.267248Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.270180Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:10.270275Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... atabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:36.361335Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:3995:3535], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:36.364591Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720659:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:36.417877Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:3977:3529], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720659 completed, doublechecking } 2026-01-04T02:46:36.621349Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4064:3577], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:36.666792Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4063:3576] txid# 281474976720660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:36.867709Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4085:3590]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:36.867940Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:36.868029Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4087:3592] 2026-01-04T02:46:36.868082Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4087:3592] 2026-01-04T02:46:36.868479Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4088:3593] 2026-01-04T02:46:36.868618Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4087:3592], server id = [2:4088:3593], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:36.868689Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4088:3593], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:36.868755Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:36.868907Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:36.868994Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4085:3590], StatRequests.size() = 1 2026-01-04T02:46:36.869133Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:42.627845Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:3950:3515], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:46:42.628105Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:3950:3515], Start read next stream part 2026-01-04T02:46:42.629699Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:46:42.633241Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33160, txId: 281474976720661] shutting down 2026-01-04T02:46:42.633378Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:46:42.633492Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], Start read next stream part 2026-01-04T02:46:42.636824Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:3950:3515], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:46:42.636906Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:3950:3515], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODU5YjdlYWYtZDIzNTNmYTMtZGFkMWJiNDktNDQ4ZTk2Y2U=, TxId: 2026-01-04T02:46:42.730777Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4194:3672]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:42.731105Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:42.731163Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4194:3672], StatRequests.size() = 1 2026-01-04T02:46:47.057240Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:46:47.057400Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], Start read next stream part 2026-01-04T02:46:47.059956Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4265:3721], ActorId: [2:4266:3722], Starting query actor #1 [2:4267:3723] 2026-01-04T02:46:47.060050Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4266:3722], ActorId: [2:4267:3723], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:47.060509Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33170, txId: 281474976720664] shutting down 2026-01-04T02:46:47.063507Z node 2 :SYSTEM_VIEWS WARN: tx_interval_summary.cpp:204: [72075186224037891] TEvIntervalQuerySummary, wrong stage: node id# 2 2026-01-04T02:46:47.064192Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4266:3722], ActorId: [2:4267:3723], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YTA3MzRkNjktZjVjZGIxMTktNWVhYTgyZTktYmY1YWMyMjc=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:47.065203Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:46:47.065265Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:3940:2462], ActorId: [2:4169:3651], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Mzc3MTgyZmEtZWNkNDIzODMtYzNkZGE1MmEtOWJkOTNkMzk=, TxId: 2026-01-04T02:46:47.126422Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4284:3737]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:47.126651Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:47.126688Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4284:3737], StatRequests.size() = 1 2026-01-04T02:46:47.267683Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4266:3722], ActorId: [2:4267:3723], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YTA3MzRkNjktZjVjZGIxMTktNWVhYTgyZTktYmY1YWMyMjc=, TxId: 2026-01-04T02:46:47.267803Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4266:3722], ActorId: [2:4267:3723], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YTA3MzRkNjktZjVjZGIxMTktNWVhYTgyZTktYmY1YWMyMjc=, TxId: 2026-01-04T02:46:47.268190Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4265:3721], ActorId: [2:4266:3722], Got response [2:4267:3723] SUCCESS 2026-01-04T02:46:47.268659Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:47.284399Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:47.284504Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3050:3424] 2026-01-04T02:46:47.285203Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4308:3752]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:46:47.285611Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:46:47.285674Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:47.286013Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:46:47.286074Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 4 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:47.286144Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:47.286439Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:46:47.291487Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:46:47.292966Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 2026-01-04T02:46:47.293058Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 4 >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] >> KqpCost::CTASWithRetry-isOlap |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclare [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:48.506477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:48.506580Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:48.506629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:48.506666Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:48.506709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:48.506741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:48.506804Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:48.506936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:48.507911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:48.508231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:48.593002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:48.593071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:48.608906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:48.609162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:48.609350Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:48.619202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:48.619453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:48.620261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:48.620555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:48.624006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:48.624203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:48.625370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:48.625438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:48.625556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:48.625603Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:48.625716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:48.625960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:48.790629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.791638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.791786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.791867Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.791935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792160Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:48.792774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oordinatorID: 72057594046316545 2026-01-04T02:46:49.493175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 100:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:100 msg type: 269090816 2026-01-04T02:46:49.493330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 100, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 100 at step: 5000038 FAKE_COORDINATOR: advance: minStep5000038 State->FrontStep: 5000037 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 100 at step: 5000038 2026-01-04T02:46:49.493690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000038, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:49.493832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 100 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:46:49.493884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-04T02:46:49.494186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-04T02:46:49.494269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-04T02:46:49.494465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:46:49.494533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:46:49.494597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:46:49.496598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:49.496643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:49.496816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:46:49.497033Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.497098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:46:49.497157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 FAKE_COORDINATOR: Erasing txId 100 2026-01-04T02:46:49.497467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:46:49.497512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:46:49.497611Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:46:49.497648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:46:49.497725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:46:49.497761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:46:49.497822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:46:49.497885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:46:49.497936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:46:49.497987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:46:49.498072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:46:49.498124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 1 2026-01-04T02:46:49.498287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:46:49.498329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:46:49.499623Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:46:49.499712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:46:49.499780Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:46:49.499844Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:46:49.499890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:46:49.500724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:46:49.500803Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:46:49.500862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:46:49.500895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:46:49.500941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:46:49.501019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 1 2026-01-04T02:46:49.501062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:683:2673] 2026-01-04T02:46:49.504615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:46:49.504943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:46:49.505018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:46:49.505056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:684:2674] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 100 2026-01-04T02:46:49.505601Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:46:49.505830Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 225us result status StatusSuccess 2026-01-04T02:46:49.506362Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRowInsertFails-isSink-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 9628, MsgBus: 4996 2026-01-04T02:46:39.071754Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332359341512701:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.071838Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.385822Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.406878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.407005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.407992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.412210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332359341512672:2081] 1767494799070191 != 1767494799070194 2026-01-04T02:46:39.432344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.587538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603923Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603968Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604115Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:40.079443Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.279329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.327288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.461193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.603612Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.674048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.283575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372226416440:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.283698Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.284272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372226416450:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.284359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.576045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.604948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.633218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.663107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.697069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.737476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.768861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.812156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.897128Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372226417318:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897225Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372226417323:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897568Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332372226417325:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.897665Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.903909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.913039Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332372226417326:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.018403Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332376521384674:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.072019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332359341512701:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.072121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.626779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... /Root. }. Handle abort execution event from: [1:7591332389406287247:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:46.874869Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332389406287254:2624], TxId: 281474976710691, task: 4. Ctx: { CheckpointId : . TraceId : 01ke3ectza2gdpw56fr86ecae0. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332389406287247:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:46.875371Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ectza2gdpw56fr86ecae0, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1227 cpu_time_us: 1227 } query_phases { duration_us: 4468 table_access { name: "/Root/TestTable" partitions_count: 1 } table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 6047 affected_shards: 2 } query_phases { duration_us: 2660 cpu_time_us: 4106 } compilation { duration_us: 356754 cpu_time_us: 348971 } process_cpu_time_us: 2501 total_duration_us: 368751 total_cpu_time_us: 362852 2026-01-04T02:46:47.202572Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332393701254596:2639], TxId: 281474976710694, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecvb3af7r3bqhj1xt7d68. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:46:47.202814Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332393701254598:2640], TxId: 281474976710694, task: 2. Ctx: { TraceId : 01ke3ecvb3af7r3bqhj1xt7d68. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332393701254593:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:47.203188Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvb3af7r3bqhj1xt7d68, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 1095 cpu_time_us: 1095 } query_phases { duration_us: 3299 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3337 affected_shards: 1 } query_phases { duration_us: 1553 cpu_time_us: 2036 } compilation { duration_us: 309957 cpu_time_us: 301422 } process_cpu_time_us: 2094 total_duration_us: 319445 total_cpu_time_us: 309984 2026-01-04T02:46:47.493875Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332393701254634:2651], TxId: 281474976710697, task: 1. Ctx: { TraceId : 01ke3ecvna5bgk97031jv8szbr. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:46:47.508381Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332393701254636:2652], TxId: 281474976710697, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecvna5bgk97031jv8szbr. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332393701254631:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:47.508882Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvna5bgk97031jv8szbr, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 805 cpu_time_us: 805 } query_phases { duration_us: 30936 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 31625 affected_shards: 1 } query_phases { duration_us: 15768 cpu_time_us: 16186 } compilation { duration_us: 247543 cpu_time_us: 210494 } process_cpu_time_us: 1657 total_duration_us: 297879 total_cpu_time_us: 260767 2026-01-04T02:46:47.742518Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332393701254676:2662], TxId: 281474976710700, task: 1. Ctx: { TraceId : 01ke3ecvyvc1e6b8w6mrprhhxk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:46:47.742833Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332393701254678:2663], TxId: 281474976710700, task: 2. Ctx: { TraceId : 01ke3ecvyvc1e6b8w6mrprhhxk. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332393701254673:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:47.743270Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecvyvc1e6b8w6mrprhhxk, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 730 cpu_time_us: 730 } query_phases { duration_us: 3155 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 3003 affected_shards: 1 } query_phases { duration_us: 1567 cpu_time_us: 2002 } compilation { duration_us: 218361 cpu_time_us: 210607 } process_cpu_time_us: 1672 total_duration_us: 227165 total_cpu_time_us: 218014 2026-01-04T02:46:48.026513Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332397996222018:2673], TxId: 281474976710703, task: 1. Ctx: { TraceId : 01ke3ecw669eby51y8ap2gxedj. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:46:48.026743Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332397996222020:2674], TxId: 281474976710703, task: 2. Ctx: { TraceId : 01ke3ecw669eby51y8ap2gxedj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332397996222015:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:48.027119Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecw669eby51y8ap2gxedj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 642 cpu_time_us: 642 } query_phases { duration_us: 3025 table_access { name: "/Root/TestTable2" reads { rows: 1 bytes: 8 } partitions_count: 1 } cpu_time_us: 2402 affected_shards: 1 } query_phases { duration_us: 1432 cpu_time_us: 1854 } compilation { duration_us: 268225 cpu_time_us: 261661 } process_cpu_time_us: 1499 total_duration_us: 276319 total_cpu_time_us: 268058 2026-01-04T02:46:48.254368Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332397996222058:2686], TxId: 281474976710706, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ecwf0d2tp2rpfwfb1x4eh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-04T02:46:48.254610Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [1:7591332397996222060:2687], TxId: 281474976710706, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ecwf0d2tp2rpfwfb1x4eh. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [1:7591332397996222055:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:46:48.254935Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YjJjMDQ0MGUtNWM0NmIyMGYtMTUyZDI2MTgtYTJlMDFkMDI=, ActorId: [1:7591332380816352234:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ecwf0d2tp2rpfwfb1x4eh, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# query_phases { duration_us: 872 cpu_time_us: 872 } query_phases { duration_us: 3943 table_access { name: "/Root/TestTable2" partitions_count: 1 } cpu_time_us: 2959 affected_shards: 1 } query_phases { duration_us: 1694 cpu_time_us: 2078 } compilation { duration_us: 212112 cpu_time_us: 205151 } process_cpu_time_us: 1810 total_duration_us: 221549 total_cpu_time_us: 212870 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::QuerySeviceRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 3259, MsgBus: 21111 2026-01-04T02:46:43.259825Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332377597985817:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:43.261442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:43.456004Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:43.501103Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:43.501226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:43.512209Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:43.562511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:43.591419Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:43.591438Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:43.591449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:43.591536Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:43.731182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:43.943762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:44.014271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.134791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.264371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.266627Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:44.328067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.126485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332390482889551:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.126575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.126845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332390482889561:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.126898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.454726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.489464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.521547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.556030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.586570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.624728Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.689535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.737819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.810408Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332390482890433:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.810475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.810532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332390482890438:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.810690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332390482890440:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.810751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.815368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:46.826988Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332390482890442:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:46.909043Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332390482890493:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:48.258686Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332377597985817:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:48.258762Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTAS+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14029, MsgBus: 9687 2026-01-04T02:46:39.104479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332358676035247:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:39.104561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:39.372796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:39.372876Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:39.374971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:39.443270Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:39.444729Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332358676035219:2081] 1767494799103223 != 1767494799103226 2026-01-04T02:46:39.444868Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:39.603917Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:39.603946Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:39.603962Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:39.604028Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:39.717954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:40.112690Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:40.254179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:40.264002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:46:40.318328Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.447848Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.601010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:40.674739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.296240Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371560938981:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.296358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.296670Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371560938991:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.296719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.639273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.675097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.709201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.740610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.772501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.813227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.847397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.912429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:42.976267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371560939863:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.976340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.976419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371560939868:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.976565Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332371560939870:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.976617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:42.979897Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:42.988208Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332371560939872:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:43.044377Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332375855907219:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:44.104734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332358676035247:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.104807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:44.756041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... :56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.371937Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.371944Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.371965Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.371981Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.371992Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372017Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372018Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372047Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037937 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372063Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372088Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037956 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372111Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372133Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372163Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372166Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372194Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372205Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037942 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372232Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037948 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372241Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372259Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037946 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372279Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372287Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037944 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372314Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372315Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224038000 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372343Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372353Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037996 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372371Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372392Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372402Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372429Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372431Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372459Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372464Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037939 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372486Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372500Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372514Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372535Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372543Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372569Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372578Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372611Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 2026-01-04T02:46:48.372636Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976710680 query_phases { duration_us: 205992 table_access { name: "/Root/.tmp/sessions/167aeef8-4015-2c72-0581-27bda64dafe1/Root/TestTable2_43840615-4be1-9f94-a33e-168afd778816" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 37030 } compilation { duration_us: 11554 cpu_time_us: 7237 } process_cpu_time_us: 1236 total_duration_us: 3021430 total_cpu_time_us: 45503 2026-01-04T02:46:48.383868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) 2026-01-04T02:46:48.389655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpRmDir, opId: 281474976710682:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_rmdir.cpp:66) |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] >> KqpCost::AAARangeFullScan [GOOD] >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateColumnTable-NoProtect-NoDbAdmin-anonymous [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.962936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.962964Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.963064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.065392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.065473Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.066242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.077874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.081897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.083441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.089491Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.089745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.092398Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.094794Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.103205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.103392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.108373Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108439Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.108588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.108634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.108821Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.114757Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:239:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.226747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.227949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.228952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.229014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.230354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.230434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.233615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.234906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.235273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.235353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.235392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.235446Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.237782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.237876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.237938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.239807Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.239854Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.239903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.239958Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.244661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.246544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.247690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.248847Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.248967Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.249021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.249321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.249378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.250254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.251257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.254006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:47.991989Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 139 Version: 5 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:47.992018Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:47.992044Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 139], version: 5 2026-01-04T02:46:47.992074Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 3 2026-01-04T02:46:47.992453Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 190:2 from tablet: 72057594046678944 to tablet: 72075186233409584 cookie: 72057594046678944:39 msg type: 275382275 2026-01-04T02:46:47.992988Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:47.993057Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 140 Version: 4 PathOwnerId: 72057594046678944, cookie: 190 2026-01-04T02:46:47.993083Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 190 2026-01-04T02:46:47.993109Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 190, pathId: [OwnerId: 72057594046678944, LocalPathId: 140], version: 4 2026-01-04T02:46:47.993139Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 4 2026-01-04T02:46:47.993198Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 2/3, is published: true 2026-01-04T02:46:47.996770Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:47.996877Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:47.998739Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:47.998860Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 190 2026-01-04T02:46:48.011824Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 2026-01-04T02:46:48.011880Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 190, tablet: 72075186233409584, partId: 2 2026-01-04T02:46:48.011968Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 190:2, at schemeshard: 72057594046678944, message: Origin: 72075186233409584 TxId: 190 FAKE_COORDINATOR: Erasing txId 190 2026-01-04T02:46:48.014372Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-04T02:46:48.014597Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 190:2, at schemeshard: 72057594046678944 2026-01-04T02:46:48.014643Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 190:2 ProgressState 2026-01-04T02:46:48.014717Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-04T02:46:48.014746Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-04T02:46:48.014780Z node 31 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#190:2 progress is 3/3 2026-01-04T02:46:48.014813Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-04T02:46:48.014853Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 190, ready parts: 3/3, is published: true 2026-01-04T02:46:48.014914Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [31:304:2294] message: TxId: 190 2026-01-04T02:46:48.014957Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 190 ready parts: 3/3 2026-01-04T02:46:48.014995Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:0 2026-01-04T02:46:48.015024Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:0 2026-01-04T02:46:48.015096Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 138] was 2 2026-01-04T02:46:48.015123Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:1 2026-01-04T02:46:48.015141Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:1 2026-01-04T02:46:48.015168Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 139] was 2 2026-01-04T02:46:48.015188Z node 31 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 190:2 2026-01-04T02:46:48.015207Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 190:2 2026-01-04T02:46:48.015275Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 140] was 3 2026-01-04T02:46:48.018011Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 190: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.018058Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 190: satisfy waiter [31:5646:7152] TestWaitNotification: OK eventTxId 190 TestWaitNotification wait txId: 185 2026-01-04T02:46:48.019518Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 185: send EvNotifyTxCompletion 2026-01-04T02:46:48.019559Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 185 TestWaitNotification wait txId: 186 2026-01-04T02:46:48.019657Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 186: send EvNotifyTxCompletion 2026-01-04T02:46:48.019678Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 186 TestWaitNotification wait txId: 187 2026-01-04T02:46:48.019725Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 187: send EvNotifyTxCompletion 2026-01-04T02:46:48.019760Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 187 TestWaitNotification wait txId: 188 2026-01-04T02:46:48.019820Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 188: send EvNotifyTxCompletion 2026-01-04T02:46:48.019849Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 188 TestWaitNotification wait txId: 189 2026-01-04T02:46:48.019911Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 189: send EvNotifyTxCompletion 2026-01-04T02:46:48.019938Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 189 2026-01-04T02:46:48.021951Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 185, at schemeshard: 72057594046678944 2026-01-04T02:46:48.022187Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 186, at schemeshard: 72057594046678944 2026-01-04T02:46:48.022287Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 185: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.022315Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 185: satisfy waiter [31:5689:7195] 2026-01-04T02:46:48.022393Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 187, at schemeshard: 72057594046678944 2026-01-04T02:46:48.022504Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 186: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.022526Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 186: satisfy waiter [31:5689:7195] 2026-01-04T02:46:48.022645Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 188, at schemeshard: 72057594046678944 2026-01-04T02:46:48.022705Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 187: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.022727Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 187: satisfy waiter [31:5689:7195] 2026-01-04T02:46:48.022838Z node 31 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 189, at schemeshard: 72057594046678944 2026-01-04T02:46:48.022925Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 188: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.022947Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 188: satisfy waiter [31:5689:7195] 2026-01-04T02:46:48.023078Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 189: got EvNotifyTxCompletionResult 2026-01-04T02:46:48.023109Z node 31 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 189: satisfy waiter [31:5689:7195] TestWaitNotification: OK eventTxId 185 TestWaitNotification: OK eventTxId 186 TestWaitNotification: OK eventTxId 187 TestWaitNotification: OK eventTxId 188 TestWaitNotification: OK eventTxId 189 >> KqpCost::Range [GOOD] |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::WriteRow+isSink+isOlap [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseColumnTableAggrStatNonLocalTablet [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.898708Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.975477Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.982255Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.982609Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.982734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:05.376768Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.470765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:05.470883Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.518318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.617535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:06.229092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:06.229631Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:06.229678Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:06.229746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:06.230362Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:06.294743Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.837441Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.591395Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.597141Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.600474Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.633522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.633651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.661136Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.663099Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.842713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.842829Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.844327Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.844965Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.845524Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.846651Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.846938Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.847082Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.847312Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.847466Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.847578Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.874510Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:10.034643Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:10.082401Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:10.082470Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:10.102504Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:10.103475Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:10.103643Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:10.103691Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:10.103729Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:10.103782Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:10.103825Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:10.103860Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:10.104326Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:10.104541Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:10.106510Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:10.114003Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:10.114087Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:10.114194Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.124260Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.132203Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:10.134944Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:10.137430Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.137545Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.158624Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:10.158889Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:10.258231Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:10.330344Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:10.540567Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.542749Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.546278Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.549513Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:10.549598Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... rd count = 1 2026-01-04T02:46:45.308807Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:46:45.308893Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037897, LocalPathId: 34] is column table. 2026-01-04T02:46:45.308946Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:45.315449Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:45.348066Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:45.348630Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:45.348710Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:45.349685Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:45.367636Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:45.367943Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:46:45.368911Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5537:4762], server id = [2:5541:4766], tablet id = 72075186224037899, status = OK 2026-01-04T02:46:45.369495Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5537:4762], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:45.370127Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5538:4763], server id = [2:5542:4767], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:45.370197Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5538:4763], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:45.370421Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5539:4764], server id = [2:5543:4768], tablet id = 72075186224037901, status = OK 2026-01-04T02:46:45.370474Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5539:4764], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:45.372262Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5540:4765], server id = [2:5544:4769], tablet id = 72075186224037902, status = OK 2026-01-04T02:46:45.372329Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5540:4765], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:45.377123Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037899 2026-01-04T02:46:45.377628Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5537:4762], server id = [2:5541:4766], tablet id = 72075186224037899 2026-01-04T02:46:45.377679Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:45.379902Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:45.380719Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5538:4763], server id = [2:5542:4767], tablet id = 72075186224037900 2026-01-04T02:46:45.380758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:45.380971Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037901 2026-01-04T02:46:45.381430Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5539:4764], server id = [2:5543:4768], tablet id = 72075186224037901 2026-01-04T02:46:45.381464Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:45.381624Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037902 2026-01-04T02:46:45.381672Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:45.381880Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:45.382291Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5540:4765], server id = [2:5544:4769], tablet id = 72075186224037902 2026-01-04T02:46:45.382324Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:45.410649Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:45.410901Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 3, current Round: 0 2026-01-04T02:46:45.972606Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 2 is different from the current 3 2026-01-04T02:46:45.972677Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:46:48.698203Z node 2 :STATISTICS INFO: service_impl.cpp:418: Node 3 is unavailable 2026-01-04T02:46:48.698320Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:48.698431Z node 2 :STATISTICS DEBUG: service_impl.cpp:254: Event round 3 is different from the current 0 2026-01-04T02:46:48.698462Z node 2 :STATISTICS DEBUG: service_impl.cpp:381: Skip TEvDispatchKeepAlive 2026-01-04T02:46:48.698548Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:48.698620Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:48.699243Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:48.713946Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:48.714154Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 4, current Round: 0 2026-01-04T02:46:48.714740Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:5661:4823], server id = [2:5662:4824], tablet id = 72075186224037900, status = OK 2026-01-04T02:46:48.714838Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:5661:4823], path = { OwnerId: 72075186224037897 LocalId: 34 } 2026-01-04T02:46:48.716164Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037900 2026-01-04T02:46:48.716236Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:48.716369Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:48.716512Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:48.716938Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5664:4826], ActorId: [2:5665:4827], Starting query actor #1 [2:5666:4828] 2026-01-04T02:46:48.717003Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5665:4827], ActorId: [2:5666:4828], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:46:48.719617Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:5661:4823], server id = [2:5662:4824], tablet id = 72075186224037900 2026-01-04T02:46:48.719661Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:48.720448Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5665:4827], ActorId: [2:5666:4828], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjQ4NDM4NmYtNGI4ZWEwYTgtYWRlMzBhZTItODMwYTliMDU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:48.833870Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5675:4837]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:48.834173Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:48.834217Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5675:4837], StatRequests.size() = 1 2026-01-04T02:46:48.973617Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5665:4827], ActorId: [2:5666:4828], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjQ4NDM4NmYtNGI4ZWEwYTgtYWRlMzBhZTItODMwYTliMDU=, TxId: 2026-01-04T02:46:48.973693Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5665:4827], ActorId: [2:5666:4828], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjQ4NDM4NmYtNGI4ZWEwYTgtYWRlMzBhZTItODMwYTliMDU=, TxId: 2026-01-04T02:46:48.974088Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5664:4826], ActorId: [2:5665:4827], Got response [2:5666:4828] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:46:48.974420Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5689:4843]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:48.974893Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:48.975310Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:48.975363Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:48.976116Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:48.976168Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:46:48.976233Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:48.981438Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 probe = 4 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpCost::OlapRangeFullScan [GOOD] >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::AAARangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 15564, MsgBus: 25545 2026-01-04T02:46:45.318770Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332383348128291:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:45.319488Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:45.516230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:45.528341Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:45.528474Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:45.581536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:45.626990Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:45.628263Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332383348128263:2081] 1767494805316993 != 1767494805316996 2026-01-04T02:46:45.673607Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:45.673636Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:45.673644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:45.673736Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:45.791791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:46.086054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:46.140804Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.294357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.385023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:46.434795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.498585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.093726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332396233032031:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.093901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.094273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332396233032041:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.094353Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.367476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.393758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.420688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.447666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.473541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.503073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.562438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.605529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.675399Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332396233032912:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.675495Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.675778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332396233032918:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.675803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332396233032917:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.675833Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.678830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:48.687902Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332396233032921:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:48.769898Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332396233032972:3779] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:50.319436Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332383348128291:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:50.319532Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; {"Plan":{"Plans":[{"PlanNodeId":5,"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["Test"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Reverse":false,"Name":"TableFullScan","Inputs":[],"Path":"\/Root\/Test","ReadRangesPointPrefixLen":"0","E-Rows":"0","Table":"Test","ReadColumns":["Group (-∞, + ... deType":"Query"}} query_phases { duration_us: 5678 table_access { name: "/Root/Test" reads { rows: 1 bytes: 20 } partitions_count: 1 } cpu_time_us: 4909 affected_shards: 1 } compilation { duration_us: 232713 cpu_time_us: 228488 } process_cpu_time_us: 351 query_plan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"Test\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/Test\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"InternalOperatorId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"},{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Limit-Filter\",\"Stats\":{\"UseLlvm\":\"undefined\",\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"4\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1513,\"Max\":1513,\"Min\":1513,\"History\":[3,1513]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"Introspections\":[\"1 tasks default for source scan\"],\"Tasks\":1,\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"FinishedTasks\":1,\"IngressRows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"PhysicalStageId\":0,\"Mkql\":{},\"StageDurationUs\":0,\"Table\":[{\"Path\":\"\\/Root\\/Test\",\"ReadRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ReadBytes\":{\"Count\":1,\"Sum\":20,\"Max\":20,\"Min\":20}}],\"BaseTimeMs\":1767494810528,\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"CpuTimeUs\":{\"Count\":1,\"Sum\":1048,\"Max\":1048,\"Min\":1048,\"History\":[2,1048]},\"Ingress\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]}},\"External\":{},\"Name\":\"KqpReadRangesSource\",\"Ingress\":{},\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":3,\"Max\":3,\"Min\":3},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":192,\"Max\":192,\"Min\":192,\"History\":[3,192]},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1576,\"Max\":1576,\"Min\":1576,\"History\":[3,1576]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"UpdateTimeMs\":2}}],\"Node Type\":\"Merge\",\"SortColumns\":[\"Group (Asc)\"],\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\",\"Stats\":{\"UseLlvm\":\"undefined\",\"OutputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"PhysicalStageId\":1,\"FinishedTasks\":1,\"InputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Introspections\":[\"1 minimum tasks for compute\"],\"DurationUs\":{\"Count\":1,\"Sum\":1000,\"Max\":1000,\"Min\":1000},\"Mkql\":{},\"MaxMemoryUsage\":{\"Count\":1,\"Sum\":1048576,\"Max\":1048576,\"Min\":1048576,\"History\":[3,1048576]},\"BaseTimeMs\":1767494810528,\"Output\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"RESULT\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":1001,\"Max\":1001,\"Min\":1001,\"History\":[3,1001]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"CpuTimeUs\":{\"Count\":1,\"Sum\":626,\"Max\":626,\"Min\":626,\"History\":[3,626]},\"StageDurationUs\":1000,\"ResultRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResultBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"OutputBytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19},\"Input\":[{\"Pop\":{\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]}},\"Name\":\"2\",\"Push\":{\"LastMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Rows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"Chunks\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"ResumeMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"FirstMessageMs\":{\"Count\":1,\"Sum\":2,\"Max\":2,\"Min\":2},\"Bytes\":{\"Count\":1,\"Sum\":19,\"Max\":19,\"Min\":19,\"History\":[3,19]},\"PauseMessageMs\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitTimeUs\":{\"Count\":1,\"Sum\":934,\"Max\":934,\"Min\":934,\"History\":[3,934]},\"WaitPeriods\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1},\"WaitMessageMs\":{\"Count\":1,\"Max\":2,\"Min\":1}}}],\"UpdateTimeMs\":3,\"Tasks\":1,\"InputRows\":{\"Count\":1,\"Sum\":1,\"Max\":1,\"Min\":1}}}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"Compilation\":{\"FromCache\":false,\"DurationUs\":232713,\"CpuTimeUs\":228488},\"ProcessCpuTimeUs\":351,\"TotalDurationUs\":244424,\"ResourcePoolId\":\"default\",\"QueuedTimeUs\":384},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"Reverse\":false,\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"Test\",\"ReadColumns\":[\"Group (-\342\210\236, +\342\210\236)\",\"Name (-\342\210\236, +\342\210\236)\",\"Amount\",\"Comment\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"E-Rows\":\"0\",\"Predicate\":\"item.Amount \\u003C 5000\",\"Name\":\"Filter\",\"E-Size\":\"0\",\"E-Cost\":\"0\"}],\"Node Type\":\"Filter\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":1.048,\"A-Cpu\":1.048,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"A-Rows\":1,\"A-SelfCpu\":0.626,\"A-Cpu\":1.674,\"A-Size\":19,\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"PlanNodeType\":\"Query\"}}" query_ast: "(\n(let $1 (KqpTable \'\"/Root/Test\" \'\"72057594046644480:45\" \'\"\" \'1))\n(let $2 \'(\'\"Amount\" \'\"Comment\" \'\"Group\" \'\"Name\"))\n(let $3 (KqpRowsSourceSettings $1 $2 \'(\'(\'\"Sorted\")) (Void) \'()))\n(let $4 (Uint64 \'1))\n(let $5 (OptionalType (DataType \'String)))\n(let $6 (StructType \'(\'\"Amount\" (OptionalType (DataType \'Uint64))) \'(\'\"Comment\" $5) \'(\'\"Group\" (OptionalType (DataType \'Uint32))) \'(\'\"Name\" $5)))\n(let $7 \'(\'(\'\"_logical_id\" \'560) \'(\'\"_id\" \'\"3fb91232-6f0ba0a2-6d1d2e20-baabc95e\") \'(\'\"_wide_channels\" $6)))\n(let $8 (DqPhyStage \'((DqSource (DataSource \'\"KqpReadRangesSource\") $3)) (lambda \'($12) (block \'(\n (let $13 (lambda \'($16) (block \'(\n (let $17 (Member $16 \'\"Amount\"))\n (return $17 (Member $16 \'\"Comment\") (Member $16 \'\"Group\") (Member $16 \'\"Name\") (Coalesce (< $17 (Uint64 \'\"5000\")) (Bool \'false)))\n ))))\n (let $14 (WideFilter (ExpandMap (ToFlow $12) $13) (lambda \'($18 $19 $20 $21 $22) $22) $4))\n (let $15 (lambda \'($23 $24 $25 $26 $27) $23 $24 $25 $26))\n (return (FromFlow (WideMap $14 $15)))\n))) $7))\n(let $9 (DqCnMerge (TDqOutput $8 \'0) \'(\'(\'\"2\" \'\"Asc\"))))\n(let $10 (DqPhyStage \'($9) (lambda \'($28) (FromFlow (NarrowMap (Take (ToFlow $28) $4) (lambda \'($29 $30 $31 $32) (AsStruct \'(\'\"Amount\" $29) \'(\'\"Comment\" $30) \'(\'\"Group\" $31) \'(\'\"Name\" $32)))))) \'(\'(\'\"_logical_id\" \'573) \'(\'\"_id\" \'\"46e33409-33debeb3-58bf4255-f2d4892\"))))\n(let $11 (DqCnResult (TDqOutput $10 \'0) \'()))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($8 $10) \'($11) \'() \'(\'(\'\"type\" \'\"data\")))) \'((KqpTxResultBinding (ListType $6) \'0 \'0)) \'(\'(\'\"type\" \'\"data_query\"))))\n)\n" total_duration_us: 244424 total_cpu_time_us: 233748 query_meta: "{\"query_database\":\"/Root\",\"query_parameter_types\":{},\"table_metadata\":[\"{\\\"DoesExist\\\":true,\\\"Cluster\\\":\\\"db\\\",\\\"Name\\\":\\\"/Root/Test\\\",\\\"SysView\\\":\\\"\\\",\\\"PathId\\\":{\\\"OwnerId\\\":72057594046644480,\\\"TableId\\\":45},\\\"SchemaVersion\\\":1,\\\"Kind\\\":1,\\\"Columns\\\":[{\\\"Name\\\":\\\"Amount\\\",\\\"Id\\\":3,\\\"Type\\\":\\\"Uint64\\\",\\\"TypeId\\\":4,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Comment\\\",\\\"Id\\\":4,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Group\\\",\\\"Id\\\":1,\\\"Type\\\":\\\"Uint32\\\",\\\"TypeId\\\":2,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}},{\\\"Name\\\":\\\"Name\\\",\\\"Id\\\":2,\\\"Type\\\":\\\"String\\\",\\\"TypeId\\\":4097,\\\"NotNull\\\":false,\\\"DefaultFromSequence\\\":\\\"\\\",\\\"DefaultKind\\\":0,\\\"DefaultFromLiteral\\\":{},\\\"IsBuildInProgress\\\":false,\\\"DefaultFromSequencePathId\\\":{\\\"OwnerId\\\":18446744073709551615,\\\"TableId\\\":18446744073709551615}}],\\\"KeyColunmNames\\\":[\\\"Group\\\",\\\"Name\\\"],\\\"RecordsCount\\\":0,\\\"DataSize\\\":0,\\\"StatsLoaded\\\":false}\"],\"table_meta_serialization_type\":2,\"created_at\":\"1767494810\",\"query_type\":\"QUERY_TYPE_SQL_DML\",\"query_syntax\":\"1\",\"query_cluster\":\"db\",\"query_id\":\"ad7c21ed-ebbff439-3ae28f31-b204f70c\",\"version\":\"1.0\"}" ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupJoin+StreamLookupJoin [GOOD] Test command err: Trying to start YDB, gRPC: 6370, MsgBus: 4146 2026-01-04T02:46:44.274181Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332380761402175:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.274512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:44.493689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:44.500470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:44.500578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:44.568420Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:44.571367Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:44.572935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332380761402063:2081] 1767494804253949 != 1767494804253952 2026-01-04T02:46:44.673018Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:44.673051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:44.673058Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:44.673148Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:44.728399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:45.056763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:45.106280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.242779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.287585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:45.375648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.436995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.376904Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393646305825:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.377104Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.377516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393646305835:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.377623Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.617568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.647878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.677299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.706208Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.734707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.764928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.793160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.842420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.908027Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393646306700:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.908091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.908096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393646306705:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.908254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393646306707:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.908297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.911144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:47.920015Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332393646306708:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:48.019291Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332397941274056:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:49.272988Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332380761402175:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:49.273081Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:49.649098Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:49.679222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:49.733650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) /Root/Join1_2 1 19 /Root/Join1_1 8 136 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::Range [GOOD] Test command err: Trying to start YDB, gRPC: 29038, MsgBus: 18698 2026-01-04T02:46:45.677412Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332387323831273:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:45.677889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:45.893016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:45.893158Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:45.941630Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:45.952934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:45.953200Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:45.954647Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332387323831169:2081] 1767494805672920 != 1767494805672923 2026-01-04T02:46:46.038890Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:46.038911Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:46.038918Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:46.039018Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:46.156378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:46.415449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:46.420797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:46:46.466963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.617744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.726796Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:46.769599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.831627Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.495541Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332400208734931:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.495614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.495932Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332400208734941:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.495962Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.821790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.852253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.879702Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.906978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.936071Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.967219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.998419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:49.039900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:49.102952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332404503703109:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:49.103026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:49.103218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332404503703114:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:49.103310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332404503703115:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:49.103367Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:49.107278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:49.117843Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332404503703118:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:49.203351Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332404503703169:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:50.676035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332387323831273:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:50.676108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRangeFullScan [GOOD] Test command err: Trying to start YDB, gRPC: 65105, MsgBus: 64454 2026-01-04T02:46:44.831389Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332380667280895:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:44.831466Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:45.072803Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:45.076511Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:45.076617Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:45.143811Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:45.178798Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332380667280866:2081] 1767494804829826 != 1767494804829829 2026-01-04T02:46:45.183840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:45.230517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:45.230570Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:45.230587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:45.230682Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:45.238010Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:45.605430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:45.672428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.795148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.908147Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:45.953630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.018294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:47.907292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393552184621:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.907462Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.907873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332393552184631:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:47.907948Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.270330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.299459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.328522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.356623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.383426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.415862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.471974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.517307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.590210Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332397847152802:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.590306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.590489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332397847152807:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.590522Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332397847152809:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.590556Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:48.594820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:48.605562Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332397847152811:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:48.710221Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332397847152862:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:49.831640Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332380667280895:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:49.831716Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:50.160764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/creat ... =abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=RestoreAppearanceSnapshot;id=RestoreAppearanceSnapshot; 2026-01-04T02:46:50.483154Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=RestoreAppearanceSnapshot;id=20; 2026-01-04T02:46:50.483173Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7591332406437087869:2537];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976715673;this=136833142869696;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810482;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.483183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;process=TTxUpdateSchema::Execute;fline=abstract.cpp:45;event=normalization_finished; 2026-01-04T02:46:50.485562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591332406437087848:2533];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976715673;this=136833142871488;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810485;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.490964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;self_id=[1:7591332406437087850:2535];ev=NActors::IEventHandle;tablet_id=72075186224037935;tx_id=281474976715673;this=136833142874624;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810490;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.493625Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7591332406437087846:2531];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976715673;this=136833142879552;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810493;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.494474Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591332406437087877:2538];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976715673;this=136833142887616;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810494;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.497564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7591332406437087882:2539];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976715673;this=136833143001184;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810497;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.514004Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;self_id=[1:7591332406437087868:2536];ev=NActors::IEventHandle;tablet_id=72075186224037932;tx_id=281474976715673;this=136833142883136;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494810513;max=18446744073709551615;plan=0;src=[1:7591332380667281213:2143];cookie=452:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.517926Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.517931Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.518009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.518036Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.518038Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.518051Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538195Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538209Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538318Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.538340Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.543974Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.544030Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.544042Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.545777Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.545846Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.545862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.549900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.549957Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.549971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.553203Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.553260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.553274Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.555614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.555663Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.555677Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.561060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.561122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:50.561137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; query_phases { duration_us: 245112 table_access { name: "/Root/TestTable" reads { rows: 3 bytes: 108 } } cpu_time_us: 118225 } compilation { duration_us: 376743 cpu_time_us: 369300 } process_cpu_time_us: 390 total_duration_us: 624319 total_cpu_time_us: 487915 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> ColumnBuildTest::ValidDefaultValue >> ColumnBuildTest::BaseCase >> ColumnBuildTest::Applying_Failed >> ColumnBuildTest::DoNotRestoreDeletedRows >> ColumnBuildTest::Rejecting >> ColumnBuildTest::AlterMainTable_Failed >> ColumnBuildTest::Cancelling >> ColumnBuildTest::Filling_Failed >> ColumnBuildTest::Locking_Failed >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> TraverseColumnShard::TraverseServerlessColumnTable [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:08.087902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:08.205529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:08.213480Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:08.213833Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:08.213977Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:08.579613Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.670409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.670535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.716915Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.787664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:09.415087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:09.415633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:09.415679Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:09.415709Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:09.416080Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:09.478556Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:10.040095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:12.410424Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:12.415464Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:12.418757Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:12.445000Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.445089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.472221Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:12.474320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.681236Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.681358Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.682971Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.683647Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.684401Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.685457Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.685714Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.685892Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.686189Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.686339Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.686455Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.702313Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.919923Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.952986Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.953082Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.981597Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:13.018714Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:13.018892Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:13.018940Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:13.018990Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:13.019039Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:13.019076Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:13.019116Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:13.019689Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:13.048497Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:13.048608Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2122:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:13.068047Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2169:2624] 2026-01-04T02:46:13.068975Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2169:2624], schemeshard id = 72075186224037897 2026-01-04T02:46:13.121676Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2227:2653] 2026-01-04T02:46:13.125872Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:46:13.145034Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Describe result: PathErrorUnknown 2026-01-04T02:46:13.145086Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Creating table 2026-01-04T02:46:13.145155Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:13.150343Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2311:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:13.153316Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:13.158657Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:13.158761Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:13.167322Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:13.322241Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:46:13.335342Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:13.656754Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:13.765517Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:13.765598Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2245:2664]. Column diff is empty, finishing 2026-01-04T02:46:14.297356Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026- ... = 1 2026-01-04T02:46:49.746637Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:49.746729Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:6209:5137], StatRequests.size() = 1 2026-01-04T02:46:49.746841Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:46:49.918112Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6199:5127], ActorId: [2:6200:5128], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZDViMjM5NGMtZTAyOTMwNmUtNGYyZWI5MjYtZDBiNGVlNDY=, TxId: 2026-01-04T02:46:49.918212Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6199:5127], ActorId: [2:6200:5128], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDViMjM5NGMtZTAyOTMwNmUtNGYyZWI5MjYtZDBiNGVlNDY=, TxId: 2026-01-04T02:46:49.918711Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6198:5126], ActorId: [2:6199:5127], Got response [2:6200:5128] SUCCESS 2026-01-04T02:46:49.919092Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:49.935393Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete background traversal for path [OwnerId: 72075186224037897, LocalPathId: 33] 2026-01-04T02:46:49.935471Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:41: [72075186224037894] TTxFinishTraversal::Complete. No ActorId to send reply. 2026-01-04T02:46:50.027688Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:50.027815Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:50.121885Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:6211:5139], schemeshard count = 1 2026-01-04T02:46:50.581417Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-04T02:46:50.581496Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.511000s, at schemeshard: 72075186224037899 2026-01-04T02:46:50.581863Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-04T02:46:50.597679Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:46:51.240869Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:695: [72075186224037894] ScheduleNextBackgroundTraversal 2026-01-04T02:46:51.240972Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:826: [72075186224037894] IsColumnTable. Path [OwnerId: 72075186224037899, LocalPathId: 32] is column table. 2026-01-04T02:46:51.241019Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:738: [72075186224037894] Start background traversal navigate for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-04T02:46:51.245973Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:19: [72075186224037894] TTxNavigate::Execute 2026-01-04T02:46:51.277762Z node 2 :STATISTICS DEBUG: tx_navigate.cpp:72: [72075186224037894] TTxNavigate::Complete 2026-01-04T02:46:51.278365Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:21: [72075186224037894] TTxResolve::Execute 2026-01-04T02:46:51.278434Z node 2 :STATISTICS DEBUG: tx_resolve.cpp:71: [72075186224037894] TTxResolve::Complete 2026-01-04T02:46:51.279286Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:56: [72075186224037894] TTxResponseTabletDistribution::Execute. Node count = 1 2026-01-04T02:46:51.304699Z node 2 :STATISTICS DEBUG: tx_response_tablet_distribution.cpp:91: [72075186224037894] TTxResponseTabletDistribution::Complete 2026-01-04T02:46:51.304936Z node 2 :STATISTICS DEBUG: service_impl.cpp:590: Received TEvAggregateStatistics from node: 2, Round: 2, current Round: 0 2026-01-04T02:46:51.305566Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6296:5184], server id = [2:6300:5188], tablet id = 72075186224037905, status = OK 2026-01-04T02:46:51.305906Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6296:5184], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-04T02:46:51.307145Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6297:5185], server id = [2:6301:5189], tablet id = 72075186224037906, status = OK 2026-01-04T02:46:51.307211Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6297:5185], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-04T02:46:51.307509Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6298:5186], server id = [2:6303:5191], tablet id = 72075186224037907, status = OK 2026-01-04T02:46:51.307556Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6298:5186], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-04T02:46:51.308351Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:6299:5187], server id = [2:6302:5190], tablet id = 72075186224037908, status = OK 2026-01-04T02:46:51.308404Z node 2 :STATISTICS DEBUG: service_impl.cpp:1061: TEvStatisticsRequest send, client id = [2:6299:5187], path = { OwnerId: 72075186224037899 LocalId: 32 } 2026-01-04T02:46:51.313302Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037905 2026-01-04T02:46:51.313873Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6296:5184], server id = [2:6300:5188], tablet id = 72075186224037905 2026-01-04T02:46:51.313923Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:51.314480Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037906 2026-01-04T02:46:51.314884Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6297:5185], server id = [2:6301:5189], tablet id = 72075186224037906 2026-01-04T02:46:51.314916Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:51.315418Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037907 2026-01-04T02:46:51.315621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6298:5186], server id = [2:6303:5191], tablet id = 72075186224037907 2026-01-04T02:46:51.315646Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:51.316020Z node 2 :STATISTICS DEBUG: service_impl.cpp:319: Received TEvStatisticsResponse TabletId: 72075186224037908 2026-01-04T02:46:51.316057Z node 2 :STATISTICS DEBUG: service_impl.cpp:504: Send aggregate statistics response to node: 2 2026-01-04T02:46:51.316187Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:27: [72075186224037894] TTxAggregateStatisticsResponse::Execute 2026-01-04T02:46:51.316303Z node 2 :STATISTICS DEBUG: tx_aggr_stat_response.cpp:119: [72075186224037894] TTxAggregateStatisticsResponse::Complete 2026-01-04T02:46:51.316687Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:6316:5200], ActorId: [2:6317:5201], Starting query actor #1 [2:6318:5202] 2026-01-04T02:46:51.316747Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:6317:5201], ActorId: [2:6318:5202], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-04T02:46:51.318542Z node 2 :STATISTICS DEBUG: service_impl.cpp:1127: EvClientDestroyed, node id = 2, client id = [2:6299:5187], server id = [2:6302:5190], tablet id = 72075186224037908 2026-01-04T02:46:51.318580Z node 2 :STATISTICS DEBUG: service_impl.cpp:1145: Skip EvClientDestroyed 2026-01-04T02:46:51.319232Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:6317:5201], ActorId: [2:6318:5202], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=M2ExZmEzYzEtMWFmODU1NjYtZDgyYjU4MDUtNDczMTQyMjg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:46:51.416079Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:6327:5211]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:51.416399Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:46:51.416446Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:6327:5211], StatRequests.size() = 1 2026-01-04T02:46:51.563995Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:6317:5201], ActorId: [2:6318:5202], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=M2ExZmEzYzEtMWFmODU1NjYtZDgyYjU4MDUtNDczMTQyMjg=, TxId: 2026-01-04T02:46:51.564076Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:6317:5201], ActorId: [2:6318:5202], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=M2ExZmEzYzEtMWFmODU1NjYtZDgyYjU4MDUtNDczMTQyMjg=, TxId: 2026-01-04T02:46:51.564485Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:6316:5200], ActorId: [2:6317:5201], Got response [2:6318:5202] SUCCESS ... waiting for NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:46:51.564954Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:6341:5217]], StatType[ 2 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:51.565179Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:51.565765Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:51.565849Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:46:51.593312Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:46:51.593380Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 3 ], Database[ Root/Shared ], TablePath[ /Root/Shared/.metadata/_statistics ] 2026-01-04T02:46:51.593429Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037899, LocalPathId: 32] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:46:51.598050Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 3 >>> failedEstimatesCount = 0 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithPersistedLeftovers-DisableStatsBatching-false [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:49.872914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:49.873029Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.873072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:49.873103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:49.873139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:49.873165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:49.873211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.873275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:49.874064Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:49.874345Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:49.955362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:49.955422Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:49.966290Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:49.966488Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:49.966645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:49.973237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:49.973431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:49.974249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:49.974444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:49.976889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.977073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:49.977898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:49.977955Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.978040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:49.978082Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:49.978162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:49.978299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:50.106840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.107760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.107926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.107997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108446Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:50.108822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 1-04T02:46:53.719325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:46:53.719378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:46:53.719572Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-04T02:46:53.719745Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:46:53.719787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-04T02:46:53.719830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-04T02:46:53.720231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:46:53.720283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-04T02:46:53.720376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:46:53.720427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-04T02:46:53.720482Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-04T02:46:53.721485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:46:53.721578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 12 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:46:53.721615Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:46:53.721653Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 12 2026-01-04T02:46:53.721693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-04T02:46:53.722514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:46:53.722593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:46:53.722620Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:46:53.722647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-04T02:46:53.722677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-04T02:46:53.722742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:46:53.725911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:46:53.725972Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-04T02:46:53.726279Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-04T02:46:53.726461Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:46:53.726499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:46:53.726554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:46:53.726589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:46:53.726627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:46:53.726686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:999:2940] message: TxId: 104 2026-01-04T02:46:53.726733Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:46:53.726767Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:46:53.726801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:46:53.726931Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-04T02:46:53.727298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:46:53.727336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:46:53.728302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:46:53.728496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:46:53.729512Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:46:53.729555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-04T02:46:53.729607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:46:53.729637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1425:3344] 2026-01-04T02:46:53.730228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 13 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-04T02:46:53.731583Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-04T02:46:53.731761Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 181us result status StatusSuccess 2026-01-04T02:46:53.732081Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 13 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 13 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects >> TargetDiscoverer::Negative >> TargetDiscoverer::InvalidCredentials >> TargetDiscoverer::Transfer >> TargetDiscoverer::Basic >> TargetDiscoverer::IndexedTable >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-dbadmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin >> KqpCost::PointLookup [GOOD] >> TargetDiscoverer::Dirs >> TVPatchTests::PatchPartFastXorDiffDisorder >> TVPatchTests::FindingPartsWhenSeveralPartsExist >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob >> TOosLogicTests::RenderHtml [GOOD] >> TVPatchTests::FindingPartsWhenError >> TVPatchTests::PatchPartFastXorDiffBeyoundBlob [GOOD] >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] >> TVPatchTests::FindingPartsWhenError [GOOD] >> ColumnBuildTest::AlterMainTable_Failed [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] >> TVPatchTests::FindingPartsWhenSeveralPartsExist [GOOD] >> TVPatchTests::FindingPartsWithTimeout >> TVPatchTests::PatchPartOk >> ColumnBuildTest::Locking_Failed [GOOD] >> ColumnBuildTest::Locking_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::PointLookup [GOOD] Test command err: Trying to start YDB, gRPC: 29824, MsgBus: 10419 2026-01-04T02:46:49.981209Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332401671638925:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:49.981310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:50.261198Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:50.261310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:50.262359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:50.296618Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:50.303306Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332401671638896:2081] 1767494809979632 != 1767494809979635 2026-01-04T02:46:50.304558Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:50.379586Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:50.379642Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:50.379660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:50.379808Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:50.491353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:50.803048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:50.859093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:50.981887Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:50.989939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:51.137011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:51.204224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.701119Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332414556542656:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.701267Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.701577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332414556542666:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.701651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.937074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.965543Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.993939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.021014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.047148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.075225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.125750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.163796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.222748Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418851510831:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.222823Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.222888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418851510836:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.222927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418851510838:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.222971Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.226340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:53.236863Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332418851510840:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:53.305576Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332418851510891:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:54.981503Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332401671638925:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:54.981586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> TVPatchTests::PatchPartGetError >> TVPatchTests::FindingPartsWithTimeout [GOOD] >> KqpCost::OlapRange [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TVPatchTests::PatchPartOk [GOOD] >> TVPatchTests::PatchPartGetError [GOOD] >> ColumnBuildTest::Filling_Failed [GOOD] >> ColumnBuildTest::Filling_DisableFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FullPatchTestXorDiffFasterVGetResult [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.412000Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.414355Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-04T02:46:56.414432Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-04T02:46:56.415547Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-04T02:46:56.416323Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:56.416523Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# The diff at index 0 went beyound the blob part; DiffStart# 100 DiffEnd# 96 BlobPartSize# 32 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartFastXorDiffDisorder [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.411974Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:100:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.414352Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:100:0] Status# OK ResultSize# 1 2026-01-04T02:46:56.414427Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:100:0] FoundParts# [5] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchXorDiff 2026-01-04T02:46:56.415541Z node 1 :BS_VDISK_PATCH INFO: {BSVSP13@skeleton_vpatch_actor.cpp:675} [0:1:0:0:0] TEvVPatch: received xor diff; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] FromPart# 4 ToPart# 0 HasBuffer# no ReceivedXorDiffCount# 1/0 Send NKikimr::TEvBlobStorage::TEvVPatchXorDiffResult 2026-01-04T02:46:56.416317Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:56.416504Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:100:0] PatchedBlobId# [1:3:3:4:6:100:0] OriginalPartId# 0 PatchedPartId# 0 Status# ERROR ErrorReason# [XorDiff from datapart] the start of the diff at index 0 righter than the start of the diff at index 1; PrevDiffStart# 2 DiffStart# 0 Send NKikimr::TEvBlobStorage::TEvVPatchResult Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> ColumnBuildTest::Applying_Failed [GOOD] >> ColumnBuildTest::Applying_DisableFlag >> KqpCost::CTASWithRetry+isOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenError [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.438593Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.439537Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# ERROR ResultSize# 1 2026-01-04T02:46:56.439598Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-04T02:46:56.439680Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWithTimeout [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.411984Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.414356Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:46:56.414431Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1 2] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:56.415536Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-04T02:46:56.416271Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-04T02:46:56.416388Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-04T02:46:56.680284Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NActors::TEvents::TEvWakeup 2026-01-04T02:46:56.690828Z node 2 :BS_VDISK_PATCH ERROR: {BSVSP11@skeleton_vpatch_actor.cpp:735} [0:1:0:0:0] TEvVPatch: the vpatch actor died due to a deadline, before receiving diff; 2026-01-04T02:46:56.690960Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-04T02:46:56.691070Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartOk [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.835246Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.836112Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:46:56.836157Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:56.836294Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-04T02:46:56.836365Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.836561Z node 1 :BS_VDISK_PATCH INFO: {BSVSP08@skeleton_vpatch_actor.cpp:383} [0:1:0:0:0] TEvVPatch: received part data; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 DataParts# 4 ReceivedBlobId# [1:2:3:4:6:10:1] Status# OK ResultSize# 1 ParityPart# no 2026-01-04T02:46:56.836625Z node 1 :BS_VDISK_PATCH INFO: {BSVSP14@skeleton_vpatch_actor.cpp:462} [0:1:0:0:0] TEvVPatch: send xor diffs; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorDiffCount# 0 2026-01-04T02:46:56.837868Z node 1 :BS_VDISK_PATCH INFO: {BSVSP15@skeleton_vpatch_actor.cpp:502} [0:1:0:0:0] TEvVPatch: send vPut; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 ReceivedXorDiffs# 0 ExpectedXorDiffs# 0 Send NKikimr::TEvBlobStorage::TEvVPut Recv NKikimr::TEvBlobStorage::TEvVPutResult 2026-01-04T02:46:56.838967Z node 1 :BS_VDISK_PATCH INFO: {BSVSP10@skeleton_vpatch_actor.cpp:628} [0:1:0:0:0] TEvVPatch: received put result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK 2026-01-04T02:46:56.839027Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-04T02:46:56.839092Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::PatchPartGetError [GOOD] Test command err: Recv 65537 2026-01-04T02:46:56.950384Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.951497Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:46:56.951567Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:56.951790Z node 1 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# no 2026-01-04T02:46:56.951848Z node 1 :BS_VDISK_PATCH INFO: {BSVSP05@skeleton_vpatch_actor.cpp:246} [0:1:0:0:0] TEvVPatch: send vGet for pulling part data; OriginalBlobId# [1:2:3:4:6:10:0] PullingPart# 1 Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:56.952084Z node 1 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: send patch result; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# ERROR ErrorReason# Recieve not OK status from VGetResult, received status# ERROR Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-04T02:46:56.952152Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenPartsAreDontExist >> KqpCost::IndexLookupAndTake-useSink [GOOD] >> TVPatchTests::FindingPartsWhenPartsAreDontExist [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists >> TargetDiscoverer::Negative [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D46774AF900 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:49.678264Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:49.678332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.678363Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:49.678390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:49.678418Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:49.678456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:49.678501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.678555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:49.679309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:49.679548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:49.751871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:49.751934Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:49.762913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:49.763089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:49.763258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:49.770423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:49.770665Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:49.771178Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:49.771439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:49.773952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.774139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:49.775094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:49.775146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.775222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:49.775255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:49.775337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:49.775499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:49.897450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.898962Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.899518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 Leader for TabletID 72057594046678944 is [2:1018:2977] sender: [2:1077:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 2026-01-04T02:46:57.101251Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 2026-01-04T02:46:57.143031Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 0 2026-01-04T02:46:57.143195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:46:57.143246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-04T02:46:57.143319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-04T02:46:57.143370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D46775E0900, stats written 1 2026-01-04T02:46:57.143848Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:46:57.144044Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 251us result status StatusSuccess 2026-01-04T02:46:57.144656Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::OlapRange [GOOD] Test command err: Trying to start YDB, gRPC: 24244, MsgBus: 7224 2026-01-04T02:46:50.049032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332405737753572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:50.051538Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:50.291599Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:50.303109Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:50.303214Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:50.372544Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:50.379710Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:50.380937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332405737753544:2081] 1767494810045296 != 1767494810045299 2026-01-04T02:46:50.457727Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:50.457747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:50.457752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:50.457836Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:50.488696Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:50.866878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:50.909447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:51.044689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:51.099756Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:51.173910Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:51.227577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.725472Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332414327690006:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.725621Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.725984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332414327690016:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.726060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:52.963178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.989776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.016548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.043456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.071483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.103043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.154073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.192007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.257866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418622658189:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.257950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.258043Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418622658194:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.258107Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332418622658195:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.258188Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.261562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:53.270567Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332418622658198:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:53.355613Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332418622658249:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:55.049095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332405737753572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.049184Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:55.084311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create ... d=72075186224037935;tx_id=281474976715673;this=136501449454112;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815373;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=482:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.376594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;self_id=[1:7591332427212593262:2536];ev=NActors::IEventHandle;tablet_id=72075186224037936;tx_id=281474976715673;this=136501449459040;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815376;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=492:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.377060Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;self_id=[1:7591332427212593242:2532];ev=NActors::IEventHandle;tablet_id=72075186224037928;tx_id=281474976715673;this=136501449541024;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815376;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=412:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.380287Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;self_id=[1:7591332427212593282:2538];ev=NActors::IEventHandle;tablet_id=72075186224037931;tx_id=281474976715673;this=136501449533408;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815379;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=442:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.380967Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;self_id=[1:7591332427212593243:2533];ev=NActors::IEventHandle;tablet_id=72075186224037930;tx_id=281474976715673;this=136501449549984;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815380;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=432:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.381599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:7591332427212593241:2531];ev=NActors::IEventHandle;tablet_id=72075186224037927;tx_id=281474976715673;this=136501449539680;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815381;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=402:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.384973Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;self_id=[1:7591332427212593283:2539];ev=NActors::IEventHandle;tablet_id=72075186224037929;tx_id=281474976715673;this=136501449550432;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815384;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=422:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.385564Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;self_id=[1:7591332427212593250:2534];ev=NActors::IEventHandle;tablet_id=72075186224037934;tx_id=281474976715673;this=136501449553792;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815385;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=472:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.412683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;self_id=[1:7591332427212593269:2537];ev=NActors::IEventHandle;tablet_id=72075186224037933;tx_id=281474976715673;this=136501456762112;method=TTxController::StartProposeOnExecute;tx_info=281474976715673:TX_KIND_SCHEMA;min=1767494815412;max=18446744073709551615;plan=0;src=[1:7591332405737753892:2145];cookie=462:13;;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417366Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417439Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417443Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037928;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417486Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.417499Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444263Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444324Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037934;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444493Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444524Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.444532Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.451645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.451696Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.451711Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.453740Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.453770Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.453780Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037929;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.459064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.459109Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.459122Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037933;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.460383Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.460445Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.460464Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.466333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.466377Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.466392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037932;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.467241Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.467286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; 2026-01-04T02:46:55.467299Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037931;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715673;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=54;result=not_found; |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TargetDiscoverer::RetryableError [GOOD] >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TargetDiscoverer::Transfer [GOOD] >> TargetDiscoverer::InvalidCredentials [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry+isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2522, MsgBus: 22416 ... waiting for SysViewsRoster update finished 2026-01-04T02:46:41.653775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:41.787330Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:41.787953Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:41.788013Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:42.092515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:42.092638Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:42.176421Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494799065770 != 1767494799065773 2026-01-04T02:46:42.190148Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:42.237478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:42.371788Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:42.708127Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:42.708193Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:42.708236Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:42.708432Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:42.720748Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:43.029190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:43.128993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:43.375508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:43.720159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.011506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.769721Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.769902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.770775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.770868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:44.802598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.005003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.250439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.530207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:45.773261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.037122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.325450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.623830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.987283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.987409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.987831Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.987898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.987968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.992846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:47.144892Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:47.193996Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:48.972154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:46:49.161903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037927;self_id=[1:3283:4551];tablet_id=720751862240379 ... nt type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192003Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037995 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192083Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037994 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192153Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037993 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192214Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037992 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192274Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037991 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192333Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037990 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192392Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037989 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192451Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037988 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192509Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037987 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192582Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037986 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192649Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037985 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192712Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037984 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192772Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037983 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192832Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037982 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192891Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037981 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.192953Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037980 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193012Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037979 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193087Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037978 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193155Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037977 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193216Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037976 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193277Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037975 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193339Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037974 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193401Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037973 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193465Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037971 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193525Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037970 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193605Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037969 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193674Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037968 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193732Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037967 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193796Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037966 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193877Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037965 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193939Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037964 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.193998Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037963 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194059Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037962 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194130Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037961 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194197Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037960 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194257Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037959 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194318Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037958 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194379Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037957 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194437Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037955 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194498Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037954 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194558Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037953 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194632Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037952 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194702Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037951 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 2026-01-04T02:46:56.194765Z node 1 :TX_COLUMNSHARD WARN: ctor_logger.h:56: TColumnShard.StateWork at 72075186224037950 unhandled event type: NKikimr::TEvDataShard::TEvSchemaChangedResult event: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 281474976715680 query_phases { duration_us: 1767494804778134 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_f8164771-4843-8939-7b9e-2e84f29974be" updates { rows: 4 bytes: 1472 } partitions_count: 4 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 144 } } cpu_time_us: 34613 } compilation { duration_us: 11658 cpu_time_us: 6358 } process_cpu_time_us: 1168 total_duration_us: 5834762 total_cpu_time_us: 42139 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TargetDiscoverer::SystemObjects [GOOD] >> TargetDiscoverer::Basic [GOOD] >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TVPatchTests::FindingPartsWhenOnlyOnePartExists [GOOD] Test command err: Recv 65537 2026-01-04T02:46:57.837625Z node 1 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:57.838788Z node 1 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:46:57.838860Z node 1 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts 2026-01-04T02:46:57.838954Z node 1 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm Recv 65537 2026-01-04T02:46:58.097890Z node 2 :BS_VDISK_PATCH INFO: {BSVSP03@skeleton_vpatch_actor.cpp:190} [0:1:0:0:0] TEvVPatch: bootstrapped; OriginalBlobId# [1:2:3:4:6:10:0] Deadline# 1970-01-01T00:00:01.000000Z Send NKikimr::TEvBlobStorage::TEvVGet Recv NKikimr::TEvBlobStorage::TEvVGetResult 2026-01-04T02:46:58.098144Z node 2 :BS_VDISK_PATCH INFO: {BSVSP06@skeleton_vpatch_actor.cpp:266} [0:1:0:0:0] TEvVPatch: received parts index; OriginalBlobId# [1:2:3:4:6:10:0] Status# OK ResultSize# 1 2026-01-04T02:46:58.098194Z node 2 :BS_VDISK_PATCH INFO: {BSVSP04@skeleton_vpatch_actor.cpp:226} [0:1:0:0:0] TEvVPatch: sended found parts; OriginalBlobId# [1:2:3:4:6:10:0] FoundParts# [1] Status# OK Send NKikimr::TEvBlobStorage::TEvVPatchFoundParts Recv NKikimr::TEvBlobStorage::TEvVPatchDiff 2026-01-04T02:46:58.098331Z node 2 :BS_VDISK_PATCH INFO: {BSVSP09@skeleton_vpatch_actor.cpp:578} [0:1:0:0:0] TEvVPatch: received diff; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 XorReceiver# no ParityPart# no ForceEnd# yes 2026-01-04T02:46:58.098378Z node 2 :BS_VDISK_PATCH INFO: {BSVSP07@skeleton_vpatch_actor.cpp:315} [0:1:0:0:0] TEvVPatch: received force end; OriginalBlobId# [1:2:3:4:6:10:0] PatchedBlobId# [1:3:3:4:6:10:0] OriginalPartId# 1 PatchedPartId# 1 Status# OK ErrorReason# Send NKikimr::TEvBlobStorage::TEvVPatchResult 2026-01-04T02:46:58.098447Z node 2 :BS_VDISK_PATCH DEBUG: {BSVSP17@skeleton_vpatch_actor.cpp:728} [0:1:0:0:0] NotifySkeletonAboutDying; Send NKikimr::TEvVPatchDyingRequest Recv NKikimr::TEvVPatchDyingConfirm |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/vdisk/skeleton/ut/unittest >> TargetDiscoverer::IndexedTable [GOOD] >> ColumnBuildTest::Locking_DisableFlag [GOOD] >> TargetDiscoverer::Dirs [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Negative [GOOD] Test command err: 2026-01-04T02:46:55.280081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332426559260523:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.280158Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.573879Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.594343Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.594443Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.667608Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.674798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.821836Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008334Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008454Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.291052Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.380546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.435910Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: {
: Error: Path not found } } } 2026-01-04T02:46:56.435963Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root/Table, status# SCHEME_ERROR, issues# {
: Error: Path not found }, iteration# 0 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::IndexLookupAndTake-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 29546, MsgBus: 3666 2026-01-04T02:46:51.262313Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332409214168200:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:51.262397Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:51.493168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:51.501513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:51.501619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:51.526821Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:51.567914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332409214168173:2081] 1767494811260593 != 1767494811260596 2026-01-04T02:46:51.580080Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:51.628288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:51.628319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:51.628327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:51.628453Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:51.689842Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:52.043681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:52.096054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.227650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.334677Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:52.379842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:52.441402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.706821Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332417804104638:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.706949Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.707148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332417804104648:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.707207Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:53.961732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:53.991801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.021886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.048357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.071721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.102043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.132240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.175439Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.264192Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332422099072815:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:54.264276Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:54.264656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332422099072819:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:54.264704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332422099072821:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:54.264747Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:54.269106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:54.280475Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332422099072824:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:54.367136Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332422099072877:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:56.180459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.262458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332409214168200:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:56.262507Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; /Root/SecondaryKeys/Index/indexImplTable 2 16 /Root/SecondaryKeys 1 8 |94.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.3%| [TA] $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::RetryableError [GOOD] Test command err: 2026-01-04T02:46:55.280081Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332427069625711:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.280159Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.534108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.552462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.552551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.626163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.635670Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.831894Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.010156Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.010191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.010200Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.010349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.289731Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.385083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.390008Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-0 } } } 2026-01-04T02:46:56.390049Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-0 }, iteration# 0 2026-01-04T02:46:56.390110Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-1 } } } 2026-01-04T02:46:56.390131Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-1 }, iteration# 1 2026-01-04T02:46:56.390208Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-2 } } } 2026-01-04T02:46:56.390230Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-2 }, iteration# 2 2026-01-04T02:46:56.390286Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-3 } } } 2026-01-04T02:46:56.390301Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-3 }, iteration# 3 2026-01-04T02:46:56.390326Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-4 } } } 2026-01-04T02:46:56.390340Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-4 }, iteration# 4 2026-01-04T02:46:56.390399Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: UNAVAILABLE, issues: {
: Error: iteration-5 } } } 2026-01-04T02:46:56.390444Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# UNAVAILABLE, issues# {
: Error: iteration-5 }, iteration# 5 |94.3%| [TA] {RESULT} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Transfer [GOOD] Test command err: 2026-01-04T02:46:55.297585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332430066500797:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.297961Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.600325Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.615456Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.615597Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.644177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.719660Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.725057Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332430066500676:2081] 1767494815293311 != 1767494815293314 2026-01-04T02:46:55.834278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008008Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008096Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.302023Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.380859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.523710Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Topic, owner: root@builtin, type: Topic, size_bytes: 0, created_at: { plan_step: 1767494816521, tx_id: 281474976710658 } } } 2026-01-04T02:46:56.523751Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root/Topic 2026-01-04T02:46:56.549613Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:168: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTopicResponse { Result: { status: SUCCESS, issues: } } 2026-01-04T02:46:56.549667Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:183: [TargetDiscoverer][rid 1] Describe topic succeeded: path# /Root/Topic 2026-01-04T02:46:56.549704Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:193: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Topic, dstPath# /Root/Replicated/Table, kind# Transfer |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/vdisk/skeleton/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::InvalidCredentials [GOOD] Test command err: 2026-01-04T02:46:55.326023Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332426742124066:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.326096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.613667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.613753Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.643523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.667561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.679046Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.680232Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332426742124033:2081] 1767494815324623 != 1767494815324626 2026-01-04T02:46:55.811698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008122Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008167Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008244Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.332904Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.387050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.396655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.642729Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: CLIENT_UNAUTHENTICATED, issues: {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user } } } 2026-01-04T02:46:56.642802Z node 1 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /Root, status# CLIENT_UNAUTHENTICATED, issues# {
: Error: Can't get Authentication info from CredentialsProvider. ydb/public/sdk/cpp/src/client/types/credentials/login/login.cpp:219: Cannot find user: user }, iteration# 0 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::AlterMainTable_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180738Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180960Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.181059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.270592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.270651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.283820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.284338Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.284531Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.291177Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.291340Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.291841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.292082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.299981Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.305921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.305984Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.432368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434416Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.434995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.435078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.435140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.435220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.435292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.435377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 57 ready parts: 1/1 2026-01-04T02:46:58.613071Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-04T02:46:58.613100Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:46:58.613134Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-04T02:46:58.613197Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1703:3569] message: TxId: 281474976730757 2026-01-04T02:46:58.613246Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:46:58.613283Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-04T02:46:58.613315Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-04T02:46:58.613376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:46:58.616314Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-04T02:46:58.616394Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-04T02:46:58.616469Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-04T02:46:58.616587Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-04T02:46:58.617857Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-04T02:46:58.617939Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:58.617984Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:46:58.619224Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-04T02:46:58.619322Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:58.619355Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:46:58.619437Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:46:58.619470Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1758:3613] TestWaitNotification: OK eventTxId 105 2026-01-04T02:46:58.619996Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:46:58.620209Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:46:58.620864Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:46:58.621040Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 203us result status StatusSuccess 2026-01-04T02:46:58.621372Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Filling_DisableFlag [GOOD] >> ColumnBuildTest::ValidDefaultValue [GOOD] >> ColumnBuildTest::Unlocking_Failed >> ColumnBuildTest::BaseCase [GOOD] >> ColumnBuildTest::Cancellation_Applying_DisableFlag >> ColumnBuildTest::Cancelling [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag >> ColumnBuildTest::Applying_DisableFlag [GOOD] |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Basic [GOOD] Test command err: 2026-01-04T02:46:55.303168Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332426814063767:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.303486Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.337435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:55.663654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.663757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.667508Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.669110Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.724557Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332426814063730:2081] 1767494815301650 != 1767494815301653 2026-01-04T02:46:55.735590Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.924318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008553Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008590Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008597Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008680Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.312417Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.380434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.390918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.527172Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816430, tx_id: 1 } } } 2026-01-04T02:46:56.527201Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-04T02:46:56.534860Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494815737, tx_id: 281474976710657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816493, tx_id: 281474976715658 } }] } } 2026-01-04T02:46:56.534889Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-04T02:46:58.434967Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816493, tx_id: 281474976715658 } } } 2026-01-04T02:46:58.435011Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-04T02:46:58.435042Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Locking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180669Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180748Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180785Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180822Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.259931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.259995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.272759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.273170Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.274889Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.283449Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.283706Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.300152Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306046Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307228Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.448984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449629Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449718Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.449974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.450364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:46:58.740235Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-04T02:46:58.740262Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:46:58.740301Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-04T02:46:58.740360Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1560:3427] message: TxId: 281474976730758 2026-01-04T02:46:58.740403Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:46:58.740438Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-04T02:46:58.740466Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-04T02:46:58.740531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:46:58.742930Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-04T02:46:58.742985Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-04T02:46:58.743031Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730758 2026-01-04T02:46:58.743133Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-04T02:46:58.744768Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-04T02:46:58.744896Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:58.744945Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:46:58.746494Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-04T02:46:58.746634Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976730757, AlterMainTableTxStatus: StatusInvalidParameter, AlterMainTableTxDone: 0, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 0, InitiateTxStatus: StatusSuccess, InitiateTxDone: 0, SnapshotStepId: 0, ApplyTxId: 0, ApplyTxStatus: StatusSuccess, ApplyTxDone: 0, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:58.746672Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:46:58.746782Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:46:58.746825Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1751:3605] TestWaitNotification: OK eventTxId 105 2026-01-04T02:46:58.747512Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:46:58.747882Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "At AlterMainTable state got unsuccess propose result, status: StatusInvalidParameter, reason: Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:46:58.748736Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:46:58.748983Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 283us result status StatusSuccess 2026-01-04T02:46:58.749523Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::SystemObjects [GOOD] Test command err: 2026-01-04T02:46:55.280346Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332428987176917:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.280401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.555841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.556183Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.611785Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.623921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.659071Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.913860Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008023Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008059Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008150Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.289312Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.393193Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.401893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.513471Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.571195Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816444, tx_id: 1 } } } 2026-01-04T02:46:56.571222Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-04T02:46:56.578629Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494815695, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816493, tx_id: 281474976710658 } }, { name: export-100500, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816535, tx_id: 281474976710659 } }] } } 2026-01-04T02:46:56.578662Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-04T02:46:58.317442Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816493, tx_id: 281474976710658 } } } 2026-01-04T02:46:58.317482Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-04T02:46:58.317536Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::Dirs [GOOD] Test command err: 2026-01-04T02:46:55.999449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332428703860727:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:56.000539Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:56.232247Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:56.232627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:56.232732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:56.295484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:56.327654Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:56.447433Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.541676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.541711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.541719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.541849Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.738679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.784991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.910635Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816787, tx_id: 1 } } } 2026-01-04T02:46:56.910679Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-04T02:46:56.919468Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816360, tx_id: 281474976715657 } }, { name: Dir, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816801, tx_id: 281474976710658 } }] } } 2026-01-04T02:46:56.919497Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-04T02:46:56.926939Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816878, tx_id: 281474976710659 } }] } } 2026-01-04T02:46:56.926966Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root/Dir 2026-01-04T02:46:57.006388Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:58.704781Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816878, tx_id: 281474976710659 } } } 2026-01-04T02:46:58.704835Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Dir/Table 2026-01-04T02:46:58.704963Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Dir/Table, dstPath# /Root/Replicated/Dir/Table, kind# Table >> ColumnBuildTest::Rejecting [GOOD] >> ColumnBuildTest::Unlocking_DisableFlag >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/replication/controller/ut_target_discoverer/unittest >> TargetDiscoverer::IndexedTable [GOOD] Test command err: 2026-01-04T02:46:55.296372Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332428477249366:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:55.296460Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:55.553910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:55.587058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:55.587179Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:55.633600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:55.683609Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:55.783164Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:56.008177Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:56.008206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:56.008226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:56.008316Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:56.302554Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:56.406681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:56.419872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.766720Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { name: Root, owner: root@builtin, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494816458, tx_id: 1 } } } 2026-01-04T02:46:56.766749Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:43: [TargetDiscoverer][rid 1] Describe path succeeded: path# /Root 2026-01-04T02:46:56.775951Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:250: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvListDirectoryResponse { Result: { children [{ name: .sys, owner: metadata@system, type: Directory, size_bytes: 0, created_at: { plan_step: 1767494815702, tx_id: 281474976715657 } }, { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816731, tx_id: 281474976710658 } }] } } 2026-01-04T02:46:56.775991Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:263: [TargetDiscoverer][rid 1] Listing succeeded: path# /Root 2026-01-04T02:46:58.636068Z node 1 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:100: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribeTableResponse { Result: { name: Table, owner: root@builtin, type: Table, size_bytes: 0, created_at: { plan_step: 1767494816731, tx_id: 281474976710658 } } } 2026-01-04T02:46:58.636118Z node 1 :REPLICATION_CONTROLLER DEBUG: target_discoverer.cpp:115: [TargetDiscoverer][rid 1] Describe table succeeded: path# /Root/Table 2026-01-04T02:46:58.636172Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:122: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table, dstPath# /Root/Replicated/Table, kind# Table 2026-01-04T02:46:58.636252Z node 1 :REPLICATION_CONTROLLER INFO: target_discoverer.cpp:142: [TargetDiscoverer][rid 1] Add target: srcPath# /Root/Table/Index, dstPath# /Root/Replicated/Table/Index/indexImplTable, kind# IndexTable |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/replication/controller/ut_target_discoverer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Filling_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180720Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183286Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.260890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.260952Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.276537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.277120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.277290Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.284914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.285157Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290860Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.299721Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306067Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.474918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.475918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476677Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476853Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.476973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.477055Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.477145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 59.555313Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730759:0 ProgressState 2026-01-04T02:46:59.555380Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-04T02:46:59.555404Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-04T02:46:59.555431Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730759:0 progress is 1/1 2026-01-04T02:46:59.555453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-04T02:46:59.555477Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730759, ready parts: 1/1, is published: true 2026-01-04T02:46:59.555519Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1817:3682] message: TxId: 281474976730759 2026-01-04T02:46:59.555550Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730759 ready parts: 1/1 2026-01-04T02:46:59.555586Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730759:0 2026-01-04T02:46:59.555609Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730759:0 2026-01-04T02:46:59.555653Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:46:59.557223Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730759 2026-01-04T02:46:59.557267Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730759 2026-01-04T02:46:59.557323Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730759 2026-01-04T02:46:59.557422Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730759 2026-01-04T02:46:59.558480Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking 2026-01-04T02:46:59.558564Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejection_Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:59.558643Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:46:59.559764Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected 2026-01-04T02:46:59.559843Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Rejected TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: Adding columns with defaults is disabled, SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730758, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976730757, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730759, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:59.559878Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:46:59.559969Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:46:59.560008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1964:3817] TestWaitNotification: OK eventTxId 105 2026-01-04T02:46:59.560523Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:46:59.560830Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "Adding columns with defaults is disabled" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:46:59.561731Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:46:59.561937Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 222us result status StatusSuccess 2026-01-04T02:46:59.562266Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |94.4%| [TA] $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TA] {RESULT} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::TooBigQuery-useSink [GOOD] >> KqpLimits::TooBigKey+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180855Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.181058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.182034Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.278729Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.278799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.293823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.294400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.294608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.302815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.303058Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.303881Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.304153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.307371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.307563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.308807Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.308869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.308951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.308997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.309059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.309224Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.479981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:46:59.583913Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-04T02:46:59.583940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:46:59.583972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-04T02:46:59.584042Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1831:3695] message: TxId: 281474976730757 2026-01-04T02:46:59.584102Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:46:59.584139Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-04T02:46:59.584168Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-04T02:46:59.584241Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:46:59.586981Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-04T02:46:59.587068Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-04T02:46:59.587139Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730757 2026-01-04T02:46:59.587260Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-04T02:46:59.589108Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-04T02:46:59.589266Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:59.589359Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:46:59.591068Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-04T02:46:59.591208Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:46:59.591270Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:46:59.591421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:46:59.591474Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1889:3742] TestWaitNotification: OK eventTxId 105 2026-01-04T02:46:59.592182Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:46:59.592526Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-04T02:46:59.593394Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:46:59.593667Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 282us result status StatusSuccess 2026-01-04T02:46:59.594196Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/replication/controller/ut_target_discoverer/test-results/unittest/{meta.json ... results_accumulator.log} |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DoNotRestoreDeletedRows [GOOD] >> ColumnBuildTest::DisabledAndEnabledFlag >> TKesusTest::TestQuoterHDRRParametersValidation |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery >> KqpCompileFallback::FallbackWithPreparedQuery >> TKesusTest::TestQuoterHDRRParametersValidation [GOOD] >> TKesusTest::TestQuoterAccountResourcesOnDemand >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessages [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] >> KqpCost::CTASWithRetry-isOlap [GOOD] |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_Failed [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.181042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.259948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.260025Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.276183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.276698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.276845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.283811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.284071Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290843Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.300161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306045Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.440517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.441916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442162Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.442463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2.176509Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976725761, at schemeshard: 72075186233409549 2026-01-04T02:47:02.176583Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-04T02:47:02.176627Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-04T02:47:02.176678Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976725761 2026-01-04T02:47:02.176779Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1548:3418], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-04T02:47:02.178227Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking 2026-01-04T02:47:02.178345Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejection_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejection_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1548:3418], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:02.178393Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Rejection_Unlocking to Rejected 2026-01-04T02:47:02.179892Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected 2026-01-04T02:47:02.180011Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Rejected TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Rejected, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: '/MyRoot/ServerLessDB/', error: path hasn't been resolved, nearest resolved path: '/MyRoot/ServerLessDB' (id: [OwnerId: 72075186233409549, LocalPathId: 1]), SubscribersCount: 1, CreateSender: [2:1548:3418], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 450, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725762, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976725761, UnlockTxStatus: StatusPathDoesNotExist, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:02.180051Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-04T02:47:02.180205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:47:02.180259Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:1564:3434] TestWaitNotification: OK eventTxId 106 2026-01-04T02:47:02.180913Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-04T02:47:02.181291Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 Issues { message: "At Unlocking state got unsuccess propose result, status: StatusPathDoesNotExist, reason: Check failed: path: \'/MyRoot/ServerLessDB/\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/ServerLessDB\' (id: [OwnerId: 72075186233409549, LocalPathId: 1])" severity: 1 } Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_REJECTED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 0 StartTime { } EndTime { } } 2026-01-04T02:47:02.182118Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:47:02.182367Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 276us result status StatusSuccess 2026-01-04T02:47:02.182839Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::CTASWithRetry-isOlap [GOOD] Test command err: Trying to start YDB, gRPC: 23378, MsgBus: 7292 ... waiting for SysViewsRoster update finished 2026-01-04T02:46:53.141410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:53.231260Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:53.231624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:53.231669Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:53.457512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:53.457612Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:53.524056Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494810158167 != 1767494810158170 2026-01-04T02:46:53.533591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:53.577138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:53.710201Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:54.046121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:54.046191Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:54.046225Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:54.046416Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:54.058600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:54.352806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:54.457933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:54.704806Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:55.079182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:55.393360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.209339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:56.209526Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:56.210741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:56.210846Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:56.245042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.441635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.685019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:56.959903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:57.200584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:57.449988Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:57.729976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:58.022348Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:58.378389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:58.378536Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:58.378994Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:58.379093Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:58.379168Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:58.385061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:58.554743Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:58.604835Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:00.441353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:01.351246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) query_phases { duration_us: 1767494811262610 table_access { name: "/Root/.tmp/sessions/844340f5-49ba-2219-13c7-a4aeee813264/Root/TestTable2_2861ff7c-459e-032f-24e6-a28dfd6c4e70" updates { rows: 4 bytes: 80 } partitions_count: 1 } table_access { name: "/Root/TestTable" reads { rows: 4 bytes: 80 } partitions_count: 1 } cpu_time_us: 4994 affected_shards: 1 } compilation { duration_us: 37640 cpu_time_us: 6734 } process_cpu_time_us: 1341 total_duration_us: 1075477 total_cpu_time_us: 13069 |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Unlocking_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180754Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180845Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.274070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.274142Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.287493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.287986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.288157Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.295493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.295776Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.296451Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.296657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.299320Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300582Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.305989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306053Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.480839Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.481969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482785Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.482976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.483076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.483161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.483268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-04T02:47:02.689472Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976725761:0 progress is 1/1 2026-01-04T02:47:02.689526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-04T02:47:02.689567Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976725761, ready parts: 1/1, is published: true 2026-01-04T02:47:02.689675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1887:3751] message: TxId: 281474976725761 2026-01-04T02:47:02.689728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976725761 ready parts: 1/1 2026-01-04T02:47:02.689764Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976725761:0 2026-01-04T02:47:02.689791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976725761:0 2026-01-04T02:47:02.689889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:47:02.693350Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976725761 2026-01-04T02:47:02.693442Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976725761 2026-01-04T02:47:02.693539Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976725761 2026-01-04T02:47:02.693727Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976725761 2026-01-04T02:47:02.695949Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-04T02:47:02.696093Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:02.696167Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:47:02.697827Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-04T02:47:02.697983Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976725761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:02.698030Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:47:02.698162Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:47:02.698213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1944:3797] TestWaitNotification: OK eventTxId 105 2026-01-04T02:47:02.698971Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:47:02.699378Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 0 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 100 EndTime { } } 2026-01-04T02:47:02.700282Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:47:02.700551Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 297us result status StatusSuccess 2026-01-04T02:47:02.701074Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 >> TKesusTest::TestQuoterAccountResourcesOnDemand [GOOD] >> TKesusTest::TestQuoterAccountResourcesPaced >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False >> KqpCompileFallback::FallbackMechanismWorks |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_Applying_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180774Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180878Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.181012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.181106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.259951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.260011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.275764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.276386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.276607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.284659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.284883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.299865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306023Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306241Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306302Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307230Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.467116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.468994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.469117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.469204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.469291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.469371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.469468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730757:0, at schemeshard: 72075186233409549 2026-01-04T02:47:03.289094Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730757:0 ProgressState 2026-01-04T02:47:03.289178Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-04T02:47:03.289205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:47:03.289231Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730757:0 progress is 1/1 2026-01-04T02:47:03.289252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:47:03.289277Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730757, ready parts: 1/1, is published: true 2026-01-04T02:47:03.289339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2593:4352] message: TxId: 281474976730757 2026-01-04T02:47:03.289373Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730757 ready parts: 1/1 2026-01-04T02:47:03.289396Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730757:0 2026-01-04T02:47:03.289415Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730757:0 2026-01-04T02:47:03.289487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-04T02:47:03.291981Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730757 2026-01-04T02:47:03.292035Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730757 2026-01-04T02:47:03.292079Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730757 2026-01-04T02:47:03.292175Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730757 2026-01-04T02:47:03.293844Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-04T02:47:03.293967Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.294031Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-04T02:47:03.295366Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-04T02:47:03.295483Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976725762, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730757, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.295530Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-04T02:47:03.295644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:47:03.295677Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2664:4412] TestWaitNotification: OK eventTxId 106 2026-01-04T02:47:03.296244Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-04T02:47:03.296495Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:47:03.297188Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:47:03.297410Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 238us result status StatusSuccess 2026-01-04T02:47:03.297872Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::Cancellation_DroppingColumns_DisableFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180693Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.181011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.268553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.268599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.279412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.279934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.280122Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.287392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.287667Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.299939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300605Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306075Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306232Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306358Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.469828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.470850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.470976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471217Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471811Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.471982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... UG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976730758:0, at schemeshard: 72075186233409549 2026-01-04T02:47:03.219397Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409549] TDone opId# 281474976730758:0 ProgressState 2026-01-04T02:47:03.219482Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-04T02:47:03.219515Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:47:03.219553Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730758:0 progress is 1/1 2026-01-04T02:47:03.219579Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:47:03.219611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730758, ready parts: 1/1, is published: true 2026-01-04T02:47:03.219679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:2454:4213] message: TxId: 281474976730758 2026-01-04T02:47:03.219745Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730758 ready parts: 1/1 2026-01-04T02:47:03.219781Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730758:0 2026-01-04T02:47:03.219809Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730758:0 2026-01-04T02:47:03.219883Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 12 2026-01-04T02:47:03.223134Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730758 2026-01-04T02:47:03.223214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730758 2026-01-04T02:47:03.223280Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 106, txId# 281474976730758 2026-01-04T02:47:03.223398Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730758 2026-01-04T02:47:03.225413Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking 2026-01-04T02:47:03.225554Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancellation_Unlocking TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancellation_Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.225610Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Cancellation_Unlocking to Cancelled 2026-01-04T02:47:03.227130Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled 2026-01-04T02:47:03.227266Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 106 Cancelled TBuildInfo{ IndexBuildId: 106, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Cancelled, SubState: None, IsBroken: 0, IsCancellationRequested: 1, Issue: , SubscribersCount: 1, CreateSender: [0:0:0], AlterMainTableTxId: 281474976725759, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976725757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976725760, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 0, ApplyTxId: 281474976730757, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 281474976725761, DropColumnsTxStatus: StatusAccepted, DropColumnsTxDone: 1, UnlockTxId: 281474976730758, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.227307Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 106, subscribers count# 1 2026-01-04T02:47:03.227438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:47:03.227487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [2:2524:4272] TestWaitNotification: OK eventTxId 106 2026-01-04T02:47:03.228164Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 106 2026-01-04T02:47:03.228521Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 106 State: STATE_CANCELLED Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 MaxBatchBytes: 8388608 MaxBatchRetries: 50 } } Progress: 0 EndTime { } } 2026-01-04T02:47:03.229311Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:47:03.229552Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 254us result status StatusSuccess 2026-01-04T02:47:03.230151Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 350 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 5 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "index" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 3 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 5 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 10 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 13 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-clusteradmin [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_column_build/unittest >> ColumnBuildTest::DisabledAndEnabledFlag [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:54.180585Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:54.180656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180697Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:54.180740Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:54.180772Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:54.180810Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:54.180854Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:54.180915Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:54.181637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:54.183322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:54.263933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:54.263993Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:54.276342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:54.276730Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:54.276871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:54.283649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:54.283838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:54.288183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:54.290818Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:54.300202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.300610Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:54.306034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:54.306159Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:54.306211Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:54.306263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:54.307225Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:54.478208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479578Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479649Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.479923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:54.480431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 64Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-04T02:47:03.929310Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976730761:0 progress is 1/1 2026-01-04T02:47:03.929341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-04T02:47:03.929379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976730761, ready parts: 1/1, is published: true 2026-01-04T02:47:03.929452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:1558:3425] message: TxId: 281474976730761 2026-01-04T02:47:03.929499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976730761 ready parts: 1/1 2026-01-04T02:47:03.929536Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976730761:0 2026-01-04T02:47:03.929570Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976730761:0 2026-01-04T02:47:03.929647Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409549, LocalPathId: 32] was 3 2026-01-04T02:47:03.932122Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7196: Handle: TEvNotifyTxCompletionResult: txId# 281474976730761 2026-01-04T02:47:03.932209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:7198: Message: TxId: 281474976730761 2026-01-04T02:47:03.932275Z node 2 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:3019: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, id# 105, txId# 281474976730761 2026-01-04T02:47:03.932400Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:3022: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TEvNotifyTxCompletionResult, TIndexBuildInfo: TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1749:3603], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 0, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0}, txId# 281474976730761 2026-01-04T02:47:03.934200Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking 2026-01-04T02:47:03.934341Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Unlocking TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Unlocking, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1749:3603], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.934409Z node 2 :BUILD_INDEX INFO: schemeshard_build_index_tx_base.cpp:24: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: Change state from Unlocking to Done 2026-01-04T02:47:03.936149Z node 2 :BUILD_INDEX NOTICE: schemeshard_build_index__progress.cpp:1950: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done 2026-01-04T02:47:03.936297Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__progress.cpp:1951: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxBuildProgress: Execute: 105 Done TBuildInfo{ IndexBuildId: 105, Uid: , DomainPathId: [OwnerId: 72075186233409549, LocalPathId: 1], TablePathId: [OwnerId: 72075186233409549, LocalPathId: 32], IndexType: EIndexTypeInvalid, IndexName: , State: Done, SubState: None, IsBroken: 0, IsCancellationRequested: 0, Issue: , SubscribersCount: 1, CreateSender: [2:1749:3603], AlterMainTableTxId: 281474976730758, AlterMainTableTxStatus: StatusAccepted, AlterMainTableTxDone: 1, LockTxId: 281474976730757, LockTxStatus: StatusAccepted, LockTxDone: 1, InitiateTxId: 281474976730759, InitiateTxStatus: StatusAccepted, InitiateTxDone: 1, SnapshotStepId: 500, ApplyTxId: 281474976730760, ApplyTxStatus: StatusAccepted, ApplyTxDone: 1, DropColumnsTxId: 0, DropColumnsTxStatus: StatusSuccess, DropColumnsTxDone: 0, UnlockTxId: 281474976730761, UnlockTxStatus: StatusAccepted, UnlockTxDone: 1, ToUploadShards: 0, DoneShards: 0, Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0, Billed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0} 2026-01-04T02:47:03.936343Z node 2 :BUILD_INDEX TRACE: schemeshard_build_index_tx_base.cpp:339: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TIndexBuildInfo SendNotifications: : id# 105, subscribers count# 1 2026-01-04T02:47:03.936494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 105: got EvNotifyTxCompletionResult 2026-01-04T02:47:03.936563Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 105: satisfy waiter [2:1765:3619] TestWaitNotification: OK eventTxId 105 2026-01-04T02:47:03.937228Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index__get.cpp:19: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: DoExecute DatabaseName: "/MyRoot/ServerLessDB" IndexBuildId: 105 2026-01-04T02:47:03.937597Z node 2 :BUILD_INDEX DEBUG: schemeshard_build_index_tx_base.h:104: TIndexBuilder::TXTYPE_GET_INDEX_BUILD: Reply Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } BUILDINDEX RESPONSE Get: NKikimrIndexBuilder.TEvGetResponse Status: SUCCESS IndexBuild { Id: 105 Issues { message: "TShardStatus { ShardIdx: 72075186233409549:4 Status: DONE UploadStatus: STATUS_CODE_UNSPECIFIED DebugMessage:
: Error: Shard or requested range is empty\n SeqNoRound: 1 Processed: UploadRows: 0 UploadBytes: 0 ReadRows: 0 ReadBytes: 0 CpuTimeUs: 0 }" severity: 1 } State: STATE_DONE Settings { source_path: "/MyRoot/ServerLessDB/Table" max_shards_in_flight: 2 column_build_operation { column { ColumnName: "default_value" default_from_literal { type { type_id: UINT64 } value { uint64_value: 10 } } } } ScanSettings { MaxBatchRows: 1 } } Progress: 100 StartTime { } EndTime { } } 2026-01-04T02:47:03.938407Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLessDB/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409549 2026-01-04T02:47:03.938645Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409549 describe path "/MyRoot/ServerLessDB/Table" took 256us result status StatusSuccess 2026-01-04T02:47:03.939117Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/ServerLessDB/Table" PathDescription { Self { Name: "Table" PathId: 32 SchemeshardId: 72075186233409549 PathType: EPathTypeTable CreateFinished: true CreateTxId: 105 CreateStep: 300 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 4 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } Columns { Name: "default_value" Type: "Uint64" TypeId: 4 Id: 3 NotNull: false DefaultFromLiteral { type { type_id: UINT64 } value { uint64_value: 10 } } IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 4 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 39 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409550 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409551 SchemeShard: 72075186233409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 39 } PathsInside: 31 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SharedHive: 72057594037968897 ServerlessComputeResourcesMode: EServerlessComputeResourcesModeShared SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 32 PathOwnerId: 72075186233409549, at schemeshard: 72075186233409549 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_column_build/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage-DisableStatsBatching-EnablePersistentPartitionStats [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D885D39F500 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:46:49.676377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:46:49.676484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.676524Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:46:49.676562Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:46:49.676604Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:46:49.676635Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:46:49.676689Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:46:49.676778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:46:49.677656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:46:49.678010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:46:49.762560Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:46:49.762614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:49.772788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:46:49.772963Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:46:49.773101Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:46:49.781145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:46:49.781393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:46:49.782172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:46:49.782438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:46:49.785618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.785819Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:46:49.787039Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:46:49.787097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:46:49.787204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:46:49.787251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:46:49.787394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:46:49.787588Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:46:49.953630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.954753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.954884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.954961Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:46:49.955846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... _SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.0554 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 0 2026-01-04T02:47:04.550755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:47:04.550820Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:47:04.551034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 2026-01-04T02:47:04.565670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 2026-01-04T02:47:04.588138Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 1 2026-01-04T02:47:04.588370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:47:04.588425Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:47:04.588594Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 2 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 2 2026-01-04T02:47:04.588896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 2 2026-01-04T02:47:04.588972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-04T02:47:04.589009Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:47:04.589101Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D885D3F4500, stats written 3 2026-01-04T02:47:04.589669Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:47:04.592381Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 282us result status StatusSuccess 2026-01-04T02:47:04.592947Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TKesusTest::TestQuoterAccountResourcesPaced [GOOD] >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] |94.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayRS_Reboot [GOOD] Test command err: 2026-01-04T02:45:35.780612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:35.782524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:35.782581Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:35.792248Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:35.792584Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:35.792868Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:35.832434Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:35.838731Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:35.838795Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:35.840138Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:35.840207Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:35.840260Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:35.840575Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:35.841635Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:35.841700Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:35.906563Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:35.936229Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:35.936404Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:35.936512Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:35.936554Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:35.936627Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:35.936669Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.936870Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.936913Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.937127Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:35.937206Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:35.937259Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.937289Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:35.937316Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:35.937355Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:35.937383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:35.937418Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:35.937455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:35.937517Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.937547Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.937601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:35.942754Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nI\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\010Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:35.942809Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:35.942892Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:35.943024Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:35.943061Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:35.943103Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:35.943167Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:35.943201Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:35.943233Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:35.943258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.943490Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:35.943515Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:35.943542Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:35.943565Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.943600Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:35.943633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:35.943661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:35.943699Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.943717Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:35.955325Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:35.955378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:35.955409Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:35.955449Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:35.955494Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:35.955919Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.955962Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:35.956000Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:35.956108Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:35.956135Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:35.956272Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:35.956310Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:35.956349Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:35.956383Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:35.959019Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:35.959077Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:35.959279Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.959312Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:35.959355Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:35.959394Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:35.959438Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:35.959473Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:35.959499Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100000 ... t [32:349:2316]: {TEvReadSet step# 1000004 txid# 17 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 13} 2026-01-04T02:47:05.287237Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.287269Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 17 2026-01-04T02:47:05.287351Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 18 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 14} 2026-01-04T02:47:05.287381Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.287412Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 18 2026-01-04T02:47:05.287495Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 19 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 15} 2026-01-04T02:47:05.287528Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.287557Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 19 2026-01-04T02:47:05.287645Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 20 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-04T02:47:05.287676Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.287707Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 20 2026-01-04T02:47:05.287801Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 21 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-04T02:47:05.287834Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.287863Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 21 2026-01-04T02:47:05.287953Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 22 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-04T02:47:05.287987Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288020Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 22 2026-01-04T02:47:05.288099Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 23 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-04T02:47:05.288132Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288162Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 23 2026-01-04T02:47:05.288249Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 24 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-04T02:47:05.288281Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288313Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 24 2026-01-04T02:47:05.288400Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 25 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-04T02:47:05.288433Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288466Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 25 2026-01-04T02:47:05.288551Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 26 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-04T02:47:05.288582Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288615Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 26 2026-01-04T02:47:05.288718Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 27 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-04T02:47:05.288754Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288798Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 27 2026-01-04T02:47:05.288860Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 28 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-04T02:47:05.288891Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.288923Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 28 2026-01-04T02:47:05.289011Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 29 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-04T02:47:05.289045Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289076Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 29 2026-01-04T02:47:05.289162Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 30 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-04T02:47:05.289196Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289227Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 30 2026-01-04T02:47:05.289310Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 31 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-04T02:47:05.289342Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289371Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 31 2026-01-04T02:47:05.289454Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 32 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-04T02:47:05.289489Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289521Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 32 2026-01-04T02:47:05.289599Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 33 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-04T02:47:05.289631Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289663Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 33 2026-01-04T02:47:05.289743Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 34 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-04T02:47:05.289775Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289823Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 34 2026-01-04T02:47:05.289910Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 35 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-04T02:47:05.289944Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.289976Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 35 2026-01-04T02:47:05.290062Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [32:805:2731], Recipient [32:349:2316]: {TEvReadSet step# 1000004 txid# 36 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:47:05.290096Z node 32 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:05.290126Z node 32 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 36 expect 17 31 31 25 31 22 25 13 28 24 27 23 29 28 29 29 25 28 28 4 28 25 2 25 27 27 27 2 2 22 - - actual 17 31 31 25 31 22 25 13 28 24 27 23 29 28 29 29 25 28 28 4 28 25 2 25 27 27 27 2 2 22 - - interm - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - |94.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_column_build/test-results/unittest/{meta.json ... results_accumulator.log} |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeDatashard::AnalyzeTwoTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.549583Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.655082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.662294Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.662617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.662749Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:05.044058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.130777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:05.130908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.177523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.256602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:05.867402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:05.867856Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:05.867896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:05.867927Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:05.868285Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:05.932342Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.434438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.061914Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.067545Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.070434Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.092313Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.092391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.118787Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.120482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.288434Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.288534Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.289825Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.290456Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.290967Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292003Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292275Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292376Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292584Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292736Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.292826Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.318067Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.510928Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.530910Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.531008Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.555931Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.557290Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.557601Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.557670Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.557724Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.557798Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.557889Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.557946Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.558537Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:09.558879Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:09.561614Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:09.570837Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:09.570891Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:09.570976Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.583052Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.590720Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:09.593323Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:09.595951Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.596031Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.616950Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:09.617184Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:09.715732Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:09.743775Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:10.015239Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.016948Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.019612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:10.022105Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:10.022170Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... ItY2Q1MzI2ZDUtZjQwN2RmOTYtM2YwNWU2MDY=, TxId: 2026-01-04T02:46:53.156615Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4579:3946], ActorId: [2:4580:3947], Got response [2:4581:3948] SUCCESS 2026-01-04T02:46:53.156814Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:46:53.170985Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:46:53.171064Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3369:3492] 2026-01-04T02:46:53.227391Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:46:53.227483Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:46:53.304075Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4398:3816], schemeshard count = 1 2026-01-04T02:46:54.193352Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:46:54.197490Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:46:54.201053Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-04T02:46:54.201170Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], Start read next stream part 2026-01-04T02:46:54.236918Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4668:3999]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:54.237264Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:46:54.237320Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4668:3999], StatRequests.size() = 1 2026-01-04T02:47:00.858900Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:00.859031Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], Start read next stream part 2026-01-04T02:47:00.859280Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:00.862352Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:00.862409Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4651:3989], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTcwYjkxNGItNWVmMjg3NTctZTgwOWJiMWQtNGRhMDg5Ng==, TxId: 2026-01-04T02:47:00.862492Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33060, txId: 281474976720667] shutting down 2026-01-04T02:47:00.862554Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-04T02:47:00.862628Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], Start read next stream part 2026-01-04T02:47:00.907255Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4768:4072]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:00.907445Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-04T02:47:00.907480Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4768:4072], StatRequests.size() = 1 2026-01-04T02:47:05.077881Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:05.078008Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], Start read next stream part 2026-01-04T02:47:05.080021Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4838:4120], ActorId: [2:4839:4121], Starting query actor #1 [2:4840:4122] 2026-01-04T02:47:05.080085Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4839:4121], ActorId: [2:4840:4122], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:47:05.082503Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:05.082563Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4639:2462], ActorId: [2:4749:4057], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MzhmZWU0MGEtZTBkNTcwNWUtZmRkMDg5MDItOTZlMGYwYTc=, TxId: 2026-01-04T02:47:05.082664Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33080, txId: 281474976720670] shutting down 2026-01-04T02:47:05.083446Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4839:4121], ActorId: [2:4840:4122], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NTdiYzlkZTMtYWY3NDEyMDQtMjNlOTM0YzItZTc0YjAyZGY=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:47:05.167263Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4839:4121], ActorId: [2:4840:4122], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTdiYzlkZTMtYWY3NDEyMDQtMjNlOTM0YzItZTc0YjAyZGY=, TxId: 2026-01-04T02:47:05.167335Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4839:4121], ActorId: [2:4840:4122], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTdiYzlkZTMtYWY3NDEyMDQtMjNlOTM0YzItZTc0YjAyZGY=, TxId: 2026-01-04T02:47:05.167633Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4838:4120], ActorId: [2:4839:4121], Got response [2:4840:4122] SUCCESS 2026-01-04T02:47:05.167906Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:47:05.194210Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-04T02:47:05.194286Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3369:3492] 2026-01-04T02:47:05.194961Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 6 ], ReplyToActorId[ [2:4872:4143]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:47:05.195300Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-04T02:47:05.195351Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:47:05.195617Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 6 ] 2026-01-04T02:47:05.195682Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 6 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:47:05.195759Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 1 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:47:05.196065Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 2 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 34] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:47:05.201271Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 1 ], RowsCount[ 0 ] 2026-01-04T02:47:05.202686Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-04T02:47:05.202813Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 6 2026-01-04T02:47:05.203503Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 7 ], ReplyToActorId[ [2:4886:4157]], StatType[ 2 ], StatRequestsCount[ 2 ] 2026-01-04T02:47:05.203906Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-04T02:47:05.203969Z node 2 :STATISTICS DEBUG: service_impl.cpp:815: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] resolve DatabasePath[ [OwnerId: 72057594046644480, LocalPathId: 38] ] 2026-01-04T02:47:05.204209Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 7 ] 2026-01-04T02:47:05.204263Z node 2 :STATISTICS DEBUG: service_impl.cpp:717: [TStatService::QueryStatistics] RequestId[ 7 ], Database[ Root/Database ], TablePath[ /Root/Database/.metadata/_statistics ] 2026-01-04T02:47:05.204326Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 3 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 1 ] 2026-01-04T02:47:05.204580Z node 2 :STATISTICS DEBUG: service_impl.cpp:658: [TStatService::LoadStatistics] QueryId[ 4 ], PathId[ [OwnerId: 72075186224037897, LocalPathId: 35] ], StatType[ 2 ], ColumnTag[ 2 ] 2026-01-04T02:47:05.207866Z node 2 :STATISTICS ERROR: service_impl.cpp:693: [TStatService::ReadRowsResponse] QueryId[ 3 ], RowsCount[ 0 ] 2026-01-04T02:47:05.208871Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 2026-01-04T02:47:05.210259Z node 2 :STATISTICS DEBUG: service_impl.cpp:1158: TEvLoadStatisticsQueryResponse, request id = 7 |94.5%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TInterconnectTest::TestBlobEvent220BytesPreSerialized >> KqpCompileFallback::FallbackToVersion1Success [GOOD] >> KqpCost::VectorIndexLookup+useSink [GOOD] >> KqpCompileFallback::FallbackWithScanQuery [GOOD] >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] >> TInterconnectTest::TestBlobEvent220BytesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizes >> TKesusTest::TestQuoterAccountResourcesDeduplicateClient [GOOD] >> TKesusTest::TestQuoterAccountResourcesForgetClient >> TInterconnectTest::TestBlobEventDifferentSizes [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized >> TTransferTests::Create_Disabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackToVersion1Success [GOOD] Test command err: Trying to start YDB, gRPC: 7247, MsgBus: 19689 2026-01-04T02:47:02.189338Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332457339935003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:02.189823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:02.476980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:02.477106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:02.522314Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:02.528630Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:02.551053Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:02.552708Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332457339934899:2081] 1767494822184928 != 1767494822184931 2026-01-04T02:47:02.764094Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:02.764128Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:02.764134Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:02.764215Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:02.805504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:03.194507Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:03.357559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:03.425990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.564929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.707387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.771898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:04.880264Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332465929871365:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:04.880393Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:04.880773Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332465929871374:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:04.880832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.474604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.506112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.530907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.560176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.588762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.618504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.652105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.714029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.796536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332470224839544:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.796654Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.796765Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332470224839549:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.796889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332470224839551:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.796923Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.801987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:05.812558Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332470224839553:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:05.886158Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332470224839606:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:07.188624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332457339935003:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:07.188689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:07.385769Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.387869Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C713A2C0198 2026-01-04T02:47:07.387927Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332478814774502:2531], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01ke3edfbrebkjwj4bn4p7cyw2, Database: /Root, SessionId: ydb://session/3?node_id=1&id=N2E5NGM3NDUtODliNzYxOTMtZTE4Y2IyNC1lZTljOTJmNw==, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:07.388072Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.388135Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332478814774502:2531], queueSize: 1 2026-01-04T02:47:07.388548Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n 2026-01-04T02:47:07.388596Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332478814774502:2531], compileActor: [1:7591332478814774510:2536] 2026-01-04T02:47:07.388640Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:07.388703Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332478814774510:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-04T02:47:07.388608Z 2026-01-04T02:47:07.552695Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332478814774510:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494827","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"c05b04c2-7263f649-7dd74057-ca2fd810","version":"1.0"} 2026-01-04T02:47:07.553171Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332478814774510:2536], duration: 0.164538s 2026-01-04T02:47:07.553210Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332478814774510:2536], owner: [1:7591332465929871326:2384], status: SUCCESS, issues: , uid: c05b04c2-7263f649-7dd74057-ca2fd810 2026-01-04T02:47:07.553316Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332478814774502:2531], status: SUCCESS, compileActor: [1:7591332478814774510:2536] 2026-01-04T02:47:07.553376Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332478814774502:2531], queryUid: c05b04c2-7263f649-7dd74057-ca2fd810, status:SUCCESS |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TTransferTests::Create >> TInterconnectTest::TestBlobEventDifferentSizesPreSerialized [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw >> KqpSinkLocks::TInvalidate >> KqpSinkMvcc::WriteSkewInsert+IsOlap >> KqpSinkMvcc::TxReadsCommitted-IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithScanQuery [GOOD] Test command err: Trying to start YDB, gRPC: 25488, MsgBus: 15901 2026-01-04T02:47:02.166128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332459098124976:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:02.166221Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:02.441582Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:02.454913Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:02.455056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:02.488161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:02.524452Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:02.699553Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:02.763358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:02.763450Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:02.763457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:02.763554Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:03.175358Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:03.344444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:03.413629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.571088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.700574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.769556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.513424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332471983028707:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.513529Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.513854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332471983028717:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.513927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.778779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.806788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.834753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.861997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.889712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.919718Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.952195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.999497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:06.073247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332476277996881:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:06.073335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:06.073457Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332476277996886:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:06.073562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332476277996888:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:06.073608Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:06.076759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:06.085744Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332476277996890:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:06.147023Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332476277996941:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:07.166343Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332459098124976:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:07.166455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:07.399797Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.399914Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C41ED374568 2026-01-04T02:47:07.399959Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332480572964542:2530], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", keepInCache: 1, split: 0{ TraceId: 01ke3edfc6125s77bvd1h75f26, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODIwY2VjZi1iZTFiMDExMi0yZDgyODY1OS1jZjFlY2RiZQ==, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:07.400121Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.400157Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332480572964542:2530], queueSize: 1 2026-01-04T02:47:07.400524Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n 2026-01-04T02:47:07.400563Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332480572964542:2530], compileActor: [1:7591332480572964551:2536] 2026-01-04T02:47:07.400615Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:07.400674Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332480572964551:2536], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n ", startTime: 2026-01-04T02:47:07.400582Z 2026-01-04T02:47:07.547354Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332480572964551:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494827","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (0, +∞)\"],\"type\":\"Scan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (0, +∞)\"],\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"82b990a7-2664be50-b08ba4ed-9d19dfef","version":"1.0"} 2026-01-04T02:47:07.547711Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332480572964551:2536], duration: 0.147110s 2026-01-04T02:47:07.547746Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332480572964551:2536], owner: [1:7591332471983028666:2383], status: SUCCESS, issues: , uid: 82b990a7-2664be50-b08ba4ed-9d19dfef 2026-01-04T02:47:07.547857Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332480572964542:2530], status: SUCCESS, compileActor: [1:7591332480572964551:2536] 2026-01-04T02:47:07.548037Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key > 0;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.548218Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332480572964542:2530], queryUid: 82b990a7-2664be50-b08ba4ed-9d19dfef, status:SUCCESS |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink >> KqpSinkTx::ExplicitTcl >> KqpSinkTx::OlapLocksAbortOnCommit >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma >> KqpSinkMvcc::DirtyReads-IsOlap >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 >> KqpSinkLocks::InvalidateOlapOnCommit >> KqpTx::RollbackManyTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 28495, MsgBus: 61828 2026-01-04T02:46:43.086968Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332376099462151:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:43.087370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:43.282874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:43.282970Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:43.310683Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:43.317028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:43.317527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:43.318292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332376099462046:2081] 1767494803083024 != 1767494803083027 2026-01-04T02:46:43.407030Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:43.407089Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:43.407102Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:43.407211Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:43.553629Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:43.822102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:43.896816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.020697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.123611Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:44.163855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.226853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.131792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332388984365812:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.131916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.132248Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332388984365822:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.132269Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.396478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.430516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.460507Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.489832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.522038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.559581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.621861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.667661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.750766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332388984366693:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.750870Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.751177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332388984366698:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.751221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332388984366699:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.751348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.755361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:46.766014Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332388984366702:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:46:46.863075Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332388984366753:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:48.053956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:48.086227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332376099462151:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:48.086265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migratio ... ";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] 2026-01-04T02:47:05.832276Z node 1 :BUILD_INDEX INFO: schemeshard_build_index__progress.cpp:2408: TIndexBuilder::TXTYPE_PROGRESS_INDEX_BUILD: TTxReply : TTxBilling, id# 281474976715776 /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] >> KqpTx::TooManyTx >> KqpSinkMvcc::TxReadsCommitted+IsOlap >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert >> KqpTx::ExplicitTcl >> KqpCompileFallback::FallbackMechanismWorks [GOOD] >> KqpSinkMvcc::LostUpdate+IsOlap >> KqpSnapshotIsolation::TSimpleOltp >> KqpTx::CommitRequired >> KqpTx::DeferredEffects >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackWithPreparedQuery [GOOD] Test command err: Trying to start YDB, gRPC: 20544, MsgBus: 13730 2026-01-04T02:47:02.166828Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332456590937760:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:02.166915Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:02.439267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:02.458772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:02.458933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:02.496359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:02.577524Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:02.611419Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:02.763682Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:02.763719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:02.763725Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:02.763805Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:03.175229Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:03.341237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:03.405152Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.563879Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.720979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:03.778165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.147294Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332469475841485:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.147417Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.147694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332469475841495:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.147749Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.474613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.505730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.534005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.567458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.597554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.630402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.664466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.703437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.793879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332469475842374:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.793964Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.794056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332469475842379:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.794121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332469475842381:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.794150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:05.801554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:05.811083Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332469475842383:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:05.873733Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332469475842434:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:07.167265Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332456590937760:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:07.167371Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:07.392746Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.392830Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C8DE29ABC48 2026-01-04T02:47:07.392866Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332478065777333:2531], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", keepInCache: 1, split: 0{ TraceId: 01ke3edfbz6qgg8pcn9renejzb, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODdkZTY0MGUtOWZlZWJmYi0zZGQ2Y2VmZC04OGQzMzM4Mw==, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:07.392984Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.393024Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332478065777333:2531], queueSize: 1 2026-01-04T02:47:07.393407Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n 2026-01-04T02:47:07.393439Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332478065777333:2531], compileActor: [1:7591332478065777341:2536] 2026-01-04T02:47:07.393464Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:07.393509Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332478065777341:2536], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n ", startTime: 2026-01-04T02:47:07.393443Z 2026-01-04T02:47:07.422980Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7591332478065777341:2536], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n " 2026-01-04T02:47:07.617477Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332478065777341:2536]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494827","query_text":"\\n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"9dbb668e-246c020-c8fe3fcd-418f2ff0","version":"1.0"} 2026-01-04T02:47:07.618012Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332478065777341:2536], duration: 0.224545s 2026-01-04T02:47:07.618051Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332478065777341:2536], owner: [1:7591332469475841446:2384], status: SUCCESS, issues: , uid: 9dbb668e-246c020-c8fe3fcd-418f2ff0 2026-01-04T02:47:07.618170Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332478065777333:2531], status: SUCCESS, compileActor: [1:7591332478065777341:2536] 2026-01-04T02:47:07.618348Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:07.618491Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332478065777333:2531], queryUid: 9dbb668e-246c020-c8fe3fcd-418f2ff0, status:SUCCESS 2026-01-04T02:47:07.628656Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1223: Served query from cache by uid, sender: [1:7591332478065777333:2531], queryUid: 9dbb668e-246c020-c8fe3fcd-418f2ff0 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-NoDbAdmin-system [GOOD] >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system >> KqpLocksTricky::TestNoLocksIssue+withSink >> KqpLimits::TooBigKey+useSink [GOOD] >> KqpLimits::TooBigKey-useSink |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestBlobEventDifferentSizesPreSerializedAndRaw [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorks [GOOD] Test command err: Trying to start YDB, gRPC: 22457, MsgBus: 28604 2026-01-04T02:47:04.579098Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332465302230002:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:04.579152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:04.825954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:04.852412Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:04.852535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:04.926351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:04.930412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:05.012797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:05.020587Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:05.020611Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:05.020622Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:05.020708Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:05.405717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:05.453734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.587438Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:05.610377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.742978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.800307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.280109Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478187133721:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.280246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.280614Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478187133731:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.280697Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.509488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.534122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.557112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.580293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.609398Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.640180Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.688903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.725882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.781859Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478187134600:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.781943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.781993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478187134605:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.782155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478187134607:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.782204Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.785625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:07.795751Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332478187134608:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:07.887794Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332478187134660:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:08.976535Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.976971Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CB58B5E1078 2026-01-04T02:47:08.977011Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332482482102256:2529], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01ke3edgxf182qg228faxj9bdm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTllOGYyZWQtMmQ2MWEzMDctNWY4YWUxZDgtZjIzYjY2MWI=, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:08.977158Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.977209Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332482482102256:2529], queueSize: 1 2026-01-04T02:47:08.977717Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:109: Enforced SQL version 1, current sql version: 0 queryText: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n 2026-01-04T02:47:08.977767Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332482482102256:2529], compileActor: [1:7591332482482102264:2534] 2026-01-04T02:47:08.977783Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:08.977839Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332482482102264:2534], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-04T02:47:08.977759Z 2026-01-04T02:47:08.997112Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:662: Compilation with SqlVersion = 1 failed, retrying with SqlVersion = 0, self: [1:7591332482482102264:2534], database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n " 2026-01-04T02:47:09.171601Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332482482102264:2534]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494829","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"2555ed2e-5d7dce95-df277a69-efe5e53e","version":"1.0"} 2026-01-04T02:47:09.172132Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332482482102264:2534], duration: 0.194347s 2026-01-04T02:47:09.172165Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332482482102264:2534], owner: [1:7591332478187133681:2383], status: SUCCESS, issues: , uid: 2555ed2e-5d7dce95-df277a69-efe5e53e 2026-01-04T02:47:09.172284Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332482482102256:2529], status: SUCCESS, compileActor: [1:7591332482482102264:2534] 2026-01-04T02:47:09.172368Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332482482102256:2529], queryUid: 2555ed2e-5d7dce95-df277a69-efe5e53e, status:SUCCESS |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::FallbackMechanismWorksEnforceSqlVersionV1False [GOOD] Test command err: Trying to start YDB, gRPC: 24438, MsgBus: 4281 2026-01-04T02:47:04.481210Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332468040610296:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:04.481859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:04.688507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:04.709699Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:04.709857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:04.764733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:04.773840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:04.775776Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332468040610192:2081] 1767494824469702 != 1767494824469705 2026-01-04T02:47:04.853827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:04.879202Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:04.879226Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:04.879232Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:04.879323Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:05.283274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:05.290359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:05.325732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.437989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.485624Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:05.592008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.651784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.207252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332480925513948:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.207333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.207557Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332480925513958:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.207605Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.447477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.473177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.499694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.523757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.546942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.574365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.604931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.646480Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.704912Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332480925514827:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.705014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.705218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332480925514833:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.705253Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332480925514832:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.705270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.708868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:07.717329Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332480925514836:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:07.803042Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332480925514887:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:08.797470Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.797656Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007CB2E4629A08 2026-01-04T02:47:08.797702Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332485220482485:2529], queryUid: , queryText: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", keepInCache: 0, split: 0{ TraceId: 01ke3edgqwbyz5vb4vc55t0pr4, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZTJkODRiMmQtNzI3ODRmMmYtNmFmMDMzMDQtNTg1NzdjOTI=, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:08.798007Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.798068Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332485220482485:2529], queueSize: 1 2026-01-04T02:47:08.798673Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332485220482485:2529], compileActor: [1:7591332485220482493:2534] 2026-01-04T02:47:08.798698Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:08.798733Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332485220482493:2534], cluster: db, database: /Root, text: "\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\n ", startTime: 2026-01-04T02:47:08.798671Z 2026-01-04T02:47:08.917396Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332485220482493:2534]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494828","query_text":"\\n SELECT * FROM [/Root/KeyValue] LIMIT 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"0","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"ReadRangesPointPrefixLen\":\"0\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"columns\":[\"Key\",\"Value\"],\"scan_by\":[\"Key (-∞, +∞)\"],\"limit\":\"1\",\"type\":\"FullScan\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":5,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1\",\"Name\":\"TableFullScan\",\"E-Rows\":\"0\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key (-∞, +∞)\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TableFullScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"d3afe492-965e57c4-a7aaa3f4-35eed114","version":"1.0"} 2026-01-04T02:47:08.918096Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332485220482493:2534], duration: 0.119392s 2026-01-04T02:47:08.918161Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332485220482493:2534], owner: [1:7591332480925513908:2383], status: SUCCESS, issues: , uid: d3afe492-965e57c4-a7aaa3f4-35eed114 2026-01-04T02:47:08.918283Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332485220482485:2529], status: SUCCESS, compileActor: [1:7591332485220482493:2534] 2026-01-04T02:47:08.918360Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332485220482485:2529], queryUid: d3afe492-965e57c4-a7aaa3f4-35eed114, status:SUCCESS ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersion1 [GOOD] Test command err: Trying to start YDB, gRPC: 61747, MsgBus: 26241 2026-01-04T02:47:04.330228Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332465456817938:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:04.330505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:04.523903Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:04.543523Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:04.543644Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:04.586422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:04.624366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:04.628423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332465456817834:2081] 1767494824326404 != 1767494824326407 2026-01-04T02:47:04.744686Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:04.752242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:04.752262Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:04.752272Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:04.752451Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:05.203553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:05.258447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.334416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:05.388044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.531261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.596813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.147613Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478341721600:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.147726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.148053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478341721610:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.148094Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.442615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.469584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.496459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.519975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.545198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.571082Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.600429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.659394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.714983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478341722485:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.715035Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.715152Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478341722490:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.715204Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332478341722492:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.715232Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.718174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:07.726773Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332478341722494:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:07.804182Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332478341722545:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:08.918446Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.918549Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C3CCF6C48D8 2026-01-04T02:47:08.918585Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332482636690142:2529], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01ke3edgvn4r775dt5q7xbvk1s, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWQ1YmU5Yy0zOTYzZDI4ZC01YmVlNWI1OS05ZjRhMWM0NA==, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:08.918694Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:08.918740Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332482636690142:2529], queueSize: 1 2026-01-04T02:47:08.919171Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332482636690142:2529], compileActor: [1:7591332482636690150:2534] 2026-01-04T02:47:08.919212Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:08.919263Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332482636690150:2534], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-04T02:47:08.919186Z 2026-01-04T02:47:09.039634Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332482636690150:2534]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494829","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"b594ff4-7f06e00d-3c318a7f-9a0b2a66","version":"1.0"} 2026-01-04T02:47:09.040151Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332482636690150:2534], duration: 0.120925s 2026-01-04T02:47:09.040192Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332482636690150:2534], owner: [1:7591332478341721561:2384], status: SUCCESS, issues: , uid: b594ff4-7f06e00d-3c318a7f-9a0b2a66 2026-01-04T02:47:09.040273Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332482636690142:2529], status: SUCCESS, compileActor: [1:7591332482636690150:2534] 2026-01-04T02:47:09.040322Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332482636690142:2529], queryUid: b594ff4-7f06e00d-3c318a7f-9a0b2a66, status:SUCCESS 2026-01-04T02:47:09.329563Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332465456817938:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.329661Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> TTransferTests::Create_Disabled [GOOD] >> TTransferTests::CreateWithoutCredentials >> TTransferTests::Create [GOOD] >> TTransferTests::CreateSequential ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest >> KqpCompileFallback::NoFallbackWhenSqlVersionNotSet [GOOD] Test command err: Trying to start YDB, gRPC: 10789, MsgBus: 29250 2026-01-04T02:47:04.390789Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332466786995913:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:04.390847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:04.615664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:04.626030Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:04.626147Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:04.726664Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332466786995889:2081] 1767494824388453 != 1767494824388456 2026-01-04T02:47:04.740480Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:04.745493Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:04.827989Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:04.855066Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:04.855090Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:04.855098Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:04.855188Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:05.268531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:05.343684Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.408758Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:05.450880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.610418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:05.677061Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.324182Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332479671899652:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.324268Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.324574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332479671899662:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.324632Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.638301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.662066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.684204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.705502Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.728381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.755870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.784698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.839084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:07.890895Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332479671900531:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.891017Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.891076Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332479671900536:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.891196Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332479671900538:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.891242Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:07.893880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:07.902004Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332479671900540:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:07.983582Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332479671900591:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:09.064734Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:09.064873Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:405: Perform request, TraceId.SpanIdPtr: 0x00007C1054AEBB18 2026-01-04T02:47:09.064918Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:415: Received compile request, sender: [1:7591332488261835484:2529], queryUid: , queryText: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", keepInCache: 0, split: 0{ TraceId: 01ke3edh077wbchbbhahhwa716, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YzI0MWUyYTctNTA2NDZhYmQtNTY3ODJjZWQtNzMxZGQ2NDg=, PoolId: default, IsStreamingQuery: 0} 2026-01-04T02:47:09.065078Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:1247: Try to find query by queryId, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_DML, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:47:09.065132Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:513: Added request to queue, sender: [1:7591332488261835484:2529], queueSize: 1 2026-01-04T02:47:09.065625Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:878: Created compile actor, sender: [1:7591332488261835484:2529], compileActor: [1:7591332488261835492:2534] 2026-01-04T02:47:09.065668Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:269: traceId: verbosity = 0, trace_id = 0 2026-01-04T02:47:09.065712Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:276: Start compilation, self: [1:7591332488261835492:2534], cluster: db, database: /Root, text: "\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\n ", startTime: 2026-01-04T02:47:09.065632Z 2026-01-04T02:47:09.188790Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591332488261835492:2534]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGg4vUm9vdC9LZXlWYWx1ZSIAKgwIgIKUhICAgIABECowATgBSjMKA0tleRABGgZVaW50NjQgBDAAQgBIAFIAWABiFgj///////////8BEP///////////wFKNgoFVmFsdWUQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767494829","query_text":"\\n SELECT * FROM `/Root/KeyValue` WHERE Key = 1;\\n ","query_type":"QUERY_TYPE_SQL_DML","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"KeyValue\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"Inputs\":[],\"Path\":\"\\/Root\\/KeyValue\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/KeyValue\",\"reads\":[{\"lookup_by\":[\"Key (1)\"],\"columns\":[\"Key\",\"Value\"],\"type\":\"Lookup\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Plans\":[{\"PlanNodeId\":3,\"Operators\":[{\"ReadRange\":[\"Key (1)\"],\"E-Size\":\"0\",\"Name\":\"TablePointLookup\",\"E-Rows\":\"1\",\"Table\":\"KeyValue\",\"ReadColumns\":[\"Key\",\"Value\"],\"E-Cost\":\"0\"}],\"Node Type\":\"TablePointLookup\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"d5360b90-6478609b-62d1b762-149f8e56","version":"1.0"} 2026-01-04T02:47:09.189278Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591332488261835492:2534], duration: 0.123623s 2026-01-04T02:47:09.189314Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591332488261835492:2534], owner: [1:7591332479671899612:2383], status: SUCCESS, issues: , uid: d5360b90-6478609b-62d1b762-149f8e56 2026-01-04T02:47:09.189441Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591332488261835484:2529], status: SUCCESS, compileActor: [1:7591332488261835492:2534] 2026-01-04T02:47:09.189524Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591332488261835484:2529], queryUid: d5360b90-6478609b-62d1b762-149f8e56, status:SUCCESS 2026-01-04T02:47:09.389212Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332466786995913:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.389300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest |94.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/persqueue_v1/ut/describes_ut/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/services/persqueue_v1/ut/describes_ut/unittest >> KqpSinkMvcc::WriteSkewUpsert+IsOlap >> KqpLocks::Invalidate >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables >> KqpSinkTx::SnapshotRO |94.6%| [TA] $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/persqueue_v1/ut/describes_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpCost::VectorIndexLookup-useSink [GOOD] >> TTransferTests::CreateWithoutCredentials [GOOD] >> TTransferTests::CreateWrongConfig >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite >> TTransferTests::CreateSequential [GOOD] >> TTransferTests::CreateInParallel >> KqpSinkLocks::OlapVisibleUncommittedRows >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp >> KqpSinkMvcc::WriteSkewInsert-IsOlap >> KqpSnapshotRead::TestSnapshotExpiration-withSink >> KqpSinkMvcc::SnapshotExpiration ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest >> KqpCost::VectorIndexLookup-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 10472, MsgBus: 2875 2026-01-04T02:46:43.356834Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332376564442277:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:43.356940Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:43.575139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:43.575254Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:43.580798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:43.656810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:43.657277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:43.658120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332376564442247:2081] 1767494803355287 != 1767494803355290 2026-01-04T02:46:43.701069Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:43.701093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:43.701100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:43.701189Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:43.813168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:44.071463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:44.118890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.267185Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.364346Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:44.409674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:44.486585Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.279779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332389449346012:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.279904Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.280282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332389449346022:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.280361Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.582178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.613201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.645020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.675359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.709088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.745392Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.780671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.832067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:46.896024Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332389449346893:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.896143Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.896272Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332389449346898:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.896566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332389449346900:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.896619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:46.900356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:46.911023Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332389449346901:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:46:46.976727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332389449346953:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:48.356970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332376564442277:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:48.357044Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:46:48.674107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_creat ... 54775816u;3u];["lS\2";9223372036854775817u;4u];["kO\2";9223372036854775818u;4u];["nI\2";9223372036854775819u;5u];["nN\2";9223372036854775820u;5u];["vB\2";9223372036854775821u;6u];["sF\2";9223372036854775822u;6u]] /Root/Vectors/vector_idx_covered/indexImplPostingTable: [[["bR\2"];[1];[10];9223372036854775815u];[["eQ\2"];[4];[40];9223372036854775815u];[["jX\2"];[9];[90];9223372036854775815u];[["mW\2"];[12];[120];9223372036854775815u];[["bR\2"];[27];[270];9223372036854775815u];[["eQ\2"];[30];[300];9223372036854775815u];[["jX\2"];[35];[350];9223372036854775815u];[["mW\2"];[38];[380];9223372036854775815u];[["bR\2"];[53];[530];9223372036854775815u];[["eQ\2"];[56];[560];9223372036854775815u];[["jX\2"];[61];[610];9223372036854775815u];[["mW\2"];[64];[640];9223372036854775815u];[["bR\2"];[79];[790];9223372036854775815u];[["eQ\2"];[82];[820];9223372036854775815u];[["jX\2"];[87];[870];9223372036854775815u];[["mW\2"];[90];[900];9223372036854775815u];[["dZ\2"];[3];[30];9223372036854775816u];[["gY\2"];[6];[60];9223372036854775816u];[["dZ\2"];[29];[290];9223372036854775816u];[["gY\2"];[32];[320];9223372036854775816u];[["dZ\2"];[55];[550];9223372036854775816u];[["gY\2"];[58];[580];9223372036854775816u];[["dZ\2"];[81];[810];9223372036854775816u];[["gY\2"];[84];[840];9223372036854775816u];[["hP\2"];[7];[70];9223372036854775817u];[["pV\2"];[15];[150];9223372036854775817u];[["hP\2"];[33];[330];9223372036854775817u];[["pV\2"];[41];[410];9223372036854775817u];[["hP\2"];[59];[590];9223372036854775817u];[["pV\2"];[67];[670];9223372036854775817u];[["hP\2"];[85];[850];9223372036854775817u];[["pV\2"];[93];[930];9223372036854775817u];[["cI\2"];[2];[20];9223372036854775818u];[["kO\2"];[10];[100];9223372036854775818u];[["sU\2"];[18];[180];9223372036854775818u];[["cI\2"];[28];[280];9223372036854775818u];[["kO\2"];[36];[360];9223372036854775818u];[["sU\2"];[44];[440];9223372036854775818u];[["cI\2"];[54];[540];9223372036854775818u];[["kO\2"];[62];[620];9223372036854775818u];[["sU\2"];[70];[700];9223372036854775818u];[["cI\2"];[80];[800];9223372036854775818u];[["kO\2"];[88];[880];9223372036854775818u];[["sU\2"];[96];[960];9223372036854775818u];[["aA\2"];[0];[0];9223372036854775819u];[["iG\2"];[8];[80];9223372036854775819u];[["lF\2"];[11];[110];9223372036854775819u];[["qM\2"];[16];[160];9223372036854775819u];[["tL\2"];[19];[190];9223372036854775819u];[["wK\2"];[22];[220];9223372036854775819u];[["yS\2"];[24];[240];9223372036854775819u];[["aA\2"];[26];[260];9223372036854775819u];[["iG\2"];[34];[340];9223372036854775819u];[["lF\2"];[37];[370];9223372036854775819u];[["qM\2"];[42];[420];9223372036854775819u];[["tL\2"];[45];[450];9223372036854775819u];[["wK\2"];[48];[480];9223372036854775819u];[["yS\2"];[50];[500];9223372036854775819u];[["aA\2"];[52];[520];9223372036854775819u];[["iG\2"];[60];[600];9223372036854775819u];[["lF\2"];[63];[630];9223372036854775819u];[["qM\2"];[68];[680];9223372036854775819u];[["tL\2"];[71];[710];9223372036854775819u];[["wK\2"];[74];[740];9223372036854775819u];[["yS\2"];[76];[760];9223372036854775819u];[["aA\2"];[78];[780];9223372036854775819u];[["iG\2"];[86];[860];9223372036854775819u];[["lF\2"];[89];[890];9223372036854775819u];[["qM\2"];[94];[940];9223372036854775819u];[["tL\2"];[97];[970];9223372036854775819u];[["fH\2"];[5];[50];9223372036854775820u];[["nN\2"];[13];[130];9223372036854775820u];[["vT\2"];[21];[210];9223372036854775820u];[["fH\2"];[31];[310];9223372036854775820u];[["nN\2"];[39];[390];9223372036854775820u];[["vT\2"];[47];[470];9223372036854775820u];[["fH\2"];[57];[570];9223372036854775820u];[["nN\2"];[65];[650];9223372036854775820u];[["vT\2"];[73];[730];9223372036854775820u];[["fH\2"];[83];[830];9223372036854775820u];[["nN\2"];[91];[910];9223372036854775820u];[["vT\2"];[99];[990];9223372036854775820u];[["uC\2"];[20];[200];9223372036854775821u];[["xB\2"];[23];[230];9223372036854775821u];[["uC\2"];[46];[460];9223372036854775821u];[["xB\2"];[49];[490];9223372036854775821u];[["uC\2"];[72];[720];9223372036854775821u];[["xB\2"];[75];[750];9223372036854775821u];[["uC\2"];[98];[980];9223372036854775821u];[["oE\2"];[14];[140];9223372036854775822u];[["rD\2"];[17];[170];9223372036854775822u];[["zJ\2"];[25];[250];9223372036854775822u];[["oE\2"];[40];[400];9223372036854775822u];[["rD\2"];[43];[430];9223372036854775822u];[["zJ\2"];[51];[510];9223372036854775822u];[["oE\2"];[66];[660];9223372036854775822u];[["rD\2"];[69];[690];9223372036854775822u];[["zJ\2"];[77];[770];9223372036854775822u];[["oE\2"];[92];[920];9223372036854775822u];[["rD\2"];[95];[950];9223372036854775822u]] /Root/Vectors: [[["aA\2"];[0];[0];[0]];[["bR\2"];[1];[1];[10]];[["cI\2"];[2];[2];[20]];[["dZ\2"];[3];[3];[30]];[["eQ\2"];[4];[4];[40]];[["fH\2"];[5];[5];[50]];[["gY\2"];[6];[6];[60]];[["hP\2"];[7];[7];[70]];[["iG\2"];[8];[8];[80]];[["jX\2"];[9];[9];[90]];[["kO\2"];[10];[0];[100]];[["lF\2"];[11];[1];[110]];[["mW\2"];[12];[2];[120]];[["nN\2"];[13];[3];[130]];[["oE\2"];[14];[4];[140]];[["pV\2"];[15];[5];[150]];[["qM\2"];[16];[6];[160]];[["rD\2"];[17];[7];[170]];[["sU\2"];[18];[8];[180]];[["tL\2"];[19];[9];[190]];[["uC\2"];[20];[0];[200]];[["vT\2"];[21];[1];[210]];[["wK\2"];[22];[2];[220]];[["xB\2"];[23];[3];[230]];[["yS\2"];[24];[4];[240]];[["zJ\2"];[25];[5];[250]];[["aA\2"];[26];[6];[260]];[["bR\2"];[27];[7];[270]];[["cI\2"];[28];[8];[280]];[["dZ\2"];[29];[9];[290]];[["eQ\2"];[30];[0];[300]];[["fH\2"];[31];[1];[310]];[["gY\2"];[32];[2];[320]];[["hP\2"];[33];[3];[330]];[["iG\2"];[34];[4];[340]];[["jX\2"];[35];[5];[350]];[["kO\2"];[36];[6];[360]];[["lF\2"];[37];[7];[370]];[["mW\2"];[38];[8];[380]];[["nN\2"];[39];[9];[390]];[["oE\2"];[40];[0];[400]];[["pV\2"];[41];[1];[410]];[["qM\2"];[42];[2];[420]];[["rD\2"];[43];[3];[430]];[["sU\2"];[44];[4];[440]];[["tL\2"];[45];[5];[450]];[["uC\2"];[46];[6];[460]];[["vT\2"];[47];[7];[470]];[["wK\2"];[48];[8];[480]];[["xB\2"];[49];[9];[490]];[["yS\2"];[50];[0];[500]];[["zJ\2"];[51];[1];[510]];[["aA\2"];[52];[2];[520]];[["bR\2"];[53];[3];[530]];[["cI\2"];[54];[4];[540]];[["dZ\2"];[55];[5];[550]];[["eQ\2"];[56];[6];[560]];[["fH\2"];[57];[7];[570]];[["gY\2"];[58];[8];[580]];[["hP\2"];[59];[9];[590]];[["iG\2"];[60];[0];[600]];[["jX\2"];[61];[1];[610]];[["kO\2"];[62];[2];[620]];[["lF\2"];[63];[3];[630]];[["mW\2"];[64];[4];[640]];[["nN\2"];[65];[5];[650]];[["oE\2"];[66];[6];[660]];[["pV\2"];[67];[7];[670]];[["qM\2"];[68];[8];[680]];[["rD\2"];[69];[9];[690]];[["sU\2"];[70];[0];[700]];[["tL\2"];[71];[1];[710]];[["uC\2"];[72];[2];[720]];[["vT\2"];[73];[3];[730]];[["wK\2"];[74];[4];[740]];[["xB\2"];[75];[5];[750]];[["yS\2"];[76];[6];[760]];[["zJ\2"];[77];[7];[770]];[["aA\2"];[78];[8];[780]];[["bR\2"];[79];[9];[790]];[["cI\2"];[80];[0];[800]];[["dZ\2"];[81];[1];[810]];[["eQ\2"];[82];[2];[820]];[["fH\2"];[83];[3];[830]];[["gY\2"];[84];[4];[840]];[["hP\2"];[85];[5];[850]];[["iG\2"];[86];[6];[860]];[["jX\2"];[87];[7];[870]];[["kO\2"];[88];[8];[880]];[["lF\2"];[89];[9];[890]];[["mW\2"];[90];[0];[900]];[["nN\2"];[91];[1];[910]];[["oE\2"];[92];[2];[920]];[["pV\2"];[93];[3];[930]];[["qM\2"];[94];[4];[940]];[["rD\2"];[95];[5];[950]];[["sU\2"];[96];[6];[960]];[["tL\2"];[97];[7];[970]];[["uC\2"];[98];[8];[980]];[["vT\2"];[99];[9];[990]]] /Root/Vectors/vector_idx_prefixed/indexImplLevelTable: [["nG\2";202u;201u];["jQ\2";203u;201u];["rD\2";9223372036854776411u;202u];["kI\2";9223372036854776412u;202u];["kO\2";9223372036854776413u;203u];["iT\2";9223372036854776414u;203u];["hV\2";205u;204u];["pK\2";206u;204u];["cV\2";9223372036854776417u;205u];["mW\2";9223372036854776418u;205u];["nN\2";9223372036854776419u;206u];["sI\2";9223372036854776420u;206u];["gQ\2";208u;207u];["oF\2";209u;207u];["gL\2";9223372036854776423u;208u];["hU\2";9223372036854776424u;208u];["mH\2";9223372036854776425u;209u];["rD\2";9223372036854776426u;209u];["rD\2";211u;210u];["jQ\2";212u;210u];["lF\2";9223372036854776429u;211u];["uC\2";9223372036854776430u;211u];["cV\2";9223372036854776431u;212u];["mP\2";9223372036854776432u;212u];["iS\2";214u;213u];["qK\2";215u;213u];["hU\2";9223372036854776435u;214u];["kO\2";9223372036854776436u;214u];["qM\2";9223372036854776437u;215u];["sH\2";9223372036854776438u;215u];["iV\2";217u;216u];["rH\2";218u;216u];["dZ\2";9223372036854776441u;217u];["kT\2";9223372036854776442u;217u];["mK\2";9223372036854776443u;218u];["vE\2";9223372036854776444u;218u];["nH\2";220u;219u];["jS\2";221u;219u];["mJ\2";9223372036854776447u;220u];["rD\2";9223372036854776448u;220u];["fU\2";9223372036854776449u;221u];["oR\2";9223372036854776450u;221u];["jR\2";223u;222u];["sH\2";224u;222u];["mP\2";9223372036854776453u;223u];["fU\2";9223372036854776454u;223u];["vG\2";9223372036854776455u;224u];["pI\2";9223372036854776456u;224u];["nG\2";226u;225u];["jR\2";227u;225u];["uC\2";9223372036854776459u;226u];["lH\2";9223372036854776460u;226u];["gY\2";9223372036854776461u;227u];["kQ\2";9223372036854776462u;227u];["rF\2";229u;228u];["jS\2";230u;228u];["pG\2";9223372036854776465u;229u];["xB\2";9223372036854776466u;229u];["nP\2";9223372036854776467u;230u];["eV\2";9223372036854776468u;230u]] /Root/Vectors/vector_idx_prefixed/indexImplPostingTable: [[[20];9223372036854776411u];[[40];9223372036854776411u];[[0];9223372036854776412u];[[50];9223372036854776412u];[[60];9223372036854776412u];[[10];9223372036854776413u];[[70];9223372036854776413u];[[80];9223372036854776413u];[[30];9223372036854776414u];[[90];9223372036854776414u];[[1];9223372036854776417u];[[81];9223372036854776417u];[[41];9223372036854776418u];[[61];9223372036854776418u];[[21];9223372036854776419u];[[31];9223372036854776419u];[[91];9223372036854776419u];[[11];9223372036854776420u];[[51];9223372036854776420u];[[71];9223372036854776420u];[[2];9223372036854776423u];[[62];9223372036854776423u];[[12];9223372036854776424u];[[32];9223372036854776424u];[[82];9223372036854776424u];[[22];9223372036854776425u];[[42];9223372036854776425u];[[52];9223372036854776425u];[[72];9223372036854776426u];[[92];9223372036854776426u];[[63];9223372036854776429u];[[23];9223372036854776430u];[[43];9223372036854776430u];[[3];9223372036854776431u];[[53];9223372036854776431u];[[13];9223372036854776432u];[[33];9223372036854776432u];[[73];9223372036854776432u];[[83];9223372036854776432u];[[93];9223372036854776432u];[[4];9223372036854776435u];[[64];9223372036854776435u];[[84];9223372036854776435u];[[44];9223372036854776436u];[[54];9223372036854776436u];[[24];9223372036854776437u];[[34];9223372036854776437u];[[94];9223372036854776437u];[[14];9223372036854776438u];[[74];9223372036854776438u];[[55];9223372036854776441u];[[15];9223372036854776442u];[[35];9223372036854776442u];[[85];9223372036854776442u];[[5];9223372036854776443u];[[45];9223372036854776443u];[[65];9223372036854776443u];[[25];9223372036854776444u];[[75];9223372036854776444u];[[95];9223372036854776444u];[[16];9223372036854776447u];[[26];9223372036854776447u];[[76];9223372036854776447u];[[86];9223372036854776447u];[[46];9223372036854776448u];[[66];9223372036854776448u];[[6];9223372036854776449u];[[56];9223372036854776449u];[[36];9223372036854776450u];[[96];9223372036854776450u];[[7];9223372036854776453u];[[47];9223372036854776453u];[[57];9223372036854776453u];[[67];9223372036854776453u];[[27];9223372036854776454u];[[87];9223372036854776454u];[[17];9223372036854776455u];[[77];9223372036854776455u];[[37];9223372036854776456u];[[97];9223372036854776456u];[[98];9223372036854776459u];[[8];9223372036854776460u];[[48];9223372036854776460u];[[68];9223372036854776460u];[[78];9223372036854776460u];[[58];9223372036854776461u];[[18];9223372036854776462u];[[28];9223372036854776462u];[[38];9223372036854776462u];[[88];9223372036854776462u];[[19];9223372036854776465u];[[69];9223372036854776465u];[[89];9223372036854776465u];[[49];9223372036854776466u];[[39];9223372036854776467u];[[59];9223372036854776467u];[[99];9223372036854776467u];[[9];9223372036854776468u];[[29];9223372036854776468u];[[79];9223372036854776468u]] /Root/Vectors/vector_idx_prefixed/indexImplPrefixTable: [[[0];201u];[[1];204u];[[2];207u];[[3];210u];[[4];213u];[[5];216u];[[6];219u];[[7];222u];[[8];225u];[[9];228u]] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> TTransferTests::CreateWrongConfig [GOOD] >> TTransferTests::CreateWrongBatchSize >> TTransferTests::CreateInParallel [GOOD] >> TTransferTests::CreateDropRecreate >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] >> TTransferTests::CreateWrongBatchSize [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsSmall >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] >> BasicUsage::TWriteSession_WriteAndReadAndCommitRandomMessagesNoClusterDiscovery [GOOD] >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage >> TTransferTests::CreateDropRecreate [GOOD] >> TTransferTests::ConsistencyLevel >> KqpTx::TooManyTx [GOOD] >> KqpTx::SnapshotROInteractive1 >> KqpTx::CommitRequired [GOOD] >> KqpTx::CommitRoTx ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestQuoterAccountResourcesForgetClient [GOOD] Test command err: 2026-01-04T02:47:01.783883Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:47:01.784009Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:47:01.803285Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:47:01.803406Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:47:01.828810Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:47:01.837514Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:134:2159], cookie=2253984994573305311, path="/Res", config={ MaxUnitsPerSecond: -100 }) 2026-01-04T02:47:01.837782Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:134:2159], cookie=2253984994573305311) 2026-01-04T02:47:01.838422Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:141:2164], cookie=10210187606117856442, path="/ResWithoutMaxUnitsPerSecond", config={ }) 2026-01-04T02:47:01.838564Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:141:2164], cookie=10210187606117856442) 2026-01-04T02:47:01.839108Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:144:2167], cookie=6334729447206447879, path="/ResWithMaxUnitsPerSecond", config={ MaxUnitsPerSecond: 1 }) 2026-01-04T02:47:01.839397Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "ResWithMaxUnitsPerSecond" 2026-01-04T02:47:01.862573Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:144:2167], cookie=6334729447206447879) 2026-01-04T02:47:01.863331Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:150:2172], cookie=17884935738631584757, path="/ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond", config={ }) 2026-01-04T02:47:01.863596Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "ResWithMaxUnitsPerSecond/ChildWithoutMaxUnitsPerSecond" 2026-01-04T02:47:01.876026Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:150:2172], cookie=17884935738631584757) 2026-01-04T02:47:02.160339Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:47:02.160465Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:47:02.178229Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:47:02.178377Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:47:02.218723Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:47:02.219297Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:136:2161], cookie=17588030965823116547, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-04T02:47:02.220597Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:47:02.232924Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:136:2161], cookie=17588030965823116547) 2026-01-04T02:47:02.233548Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:147:2169], cookie=4209290498497179144, path="/Root/Res", config={ }) 2026-01-04T02:47:02.233789Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-04T02:47:02.246194Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:147:2169], cookie=4209290498497179144) 2026-01-04T02:47:02.249495Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:152:2174]. Cookie: 9110079788643199218. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 2 Version: "version" Schema: "schema" CloudId: "cloud" FolderId: "folder" ResourceId: "resource" SourceId: "source" Tags { key: "key" value: "value" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:02.249599Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:152:2174], cookie=9110079788643199218) 2026-01-04T02:47:02.250282Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:152:2174]. Cookie: 8533508743656418256. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 29000 } } 2026-01-04T02:47:02.250351Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:152:2174], cookie=8533508743656418256) 2026-01-04T02:47:04.380852Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:47:04.380970Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:47:04.400596Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:47:04.401145Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:47:04.438066Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:47:04.438625Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=8759403774657907836, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:47:04.438999Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:47:04.451364Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=8759403774657907836) 2026-01-04T02:47:04.452045Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=14868542803955173192, path="/Root/Res", config={ }) 2026-01-04T02:47:04.452296Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-04T02:47:04.464694Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=14868542803955173192) 2026-01-04T02:47:04.465514Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:152:2174]. Cookie: 5093787087088105792. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:04.465571Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:152:2174], cookie=5093787087088105792) 2026-01-04T02:47:04.466070Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:152:2174]. Cookie: 8242604741378416217. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1019000 } } 2026-01-04T02:47:04.466127Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:152:2174], cookie=8242604741378416217) 2026-01-04T02:47:06.585973Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:47:06.586095Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:47:06.606020Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:47:06.606553Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:47:06.630993Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:47:06.631503Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=13747560975163808039, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:47:06.631853Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:47:06.644106Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=13747560975163808039) 2026-01-04T02:47:06.645017Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 15410718872105455650. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:06.645085Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=15410718872105455650) 2026-01-04T02:47:06.645594Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 6535812782541944359. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-04T02:47:06.645646Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=6535812782541944359) 2026-01-04T02:47:06.646093Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 9710305049334258961. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-04T02:47:06.646142Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=9710305049334258961) 2026-01-04T02:47:08.780755Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:47:08.780870Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:47:08.795294Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:47:08.795437Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:47:08.809097Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:47:08.809440Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:135:2159], cookie=12991820032123090316, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:47:08.809705Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:47:08.831934Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:135:2159], cookie=12991820032123090316) 2026-01-04T02:47:08.832554Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:145:2167]. Cookie: 16018861753381070797. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:08.832593Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:145:2167], cookie=16018861753381070797) 2026-01-04T02:47:08.832966Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:145:2167]. Cookie: 3687440301138196193. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 3000000 } } 2026-01-04T02:47:08.833003Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:145:2167], cookie=3687440301138196193) 2026-01-04T02:47:11.423251Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:184:2191]. Cookie: 18079979966260720023. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:11.423305Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:184:2191], cookie=18079979966260720023) 2026-01-04T02:47:11.423683Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:184:2191]. Cookie: 14639285287597093831. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 9000000 } } 2026-01-04T02:47:11.423716Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:184:2191], cookie=14639285287597093831) 2026-01-04T02:47:13.571289Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:217:2217]. Cookie: 7934643307491380390. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:47:13.571362Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:217:2217], cookie=7934643307491380390) 2026-01-04T02:47:13.571870Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [5:217:2217]. Cookie: 14082097665186554608. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 15000000 } } 2026-01-04T02:47:13.571917Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[5:217:2217], cookie=14082097665186554608) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> KqpSinkLocks::VisibleUncommittedRowsUpdate >> KqpTx::ExplicitTcl [GOOD] >> KqpTx::InteractiveTx >> KqpSnapshotIsolation::TSimpleOltp [GOOD] >> KqpSnapshotIsolation::TSimpleOlap >> KqpTx::DeferredEffects [GOOD] >> KqpTx::EmptyTxOnCommit >> KqpSinkTx::ExplicitTcl [GOOD] >> KqpSinkTx::InvalidateOnError >> KqpSinkMvcc::TxReadsCommitted-IsOlap [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap >> KqpSinkLocks::TInvalidate [GOOD] >> KqpSinkLocks::TInvalidateOlap >> TTransferTests::CreateWrongFlushIntervalIsSmall [GOOD] >> TTransferTests::CreateWrongFlushIntervalIsBig >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink >> TTransferTests::ConsistencyLevel [GOOD] >> TTransferTests::Alter >> KqpSinkMvcc::DirtyReads-IsOlap [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_system_names/unittest >> TSchemeShardSysNames::ESchemeOpCreateFileStore-Protect-DbAdmin-system [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:119:2058] recipient: [1:113:2144] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:131:2058] recipient: [1:113:2144] 2026-01-04T02:44:09.962728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:44:09.962813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.962851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:44:09.962894Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:44:09.962939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:44:09.962971Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:44:09.963033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:44:09.963116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:44:09.963932Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:09.965005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:44:10.069238Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: QueryServiceConfig { AllExternalDataSourcesAreAvailable: true } 2026-01-04T02:44:10.069335Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.070194Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:44:10.081504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:44:10.082007Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:44:10.083427Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:44:10.097101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:44:10.097364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:44:10.098233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.098645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:44:10.102023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.102920Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:44:10.108122Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:44:10.108336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:44:10.108390Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:44:10.108484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:44:10.108683Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.116605Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:245:2058] recipient: [1:15:2062] 2026-01-04T02:44:10.291236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:44:10.291474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.291696Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:44:10.291755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:44:10.292081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:44:10.292176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.294476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.294706Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:44:10.294978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.295073Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:44:10.295118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:44:10.295155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:44:10.297315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.297376Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:44:10.297419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:44:10.299308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.299374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:44:10.299424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.299485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:44:10.312538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:44:10.314486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:44:10.314694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:44:10.315956Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:44:10.316119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 139 RawX2: 4294969457 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:44:10.316170Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.316467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:44:10.316530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:44:10.316717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:44:10.316829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:44:10.319079Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board ... as 4 2026-01-04T02:47:15.655092Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 202, publications: 4, subscribers: 0 2026-01-04T02:47:15.655127Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 127], 6 2026-01-04T02:47:15.655161Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 128], 6 2026-01-04T02:47:15.655191Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 129], 5 2026-01-04T02:47:15.655220Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 202, [OwnerId: 72057594046678944, LocalPathId: 130], 2 2026-01-04T02:47:15.657420Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.657536Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 127 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.657580Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 4, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:47:15.657638Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 127], version: 6 2026-01-04T02:47:15.657681Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 127] was 2 2026-01-04T02:47:15.659074Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.659179Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 128 Version: 6 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.659215Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 3, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:47:15.659254Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 128], version: 6 2026-01-04T02:47:15.659307Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 128] was 2 2026-01-04T02:47:15.660742Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.660840Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 129 Version: 5 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.660880Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:47:15.660917Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 129], version: 5 2026-01-04T02:47:15.660958Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 129] was 2 2026-01-04T02:47:15.662546Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.662649Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 130 Version: 2 PathOwnerId: 72057594046678944, cookie: 202 2026-01-04T02:47:15.662686Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 202 2026-01-04T02:47:15.662722Z node 32 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 202, pathId: [OwnerId: 72057594046678944, LocalPathId: 130], version: 2 2026-01-04T02:47:15.662761Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 130] was 3 2026-01-04T02:47:15.662844Z node 32 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 202, subscribers: 0 2026-01-04T02:47:15.666396Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:47:15.666588Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:47:15.668919Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 2026-01-04T02:47:15.669069Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 202 TestModificationResult got TxId: 202, wait until txId: 202 TestWaitNotification wait txId: 202 2026-01-04T02:47:15.670887Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 202: send EvNotifyTxCompletion 2026-01-04T02:47:15.670939Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 202 2026-01-04T02:47:15.672943Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 202, at schemeshard: 72057594046678944 2026-01-04T02:47:15.673085Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 202: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.673137Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 202: satisfy waiter [32:3901:5618] TestWaitNotification: OK eventTxId 202 TestWaitNotification wait txId: 197 2026-01-04T02:47:15.674753Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 197: send EvNotifyTxCompletion 2026-01-04T02:47:15.674809Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 197 TestWaitNotification wait txId: 198 2026-01-04T02:47:15.674934Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 198: send EvNotifyTxCompletion 2026-01-04T02:47:15.674966Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 198 TestWaitNotification wait txId: 199 2026-01-04T02:47:15.675043Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 199: send EvNotifyTxCompletion 2026-01-04T02:47:15.675075Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 199 TestWaitNotification wait txId: 200 2026-01-04T02:47:15.675146Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 200: send EvNotifyTxCompletion 2026-01-04T02:47:15.675179Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 200 TestWaitNotification wait txId: 201 2026-01-04T02:47:15.675248Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 201: send EvNotifyTxCompletion 2026-01-04T02:47:15.675280Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 201 2026-01-04T02:47:15.677586Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 197, at schemeshard: 72057594046678944 2026-01-04T02:47:15.677938Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 197: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.677988Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 197: satisfy waiter [32:3904:5621] 2026-01-04T02:47:15.678280Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 198, at schemeshard: 72057594046678944 2026-01-04T02:47:15.678586Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 199, at schemeshard: 72057594046678944 2026-01-04T02:47:15.678651Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 198: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.678685Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 198: satisfy waiter [32:3904:5621] 2026-01-04T02:47:15.678862Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 200, at schemeshard: 72057594046678944 2026-01-04T02:47:15.678948Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 199: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.678997Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 199: satisfy waiter [32:3904:5621] 2026-01-04T02:47:15.679170Z node 32 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 201, at schemeshard: 72057594046678944 2026-01-04T02:47:15.679248Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 200: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.679287Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 200: satisfy waiter [32:3904:5621] 2026-01-04T02:47:15.679452Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 201: got EvNotifyTxCompletionResult 2026-01-04T02:47:15.679488Z node 32 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 201: satisfy waiter [32:3904:5621] TestWaitNotification: OK eventTxId 197 TestWaitNotification: OK eventTxId 198 TestWaitNotification: OK eventTxId 199 TestWaitNotification: OK eventTxId 200 TestWaitNotification: OK eventTxId 201 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_system_names/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] >> KqpLocks::Invalidate [GOOD] >> KqpLocks::DifferentKeyUpdate >> KqpSinkTx::OlapSnapshotRO >> TTransferTests::Alter [GOOD] >> KqpSinkTx::SnapshotRO [GOOD] >> KqpSinkTx::SnapshotROInteractive1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::CreateWrongFlushIntervalIsBig [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:47:09.633351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:47:09.633452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:47:09.633492Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:47:09.633542Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:47:09.633591Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:47:09.633623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:47:09.633692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:47:09.633769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:47:09.634651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:47:09.635953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:47:09.733103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:47:09.733161Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:09.749020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:47:09.749509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:47:09.749701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:47:09.759038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:47:09.760347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:47:09.762864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:47:09.767925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:47:09.781400Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:47:09.783412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:47:09.791267Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:47:09.791344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:47:09.791412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:47:09.791485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:47:09.791609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:47:09.792717Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:47:09.969002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.970936Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.971014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.971116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.971233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.971309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 712:2698], Recipient [6:128:2153]: NKikimrTxColumnShard.TEvNotifyTxCompletionResult Origin: 72075186233409546 TxId: 101 2026-01-04T02:47:19.438816Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5291: StateWork, processing event TEvColumnShard::TEvNotifyTxCompletionResult 2026-01-04T02:47:19.438878Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-04T02:47:19.438918Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409546, partId: 0 2026-01-04T02:47:19.439023Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409546 TxId: 101 2026-01-04T02:47:19.439164Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:47:19.442772Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:47:19.442840Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:47:19.442899Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 101:0 2026-01-04T02:47:19.443070Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:47:19.443113Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:47:19.443177Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:47:19.443227Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:47:19.443366Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:19.443409Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:47:19.443455Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:47:19.443510Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:47:19.443556Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:47:19.443604Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:47:19.443684Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [6:745:2722] message: TxId: 101 2026-01-04T02:47:19.443767Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:47:19.443819Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:47:19.443862Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:47:19.444023Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:47:19.447821Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:47:19.447945Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [6:745:2722] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 101 at schemeshard: 72057594046678944 2026-01-04T02:47:19.448159Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:47:19.448222Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [6:746:2723] 2026-01-04T02:47:19.448472Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:748:2725], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:47:19.448520Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:47:19.448562Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 101 TestModificationResults wait txId: 102 2026-01-04T02:47:19.449295Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [6:791:2760], Recipient [6:128:2153]: {TEvModifySchemeTransaction txid# 102 TabletId# 72057594046678944} 2026-01-04T02:47:19.449358Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:47:19.452359Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateTransfer Replication { Name: "Transfer" Config { TransferSpecific { Target { SrcPath: "/MyRoot1/Table" DstPath: "/MyRoot/Table" } Batching { FlushIntervalMilliSeconds: 86400001 } } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:47:19.452723Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_replication.cpp:361: [72057594046678944] TCreateReplication Propose: opId# 102:0, path# /MyRoot/Transfer 2026-01-04T02:47:19.452832Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, at schemeshard: 72057594046678944 2026-01-04T02:47:19.453086Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:19.455442Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Flush interval must be less than or equal to 24 hours" TxId: 102 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:47:19.455761Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Flush interval must be less than or equal to 24 hours, operation: CREATE TRANSFER, path: /MyRoot/Transfer 2026-01-04T02:47:19.455837Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T02:47:19.456176Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:47:19.456234Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:47:19.456619Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [6:797:2766], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:47:19.456681Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:47:19.456740Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:47:19.456880Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [6:745:2722], Recipient [6:128:2153]: NKikimrScheme.TEvNotifyTxCompletion TxId: 102 2026-01-04T02:47:19.456917Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:47:19.456996Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:47:19.457080Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:47:19.457122Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [6:795:2764] 2026-01-04T02:47:19.457289Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [6:797:2766], Recipient [6:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:47:19.457326Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:47:19.457370Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-04T02:47:19.457720Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [6:798:2767], Recipient [6:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:47:19.457782Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:47:19.457911Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Transfer" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:47:19.458109Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Transfer" took 203us result status StatusPathDoesNotExist 2026-01-04T02:47:19.458270Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Transfer\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/Transfer" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_transfer/unittest >> TTransferTests::Alter [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:47:09.633332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:47:09.633432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:47:09.633471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:47:09.633530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:47:09.633567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:47:09.633597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:47:09.633648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:47:09.633708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:47:09.634555Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:47:09.635982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:47:09.731406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:47:09.731484Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:09.748399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:47:09.748631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:47:09.749459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:47:09.760077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:47:09.760344Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:47:09.762864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:47:09.767727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:47:09.784798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:47:09.785008Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:47:09.790716Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:47:09.790803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:47:09.790955Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:47:09.791013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:47:09.791208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:47:09.792293Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:47:09.962005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968031Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968480Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.968923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.969026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.969104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.969222Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:47:09.969324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... vProgressOperation 2026-01-04T02:47:20.251507Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:47:20.251572Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:196: [72057594046678944] TAlterReplication TPropose opId# 104:0 ProgressState 2026-01-04T02:47:20.251633Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:20.251705Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 104 ready parts: 1/1 2026-01-04T02:47:20.251874Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 104 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:47:20.253101Z node 6 :TX_PROXY INFO: describe.cpp:354: Actor# [6:959:2902] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 1 2026-01-04T02:47:20.253750Z node 6 :REPLICATION_CONTROLLER TRACE: target_discoverer.cpp:28: [TargetDiscoverer][rid 1] Handle NKikimr::NReplication::TEvYdbProxy::TEvDescribePathResponse { Result: { status: SCHEME_ERROR, issues: } } 2026-01-04T02:47:20.253828Z node 6 :REPLICATION_CONTROLLER ERROR: target_discoverer.cpp:80: [TargetDiscoverer][rid 1] Describe path failed: path# /MyRoot1/Table, status# SCHEME_ERROR, issues# , iteration# 0 2026-01-04T02:47:20.254036Z node 6 :REPLICATION_CONTROLLER TRACE: controller.cpp:201: [controller 72075186233409547] Handle NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-04T02:47:20.254164Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:24: [controller 72075186233409547][TxDiscoveryTargetsResult] Execute: NKikimr::NReplication::NController::TEvPrivate::TEvDiscoveryTargetsResult { ReplicationId: 1 ToAdd [] ToDelete [] Failed [/MyRoot1/Table: SCHEME_ERROR ()] } 2026-01-04T02:47:20.254236Z node 6 :REPLICATION_CONTROLLER ERROR: tx_discovery_targets_result.cpp:79: [controller 72075186233409547][TxDiscoveryTargetsResult] Discovery error: rid# 1, error# /MyRoot1/Table: SCHEME_ERROR () 2026-01-04T02:47:20.255406Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:47:20.255460Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 msg type: 269090816 2026-01-04T02:47:20.255565Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 104, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 104 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 104 at step: 5000041 2026-01-04T02:47:20.256496Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269287424, Sender [6:138:2160], Recipient [6:277:2276] 2026-01-04T02:47:20.256555Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5268: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:47:20.256651Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:47:20.256781Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 104 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 25769805936 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:47:20.256853Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_replication.cpp:210: [72057594046678944] TAlterReplication TPropose opId# 104:0 HandleReply TEvOperationPlan: step# 5000041 2026-01-04T02:47:20.256998Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:47:20.257229Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:20.257332Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:47:20.257413Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:706: Ack tablet strongly msg opId: 104:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:104 2026-01-04T02:47:20.258381Z node 6 :REPLICATION_CONTROLLER DEBUG: tx_discovery_targets_result.cpp:92: [controller 72075186233409547][TxDiscoveryTargetsResult] Complete 2026-01-04T02:47:20.259464Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:47:20.259514Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:394: Ack coordinator stepId#5000041 first txId#104 countTxs#1 2026-01-04T02:47:20.259582Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:364: Ack mediator stepId#5000041 2026-01-04T02:47:20.259632Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 104:0 2026-01-04T02:47:20.259875Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [6:128:2153], Recipient [6:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:47:20.259913Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:47:20.259995Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:47:20.260042Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:47:20.260325Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:47:20.260384Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [6:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 39 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:47:20.260774Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:47:20.260831Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:47:20.260962Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:20.261009Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:47:20.261055Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:47:20.261106Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:47:20.261149Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:47:20.261206Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:47:20.261257Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:47:20.261306Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:47:20.261346Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:47:20.261489Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:47:20.261541Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 1, subscribers: 0 2026-01-04T02:47:20.261584Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 39], 4 2026-01-04T02:47:20.262353Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274137603, Sender [6:211:2212], Recipient [6:128:2153]: NKikimrSchemeBoard.TEvUpdateAck { Owner: 72057594046678944 Generation: 2 PathId: [OwnerId: 72057594046678944, LocalPathId: 39] Version: 4 } 2026-01-04T02:47:20.262408Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5332: StateWork, processing event NSchemeBoard::NSchemeshardEvents::TEvUpdateAck 2026-01-04T02:47:20.262521Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:47:20.262636Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:47:20.262682Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:47:20.262729Z node 6 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 4 2026-01-04T02:47:20.262778Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:47:20.262878Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:47:20.262923Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:47:20.265411Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:47:20.265790Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:47:20.265873Z node 6 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 TestModificationResult got TxId: 104, wait until txId: 104 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_transfer/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp+UpdateAfterInsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink |94.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_transfer/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpLimits::TooBigKey-useSink [GOOD] >> KqpLimits::TooBigColumn-useSink >> KqpTx::RollbackManyTx [GOOD] >> KqpTx::RollbackRoTx >> KqpSinkMvcc::WriteSkewInsert-IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace+IsOlap >> KqpSnapshotIsolation::TReadOnlyOlap >> KqpLocksTricky::TestNoLocksIssue+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink >> KqpLocksTricky::TestNoWrite >> KqpTx::CommitRoTx [GOOD] >> KqpTx::CommitRoTx_TLI >> KqpTx::SnapshotROInteractive1 [GOOD] >> KqpTx::SnapshotROInteractive2 >> KqpTx::EmptyTxOnCommit [GOOD] >> KqpTx::CommitStats >> KqpTx::InteractiveTx [GOOD] >> KqpTx::InvalidateOnError >> KqpSinkTx::InvalidateOnError [GOOD] >> KqpSinkTx::Interactive >> KqpSinkLocks::InvalidateOlapOnCommit [GOOD] >> KqpSinkLocks::EmptyRangeOlap >> KqpSinkLocks::VisibleUncommittedRowsUpdate [GOOD] >> KqpSinkMvcc::DirtyReads+IsOlap >> KqpSinkMvcc::TxReadsCommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap >> KqpSnapshotIsolation::TSnapshotTwoInsertOltp [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert >> KqpSinkMvcc::WriteSkewInsert+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns+IsOlap >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite3 [GOOD] >> KqpSinkMvcc::OltpNamedStatement >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowByCount [GOOD] >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize >> KqpLocks::DifferentKeyUpdate [GOOD] >> KqpLocks::EmptyRange >> KqpSinkTx::OlapLocksAbortOnCommit [GOOD] >> KqpSinkTx::OlapInvalidateOnError >> KqpSinkTx::SnapshotROInteractive1 [GOOD] >> KqpSinkTx::SnapshotROInteractive2 >> KqpSinkMvcc::WriteSkewUpsert+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap >> KqpSinkMvcc::LostUpdate+IsOlap [GOOD] >> KqpSinkMvcc::LostUpdate-IsOlap >> KqpSinkLocks::OlapVisibleUncommittedRows [GOOD] >> KqpSinkLocks::OlapUpdateLocksTwoShards >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite+withSink [GOOD] >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink >> KqpTx::RollbackRoTx [GOOD] >> KqpTx::RollbackInvalidated >> KqpSinkMvcc::OlapReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 |94.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotRead::TestSnapshotExpiration-withSink [GOOD] >> KqpTx::BeginTransactionBadMode >> KqpTx::CommitRoTx_TLI [GOOD] >> KqpTx::InvalidateOnError [GOOD] >> KqpTx::CommitStats [GOOD] >> KqpSnapshotIsolation::TSimpleOlap [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap >> BasicUsage::TSimpleWriteSession_AutoSeqNo_BasicUsage [GOOD] >> BasicUsage::TWriteSession_AutoBatching [GOOD] >> BasicUsage::TWriteSession_WriteEncoded >> KqpSinkTx::Interactive [GOOD] >> KqpTx::SnapshotROInteractive2 [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOltp-UpdateAfterInsert [GOOD] >> KqpSinkLocks::TInvalidateOlap [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite >> KqpSinkLocks::UncommittedRead >> KqpLocksTricky::TestSnapshotWithDependentReads+UseSink [GOOD] >> KqpSinkMvcc::TxReadsItsOwnWrites+IsOlap [GOOD] >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] >> KqpLocks::EmptyRange [GOOD] >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink >> KqpLocks::EmptyRangeAlreadyBroken >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap >> KqpSinkMvcc::SnapshotExpiration [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::InvalidateOnError [GOOD] Test command err: Trying to start YDB, gRPC: 2689, MsgBus: 21927 2026-01-04T02:47:09.842649Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332489067994144:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.842690Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.249867Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.255866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.256150Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.288065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.356583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.461951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.461970Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.461987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.462063Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.519399Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.856962Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.155682Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.201287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.353847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.500832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.572924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.347896Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506247865165:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.348029Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.348328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506247865175:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.348390Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.649781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.684199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.712816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.749381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.785262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.833186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.910970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.998264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.078254Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510542833346:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.078351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.082064Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510542833351:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.082148Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510542833352:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.082208Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.088220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.101915Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332510542833355:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:14.205027Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332510542833406:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.842715Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332489067994144:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.842779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:16.521236Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTg0NjBhMmMtODZkM2JkYjUtNTdmYjg3OTItNGE5NzQ1YTA=, ActorId: [1:7591332514837801011:2533], ActorState: ReadyState, LegacyTraceId: 01ke3edr8z3rr8wvmr2hq1s379, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3edqqv0nqpfnzx1rf3eana" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 24330, MsgBus: 24862 2026-01-04T02:47:17.432587Z node 2 :METADATA_PRO ... /core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.636761Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.785630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.869596Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.546859Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332572324916538:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.546952Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.547220Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332572324916548:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.547283Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.628955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.665886Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.702085Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.748605Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.787400Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.830023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.865092Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.911197Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.990153Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332572324917426:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.990236Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.990575Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332572324917431:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.990622Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332572324917432:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.990659Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.993973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:30.004652Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332572324917435:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:30.088687Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332576619884782:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:30.309891Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332555145045610:2170];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:30.309971Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:31.969199Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-04T02:47:31.969428Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037911 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:47:31.969570Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037911 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:47:31.969790Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7591332580914852426:2533], Table: `/Root/KeyValue` ([72057594046644480:42:1]), SessionActorId: [3:7591332580914852400:2533]Got CONSTRAINT VIOLATION for table `/Root/KeyValue`. ShardID=72075186224037911, Sink=[3:7591332580914852426:2533].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:47:31.970404Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332580914852419:2533], SessionActorId: [3:7591332580914852400:2533], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7591332580914852400:2533]. 2026-01-04T02:47:31.970678Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=NWNhMWI0YTItMzM5NjQ2ZTctZGUwM2FmNDMtZTYyYWVlMmY=, ActorId: [3:7591332580914852400:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ee79mbcmd2zhn01bfdz3j, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7591332580914852420:2533] from: [3:7591332580914852419:2533] trace_id# 2026-01-04T02:47:31.970788Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332580914852420:2533] TxId: 281474976710673. Ctx: { TraceId: 01ke3ee79mbcmd2zhn01bfdz3j, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NWNhMWI0YTItMzM5NjQ2ZTctZGUwM2FmNDMtZTYyYWVlMmY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KeyValue`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:47:31.971128Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NWNhMWI0YTItMzM5NjQ2ZTctZGUwM2FmNDMtZTYyYWVlMmY=, ActorId: [3:7591332580914852400:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ee79mbcmd2zhn01bfdz3j, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KeyValue`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-04T02:47:32.066282Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NWNhMWI0YTItMzM5NjQ2ZTctZGUwM2FmNDMtZTYyYWVlMmY=, ActorId: [3:7591332580914852400:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ee7ck5h01hfrxcg85267k, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3ee7970pvg8522p0e43qtj" issue_code: 2015 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 29436, MsgBus: 1391 2026-01-04T02:47:09.801361Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488119842460:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.801499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.174976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.262447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.262533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.270792Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.279930Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.441105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.444997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.445020Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.445037Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.445105Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.812576Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.117308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.124468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:11.175896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.306429Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.448847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.500651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.173187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505299713457:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.173339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.173770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505299713467:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.173861Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.478399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.511449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.541558Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.571487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.603244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.643217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.683159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.726243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.839424Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505299714336:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.839522Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.839915Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505299714342:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.840120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505299714341:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.840177Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.843489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.855706Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505299714345:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:13.936264Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505299714396:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.802007Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332488119842460:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.802098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:15.983087Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2I4OTlmMDItZmRlMTc5ODYtOWNlMGYzMDItZWMwMDk0Njc=, ActorId: [1:7591332513889649296:2533], ActorState: ReadyState, LegacyTraceId: 01ke3edqr162ttsctmrq3kzy3r, Create QueryResponse for error on request, msg: ydb/core/kqp/session_actor/kqp_session_ ... migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:24.961923Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:25.061552Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.063187Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332554272491622:2081] 1767494844800530 != 1767494844800533 2026-01-04T02:47:25.071878Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.077983Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.115819Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:25.282414Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:25.305722Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:25.305746Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:25.305753Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:25.305869Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:25.847132Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:25.940028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:25.945940Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:25.956324Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.050863Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.234920Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.329063Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.040281Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747329972:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.040401Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.041006Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747329982:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.041072Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.041477Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747329984:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.041529Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.144296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.182897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.218757Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.252698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.286913Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.332963Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.377600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.430465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.520839Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747330854:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.520935Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.522499Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747330859:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.522558Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575747330860:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.522601Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.526683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.544583Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332575747330863:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:29.612379Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332575747330914:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:29.815103Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332554272491660:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:29.815171Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::Interactive [GOOD] Test command err: Trying to start YDB, gRPC: 4406, MsgBus: 10798 2026-01-04T02:47:09.721851Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488718829246:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.722009Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.028127Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.028266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.032367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.156417Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.165955Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332488718829217:2081] 1767494829719491 != 1767494829719494 2026-01-04T02:47:10.212088Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.410035Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.444750Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.444771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.444783Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.444837Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.738005Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.126952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.114524Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505898699294:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.114537Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505898699304:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.114638Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.115251Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505898699309:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.115316Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.117831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.127137Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505898699308:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.238493Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505898699361:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.514660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.610367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.603359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.722394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332488718829246:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.722475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:16.386990Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTZmNGQ5ZTctMzRlZTU5NTYtZDgyNDUwOGEtNjJjMTBjYWI=, ActorId: [1:7591332514488641836:2963], ActorState: ReadyState, LegacyTraceId: 01ke3edr45319pwdqz6ngsnkg1, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3edqrp9g2pvfp2zpb94a2r" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 12332, MsgBus: 22831 2026-01-04T02:47:17.764879Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:17.765771Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332523368047011:2271];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.774014Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:17.893882Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:17.914031Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332523368046766:2081] 1767494837726943 != 1767494837726946 2026-01-04T02:47:17.938370Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:17.938466Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:17.940489Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:17.967758Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:18.051822Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:18.051842Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:18.051849Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:18.051931Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:18.087082Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:18.595189Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:18.607380Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:18.711466Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:21.515644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332540547916828:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.515760Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.516204Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332540547916841:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.516250Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions ... 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:21.765226Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:22.808103Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332523368047011:2271];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:22.809505Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:22.850036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:24.357012Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-04T02:47:24.357244Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037889 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:47:24.357385Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037889 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:47:24.357595Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [2:7591332553432826745:2964], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [2:7591332553432826721:2964]Got CONSTRAINT VIOLATION for table `/Root/KV`. ShardID=72075186224037889, Sink=[2:7591332553432826745:2964].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:47:24.358125Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:7591332553432826738:2964], SessionActorId: [2:7591332553432826721:2964], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[2:7591332553432826721:2964]. 2026-01-04T02:47:24.358434Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=N2M5NjY3ZDMtNTAzNWY4Zi1mZjgyYmVlLTJmMTVmNGU5, ActorId: [2:7591332553432826721:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3edzvp6qv57fbfff69bj0a, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [2:7591332553432826739:2964] from: [2:7591332553432826738:2964] trace_id# 2026-01-04T02:47:24.358564Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:7591332553432826739:2964] TxId: 281474976710664. Ctx: { TraceId: 01ke3edzvp6qv57fbfff69bj0a, Database: /Root, SessionId: ydb://session/3?node_id=2&id=N2M5NjY3ZDMtNTAzNWY4Zi1mZjgyYmVlLTJmMTVmNGU5, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/KV`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:47:24.358951Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=N2M5NjY3ZDMtNTAzNWY4Zi1mZjgyYmVlLTJmMTVmNGU5, ActorId: [2:7591332553432826721:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3edzvp6qv57fbfff69bj0a, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/KV`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id#
: Error: Constraint violated. Table: `/Root/KV`., code: 2012
: Error: Conflict with existing key., code: 2012 2026-01-04T02:47:24.440272Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=N2M5NjY3ZDMtNTAzNWY4Zi1mZjgyYmVlLTJmMTVmNGU5, ActorId: [2:7591332553432826721:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3edzyh7kp6bznef3qeyfwh, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3edzvf76pwxsc25jfqzskk" issue_code: 2015 severity: 1 } trace_id#
: Error: Transaction not found: 01ke3edzvf76pwxsc25jfqzskk, code: 2015 Trying to start YDB, gRPC: 4872, MsgBus: 25491 2026-01-04T02:47:25.779067Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332558223019549:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:25.779223Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:25.813762Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:25.896166Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.939810Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.939904Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.945757Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:26.025524Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:26.042380Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.042408Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.042417Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.042510Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:26.643026Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.654645Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:26.801466Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:29.703663Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575402889484:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.703748Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575402889497:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.703789Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.704020Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575402889501:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.704063Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.707811Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.718063Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332575402889500:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:29.782961Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332575402889555:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:29.836134Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.898550Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.888583Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332558223019549:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:30.890800Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:30.963936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitStats [GOOD] Test command err: Trying to start YDB, gRPC: 17268, MsgBus: 25368 2026-01-04T02:47:09.851526Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332489477594168:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.854048Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.156491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.156624Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.192192Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.225973Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332489477594143:2081] 1767494829850123 != 1767494829850126 2026-01-04T02:47:10.234825Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.236516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.439172Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.444610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.444629Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.444660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.444751Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.863871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.086985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.161922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.301418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.456870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.525667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:12.941183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332502362497899:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.941278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.941521Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332502362497909:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.941561Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.433529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.464716Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.492321Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.516867Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.553342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.600000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.659810Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.767625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.861440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506657466083:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.861532Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.861907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506657466088:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.861951Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506657466089:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.862058Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.865923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.878675Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332506657466092:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:13.966322Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332506657466147:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.855250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332489477594168:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.855308Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32257, MsgBus: 24310 2026-01-04T02:47:17.665853Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:17.666037Z node 2 :METADATA_PROVIDER E ... :47:22.272515Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332544539413900:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 15371, MsgBus: 19793 2026-01-04T02:47:25.227688Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332555951611075:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:25.227817Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:25.289918Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:25.355118Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.355221Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.356601Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:25.362267Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.507057Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:25.507087Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:25.507097Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:25.507181Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:25.512766Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:26.071759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.079576Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:26.083748Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.146949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.254896Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:26.314707Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.390292Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.087404Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332573131481990:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.087501Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.090057Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332573131482000:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.090123Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.202492Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.262721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.392298Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.480789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.533254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.575334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.617114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.671895Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.775410Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332573131482874:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.775536Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.775838Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332573131482879:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.775882Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332573131482880:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.775909Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.780058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.791335Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332573131482883:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:29.863649Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332573131482934:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:30.229039Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332555951611075:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:30.229111Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitRoTx_TLI [GOOD] Test command err: Trying to start YDB, gRPC: 10321, MsgBus: 29135 2026-01-04T02:47:09.907594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488831425655:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.907820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:09.984730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:10.323976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.325418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.325530Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.410025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.489179Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.521381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.569853Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.569874Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.569893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.569984Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.890678Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.133999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.145930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:11.194733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.355458Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.517646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.580020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.360177Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506011296453:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.360270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.360516Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506011296463:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.360557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.707772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.739674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.777614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.812741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.846644Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.889982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.925666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.978670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.057218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510306264631:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.057304Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.057792Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510306264636:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.057845Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332510306264637:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.057893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.062510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.073739Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332510306264640:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:14.159434Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332510306264691:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.894364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332488831425655:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.894442Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 15505, MsgBus: 63149 2026-01-04T02:47:16.934335Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exist ... 24.703612Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332554351711543:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:24.704051Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:24.737728Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:24.832304Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:24.832394Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:24.869986Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332554351711395:2081] 1767494844663394 != 1767494844663397 2026-01-04T02:47:24.901710Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:24.903825Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:24.904763Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:25.117910Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:25.201868Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:25.201892Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:25.201900Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:25.201980Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:25.701925Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:25.835349Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:25.842706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:25.857267Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:25.934423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.152346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.236716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:28.727095Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332571531582453:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.727200Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.727501Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332571531582465:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.727549Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.795419Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:28.841689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:28.920675Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:28.969194Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.005169Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.049288Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.107745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.173251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.276305Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575826550628:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.276389Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.276809Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575826550634:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.276852Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.276893Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332575826550633:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.281465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.302602Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332575826550637:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:29.376734Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332575826550688:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:29.695978Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332554351711543:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:29.696051Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpLimits::TooBigColumn-useSink [GOOD] |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap-CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert >> KqpSinkMvcc::OltpNamedStatement [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxDeleteOwnUncommitted-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 20594, MsgBus: 27143 2026-01-04T02:47:09.784030Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332486964187987:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.784125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:09.824527Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:10.155595Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.155693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.168418Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.268512Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.276430Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332486964187886:2081] 1767494829780201 != 1767494829780204 2026-01-04T02:47:10.301282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.448042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.448063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.448075Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.448164Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.507065Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.796366Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.087800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.019389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504144057940:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.019497Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.019791Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504144057951:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.019922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504144057969:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.020061Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.023802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.034279Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332504144057971:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.166928Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332504144058022:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.456387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.617902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.617892Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.618148Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.618370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.618472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.618580Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.618678Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.618826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.618860Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.618968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.619073Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.619077Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.619170Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.619194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.619286Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.619347Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.619400Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.619452Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.619480Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.619573Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332504144058163:2339];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.619592Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:7591332504144058165:2341];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.619 ... 41: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.767911Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.767971Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.767987Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.776552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.776619Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.776638Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.777910Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.777964Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.777978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.786311Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.786370Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.786392Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.787372Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.787415Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.787453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038055;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.794651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.794743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:21.794761Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; Trying to start YDB, gRPC: 30006, MsgBus: 26627 2026-01-04T02:47:26.466764Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332561169951985:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:26.473906Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:26.566026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:26.700945Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:26.701030Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:26.702884Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:26.707395Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332561169951763:2081] 1767494846409864 != 1767494846409867 2026-01-04T02:47:26.755404Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:26.811010Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:26.960516Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.960543Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.960552Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.960633Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:27.409790Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:27.547074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:27.566321Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:30.278836Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332578349821831:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:30.278893Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332578349821839:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:30.278945Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:30.293990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:30.294109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332578349821846:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:30.294227Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:30.315549Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332578349821845:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:30.392190Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332578349821898:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:30.451050Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.579600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.498289Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332561169951985:2251];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:31.500651Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:31.561345Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSinkTx::OlapSnapshotRO [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] >> KqpLocksTricky::TestNoLocksIssue-withSink [GOOD] >> KqpLocksTricky::TestNoWrite [GOOD] >> KqpSinkMvcc::WriteSkewUpsert-IsOlap [GOOD] >> KqpSinkMvcc::LostUpdate-IsOlap [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead >> KqpTx::RollbackTx >> KqpTx::RollbackInvalidated [GOOD] >> KqpSinkTx::OlapDeferredEffects >> KqpSinkMvcc::OlapMultiSinks >> KqpSinkTx::DeferredEffects >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink >> KqpTx::LocksAbortOnCommit >> KqpSinkLocks::InvalidateOnCommit >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite >> KqpSnapshotIsolation::TReadOnlyOlap [GOOD] >> KqpTx::BeginTransactionBadMode [GOOD] >> KqpTx::CommitPrepared >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/query/unittest >> KqpLimits::TooBigColumn-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 2271, MsgBus: 21399 2026-01-04T02:41:20.062407Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591330991232060074:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:20.062453Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:41:20.109618Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:41:20.501031Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:41:20.506577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:41:20.506715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:41:20.526954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:41:20.625023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:41:20.682808Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:41:20.767447Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:41:20.767482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:41:20.767489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:41:20.767550Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:41:21.086176Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:41:21.579978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:41:21.586600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:41:21.652157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.818277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:21.996942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:22.071382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.566726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331004116963797:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.566872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.567200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331004116963807:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.567256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:23.918698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.946875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:23.978885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.005130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.032306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.068153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.141408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.186619Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:41:24.258972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008411931977:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.259065Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.259242Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008411931982:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.259276Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331008411931983:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.259298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:41:24.262305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:41:24.270991Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591331008411931986:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:41:24.337533Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591331008411932037:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:41:25.062844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591330991232060074:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:41:25.062889Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:41:25.708689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itsel ... ofiles were not loaded 2026-01-04T02:47:23.234131Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591332543192845184:2081] 1767494842977828 != 1767494842977831 2026-01-04T02:47:23.265121Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:23.438560Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:23.438585Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:23.438595Z node 5 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:23.438713Z node 5 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:23.517914Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:24.069994Z node 5 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:24.267492Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:24.279788Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:24.293350Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:24.410917Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:24.691696Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:24.802809Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.149012Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332573257618141:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.149147Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.149527Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332573257618151:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.149583Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.281210Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.345872Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.408784Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.456023Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.509229Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.563938Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.629529Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.719004Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.908068Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332573257619045:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.908159Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.908227Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332573257619050:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.910072Z node 5 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [5:7591332573257619052:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.910157Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.912957Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.929772Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [5:7591332573257619053:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:30.031005Z node 5 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [5:7591332577552586404:3784] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:34.819142Z node 5 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:186: Transaction write column value of 20971522 bytes is larger than the allowed threshold 2026-01-04T02:47:34.819294Z node 5 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710673 at tablet 72075186224037911 status: EXEC_ERROR errors: BAD_ARGUMENT (Transaction write column value of 20971522 bytes is larger than the allowed threshold) | 2026-01-04T02:47:34.823241Z node 5 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [5:7591332594732455897:2528] TxId: 281474976710673. Ctx: { TraceId: 01ke3ee9b85yz2kwss2p7xqsp7, Database: /Root, SessionId: ydb://session/3?node_id=5&id=NzI3ZmYyZWQtYzMyNTNjNWItMTc5ZWZlMTEtZjRjNWQyNWI=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold; trace_id# 2026-01-04T02:47:34.823810Z node 5 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=5&id=NzI3ZmYyZWQtYzMyNTNjNWItMTc5ZWZlMTEtZjRjNWQyNWI=, ActorId: [5:7591332590437488573:2528], ActorState: ExecuteState, LegacyTraceId: 01ke3ee9b85yz2kwss2p7xqsp7, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [BAD_ARGUMENT] Transaction write column value of 20971522 bytes is larger than the allowed threshold ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::SnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 24192, MsgBus: 13779 2026-01-04T02:47:12.390653Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332501622760269:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:12.392662Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:12.648678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:12.648836Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:12.655319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:12.702054Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:12.747193Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:12.748222Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332501622760241:2081] 1767494832388624 != 1767494832388627 2026-01-04T02:47:12.808752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:12.808781Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:12.808790Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:12.808881Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:12.861253Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:13.216026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.400146Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.579053Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332514507663011:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.579205Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.580016Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332514507663033:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.580066Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332514507663034:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.580558Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.584944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:15.601983Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332514507663037:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:15.738021Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332514507663088:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:16.052076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.157031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.130523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.874262Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332501622760269:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.964913Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:19.571505Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NDlmYWFhMWEtNDU4NjdmZTMtNmZlOTE2Y2MtMmNhZDM3ZDY=, ActorId: [1:7591332527392572931:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3edv5wey0wtj4zrg470jkc, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 29 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 29 } issue_code: 2008 severity: 1 } trace_id#
:3:29: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 Trying to start YDB, gRPC: 62866, MsgBus: 27820 2026-01-04T02:47:20.887391Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332534621300665:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:20.887541Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:20.940443Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:21.069091Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:21.073189Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332534621300525:2081] 1767494840848036 != 1767494840848039 2026-01-04T02:47:21.081606Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:21.092567Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:21.092652Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:21.116089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:21.270533Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:21.270555Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:21.270563Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:21.270656Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:21.300011Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:21.757704Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:21.763437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:21.913958Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:24.466356Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332551801170588:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:24.466428Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332551801170603:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:24.466464Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:24.468034Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332551801170610:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:24.468101Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:24.469479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:24.483543Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332551801170609:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:24.572741Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332551801170662:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:24.636726Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:24.705894Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:25.789074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:26.214161Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332534621300665:2177];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:26.623757Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 9708, MsgBus: 12337 2026-01-04T02:47:28.981141Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:28.981261Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:29.089300Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:29.089382Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:29.113912Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332572125123994:2081] 1767494848894352 != 1767494848894355 2026-01-04T02:47:29.115040Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:29.123815Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:29.271608Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:29.287043Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:29.287066Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:29.287074Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:29.287149Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:29.809190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:29.814838Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:29.933947Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:32.687758Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332589304994066:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:32.687898Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:32.688266Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332589304994078:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:32.688309Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332589304994079:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:32.688348Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:32.692342Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:32.706200Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332589304994082:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:32.781026Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332589304994133:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:32.844392Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:32.894374Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.072929Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/query/unittest |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::RollbackInvalidated [GOOD] Test command err: Trying to start YDB, gRPC: 19230, MsgBus: 17256 2026-01-04T02:47:09.722568Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488640889785:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.722702Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.044136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.044264Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.048855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.126457Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.141842Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332488640889754:2081] 1767494829717645 != 1767494829717648 2026-01-04T02:47:10.150127Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.390488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.445961Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.445987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.446006Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.446101Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.742164Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.140657Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.152060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:11.187839Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.311498Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.477196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.535408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.157368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505820760811:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.157453Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.157669Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505820760821:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.157729Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.454327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.480629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.506766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.534512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.563765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.598999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.684599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.780483Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.860599Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505820761696:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.860666Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.860897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505820761702:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.860943Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505820761701:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.860968Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.864555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.876159Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505820761705:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:13.969948Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505820761756:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.725962Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332488640889785:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.726032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 32727, MsgBus: 61562 2026-01-04T02:47:23.305467Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinaliz ... 7 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:30.986806Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:30.994605Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:30.994625Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:30.994633Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:30.994713Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:31.475583Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:31.504312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.585075Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.728294Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:31.771126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.853789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.765969Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332594677709382:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.766111Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.766530Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332594677709392:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.766617Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.856527Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.900462Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.948135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.997410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.045837Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.117121Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.170647Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.226741Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.324514Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332598972677559:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:35.324593Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:35.324947Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332598972677564:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:35.325002Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332598972677565:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:35.325124Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:35.330579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:35.349393Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332598972677568:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:35.412914Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332598972677619:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:35.722147Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332577497838476:2161];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:35.722219Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:37.361735Z node 3 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [3:7591332607562612566:2543], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:2:13: Error: At function: KiReadTable!
:2:13: Error: Cannot find table 'db.[/Root/BadTable]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:47:37.363336Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=3&id=YzVmNTQ1ODgtNTc1NGZjNWItNmFlZjJkYi1mMjdlMDNiMg==, ActorId: [3:7591332607562612544:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3eeckj4qztp0ncgh3aerc1, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 2 column: 13 } message: "At function: KiReadTable!" end_position { row: 2 column: 13 } severity: 1 issues { position { row: 2 column: 13 } message: "Cannot find table \'db.[/Root/BadTable]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 2 column: 13 } issue_code: 2003 severity: 1 } } } tx_id# 01ke3eecjq17me9xy1sxyvevfh trace_id# 2026-01-04T02:47:37.384704Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzVmNTQ1ODgtNTc1NGZjNWItNmFlZjJkYi1mMjdlMDNiMg==, ActorId: [3:7591332607562612544:2533], ActorState: ReadyState, LegacyTraceId: 01ke3eecn83ppwmwe0rabdvvbw, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3eecjq17me9xy1sxyvevfh" issue_code: 2015 severity: 1 } trace_id# |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_system_names/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace+IsOlap [GOOD] >> KqpSinkMvcc::WriteSkewReplace-IsOlap >> DataShardTxOrder::RandomPoints_DelayData [GOOD] |94.6%| [TA] $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} |94.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/query/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::UpdateColumns+IsOlap [GOOD] >> KqpSinkMvcc::UpdateColumns-IsOlap >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink >> KqpSinkLocks::VisibleUncommittedRows >> KqpSnapshotIsolation::TConflictReadWriteOltp >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_order/unittest >> DataShardTxOrder::RandomPoints_DelayData [GOOD] Test command err: 2026-01-04T02:45:34.734238Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:45:34.735569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:45:34.735614Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:34.742550Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:45:34.742763Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:133:2156] 2026-01-04T02:45:34.742928Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:34.774214Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:112:2143], Recipient [1:133:2156]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:45:34.780107Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:34.780186Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:34.781679Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T02:45:34.781744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T02:45:34.781829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T02:45:34.782198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:34.783383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:34.783455Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:202:2156] in generation 2 2026-01-04T02:45:34.841645Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:34.865235Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T02:45:34.865364Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:34.866280Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:219:2215] 2026-01-04T02:45:34.866321Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T02:45:34.866366Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T02:45:34.866407Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.866580Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.866614Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.866813Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T02:45:34.866895Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T02:45:34.866971Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.867003Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:34.867032Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T02:45:34.867075Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T02:45:34.867116Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T02:45:34.867157Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T02:45:34.867201Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:45:34.867278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:214:2212], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.867305Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.867341Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:212:2211], serverId# [1:214:2212], sessionId# [0:0:0] 2026-01-04T02:45:34.873975Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:103:2137], Recipient [1:133:2156]: NKikimrTxDataShard.TEvProposeTransaction TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 103 RawX2: 4294969433 } TxBody: "\nK\n\006table1\020\r\032\t\n\003key\030\002 \"\032\014\n\005value\030\200$ 8\032\n\n\004uint\030\002 9(\":\n \000Z\006\010\010\030\001(\000J\014/Root/table1" TxId: 1 ExecLevel: 0 Flags: 0 SchemeShardId: 4200 ProcessingParams { } 2026-01-04T02:45:34.874028Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:45:34.874108Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 9437184 2026-01-04T02:45:34.874226Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit CheckSchemeTx 2026-01-04T02:45:34.874255Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 9437184 txId 1 ssId 4200 seqNo 0:0 2026-01-04T02:45:34.874292Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 1 at tablet 9437184 2026-01-04T02:45:34.874356Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is ExecutedNoMoreRestarts 2026-01-04T02:45:34.874401Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit CheckSchemeTx 2026-01-04T02:45:34.874425Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit StoreSchemeTx 2026-01-04T02:45:34.874449Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.874637Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayCompleteNoMoreRestarts 2026-01-04T02:45:34.874662Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit StoreSchemeTx 2026-01-04T02:45:34.874684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit FinishPropose 2026-01-04T02:45:34.874705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.874735Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 9437184 is DelayComplete 2026-01-04T02:45:34.874766Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 9437184 executing on unit FinishPropose 2026-01-04T02:45:34.874793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 9437184 to execution unit WaitForPlan 2026-01-04T02:45:34.874823Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.874847Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:1] at 9437184 is not ready to execute on unit WaitForPlan 2026-01-04T02:45:34.886627Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 9437184 2026-01-04T02:45:34.886700Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit StoreSchemeTx 2026-01-04T02:45:34.886738Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:1] at 9437184 on unit FinishPropose 2026-01-04T02:45:34.886799Z node 1 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 1 at tablet 9437184 send to client, exec latency: 0 ms, propose latency: 1 ms, status: PREPARED 2026-01-04T02:45:34.886869Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 9437184 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:34.887370Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:225:2221], Recipient [1:133:2156]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.887414Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:45:34.887446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 9437184, clientId# [1:224:2220], serverId# [1:225:2221], sessionId# [0:0:0] 2026-01-04T02:45:34.887547Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287424, Sender [1:103:2137], Recipient [1:133:2156]: {TEvPlanStep step# 1000001 MediatorId# 0 TabletID 9437184} 2026-01-04T02:45:34.887569Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3171: StateWork, processing event TEvTxProcessing::TEvPlanStep 2026-01-04T02:45:34.887697Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1811: Trying to execute [1000001:1] at 9437184 on unit WaitForPlan 2026-01-04T02:45:34.887736Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1826: Execution status for [1000001:1] at 9437184 is Executed 2026-01-04T02:45:34.887789Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [1000001:1] at 9437184 executing on unit WaitForPlan 2026-01-04T02:45:34.887841Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [1000001:1] at 9437184 to execution unit PlanQueue 2026-01-04T02:45:34.894232Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 1 at step 1000001 at tablet 9437184 { Transactions { TxId: 1 AckTo { RawX1: 103 RawX2: 4294969433 } } Step: 1000001 MediatorID: 0 TabletID: 9437184 } 2026-01-04T02:45:34.894290Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:45:34.894513Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:133:2156], Recipient [1:133:2156]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.894547Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:45:34.894586Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T02:45:34.894621Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:34.894676Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 9437184 2026-01-04T02:45:34.894723Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1000001:1] in PlanQueue unit at 9437184 2026-01-04T02:45:34.894756Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [100 ... 9437184 consumer 9437184 txId 521 2026-01-04T02:47:40.375848Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 522 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 16} 2026-01-04T02:47:40.375882Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.375911Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 522 2026-01-04T02:47:40.376080Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 523 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 17} 2026-01-04T02:47:40.376117Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.376146Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 523 2026-01-04T02:47:40.376229Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 524 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 18} 2026-01-04T02:47:40.376258Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.376286Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 524 2026-01-04T02:47:40.376440Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 525 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 19} 2026-01-04T02:47:40.376475Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.376503Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 525 2026-01-04T02:47:40.376705Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 526 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 20} 2026-01-04T02:47:40.376743Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.376775Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 526 2026-01-04T02:47:40.376964Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 527 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 21} 2026-01-04T02:47:40.377000Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.377030Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 527 2026-01-04T02:47:40.377183Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 528 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 22} 2026-01-04T02:47:40.377217Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.377247Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 528 2026-01-04T02:47:40.377356Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 529 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 23} 2026-01-04T02:47:40.377410Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.377449Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 529 2026-01-04T02:47:40.377571Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 530 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 24} 2026-01-04T02:47:40.377605Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.377633Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 530 2026-01-04T02:47:40.377839Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 531 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 25} 2026-01-04T02:47:40.377876Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.377904Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 531 2026-01-04T02:47:40.378058Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 532 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 26} 2026-01-04T02:47:40.378096Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.378124Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 532 2026-01-04T02:47:40.378265Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 533 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 27} 2026-01-04T02:47:40.378301Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.378332Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 533 2026-01-04T02:47:40.378447Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 534 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 28} 2026-01-04T02:47:40.378494Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.378527Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 534 2026-01-04T02:47:40.378683Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 535 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 29} 2026-01-04T02:47:40.378719Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.378749Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 535 2026-01-04T02:47:40.378905Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 536 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 30} 2026-01-04T02:47:40.378938Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.378966Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 536 2026-01-04T02:47:40.379077Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 537 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 31} 2026-01-04T02:47:40.379113Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.379143Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 537 2026-01-04T02:47:40.407238Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:47:40.407326Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:538] at 9437184 on unit CompleteOperation 2026-01-04T02:47:40.407406Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 538] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 1 ms, propose latency: 2 ms 2026-01-04T02:47:40.407494Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:564: Send delayed Ack RS Ack at 9437184 {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:47:40.407566Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:47:40.407874Z node 10 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T02:47:40.407936Z node 10 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [1000005:539] at 9437184 on unit CompleteOperation 2026-01-04T02:47:40.407987Z node 10 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1000005 : 539] from 9437184 at tablet 9437184 send result to client [10:103:2137], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T02:47:40.408028Z node 10 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T02:47:40.408272Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [10:239:2231], Recipient [10:350:2317]: {TEvReadSet step# 1000005 txid# 538 TabletSource# 9437185 TabletDest# 9437184 SetTabletConsumer# 9437184 Flags# 0 Seqno# 32} 2026-01-04T02:47:40.408321Z node 10 :TX_DATASHARD TRACE: datashard_impl.h:3173: StateWork, processing event TEvTxProcessing::TEvReadSetAck 2026-01-04T02:47:40.408362Z node 10 :TX_DATASHARD DEBUG: datashard_outreadset.cpp:150: Receive RS Ack at 9437185 source 9437185 dest 9437184 consumer 9437184 txId 538 expect 29 31 31 31 31 20 21 29 31 29 29 21 27 27 19 12 27 29 20 27 21 29 24 20 29 27 29 24 19 - - - actual 29 31 31 31 31 20 21 29 31 29 29 21 27 27 19 12 27 29 20 27 21 29 24 20 29 27 29 24 19 - - - interm 29 29 21 18 27 20 21 29 20 29 29 21 27 27 19 12 27 29 20 27 21 29 24 20 29 27 29 24 19 - - - |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_order/unittest >> KqpSinkLocks::UncommittedRead [GOOD] >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] >> KqpSinkLocks::EmptyRangeOlap [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxWithIndexCommitsOnConcurrentWrite-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 16122, MsgBus: 13343 2026-01-04T02:47:09.721493Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332489762275295:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.721536Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.009896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.053199Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.053333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.135363Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332489762275259:2081] 1767494829717688 != 1767494829717691 2026-01-04T02:47:10.156097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.160058Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.354588Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.444942Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.444967Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.444994Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.445078Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.746786Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.105869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.146629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:11.196961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.346794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.500532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:11.585283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.274730Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506942146316:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.274901Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.275218Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506942146326:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.275286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.554762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.586019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.619358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.649796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.700027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.768090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.833195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.899195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.001862Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506942147203:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.002012Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.002975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332511237114504:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.003012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332511237114505:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.003181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.006802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.028544Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332511237114508:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:14.126965Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332511237114559:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.721965Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332489762275295:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.722038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 14220, MsgBus: 3573 2026-01-04T02:47:18.344849Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActo ... 2026-01-04T02:47:30.282086Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:30.286060Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:30.286144Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:30.289917Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:30.382322Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:30.382416Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:30.382423Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:30.382429Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:30.382486Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:30.923363Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:30.930210Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:30.941381Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.064949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.102259Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:31.292632Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.430482Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.260141Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332597793351276:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.260238Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.260522Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332597793351285:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.260573Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.349302Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.399167Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.441897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.481381Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.523747Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.570356Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.620948Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.690253Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:34.798221Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332597793352161:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.798363Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.798868Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332597793352166:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.798934Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332597793352167:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.798978Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:34.802661Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:34.815232Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332597793352170:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:34.871711Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332597793352221:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:35.096101Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332580613480251:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:35.096176Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:36.956754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:37.025628Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:37.091519Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInvalidateOnError [GOOD] >> KqpSinkTx::OlapInteractive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp+EnableIndexStreamWrite [GOOD] Test command err: Trying to start YDB, gRPC: 5573, MsgBus: 21886 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:12.614050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:12.742557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:12.743059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:12.743117Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:13.028841Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.029009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.092215Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494829828422 != 1767494829828425 2026-01-04T02:47:13.102447Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:13.147253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.283376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:13.670760Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:13.670813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:13.670865Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:13.671083Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:13.683288Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:14.064816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:14.078251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:14.105879Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:912:2777], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.106040Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:923:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.106130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.106954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.107033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.112295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.206790Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:927:2786], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:14.268054Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:983:2823] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.540072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.885634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.306996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 20600, MsgBus: 18575 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:25.331503Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:25.338568Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:47:25.342954Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:25.343255Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:25.343433Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:25.625588Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.625720Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.652572Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494841911450 != 1767494841911454 2026-01-04T02:47:25.657543Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.706720Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:25.831546Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:26.280130Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.280188Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.280223Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.280883Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:26.293439Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:26.778009Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.828954Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:915:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.829079Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.829169Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.836484Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.837161Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.845129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:26.923150Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:929:2788], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:26.961643Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:986:2826] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:27.048324Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:27.287146Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.884666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 62461, MsgBus: 17727 2026-01-04T02:47:34.865531Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:34.882062Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:34.956010Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332595761525480:2081] 1767494854805318 != 1767494854805321 2026-01-04T02:47:34.992472Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:35.056985Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:35.057081Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:35.068426Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:35.250558Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:35.250590Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:35.250599Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:35.250753Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:35.254236Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:35.707074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:35.858616Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:38.905140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332612941395546:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:38.905275Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:38.905679Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332612941395558:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:38.910744Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:38.927379Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:47:38.928851Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332612941395560:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:38.979980Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332612941395622:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:39.056882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:39.143918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.213254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:41.426313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:41.943588Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332625826305617:2962], SessionActorId: [3:7591332625826305473:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 . sessionActorId=[3:7591332625826305473:2962]. 2026-01-04T02:47:41.944490Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Y2U4MGVjZTctYjA1YTUwMmEtYzYwMThhM2MtNDM0YzI2NTY=, ActorId: [3:7591332625826305473:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3eeh03byr4tj3szrq2q6k9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332625826305618:2962] from: [3:7591332625826305617:2962] trace_id# 2026-01-04T02:47:41.944656Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332625826305618:2962] TxId: 281474976715666. Ctx: { TraceId: 01ke3eeh03byr4tj3szrq2q6k9, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Y2U4MGVjZTctYjA1YTUwMmEtYzYwMThhM2MtNDM0YzI2NTY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`., code: 2001 } trace_id# 2026-01-04T02:47:41.945058Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Y2U4MGVjZTctYjA1YTUwMmEtYzYwMThhM2MtNDM0YzI2NTY=, ActorId: [3:7591332625826305473:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3eeh03byr4tj3szrq2q6k9, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`." issue_code: 2001 severity: 1 } trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkMvcc::OltpMultiSinksNoSinks [GOOD] >> KqpSinkMvcc::OltpMultiSinks >> KqpTx::RollbackTx [GOOD] >> KqpTx::RollbackTx2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::TxReadsItsOwnWrites-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 8236, MsgBus: 5746 2026-01-04T02:47:09.731563Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488488961308:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.731666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.022634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.107758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.107857Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.184108Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332488488961204:2081] 1767494829724142 != 1767494829724145 2026-01-04T02:47:10.196294Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.199138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.332755Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.449500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.449537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.449555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.449650Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.746949Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.120826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.131238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:13.113968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505668831269:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.113984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505668831289:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.114068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.114278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505668831292:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.114339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.117708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.127677Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505668831291:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:13.225070Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505668831344:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.461881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.555834Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.640493Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.730002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332488488961308:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.730072Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 17098, MsgBus: 27308 2026-01-04T02:47:17.752290Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332522239288715:2088];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.854596Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:17.913954Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:17.941986Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332522239288651:2081] 1767494837741283 != 1767494837741286 2026-01-04T02:47:17.951573Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:17.951651Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:17.959865Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:17.972849Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:18.087140Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:18.087162Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:18.087172Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:18.087243Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:18.116238Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:18.614154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:18.620634Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:18.853953Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:21.383048Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332539419158684:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.383176Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.383579Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332539419158721:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.386961Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:21.403388Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:47:21.403640Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332539419158723:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for err ... x_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.164293Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.164355Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.164369Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.166452Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.166513Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.166529Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.174971Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.175047Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.175091Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.177682Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.177743Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.177758Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.183533Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.183598Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.183614Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.186964Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.187018Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:30.187044Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038008;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:32.902247Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:47:32.902275Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded Trying to start YDB, gRPC: 2692, MsgBus: 4452 2026-01-04T02:47:34.961883Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332593957006701:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:34.962693Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:35.014775Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:35.132075Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:35.132159Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:35.132670Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:35.149057Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:35.149917Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332593957006652:2081] 1767494854942902 != 1767494854942905 2026-01-04T02:47:35.181885Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:35.240088Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:35.240113Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:35.240122Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:35.240209Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:35.835100Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:35.972003Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:39.040393Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332615431844014:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:39.040485Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:39.041136Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332615431844026:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:39.041302Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332615431844028:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:39.041368Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:39.046106Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:39.063327Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332615431844031:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:39.137798Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332615431844082:2532] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:39.214187Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:39.292643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.191418Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332593957006701:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:40.200830Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:40.511393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey-IsOlap+CommitOnInsert [GOOD] >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert >> KqpSnapshotRead::TestReadOnly+withSink >> KqpSinkMvcc::DirtyReads+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap >> RetryPolicy::TWriteSession_TestPolicy [GOOD] >> RetryPolicy::TWriteSession_TestBrokenPolicy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::EmptyRangeAlreadyBroken [GOOD] Test command err: Trying to start YDB, gRPC: 22457, MsgBus: 3757 2026-01-04T02:47:12.026440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332503239439638:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:12.029026Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:12.293494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:12.300494Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:12.300599Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:12.326953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:12.389656Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332503239439533:2081] 1767494832018583 != 1767494832018586 2026-01-04T02:47:12.410905Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:12.499112Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:12.499136Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:12.499147Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:12.499247Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:12.587891Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:12.900036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:12.957744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.038931Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:13.080920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.229250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.290878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.308511Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332516124343295:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.308656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.309475Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332516124343305:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.309546Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.603996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.640402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.672962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.712120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.790841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.825353Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.867216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.937894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.018587Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332520419311474:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.018703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.018722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332520419311479:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.018888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332520419311481:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.018943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.022030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:16.036834Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332520419311482:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:16.134097Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332520419311536:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:17.025934Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332503239439638:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.026002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:18.417878Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332529009246501:2531], SessionActorId: [1:7591332524714279136:2531], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7591332524714279136:2531]. 2026-01-04T02:47:18.420822Z node 1 :KQP_SE ... delivered_message; 2026-01-04T02:47:35.915330Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:36.010912Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332601160713491:2081] 1767494855887465 != 1767494855887468 2026-01-04T02:47:36.060031Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:36.073198Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:36.073304Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:36.080469Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:36.205175Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:36.234444Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:36.234468Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:36.234485Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:36.234584Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:36.865826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:36.876332Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:36.888515Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:36.984448Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:37.014965Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:37.184500Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:37.270190Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.326703Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332622635551843:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.326806Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.327253Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332622635551853:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.327301Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.421352Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.470003Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.546739Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.590371Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.635145Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.685606Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.741796Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.810259Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.889790Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332601160713524:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:40.890079Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:40.953970Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332622635552728:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.954106Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.954526Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332622635552733:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.954584Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332622635552734:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.954625Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.961686Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:40.976109Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332622635552737:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:41.060672Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332626930520084:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:43.838670Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=MmNlZjM2Yy1iYjM3ZjhlZC03OTA2NTI2NC1iYTE2ODFiYQ==, ActorId: [4:7591332635520455008:2535], ActorState: ExecuteState, LegacyTraceId: 01ke3eejntd9awyvckesd8t93v, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001
: Error: tx has deferred effects, but locks are broken |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite >> KqpTx::LocksAbortOnCommit [GOOD] >> KqpTx::RollbackByIdle >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink >> KqpTx::CommitPrepared [GOOD] >> KqpSinkLocks::DifferentKeyUpdateOlap >> KqpSinkLocks::OlapUpdateLocksTwoShards [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShard >> KqpSinkLocks::InvalidateOnCommit [GOOD] >> KqpSinkLocks::OlapUncommittedRead >> KqpSinkMvcc::ReadOnlyTxCommitsOnConcurrentWrite [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 >> KqpLocksTricky::TestSnapshotWithDependentReads-UseSink [GOOD] >> KqpRollback::DoubleUpdate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::CommitPrepared [GOOD] Test command err: Trying to start YDB, gRPC: 17860, MsgBus: 21653 2026-01-04T02:47:13.883631Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332504391373938:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:13.884372Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:14.235544Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:14.370114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:14.370241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:14.385471Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:14.390195Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:14.543592Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:14.574295Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:14.574325Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:14.574332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:14.574421Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:14.886367Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.221800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:15.233974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:15.327284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.493050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.673339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.761895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.736014Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332521571244857:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.736131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.736496Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332521571244867:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.736563Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.417036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.469562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.561714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.647544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.696000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.754444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.845166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.884956Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332504391373938:2157];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:18.885202Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:18.906932Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:19.026151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332530161180338:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:19.026307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:19.030411Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332530161180344:2489], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:19.030435Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332530161180343:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:19.030496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:19.034912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:19.051075Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332530161180347:2490], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:19.143661Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332530161180398:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:29.173901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:47:29.173936Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:31.626226Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7591332581700788628:2665], TxId: 281474976715682, t ... ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332585652401170:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:37.650376Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 10129, MsgBus: 61675 2026-01-04T02:47:39.750010Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332617493595660:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:39.750232Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:39.795316Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:39.867462Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.895789Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:39.895872Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:39.908826Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:40.002432Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:40.002457Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:40.002465Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:40.002545Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:40.067985Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:40.442880Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.454907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:40.468098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.550699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.735917Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.777937Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.809693Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.418187Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332634673466579:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.418280Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.418573Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332634673466589:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.418641Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.519664Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.564768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.641922Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.678314Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.711805Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.754464Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.806880Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.860189Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.965453Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332634673467461:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.965594Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.966030Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332634673467466:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.966079Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332634673467467:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.966343Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.983552Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:44.000556Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332634673467470:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:44.072754Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332638968434817:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:44.753907Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332617493595660:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:44.753989Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::DeferredEffects [GOOD] Test command err: Trying to start YDB, gRPC: 16067, MsgBus: 5288 2026-01-04T02:47:11.833331Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332495319052778:2112];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:11.833540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:12.101446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:12.101566Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:12.152754Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:12.167373Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:12.204634Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:12.205940Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332495319052695:2081] 1767494831830064 != 1767494831830067 2026-01-04T02:47:12.314437Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:12.314465Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:12.314472Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:12.314545Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:12.426795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:12.719153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:12.844600Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.048065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512498922751:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.048191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.048641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512498922780:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.048696Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512498922779:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.048978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.056767Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:15.079898Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332512498922783:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:15.231305Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332512498922834:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:15.513261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:15.701260Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:15.701257Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:15.701451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:15.701530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:15.701838Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:15.701862Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:15.701990Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:15.702035Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:15.702093Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:15.702138Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:15.702206Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:15.702267Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:15.702320Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:15.702353Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:15.702465Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:15.702466Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:15.702631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:15.702690Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:15.702747Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:15.702794Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:15.702865Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332512498922969:2337];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:15.702929Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332512498922975:2341];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS ... 2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190867Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190879Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190890Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190901Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190914Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190924Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190948Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190960Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190971Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190982Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.190992Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191012Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191029Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191040Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191052Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191077Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191089Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191101Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191113Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191123Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191134Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191144Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191168Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:36.191190Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [2:7591332606157514126:2967], SessionActorId: [2:7591332601862546205:2967], StateRollback: unknown message 278003713 Trying to start YDB, gRPC: 13381, MsgBus: 29783 2026-01-04T02:47:38.010869Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332611732198548:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.010935Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:38.061689Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:38.151056Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:38.166002Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332607437231196:2081] 1767494858008807 != 1767494858008810 2026-01-04T02:47:38.192859Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:38.192960Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:38.201564Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:38.282567Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:38.282593Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:38.282601Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:38.282696Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:38.283889Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:38.814600Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:39.022140Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:42.046861Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332628912068564:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.046923Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332628912068573:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.046982Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.049913Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332628912068579:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.050008Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.051531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:42.064635Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332628912068578:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:42.125274Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332628912068631:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:42.180618Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.224852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.197197Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332611732198548:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:43.197659Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:43.489015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 >> KqpLocks::InvalidateOnCommit >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit+IsOlap [GOOD] >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap >> KqpSinkLocks::VisibleUncommittedRows [GOOD] >> KqpSinkLocks::UpdateLocksTwoShards >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] >> KqpLocksTricky::TestSnapshotIfInsertRead [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex |94.7%| [TA] $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSnapshotIsolation::TConflictReadWriteOltp [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink >> TNebiusAccessServiceTest::Authenticate |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::WriteSkewReplace-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 18003, MsgBus: 21438 2026-01-04T02:47:13.876587Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332505802669155:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:13.876653Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:14.211940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:14.212062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:14.276387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:14.280835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:14.298120Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332505802669047:2081] 1767494833859568 != 1767494833859571 2026-01-04T02:47:14.300301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:14.521919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:14.557172Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:14.557204Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:14.557211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:14.557305Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:14.895076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.253295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:15.265336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:17.549922Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332522982539095:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.549934Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332522982539107:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.550064Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.550579Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332522982539134:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.550653Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:17.554880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:17.570312Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332522982539133:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:17.672046Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332522982539186:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:18.002721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.183840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:19.200659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332505802669155:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:19.207505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:19.520455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:21.625362Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710670; 2026-01-04T02:47:21.656139Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7591332540162416669:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:7591332535867449046:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:7591332540162416669:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:47:21.656716Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[1:7591332535867449046:2967]. 2026-01-04T02:47:21.656870Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656884Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656896Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656907Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656918Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656929Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656941Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656952Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656963Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656974Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656985Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.656997Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657009Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657020Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657030Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657044Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657055Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657066Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1:7591332540162416662:2967], SessionActorId: [1:7591332535867449046:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:21.657078Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [1: ... 1-04T02:47:41.330429Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:41.330445Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:41.330524Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:41.386259Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:41.962127Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:41.979810Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:41.986588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:44.989771Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332638479479612:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:44.989774Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332638479479623:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:44.989866Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:44.990364Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332638479479627:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:44.990437Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:45.006635Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:45.025949Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332638479479626:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:45.116610Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332642774446977:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:45.191166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:45.244879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.214045Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332621299609711:2195];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:46.227616Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:46.538217Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:48.979036Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976715668; 2026-01-04T02:47:49.001088Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7591332655659357533:2967], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [3:7591332655659356899:2967]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[3:7591332655659357533:2967].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:47:49.001201Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7591332655659356899:2967]. 2026-01-04T02:47:49.001366Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001379Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001392Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001407Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001416Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001425Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001434Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001487Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YWU1ZDA2NWQtODc0ODE4YzAtM2NjN2JmOGMtYjk5ZDJmZmM=, ActorId: [3:7591332655659356899:2967], ActorState: ExecuteState, LegacyTraceId: 01ke3eeqz0d41ecqrqy3ykkvy9, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332655659357527:2967] from: [3:7591332655659357526:2967] trace_id# 2026-01-04T02:47:49.001513Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001532Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001550Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001667Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332655659357527:2967] TxId: 281474976715668. Ctx: { TraceId: 01ke3eeqz0d41ecqrqy3ykkvy9, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YWU1ZDA2NWQtODc0ODE4YzAtM2NjN2JmOGMtYjk5ZDJmZmM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:47:49.001922Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.001957Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002137Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YWU1ZDA2NWQtODc0ODE4YzAtM2NjN2JmOGMtYjk5ZDJmZmM=, ActorId: [3:7591332655659356899:2967], ActorState: ExecuteState, LegacyTraceId: 01ke3eeqz0d41ecqrqy3ykkvy9, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:47:49.002405Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002422Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002432Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002441Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002450Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002477Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 2026-01-04T02:47:49.002486Z node 3 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [3:7591332655659357526:2967], SessionActorId: [3:7591332655659356899:2967], StateRollback: unknown message 278003713 |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] >> KqpTx::RollbackTx2 [GOOD] >> KqpTx::SnapshotRO >> KqpSnapshotIsolation::TPragmaSettingOltp+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TNebiusAccessServiceTest::Authorize >> TNebiusAccessServiceTest::Authorize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::UpdateColumns-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 21105, MsgBus: 9723 2026-01-04T02:47:09.722871Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332489648674166:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.726206Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.058057Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.091489Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.091589Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.180323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.258405Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.265889Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332489648674139:2081] 1767494829720591 != 1767494829720594 2026-01-04T02:47:10.277373Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.446489Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.446510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.446526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.446604Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.754281Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.109261Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.117239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:13.056731Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506828544212:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.056736Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506828544224:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.056844Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.057982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332506828544227:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.058067Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.060413Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.072797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332506828544226:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.188164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332506828544279:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.453791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.619418Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.619631Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.619850Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.619981Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.620068Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.620171Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.620262Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.620346Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.620440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.620542Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.620645Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.620735Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.620822Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332506828544422:2342];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.622548Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.622596Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.622723Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.622801Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.622900Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.622980Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.623061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.623142Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506828544417:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.623258Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... ; 2026-01-04T02:47:39.547635Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.551884Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.551941Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.551958Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.555749Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.555806Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.555821Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.565440Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.565503Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.565517Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.572208Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.572273Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.572290Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.577263Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.577326Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.577341Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.580235Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.580291Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.580307Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.585373Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.585425Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:39.585439Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038068;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; Trying to start YDB, gRPC: 5582, MsgBus: 9492 2026-01-04T02:47:42.174741Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:42.178815Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:42.266275Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:42.266367Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:42.267690Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:42.270239Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332631813526956:2081] 1767494862131657 != 1767494862131660 2026-01-04T02:47:42.299306Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:42.394556Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:42.394588Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:42.394600Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:42.394706Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:42.461147Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:42.854096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:43.169997Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:46.345921Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332648993397000:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.345988Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332648993397012:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.346082Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.346708Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332648993397039:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.346778Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.350903Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:46.366906Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332648993397038:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:46.439577Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332648993397091:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:46.513563Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.592752Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.814162Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSinkLocks::InsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap+UpdateAfterInsert [GOOD] >> KqpSinkTx::OlapDeferredEffects [GOOD] >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert >> KqpSinkTx::OlapExplicitTcl >> KqpSnapshotRead::TestReadOnly+withSink [GOOD] >> KqpSinkMvcc::OltpMultiSinks [GOOD] >> KqpSinkTx::OlapSnapshotROInteractive1 [GOOD] >> KqpTx::RollbackByIdle [GOOD] >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert >> KqpSinkTx::OlapSnapshotROInteractive2 >> KqpTx::MixEnginesOldNew >> KqpSnapshotRead::TestReadOnly-withSink ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authenticate [GOOD] Test command err: 2026-01-04T02:47:52.947947Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cadd7de2ed0] Connect to grpc://localhost:16668 2026-01-04T02:47:52.961273Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cadd7de2ed0] Request AuthenticateRequest { iam_token: "**** (3C4833B6)" } 2026-01-04T02:47:53.010750Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cadd7de2ed0] Status 7 Permission Denied 2026-01-04T02:47:53.014122Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cadd7de2ed0] Request AuthenticateRequest { iam_token: "**** (86DDB286)" } 2026-01-04T02:47:53.027412Z node 1 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cadd7de2ed0] Response AuthenticateResponse { account { user_account { id: "1234" } } } ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::Authorize [GOOD] Test command err: 2026-01-04T02:47:53.151414Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7cf707ee4750] Connect to grpc://localhost:63478 2026-01-04T02:47:53.166007Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf707ee4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-04T02:47:53.180373Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7cf707ee4750] Response AuthorizeResponse { results { key: 0 value { account { user_account { id: "user_id" } } } } } 2026-01-04T02:47:53.186312Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf707ee4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "path_id" } } iam_token: "**** (79225CA9)" } } } 2026-01-04T02:47:53.188529Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf707ee4750] Status 7 Permission Denied 2026-01-04T02:47:53.194374Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf707ee4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "denied" } resource_path { path { id: "path_id" } } iam_token: "**** (717F937C)" } } } 2026-01-04T02:47:53.196296Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf707ee4750] Status 7 Permission Denied 2026-01-04T02:47:53.202404Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7cf707ee4750] Request AuthorizeRequest { checks { key: 0 value { permission { name: "perm" } resource_path { path { id: "p" } } iam_token: "**** (717F937C)" } } } 2026-01-04T02:47:53.204697Z node 3 :GRPC_CLIENT DEBUG: grpc_service_client.h:111: [7cf707ee4750] Status 7 Permission Denied |94.7%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx+withSink [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite1-withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_order/test-results/unittest/{meta.json ... results_accumulator.log} >> TNebiusAccessServiceTest::PassRequestId [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite1 [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 >> BasicUsage::TWriteSession_WriteEncoded [GOOD] >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryIndexOltp-EnableIndexStreamWrite [GOOD] >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp ------- [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest >> TNebiusAccessServiceTest::PassRequestId [GOOD] Test command err: 2026-01-04T02:47:55.860833Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:83: [7ca8bb2e3cd0]{reqId} Connect to grpc://localhost:13125 2026-01-04T02:47:55.889234Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:122: [7ca8bb2e3cd0]{reqId} Request AuthenticateRequest { iam_token: "**** (717F937C)" } 2026-01-04T02:47:55.913307Z node 2 :GRPC_CLIENT DEBUG: grpc_service_client.h:109: [7ca8bb2e3cd0]{reqId} Response AuthenticateResponse { account { user_account { id: "1234" } } } |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpMultiSinks [GOOD] Test command err: Trying to start YDB, gRPC: 17178, MsgBus: 19828 2026-01-04T02:47:09.754269Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488163520778:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.754364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.221917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.240961Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.241055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.333501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.359797Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.363094Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332488163520755:2081] 1767494829751676 != 1767494829751679 2026-01-04T02:47:10.412491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.501269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.501300Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.501322Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.501439Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.768018Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.135853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.261480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505343390841:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.261502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505343390829:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.261609Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.262473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505343390844:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.262559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.264609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.273749Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505343390843:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.395827Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505343390896:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.699606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.888697Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.889009Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.889261Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.889458Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.889572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.889683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.889827Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.889935Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.890028Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.890134Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.890223Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.890329Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.890437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505343391065:2338];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.896627Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.896693Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.896881Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.896991Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.897121Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.897243Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.897361Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.897472Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.897594Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332505343391078:2345];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CL ... pFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:37.098427Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:37.098451Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:37.098459Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:37.098553Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:37.648802Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:37.662641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:37.796906Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.832015Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332620217920623:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.832163Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.832614Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332620217920658:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:40.851812Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:40.894866Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332620217920660:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:40.968741Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332620217920712:2532] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:41.038440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:41.083409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.002329Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332603038050735:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:42.040720Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:42.316627Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 3452, MsgBus: 9509 2026-01-04T02:47:45.643953Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332641719043084:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:45.644248Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:45.661970Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:45.760319Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:45.768123Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:45.768219Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:45.797509Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:45.890753Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:45.890780Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:45.890788Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:45.890880Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:45.925007Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:46.556211Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:46.564032Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:46.651338Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:49.939958Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332658898913023:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.940038Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332658898913012:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.940430Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.940796Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332658898913029:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.940900Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.945158Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:49.957382Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332658898913028:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:50.030654Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332663193880378:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:50.096818Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.150747Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.950778Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332641719043084:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:50.951227Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:51.383931Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> KqpSinkMvcc::OlapMultiSinks [GOOD] >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocks::InvalidateOnCommit [GOOD] >> KqpLocks::TwoPhaseTx ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> BasicUsage::TWriteSession_BatchingProducesContinueTokens [GOOD] Test command err: 2026-01-04T02:46:27.843994Z :WriteAndReadSomeMessagesWithAsyncCompression INFO: Random seed for debugging is 1767494787843969 2026-01-04T02:46:28.006108Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332310901282149:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:28.006200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:28.032688Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332312667365000:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:46:28.032870Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:46:28.033325Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:46:28.040454Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:46:28.180389Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:28.202267Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:46:28.226609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:28.226695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:28.227286Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:28.227346Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:28.252861Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:28.253038Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:28.255097Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:28.258146Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:28.355211Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002ea3/r3tmp/yandexrA9VCY.tmp 2026-01-04T02:46:28.355233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002ea3/r3tmp/yandexrA9VCY.tmp 2026-01-04T02:46:28.355470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002ea3/r3tmp/yandexrA9VCY.tmp 2026-01-04T02:46:28.355591Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:28.361881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:46:28.385470Z INFO: TTestServer started on Port 30697 GrpcPort 25388 2026-01-04T02:46:28.423687Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions PQClient connected to localhost:25388 2026-01-04T02:46:28.519537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:29.013151Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:29.037252Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:30.185716Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332321257299835:2304], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:30.185716Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332321257299824:2301], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:30.185787Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:30.186002Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332321257299839:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:30.186053Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:30.191141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:30.204305Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332321257299838:2305], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720657 completed, doublechecking } 2026-01-04T02:46:30.307719Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332321257299868:2137] txid# 281474976720658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:30.531513Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591332321257299883:2310], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:46:30.531939Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=ZjhmNDZiMWYtZjliNTRlMWUtYjFkN2U3MjctYjc3YTA3MmI=, ActorId: [2:7591332321257299822:2300], ActorState: ExecuteState, LegacyTraceId: 01ke3ecb18c7c1j8fsfp64yvv7, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:46:30.531971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:30.534080Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591332319491218104:2335], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:46:30.534242Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:46:30.534364Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzEyMjY5NTUtNTRkNjQ1MDAtNmFkMzkxODAtZjY2MzBjNmU=, ActorId: [1:7591332319491218062:2327], ActorState: ExecuteState, LegacyTraceId: 01ke3ecb4j12083qhkh99rmvn3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:46:30.534591Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2 ... ie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-04T02:47:54.243851Z node 11 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 after read state TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 59af5500-437e317a-aa7d401e-1c139976 has messages 1 2026-01-04T02:47:54.243998Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 read done: guid# 59af5500-437e317a-aa7d401e-1c139976, partition# TopicId: Topic rt3.dc1--test-topic in dc dc1 in database: Root, partition 0(assignId:1), size# 432 2026-01-04T02:47:54.244034Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 response to read: guid# 59af5500-437e317a-aa7d401e-1c139976 2026-01-04T02:47:54.244317Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 Process answer. Aval parts: 0 2026-01-04T02:47:54.248365Z :DEBUG: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] [null] After sending read request: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:47:54.249898Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 grpc read done: success# 1, data# { read { } } 2026-01-04T02:47:54.250050Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 got read request: guid# d993f85b-2f074d56-680663cc-f7ad7a0c 2026-01-04T02:47:54.254079Z :DEBUG: [/Root] Decompression task done. Partition/PartitionSessionId: 0 (0-3) 2026-01-04T02:47:54.255268Z :DEBUG: [/Root] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T02:47:54.255336Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T02:47:54.255370Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 1} (2-2) 2026-01-04T02:47:54.255397Z :DEBUG: [/Root] Take Data. Partition 0. Read: {1, 2} (3-3) 2026-01-04T02:47:54.255449Z :DEBUG: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] [null] The application data is transferred to the client. Number of messages 4, size 14 bytes 2026-01-04T02:47:54.255667Z :INFO: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] Closing read session. Close timeout: 0.000000s 2026-01-04T02:47:54.255720Z :INFO: [/Root] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): null:test-topic:0:1:3:0 2026-01-04T02:47:54.255795Z :INFO: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] Counters: { Errors: 0 CurrentSessionLifetimeMs: 41 BytesRead: 14 MessagesRead: 4 BytesReadCompressed: 74 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T02:47:54.255907Z :NOTICE: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T02:47:54.255956Z :DEBUG: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] [null] Abort session to cluster 2026-01-04T02:47:54.256557Z :NOTICE: [/Root] [/Root] [d09f71f9-2549cae2-e23cc687-6e2b7c55] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T02:47:54.256769Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0] Write session: close. Timeout = 0 ms 2026-01-04T02:47:54.256812Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0] Write session will now close 2026-01-04T02:47:54.256869Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0] Write session: aborting 2026-01-04T02:47:54.257243Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:47:54.257287Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0] Write session: destroy 2026-01-04T02:47:54.259038Z node 11 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 grpc read done: success# 0, data# { } 2026-01-04T02:47:54.259068Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 grpc read failed 2026-01-04T02:47:54.259097Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 grpc closed 2026-01-04T02:47:54.259133Z node 11 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_11_1_6788364337015016902_v1 is DEAD 2026-01-04T02:47:54.259945Z node 11 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 2 sessionId: test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0 grpc read done: success: 0 data: 2026-01-04T02:47:54.259962Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 2 sessionId: test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0 grpc read failed 2026-01-04T02:47:54.259990Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:232: session v1 cookie: 2 sessionId: test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0 grpc closed 2026-01-04T02:47:54.260009Z node 11 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 2 sessionId: test-message-group-id|24570897-b0adedfb-1d218fe5-ef9439f2_0 is DEAD 2026-01-04T02:47:54.261037Z node 11 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:47:54.261266Z node 11 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591332683967947181:2544] disconnected. 2026-01-04T02:47:54.261292Z node 11 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591332683967947181:2544] disconnected; active server actors: 1 2026-01-04T02:47:54.261315Z node 11 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037893][rt3.dc1--test-topic] pipe [11:7591332683967947181:2544] client user disconnected session shared/user_11_1_6788364337015016902_v1 2026-01-04T02:47:54.262188Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2321: [PQ: 72075186224037892] Destroy direct read session shared/user_11_1_6788364337015016902_v1 2026-01-04T02:47:54.262242Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7591332683967947184:2547] destroyed 2026-01-04T02:47:54.262274Z node 12 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [11:7591332658198143099:2481] destroyed 2026-01-04T02:47:54.262308Z node 12 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:47:54.262345Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:47:54.262380Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.262398Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:47:54.262426Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.262443Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:47:54.262525Z node 12 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_11_1_6788364337015016902_v1 2026-01-04T02:47:54.301914Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:47:54.301960Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.301977Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:47:54.302002Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.302019Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:47:54.402114Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:47:54.402157Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.402177Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:47:54.402207Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.402234Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:47:54.502383Z node 12 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:47:54.502427Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.502444Z node 12 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:47:54.502468Z node 12 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:47:54.502484Z node 12 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:47:54.642242Z node 11 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1259} ActorId: [11:7591332683967947205:2538] TxId: 281474976710683. Ctx: { TraceId: 01ke3eewzt9t730m7z95x7bd5n, Database: /Root, SessionId: ydb://session/3?node_id=11&id=MmUxODEwZGQtMzI0MTRiZDgtN2JlNDVhYi1kMzcyZDVkZQ==, PoolId: default, IsStreamingQuery: 0}. UNAVAILABLE: Failed to send EvStartKqpTasksRequest because node is unavailable: 12 trace_id# 2026-01-04T02:47:54.642412Z node 11 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [11:7591332683967947209:2538], TxId: 281474976710683, task: 3. Ctx: { CheckpointId : . TraceId : 01ke3eewzt9t730m7z95x7bd5n. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=11&id=MmUxODEwZGQtMzI0MTRiZDgtN2JlNDVhYi1kMzcyZDVkZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Handle abort execution event from: [11:7591332683967947205:2538], status: UNAVAILABLE, reason: {
: Error: Terminate execution } |94.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/library/ncloud/impl/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/library/ncloud/impl/ut/unittest >> KqpSinkLocks::UpdateLocksTwoShards [GOOD] >> KqpSinkLocks::UpdateLocksOneShard >> KqpRollback::DoubleUpdate [GOOD] >> KqpSinkLocks::DifferentKeyUpdate |94.7%| [TA] $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {RESULT} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/library/ncloud/impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkMvcc::TransactionFailsAsSoonAsItIsClearItCannotCommit-IsOlap [GOOD] >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3+withSink [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink >> TSchemeShardExtSubDomainTest::Fake [GOOD] >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx >> KqpTx::SnapshotRO [GOOD] >> TSchemeShardExtSubDomainTest::Create >> TSchemeShardExtSubDomainTest::CreateAndWait >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain >> KqpSinkTx::TIsolationSettingTest-IsOlap-UsePragma [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables >> KqpSnapshotRead::TestReadOnly-withSink [GOOD] >> KqpSnapshotRead::TestSnapshotExpiration+withSink >> KqpTx::MixEnginesOldNew [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndWait [GOOD] >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive >> KqpSinkLocks::DifferentKeyUpdateOlap [GOOD] >> KqpSinkLocks::EmptyRange >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::SnapshotRO [GOOD] Test command err: Trying to start YDB, gRPC: 4209, MsgBus: 32383 2026-01-04T02:47:38.684791Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332614822446413:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.684867Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:38.943921Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:38.949056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:38.949174Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:38.976044Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:39.021753Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332614822446308:2081] 1767494858671134 != 1767494858671137 2026-01-04T02:47:39.033404Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.184647Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:39.241205Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:39.241235Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:39.241243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:39.241328Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:39.701179Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:39.830489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:39.842366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:39.951633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.128688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.277034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.349197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.174842Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332632002317356:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.174996Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.179803Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332632002317366:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.179917Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.504462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.549002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.593101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.629126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.666705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.713721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.789674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.844650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.923865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332632002318239:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.923961Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.924243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332632002318245:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.924265Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332632002318244:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.924355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.928288Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:42.944281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:47:42.945758Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332632002318248:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:43.005927Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332636297285595:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:43.683658Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332614822446413:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:43.683713Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrat ... 4T02:47:53.209930Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:53.315962Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:53.329905Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:53.329982Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:53.330204Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:53.349042Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:53.406385Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:53.406403Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:53.406414Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:53.406488Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:53.619471Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:53.953102Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:53.966206Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:53.983433Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:54.075986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:54.197915Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:54.202135Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:54.304425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.299212Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332696159925811:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.299318Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.299615Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332696159925821:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.299682Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.368287Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.399582Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.428414Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.457634Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.489114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.526096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.568428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.614108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.705168Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332696159926691:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.705261Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.705524Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332696159926696:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.705572Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332696159926697:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.705708Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:57.709767Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:57.722110Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332696159926700:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:57.792512Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332696159926751:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:58.186464Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332678980054811:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:58.186520Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:00.190492Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjIzZjVlYi01ZTMyNDA5NC05YjFkN2ZkMC03NWY4NDk1OA==, ActorId: [3:7591332704749861668:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ef2ve4xdrb6zyx8kk24ky, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { position { row: 3 column: 25 } message: "Operation \'Upsert\' can\'t be performed in read only transaction" end_position { row: 3 column: 25 } issue_code: 2008 severity: 1 } trace_id#
:3:25: Error: Operation 'Upsert' can't be performed in read only transaction, code: 2008 |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::Create [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst >> KqpSinkLocks::OlapUncommittedRead [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive >> TSchemeShardExtSubDomainTest::NothingInsideGSS-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::Drop-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpTx::MixEnginesOldNew [GOOD] Test command err: Trying to start YDB, gRPC: 31730, MsgBus: 65316 2026-01-04T02:47:38.878083Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332613423383564:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.878267Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:38.912883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:39.267756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:39.267871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:39.370791Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:39.382221Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332613423383440:2081] 1767494858871443 != 1767494858871446 2026-01-04T02:47:39.391765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:39.393490Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.618593Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:39.618616Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:39.618624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:39.618699Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:39.640952Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:39.891176Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.205072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.215362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:40.265479Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.469688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.648500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.730424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:42.734834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332630603254490:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.735003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.740081Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332630603254500:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.740203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.095346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.134836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.172446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.243672Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.315768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.363309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.406900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.477084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.588643Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634898222667:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.588748Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.589270Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634898222672:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.589317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634898222673:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.589345Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.593329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:43.607043Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332634898222676:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:43.683151Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332634898222727:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:43.878165Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332613423383564:2158];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:43.905095Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existenc ... Response for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3eewfz7y33jj0s9nk8hbhv" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 10620, MsgBus: 28966 2026-01-04T02:47:54.895625Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332681877397741:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:54.918176Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:54.945915Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:55.093354Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332681877397701:2081] 1767494874875809 != 1767494874875812 2026-01-04T02:47:55.111957Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:55.122976Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:55.123067Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:55.127532Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:55.183374Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:55.318413Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:55.318438Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:55.318445Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:55.318528Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:55.840710Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:55.866689Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:55.880745Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:55.928170Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:55.967509Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:56.137642Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:56.220706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:58.762949Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332699057268745:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.763037Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.763417Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332699057268755:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.763463Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.829883Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:58.867461Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:58.901494Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:58.948230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:58.986879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.023047Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.063150Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.114195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.233933Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703352236923:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.234051Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.234227Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703352236928:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.234280Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703352236929:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.234537Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.239629Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:59.255974Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332703352236932:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:59.348618Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332703352236983:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:59.899099Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332681877397741:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:59.899168Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::CreateItemsInsideExtSubdomainAtGSSwithoutTSS [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpSinkMvcc::ChangeFromTheFuture+IsOlap [GOOD] >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] >> KqpLocks::TwoPhaseTx [GOOD] >> KqpLocks::MixedTxFail+useSink >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot+StreamIndex [GOOD] >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithoutEnablingTx-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice >> TSchemeShardServerLess::Fake [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst >> TSchemeShardServerLess::StorageBillingLabels >> RetryPolicy::TWriteSession_TestBrokenPolicy [GOOD] >> RetryPolicy::TWriteSession_RetryOnTargetCluster >> KqpSinkTx::OlapInteractive [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoInsertOlap [GOOD] Test command err: Trying to start YDB, gRPC: 14050, MsgBus: 8622 2026-01-04T02:47:09.857734Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332487013141020:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.864370Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.214003Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.333613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.333756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.338710Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.410231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332487013140964:2081] 1767494829838690 != 1767494829838693 2026-01-04T02:47:10.410949Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.452019Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.487995Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.488010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.488021Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.488084Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.864168Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.134913Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.140670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:13.133513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504193011042:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.133513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504193011025:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.133618Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.133854Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504193011053:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.133932Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.137110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.146916Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332504193011052:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:13.260872Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332504193011105:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.521472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.636025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.697361Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.853624Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332487013141020:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.942323Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8168, MsgBus: 8943 2026-01-04T02:47:17.548158Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332522191811177:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.548332Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:17.579301Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:17.732633Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:17.733901Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332522191811148:2081] 1767494837542932 != 1767494837542935 2026-01-04T02:47:17.741882Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:17.776311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:17.776393Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:17.787311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:17.919989Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:17.950385Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:17.950407Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:17.950424Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:17.950518Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:18.496277Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:18.575905Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:21.385885Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332539371681192:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.386019Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.386665Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332539371681220:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.386730Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332539371681221:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.387023Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.391237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propo ... e 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.568899Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.568994Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.569032Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.579032Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.579119Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.579156Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.587749Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.587833Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.587870Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.596485Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.596571Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.596609Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.604987Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.605080Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.605117Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.613957Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.614043Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.614095Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.622261Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.622347Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.622381Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.629888Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.629974Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.630007Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.637793Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.637902Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.637941Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:58.747577Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eedz97dkxfqb679z0cy5w", SessionId: ydb://session/3?node_id=3&id=MzZkN2VhM2ItMmFjMzJmMzMtNjFkOWJkY2MtNDMyNjM1NGQ=, Slow query, duration: 20.012350s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2026-01-04T02:48:00.828210Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:80;broken_lock_id=281474976715665; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=FO::FETCHING;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=2700;fline=stage_features.cpp:89;name=GLOBAL;event=free;usage=0;delta=444; FALLBACK_ACTOR_LOGGING;priority=INFO;component=334;fline=context.h:99;fetching=ef=(column_ids=;column_names=;);;sharding=(column_ids=;column_names=;);;pk=(column_ids=1;column_names=Key;);;ff=(column_ids=1,2;column_names=Key,Value;);;program_input=(column_ids=1,2;column_names=Key,Value;);;; 2026-01-04T02:48:00.918504Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [3:9744:9143], TxId: 281474976715665, task: 2. Ctx: { TraceId : 01ke3ef2y7fp9r3kr8gf3my74w. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZDg5ZTU2ZDktMTZmY2E3M2ItNDVkOTY2MGEtMWFlMzRmMGU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } 2026-01-04T02:48:00.919450Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:9744:9143], TxId: 281474976715665, task: 2. Ctx: { TraceId : 01ke3ef2y7fp9r3kr8gf3my74w. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZDg5ZTU2ZDktMTZmY2E3M2ItNDVkOTY2MGEtMWFlMzRmMGU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 }. 2026-01-04T02:48:00.921357Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDg5ZTU2ZDktMTZmY2E3M2ItNDVkOTY2MGEtMWFlMzRmMGU=, ActorId: [3:9515:8919], ActorState: ExecuteState, LegacyTraceId: 01ke3ef2y7fp9r3kr8gf3my74w, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=14;stage=FO::FETCHING; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2700;fline=allocation.cpp:62;event=destroy;allocation_id=15;stage=FO::FETCHING; |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardExtSubDomainTest::Drop-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst >> TSchemeShardExtSubDomainTest::CreateAndAlter [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::Fake [GOOD] |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst >> KqpSnapshotIsolation::TUniqueSecondaryWriteIndexOltp [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false >> KqpSinkLocks::UpdateLocksOneShard [GOOD] >> KqpSinkLocks::UpdateLocksOneShardRowExists >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OltpNamedStatementNoSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite2+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 29156, MsgBus: 27945 2026-01-04T02:47:38.961941Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332615099902401:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.962233Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:39.347413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:39.352577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:39.352669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:39.399965Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:39.608178Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.614237Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:39.634796Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:39.634815Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:39.634823Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:39.634907Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:39.970310Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.376669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.410323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:40.478878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.738819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:40.954651Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:41.043289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.095348Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332636574740652:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.095447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.095785Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332636574740662:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.095816Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.435127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.474319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.514205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.565020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.593860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.636582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.688596Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.765724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.866187Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332636574741536:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.866293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.866830Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332636574741542:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.866869Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.866909Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332636574741541:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.871678Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:43.884081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710670, at schemeshard: 72057594046644480 2026-01-04T02:47:43.884306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332636574741545:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:43.965909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332615099902401:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:43.965994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:43.966795Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332636574741596:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:46.215095Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not v ... node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:55.700501Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:55.700509Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:55.700603Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:55.756537Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:56.268087Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:56.285588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:56.301219Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:56.368869Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:56.436668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:56.606667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:56.682957Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.039289Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703243983654:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.039368Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.039618Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703243983664:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.039646Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.110607Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.147939Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.187093Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.220034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.254674Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.330421Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.377562Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.455165Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.550716Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703243984537:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.550797Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.551191Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703243984542:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.551242Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332703243984543:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.551361Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.555166Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:59.568058Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332703243984546:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:59.640680Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332703243984597:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:00.327314Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332686064112673:2094];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:00.327376Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:04.084925Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332724718821490:2532], SessionActorId: [3:7591332711833919496:2532], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 . sessionActorId=[3:7591332711833919496:2532]. 2026-01-04T02:48:04.085148Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ODljMjRiNmMtNzY4Nzk3OTUtZDI3MTNlYzctZDkxNmQ2MDA=, ActorId: [3:7591332711833919496:2532], ActorState: ExecuteState, LegacyTraceId: 01ke3ef6es9fdjnsm56abyes83, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332724718821491:2532] from: [3:7591332724718821490:2532] trace_id# 2026-01-04T02:48:04.085217Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332724718821491:2532] TxId: 281474976715675. Ctx: { TraceId: 01ke3ef6es9fdjnsm56abyes83, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ODljMjRiNmMtNzY4Nzk3OTUtZDI3MTNlYzctZDkxNmQ2MDA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/EightShard`, code: 2001 } trace_id# 2026-01-04T02:48:04.085516Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ODljMjRiNmMtNzY4Nzk3OTUtZDI3MTNlYzctZDkxNmQ2MDA=, ActorId: [3:7591332711833919496:2532], ActorState: ExecuteState, LegacyTraceId: 01ke3ef6es9fdjnsm56abyes83, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/EightShard`" issue_code: 2001 severity: 1 } trace_id# |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::InsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 14563, MsgBus: 22790 2026-01-04T02:47:09.780604Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332488049842668:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.780664Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:09.814817Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:10.102517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.102627Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.106458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.159348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.201995Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332488049842639:2081] 1767494829779210 != 1767494829779213 2026-01-04T02:47:10.208566Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.328367Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.446987Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.447004Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.447036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.447177Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.793137Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.091759Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.046874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505229712722:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.046908Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505229712710:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.047000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.047741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332505229712725:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.047848Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.051748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.063306Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332505229712724:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.177509Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332505229712777:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.449854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.621649Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.622150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.622412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.622521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.622664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.622767Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.622859Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.622961Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.623106Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.623231Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.623371Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.623489Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.623617Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332505229712927:2343];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.625291Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.625332Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.625491Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.625591Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.625686Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.625796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.626039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.626139Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332505229712919:2340];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.626 ... 963].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:47:52.390101Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332673226693792:2963], SessionActorId: [3:7591332668931726449:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7591332668931726449:2963]. 2026-01-04T02:47:52.390310Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZWJiNTkzYmUtOTQ2MDc3NTMtNDQwMmRiM2YtYTE2ZWVmYjk=, ActorId: [3:7591332668931726449:2963], ActorState: ExecuteState, LegacyTraceId: 01ke3eev721y1xq1rtapkdn4dm, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332673226693793:2963] from: [3:7591332673226693792:2963] trace_id# 2026-01-04T02:47:52.390442Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332673226693793:2963] TxId: 281474976710665. Ctx: { TraceId: 01ke3eev721y1xq1rtapkdn4dm, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZWJiNTkzYmUtOTQ2MDc3NTMtNDQwMmRiM2YtYTE2ZWVmYjk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:47:52.390817Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWJiNTkzYmUtOTQ2MDc3NTMtNDQwMmRiM2YtYTE2ZWVmYjk=, ActorId: [3:7591332668931726449:2963], ActorState: ExecuteState, LegacyTraceId: 01ke3eev721y1xq1rtapkdn4dm, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:47:52.392042Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710665; 2026-01-04T02:47:52.392221Z node 3 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767494872430 : 281474976710665] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 10386, MsgBus: 6844 2026-01-04T02:47:54.226734Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332683318550843:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:54.226793Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:54.319073Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:54.402593Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:54.473138Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:54.473241Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:54.485891Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:54.490600Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:54.608049Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:54.608075Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:54.608085Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:54.608182Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:55.241971Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:55.579301Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:55.598701Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:59.052111Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332704793388163:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.052260Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.052681Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332704793388175:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:59.057838Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:59.078649Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332704793388177:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:59.172727Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332704793388239:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:59.232561Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332683318550843:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:59.232620Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:59.238864Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.318492Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:00.556807Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:02.777685Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710666; 2026-01-04T02:48:02.779067Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7591332717678298143:2965], SessionActorId: [4:7591332717678298084:2965], Got LOCKS BROKEN for table. ShardID=72075186224037888, Sink=[4:7591332717678298143:2965].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:48:02.779174Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332717678298143:2965], SessionActorId: [4:7591332717678298084:2965], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7591332717678298084:2965]. 2026-01-04T02:48:02.779387Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=YjU3MDczNS04MjhlNjg4YS0xOTUwNjA3MC1mNjlhOGVj, ActorId: [4:7591332717678298084:2965], ActorState: ExecuteState, LegacyTraceId: 01ke3ef5bwbfj7gnbgv5t41hty, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332717678298144:2965] from: [4:7591332717678298143:2965] trace_id# 2026-01-04T02:48:02.779529Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332717678298144:2965] TxId: 281474976710666. Ctx: { TraceId: 01ke3ef5bwbfj7gnbgv5t41hty, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YjU3MDczNS04MjhlNjg4YS0xOTUwNjA3MC1mNjlhOGVj, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:48:02.779939Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YjU3MDczNS04MjhlNjg4YS0xOTUwNjA3MC1mNjlhOGVj, ActorId: [4:7591332717678298084:2965], ActorState: ExecuteState, LegacyTraceId: 01ke3ef5bwbfj7gnbgv5t41hty, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:48:02.780405Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_ABORTED;details=Distributed transaction aborted due to commit failure;tx_id=281474976710666; 2026-01-04T02:48:02.780533Z node 4 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767494882825 : 281474976710666] from 72075186224037889 at tablet 72075186224037889, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapVisibleUncommittedRowsUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 27941, MsgBus: 4931 2026-01-04T02:47:09.726397Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332487624585958:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.726455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:09.970027Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.046482Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.046605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.105441Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.159316Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.249926Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.446857Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.446877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.446904Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.446999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.755317Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.094594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.107877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:13.029295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504804456003:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.029306Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504804455995:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.029454Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.029903Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332504804456010:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.029988Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.032990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.044356Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332504804456009:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.151392Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332504804456062:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.442634Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.551669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.630552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.944617Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332487624585958:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:15.382683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:16.471306Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332517689365928:2962], SessionActorId: [1:7591332513394398584:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7591332513394398584:2962]. 2026-01-04T02:47:16.471591Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=MzQyMGM3MGMtNTM5NjExM2ItYTc1M2NlNWUtZTY1N2RmZGM=, ActorId: [1:7591332513394398584:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3edr5a24sragzvfva9azqm, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7591332517689365929:2962] from: [1:7591332517689365928:2962] trace_id# 2026-01-04T02:47:16.471691Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332517689365929:2962] TxId: 281474976710665. Ctx: { TraceId: 01ke3edr5a24sragzvfva9azqm, Database: /Root, SessionId: ydb://session/3?node_id=1&id=MzQyMGM3MGMtNTM5NjExM2ItYTc1M2NlNWUtZTY1N2RmZGM=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-04T02:47:16.472095Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=MzQyMGM3MGMtNTM5NjExM2ItYTc1M2NlNWUtZTY1N2RmZGM=, ActorId: [1:7591332513394398584:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3edr5a24sragzvfva9azqm, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 18347, MsgBus: 12798 2026-01-04T02:47:18.012481Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:18.012605Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:18.076442Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:18.085990Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332524363344133:2081] 1767494837919692 != 1767494837919695 2026-01-04T02:47:18.088513Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:18.088585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:18.115877Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:18.229262Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:18.276836Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:18.276861Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:18.276868Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:18.276938Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:18.814864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:18.835822Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:18.989946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:21.802988Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332541543214204:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.803101Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_ ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.670160Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.674130Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.674194Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.674215Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.679741Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.679811Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.679831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.683733Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.683801Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.683821Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.689956Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.690019Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.690040Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.694382Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.694454Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.694476Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.700878Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.700942Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.700966Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.708369Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.708440Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.708463Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038060;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711648Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711709Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711757Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.718406Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.718504Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.718526Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038044;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.721920Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.721982Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.722002Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.727119Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.727176Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.727196Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.731905Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.731981Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.732001Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.737237Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.737307Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.737328Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038022;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.763313Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eem366ger04jav22a2r73", SessionId: ydb://session/3?node_id=4&id=OWM3OGE5MDMtOGI4OWJkMTAtYTc4OTlmYjQtNGY1NmRkMmE=, Slow query, duration: 11.012576s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlter-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst >> KqpSinkLocks::OlapUpdateLocksOneShard [GOOD] >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapInteractive [GOOD] Test command err: Trying to start YDB, gRPC: 4478, MsgBus: 3089 2026-01-04T02:47:09.734034Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332489642738935:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.735306Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:09.989213Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.061037Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.061134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.067068Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.141392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.267275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.444344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.444368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.444478Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.444571Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.739956Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.126533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:12.918973Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332502527641595:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.919091Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.919311Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332502527641604:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.919389Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.919870Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332502527641609:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:12.926365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:12.935948Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332502527641611:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.045858Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332502527641662:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.448962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.611878Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.611879Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.612091Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.612328Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.612410Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.612473Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.612552Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.612632Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.612684Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.612688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.612743Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.612800Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.612832Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.612882Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.612937Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.612949Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.613049Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332506822609098:2340];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.613114Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.613230Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.613322Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.613470Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.613572Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.613703Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332506822609106:2342];tablet_id=72075186224037894;process=TTxInitS ... e;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672013Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672019Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672076Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672080Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672098Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.672099Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038012;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682085Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682158Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682178Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682784Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682830Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.682847Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038062;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.691918Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.691986Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.692008Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.692161Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.692210Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.692228Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038078;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.701553Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.701622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.701644Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038052;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.701784Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.702159Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.702202Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038056;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711416Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711491Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.711513Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038066;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.716321Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.716386Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.716406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.721312Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.721382Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.721403Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.726258Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.726349Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.726375Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.731228Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.731291Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.731312Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038064;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:47:59.765657Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eemrr4h8wyfejkek3q4se", SessionId: ydb://session/3?node_id=3&id=ZDcwZjQ0YjItOTE2MGMzNTgtZTZiZGM2MWYtYWRiMjE1MzE=, Slow query, duration: 10.465860s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:47:59.919545Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:47:59.919563Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] >> TSchemeShardExtSubDomainTest::AlterWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain >> TSchemeShardExtSubDomainTest::Drop-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true >> IcbAsActorTests::TestHttpPostReaction >> IcbAsActorTests::TestHttpGetResponse >> IcbAsActorTests::TestHttpGetResponse [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey+IsOlap+CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 15593, MsgBus: 18867 2026-01-04T02:47:09.721282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332490001225859:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.721382Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.068669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.101478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.101560Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.197952Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332490001225825:2081] 1767494829716816 != 1767494829716819 2026-01-04T02:47:10.198356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.209359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.327209Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.447203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.447221Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.447233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.447356Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.752851Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.111036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.164889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332507181095908:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.164888Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332507181095897:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.165003Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.165317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332507181095912:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.165402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.168762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.182984Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332507181095911:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.309882Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332507181095964:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.558659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:13.693158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.722071Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332490001225859:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.722160Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:14.756868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 32419, MsgBus: 5090 2026-01-04T02:47:18.911195Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332528361587246:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:18.911591Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:18.990057Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:19.153948Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332528361587141:2081] 1767494838876163 != 1767494838876166 2026-01-04T02:47:19.170562Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:19.209951Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:19.246176Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:19.246261Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:19.263846Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:19.346786Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:19.346810Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:19.346817Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:19.346883Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:19.355153Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:19.876545Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:19.946960Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:19.952864Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:22.725399Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332545541457206:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:22.725523Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:22.725999Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332545541457218:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:22.726049Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332545541457219:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:22.726164Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:22.730632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but pro ... cal=44;result=not_found; 2026-01-04T02:48:01.090143Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.090163Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038002;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.091951Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.092002Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.092017Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038039;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.100829Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.100912Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.100931Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037998;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.101527Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.101575Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.101594Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111831Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111864Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111909Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111916Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111927Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038061;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.111934Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038054;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.121754Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.121937Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.121962Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.124915Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.124969Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.124987Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038000;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.131688Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.131778Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.131796Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.134822Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.134894Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.134910Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038026;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.141759Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.142175Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.142202Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038050;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:01.162549Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:01.162585Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:01.176432Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eep7e1z2f464g4y5k491x", SessionId: ydb://session/3?node_id=4&id=MjY5NTFkODctNTE2ZGE2ZjEtMzI3ZGM4MjMtOWY2Y2Q0MjM=, Slow query, duration: 10.714205s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:03.584824Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037930;tx_state=TTxProgressTx::Execute;tx_current=281474976715668;tx_id=281474976715668;commit_tx_id=281474976715668;commit_lock_id=281474976715665;fline=manager.cpp:80;broken_lock_id=281474976715666; 2026-01-04T02:48:03.614793Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7591332721401671205:4629], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ef68ce06kvsnqtdxf6agj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTc0YWVhMjktOThjZWVmZmQtY2Y5YzEyMDktOTFkMjYxYzE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-04T02:48:03.614932Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7591332721401671205:4629], TxId: 281474976715669, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ef68ce06kvsnqtdxf6agj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=MTc0YWVhMjktOThjZWVmZmQtY2Y5YzEyMDktOTFkMjYxYzE=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-04T02:48:03.615635Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=MTc0YWVhMjktOThjZWVmZmQtY2Y5YzEyMDktOTFkMjYxYzE=, ActorId: [4:7591332712811734790:3635], ActorState: ExecuteState, LegacyTraceId: 01ke3ef68ce06kvsnqtdxf6agj, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpGetResponse [GOOD] |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> IcbAsActorTests::TestHttpPostReaction [GOOD] >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true >> TSchemeShardExtSubDomainTest::CreateAndSameAlterTwice-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::InsertConflictingKey-IsOlap-CommitOnInsert [GOOD] Test command err: Trying to start YDB, gRPC: 30591, MsgBus: 27528 2026-01-04T02:47:09.918716Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332486449585873:2261];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:09.918769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:10.329134Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:10.329233Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:10.365456Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:10.381797Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:10.383967Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:10.510010Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:10.510038Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:10.510057Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:10.510156Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:10.596923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:10.905995Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:11.089108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:11.097115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:13.287096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332503629455717:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.287096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332503629455726:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.287240Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.287500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332503629455732:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.287567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:13.290614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:13.299426Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332503629455731:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:13.415154Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332503629455784:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:13.687990Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:13.873079Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.873411Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.873651Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.873775Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.874083Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.874200Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.874314Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.874412Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.874540Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:13.874666Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:13.874796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:13.874903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:13.875048Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;self_id=[1:7591332503629455953:2342];tablet_id=72075186224037895;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:13.875395Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:13.875451Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:13.875620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:13.875773Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:13.875875Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:13.875995Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:13.876137Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:13.876251Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:13.876362Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037890;self_id=[1:7591332503629455944:2337];tablet_id=72075186224037890;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Resto ... subdomain.cpp:311) 2026-01-04T02:47:57.938604Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:58.100584Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:01.428314Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332713934358822:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:01.428473Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:01.429058Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332713934358858:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:01.429114Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332713934358859:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:01.429167Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:01.434491Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:01.447849Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:01.448887Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332713934358862:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:01.557131Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332713934358913:2540] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:01.626182Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.675699Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:02.928176Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.857376Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710669; 2026-01-04T02:48:04.877835Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [4:7591332726819269072:2966], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7591332726819268809:2966]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7591332726819269072:2966].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:48:04.877988Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7591332726819268809:2966]. 2026-01-04T02:48:04.878225Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878241Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878253Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878268Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878286Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878300Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878315Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878343Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878360Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878373Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878389Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878469Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=YWI5MzAzMDAtZDQwZGY0ODQtYWI5M2RkNGMtYjBhMGQ3Nzg=, ActorId: [4:7591332726819268809:2966], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7f3b7w26tzfzdecv7b3, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332726819269383:2966] from: [4:7591332726819269065:2966] trace_id# 2026-01-04T02:48:04.878489Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878506Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878520Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878559Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878668Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332726819269383:2966] TxId: 281474976710669. Ctx: { TraceId: 01ke3ef7f3b7w26tzfzdecv7b3, Database: /Root, SessionId: ydb://session/3?node_id=4&id=YWI5MzAzMDAtZDQwZGY0ODQtYWI5M2RkNGMtYjBhMGQ3Nzg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:48:04.878807Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878824Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878841Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.878856Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879075Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YWI5MzAzMDAtZDQwZGY0ODQtYWI5M2RkNGMtYjBhMGQ3Nzg=, ActorId: [4:7591332726819268809:2966], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7f3b7w26tzfzdecv7b3, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:48:04.879315Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879332Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879346Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879361Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879375Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 2026-01-04T02:48:04.879416Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332726819269065:2966], SessionActorId: [4:7591332726819268809:2966], StateRollback: unknown message 278003713 |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.7%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:06.591709Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:06.591841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:06.591883Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:06.591913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:06.591938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:06.591962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:06.592033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:06.592098Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:06.592905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:06.593223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:06.681893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:06.681955Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:06.695892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:06.696091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:06.696288Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:06.704300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:06.704538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:06.705238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:06.705540Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:06.708447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:06.708652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:06.709857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:06.709929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:06.710041Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:06.710086Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:06.710183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:06.710370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.717176Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:48:06.839542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:06.839859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.840085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:48:06.840131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:48:06.840351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:06.840432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:06.842798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:06.843006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:48:06.843237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.843332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:48:06.843383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:48:06.843414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:48:06.845492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.845559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:48:06.845619Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:48:06.847543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.847589Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:06.847642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:06.847694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:48:06.851525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:06.853478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:48:06.853680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:48:06.854834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:06.854981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:06.855035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:06.855325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:48:06.855385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:06.855559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:06.855651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:48:06.857781Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:06.857844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 5, at schemeshard: 72057594046678944 2026-01-04T02:48:07.788692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-04T02:48:07.790403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 Forgetting tablet 72075186234409546 2026-01-04T02:48:07.794263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-04T02:48:07.794564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409548 2026-01-04T02:48:07.796319Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409547 Forgetting tablet 72075186234409547 2026-01-04T02:48:07.798392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:48:07.799112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:48:07.799371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 2026-01-04T02:48:07.800968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:07.801099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:48:07.801248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-04T02:48:07.801924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:07.802058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:48:07.802136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:48:07.806342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:48:07.806405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409546 2026-01-04T02:48:07.806508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-04T02:48:07.806558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409548 2026-01-04T02:48:07.807573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:48:07.807627Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409547 2026-01-04T02:48:07.808286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:48:07.808423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:48:07.808805Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:48:07.808855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:48:07.809382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:48:07.809492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:48:07.809540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:937:2797] TestWaitNotification: OK eventTxId 106 2026-01-04T02:48:07.810201Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:07.810499Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 260us result status StatusPathDoesNotExist 2026-01-04T02:48:07.810712Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:07.811303Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:07.811498Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 193us result status StatusPathDoesNotExist 2026-01-04T02:48:07.811671Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:07.812296Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:07.812515Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 229us result status StatusSuccess 2026-01-04T02:48:07.812993Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186233409550 is deleted wait until 72075186233409551 is deleted wait until 72075186233409552 is deleted wait until 72075186233409553 is deleted 2026-01-04T02:48:07.820667Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409550 2026-01-04T02:48:07.820770Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409551 2026-01-04T02:48:07.820892Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409552 2026-01-04T02:48:07.820964Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186233409553 Deleted tabletId 72075186233409550 Deleted tabletId 72075186233409551 Deleted tabletId 72075186233409552 Deleted tabletId 72075186233409553 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::DifferentKeyUpdate [GOOD] Test command err: Trying to start YDB, gRPC: 25107, MsgBus: 19559 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:25.305943Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:25.432826Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:25.433279Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:25.433362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:25.728362Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.728522Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.804464Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494842264309 != 1767494842264312 2026-01-04T02:47:25.821072Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.874941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:26.020642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:26.414284Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.414335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.414364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.414539Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:26.427062Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:26.871615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.982577Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:27.247533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:27.641762Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:27.934319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:28.800873Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1908:3514], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.801334Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.802584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1981:3533], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.802706Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:28.836746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.023127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.358592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.619009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.887270Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.170671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.450313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.763271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.243346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.243466Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.245284Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2801:4183], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.245427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.245534Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2804:4186], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.259252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:31.425546Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2806:4188], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:31.496069Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2861:4224] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:34.355275Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NWZhNDBlNGQtMmQzNjAwNmMtMTM1N2Y1YTYtODQ3MTQ3NjI=, ActorId: [1:3156:4457], ActorState: ExecuteState, LegacyTraceId: 01ke3ee8qv5y8mc8k6xqzwtsym, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KeyValue2`" issue_code: 2001 severity: 1 } trace_id# Trying to start Y ... 8,"}}]}}; 2026-01-04T02:47:57.604797Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000915":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"64,"}}]}}; 2026-01-04T02:47:57.604840Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-04T02:47:57.604909Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-04T02:47:57.605112Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000939":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"38,"}},{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"61,"}}]}}; 2026-01-04T02:47:57.605291Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={"1000000939":{"intervals":[{"i":{"txs":[],"starts":[{"inc":{"count_include":1},"id":281474976710663}],"finishes":[{"inc":{"count_include":1},"id":281474976710663}]},"p":{"include":0,"pk":"61,"}}]}}; 2026-01-04T02:47:57.605424Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tx_id=0;lock_id=281474976710663;fline=interaction.h:464;event=remove_interval;interactions_info={}; 2026-01-04T02:47:57.610683Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037930;self_id=[3:7591332673486856076:2337];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037930; 2026-01-04T02:47:57.610687Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037905;self_id=[3:7591332673486856226:2370];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037905; 2026-01-04T02:47:57.610726Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037906;self_id=[3:7591332673486856199:2358];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037906; 2026-01-04T02:47:57.610737Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037929;self_id=[3:7591332673486856195:2354];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037929; 2026-01-04T02:47:57.613151Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037899;self_id=[3:7591332673486856223:2369];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037899; 2026-01-04T02:47:57.621055Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037907;self_id=[3:7591332673486856282:2378];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037907; 2026-01-04T02:47:57.621119Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037909;self_id=[3:7591332673486856317:2386];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037909; 2026-01-04T02:47:57.625548Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037942;self_id=[3:7591332673486856085:2346];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037942; 2026-01-04T02:47:57.629622Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037931;self_id=[3:7591332673486856081:2342];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037931; 2026-01-04T02:47:57.641410Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037927;self_id=[3:7591332673486856196:2355];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037927; 2026-01-04T02:47:57.641473Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037908;self_id=[3:7591332673486856531:2392];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037908; 2026-01-04T02:47:57.646533Z node 3 :TX_COLUMNSHARD DEBUG: log.cpp:841: tablet_id=72075186224037893;self_id=[3:7591332673486856308:2382];ev=NActors::TEvents::TEvWakeup;fline=columnshard.cpp:269;event=TEvPrivate::TEvPeriodicWakeup::MANUAL;tablet_id=72075186224037893; Trying to start YDB, gRPC: 4110, MsgBus: 9986 2026-01-04T02:47:58.781722Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332698398693153:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:58.781759Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:58.934318Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:58.941064Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:58.966300Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:58.966396Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:58.992142Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:59.078496Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:59.078520Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:59.078528Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:59.078629Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:59.141401Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:59.723609Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:59.731414Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:59.791844Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:02.974284Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332715578563157:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.974367Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.974736Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332715578563184:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.974744Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332715578563185:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.974807Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.979458Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:02.992706Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332715578563188:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:03.094775Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332719873530535:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:03.166694Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.215713Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.049310Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332698398693153:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:04.070472Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:04.359770Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterWithExternalHive-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOltp-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 16061, MsgBus: 9503 2026-01-04T02:47:23.925940Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:23.926086Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:23.991025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:23.991137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:24.049490Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332549453953056:2081] 1767494843439831 != 1767494843439834 2026-01-04T02:47:24.063527Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:24.073166Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:24.104063Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:24.166500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:24.166521Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:24.166528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:24.166603Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:24.507129Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:24.617194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:24.624461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:27.046706Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332566633823115:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:27.046898Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:27.047428Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332566633823150:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:27.047466Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332566633823151:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:27.047795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:27.064140Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:27.080173Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332566633823154:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:27.210697Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332566633823205:2544] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:27.503685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:27.690968Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:27.690970Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:27.691236Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:27.691247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:27.691641Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:27.691757Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:27.691845Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:27.691978Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:27.692075Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:27.692126Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:27.692178Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:27.692300Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:27.692333Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:27.692430Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:27.692450Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:27.692526Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:27.692549Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:27.692620Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:27.692664Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:27.692722Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037894;self_id=[1:7591332566633823352:2342];tablet_id=72075186224037894;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:27.692766Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:27.692902Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332566633823346:2340];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 202 ... on type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:44.617490Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:929:2788], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:44.657353Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:986:2826] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:44.726979Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:45.024182Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.836135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:51.653528Z node 2 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=281474976715669; 2026-01-04T02:47:51.666536Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 281474976715669 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:51.666815Z node 2 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 281474976715669 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:51.667231Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [2:9049:8419], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [2:8813:8419]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[2:9049:8419].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2026-01-04T02:47:51.667972Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:8838:8419], SessionActorId: [2:8813:8419], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[2:8813:8419]. 2026-01-04T02:47:51.668508Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=Y2U2MWRkYWQtYTJkMjMzNGEtY2RkMzJkNDYtNzdmNDY3MjQ=, ActorId: [2:8813:8419], ActorState: ExecuteState, LegacyTraceId: 01ke3eet4b0vkyp8wpsrs0f5va, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [2:9043:8419] from: [2:8838:8419] trace_id# 2026-01-04T02:47:51.668721Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:9043:8419] TxId: 281474976715665. Ctx: { TraceId: 01ke3eet4b0vkyp8wpsrs0f5va, Database: /Root, SessionId: ydb://session/3?node_id=2&id=Y2U2MWRkYWQtYTJkMjMzNGEtY2RkMzJkNDYtNzdmNDY3MjQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-04T02:47:51.669177Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=Y2U2MWRkYWQtYTJkMjMzNGEtY2RkMzJkNDYtNzdmNDY3MjQ=, ActorId: [2:8813:8419], ActorState: ExecuteState, LegacyTraceId: 01ke3eet4b0vkyp8wpsrs0f5va, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 4483, MsgBus: 27804 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:57.148909Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:57.157165Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:47:57.162523Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:114:2161], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:57.162881Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:57.163201Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:57.453796Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:57.453954Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:57.480605Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1767494873162940 != 1767494873162944 2026-01-04T02:47:57.489047Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:57.547232Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:57.707999Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:58.079661Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:58.079735Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:58.079776Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:58.080353Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:58.092183Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:58.558949Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:58.676183Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.676320Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.676418Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.677710Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.678434Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:58.683791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:58.809618Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:58.847132Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:58.917451Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:59.159143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:02.296229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-false [GOOD] >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadWriteTxFailsOnConcurrentWrite3-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 22140, MsgBus: 21039 2026-01-04T02:47:42.668112Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332628949563414:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:42.668169Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:43.024979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:43.025095Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:43.031288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:43.089907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:43.131578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:43.290289Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:43.290313Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:43.290319Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:43.290427Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:43.334560Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:43.685940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:43.881878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:43.888243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:43.964869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.153701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.322466Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.418174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.379612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332646129434436:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.379718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.380055Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332646129434446:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.380109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.718111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.754257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.800087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.835186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.897949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.944172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.985227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.036525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.182576Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332650424402622:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:47.182641Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332650424402627:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:47.182677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:47.183274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332650424402630:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:47.183314Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:47.186995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:47.198436Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332650424402629:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:47.294953Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332650424402682:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:47.669153Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332628949563414:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:47.669213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:52.031247Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NDEyM2RhOWItMTQyOTMxZTctODQ4MjNlYzMtZGRmOTQ4NDE=, ActorId: [1:7591332659014337542:2523], ActorState: ExecuteState, LegacyTraceId: 01ke3eetmz2q0tsjybpd0nnnws, Create QueryResponse for error on request, msg: tx has deferred effects, but locks a ... accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:00.852917Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:00.960426Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:00.966266Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332708687743129:2081] 1767494880835299 != 1767494880835302 2026-01-04T02:48:00.987971Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:00.988065Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:01.007081Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:01.033963Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:01.082365Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:01.082394Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:01.082401Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:01.082492Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:01.593901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:01.599859Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:01.608928Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.670013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.826636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.869456Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:01.892439Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.491704Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332725867614192:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.491830Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.492155Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332725867614202:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.492209Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.566403Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.598002Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.632775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.665425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.695490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.734034Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.778846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.830293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.915393Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332725867615075:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.915496Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.915543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332725867615080:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.915701Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332725867615082:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.915757Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.919587Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:04.932053Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332725867615084:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:48:04.989193Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332725867615135:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:05.836809Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332708687743165:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:05.836889Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:07.662864Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NjE0ZWRkZTgtM2NmNjVhZTktNzk1NzYxZGMtNTQ4MTU1ODE=, ActorId: [3:7591332734457549990:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3efa4q37eyr9sgm060mm6n, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TwoShard`" issue_code: 2001 severity: 1 } trace_id# |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] >> KqpSinkLocks::EmptyRange [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBroken |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::DropWithDeadTenantHive-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:01.957072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:01.957187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:01.957478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:01.957528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:01.957559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:01.957639Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:01.958700Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:01.960427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:02.061770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:02.061861Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:02.081103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:02.081673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:02.081930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:02.092617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:02.092921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:02.093651Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.093991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:02.096909Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.106007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:02.123547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123632Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123690Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:02.123770Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:02.123837Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:02.125744Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.312464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.313547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.313692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.313798Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.313930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.314897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... xId: 103 ready parts: 1/1 2026-01-04T02:48:09.404015Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:48:09.404056Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:48:09.404243Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:48:09.405239Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:09.406496Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186234409547 2026-01-04T02:48:09.406787Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:48:09.406977Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:48:09.407292Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:48:09.407628Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:09.407858Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-04T02:48:09.408168Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186234409548 2026-01-04T02:48:09.408305Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:48:09.408459Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:48:09.408638Z node 6 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186234409546 2026-01-04T02:48:09.408788Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:48:09.408945Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:48:09.409161Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:09.409582Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:09.409659Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:48:09.410082Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:48:09.410416Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:09.410490Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:48:09.410588Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:09.414920Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:48:09.415020Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186234409547 2026-01-04T02:48:09.415151Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:48:09.415182Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:48:09.415297Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:48:09.415324Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186234409548 2026-01-04T02:48:09.415551Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:48:09.415628Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186234409546 2026-01-04T02:48:09.418092Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:48:09.418528Z node 6 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:48:09.418949Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:48:09.419022Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:48:09.419523Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:48:09.419639Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:48:09.419695Z node 6 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [6:590:2532] TestWaitNotification: OK eventTxId 103 2026-01-04T02:48:09.420398Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:09.420612Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 244us result status StatusPathDoesNotExist 2026-01-04T02:48:09.420785Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:09.421350Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:09.421552Z node 6 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 230us result status StatusSuccess 2026-01-04T02:48:09.422226Z node 6 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:09.423122Z node 6 :HIVE INFO: tablet_helpers.cpp:1586: [72057594037968897] TEvRequestHiveInfo, msg: ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ReadWriteTxFailsOnConcurrentWrite3 [GOOD] Test command err: Trying to start YDB, gRPC: 17666, MsgBus: 26214 2026-01-04T02:47:14.359093Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332507909217975:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:14.359832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:14.677870Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:14.692749Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:14.692852Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:14.772206Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:14.864211Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:14.957893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:15.009123Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:15.009148Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:15.009157Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:15.009257Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:15.365950Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.693359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:15.711130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:18.007655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332525089087901:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.007656Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332525089087912:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.007769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.008203Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332525089087916:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.008272Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.011345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:18.020315Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332525089087915:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:18.119295Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332525089087968:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:18.420294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.559139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:19.415357Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332507909217975:2173];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:19.416731Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:19.643873Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.612293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:47:29.612320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:34.866463Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [1:7591332593808573566:2964], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01ke3eea688c6dz0086efsxk2n. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODFiNTZiMGYtZmQ4OTgyZDAtYTJmMzY2YzItODc4MDZkZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1767494841049/18446744073709551615 shard 72075186224037889 with lowWatermark v1767494841350/18446744073709551615 (node# 1 state# Ready) } } 2026-01-04T02:47:34.866984Z node 1 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [1:7591332593808573566:2964], TxId: 281474976710678, task: 1. Ctx: { TraceId : 01ke3eea688c6dz0086efsxk2n. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=1&id=ODFiNTZiMGYtZmQ4OTgyZDAtYTJmMzY2YzItODc4MDZkZDk=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 43 has no snapshot at v1767494841049/18446744073709551615 shard 72075186224037889 with lowWatermark v1767494841350/18446744073709551615 (node# 1 state# Ready) } }. 2026-01-04T02:47:34.867814Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODFiNTZiMGYtZmQ4OTgyZDAtYTJmMzY2YzItODc4MDZkZDk=, ActorId: [1:7591332537973997770:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3eea688c6dz0086efsxk2n, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 43 has no snapshot at v1767494841049/18446744073709551615 shard 72075186224037889 with lowWatermark v1767494841350/18446744073709551615 (node# 1 state# Ready)" severity: 1 } } trace_id# Trying to start YDB, gRPC: 62626, MsgBus: 1574 2026-01-04T02:47:36.026120Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332603125952018:2217];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:36.026495Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:36.041019Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:36.116439Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:36.149097Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:36.149183Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:36.160950Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:36.211137Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:36.211160Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:36.211168Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:36.211249Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:36.300246Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:36.686790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:36.695190Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:37.034313Z node 2 :T ... kloadService] [TPoolFetcherActor] ActorId: [3:7591332680019499657:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.280281Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.280580Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332680019499661:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.280628Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.285081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:54.299302Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332680019499660:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:54.386036Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332680019499713:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:54.452133Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:54.563423Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:55.517047Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332662839629639:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:55.520836Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:55.825010Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:57.746116Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YjBkZDJjY2MtYjQ2MWQ1NTEtZmMyYjAyMmYtYjM0YzEyNzM=, ActorId: [3:7591332692904409548:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3ef0f6aejz9bkmd309qpve, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 10040, MsgBus: 20422 2026-01-04T02:47:59.208732Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332702534645936:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:59.208801Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:59.237198Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:59.332460Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332702534645908:2081] 1767494879201329 != 1767494879201332 2026-01-04T02:47:59.332741Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:59.348384Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:59.348468Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:59.351614Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:59.502490Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:59.502509Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:59.502514Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:59.502592Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:59.521910Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:59.977682Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:59.984939Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:00.218742Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:03.463624Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332719714515955:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:03.463698Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332719714515985:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:03.463751Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:03.464091Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332719714515994:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:03.464144Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:03.468372Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:03.578997Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332719714515993:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:03.655076Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332719714516046:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:03.720420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.782552Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.644563Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332702534645936:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:04.747314Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:05.042529Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:07.247934Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YzQwZGRhOGQtZjk4ODJjMTYtZDRkMTkyMC1mZWYwNTc3YQ==, ActorId: [4:7591332732599426056:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3ef9qqcqjfmgsrads021xh, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::BaseCase-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:08.825304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:08.825417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:08.825455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:08.825493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:08.825528Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:08.825556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:08.825632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:08.825703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:08.826543Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:08.826819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:08.917778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:08.917866Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:08.932966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:08.933195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:08.933396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:08.944301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:08.944543Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:08.945235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:08.945519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:08.948679Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:08.948873Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:08.950042Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:08.950108Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:08.950223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:08.950265Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:08.950373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:08.950577Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:08.957959Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:48:09.095761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:09.096077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:09.096303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:48:09.096343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:48:09.096567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:09.096650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:09.106968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:09.107231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:48:09.107507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:09.107598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:48:09.107644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:48:09.107687Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:48:09.110291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:09.110362Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:48:09.110414Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:48:09.113075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:09.113133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:09.113197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:09.113291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:48:09.117281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:09.120256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:48:09.120473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:48:09.121723Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:09.121906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:09.121961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:09.122248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:48:09.122309Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:09.122510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:09.122597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:48:09.125134Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:09.125189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... AT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 4 2026-01-04T02:48:09.979514Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 7 TxId_Deprecated: 0 TabletID: 72075186234409551 2026-01-04T02:48:09.980545Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72075186233409546] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 FAKEHIVE 72075186233409546 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 6 TxId_Deprecated: 0 TabletID: 72075186234409550 2026-01-04T02:48:09.980890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 7, at schemeshard: 72057594046678944 2026-01-04T02:48:09.981171Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 Forgetting tablet 72075186234409551 2026-01-04T02:48:09.982614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72075186233409546 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 6, at schemeshard: 72057594046678944 2026-01-04T02:48:09.982866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 2 Forgetting tablet 72075186234409550 2026-01-04T02:48:09.983797Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:09.983857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:48:09.983984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 1 2026-01-04T02:48:09.984419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 106 2026-01-04T02:48:09.984612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:09.984669Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 3], at schemeshard: 72057594046678944 2026-01-04T02:48:09.984744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:48:09.988910Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:48:09.988986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186234409549 2026-01-04T02:48:09.991136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-04T02:48:09.991220Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186234409551 2026-01-04T02:48:09.991327Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:48:09.991366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186234409550 2026-01-04T02:48:09.991577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:48:09.991659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 106, wait until txId: 106 TestWaitNotification wait txId: 106 2026-01-04T02:48:09.992078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 106: send EvNotifyTxCompletion 2026-01-04T02:48:09.992130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 106 2026-01-04T02:48:09.992674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 106, at schemeshard: 72057594046678944 2026-01-04T02:48:09.992847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 106: got EvNotifyTxCompletionResult 2026-01-04T02:48:09.992890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 106: satisfy waiter [1:942:2802] TestWaitNotification: OK eventTxId 106 2026-01-04T02:48:09.993573Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0/dir/table0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:09.993853Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0/dir/table0" took 284us result status StatusPathDoesNotExist 2026-01-04T02:48:09.994057Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0/dir/table0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0/dir/table0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:09.994690Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/ServerLess0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:09.994916Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/ServerLess0" took 231us result status StatusPathDoesNotExist 2026-01-04T02:48:09.995080Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/ServerLess0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/ServerLess0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:09.995709Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:09.995986Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 242us result status StatusSuccess 2026-01-04T02:48:09.996547Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 7 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "SharedDB" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 wait until 72075186234409549 is deleted wait until 72075186234409550 is deleted wait until 72075186234409551 is deleted wait until 72075186234409552 is deleted 2026-01-04T02:48:09.997473Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409549 2026-01-04T02:48:09.997581Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409550 2026-01-04T02:48:09.997639Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409551 2026-01-04T02:48:09.997681Z node 1 :HIVE INFO: tablet_helpers.cpp:1658: [72075186233409546] TEvSubscribeToTabletDeletion, 72075186234409552 Deleted tabletId 72075186234409549 Deleted tabletId 72075186234409550 Deleted tabletId 72075186234409551 Deleted tabletId 72075186234409552 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterThenDropChangesParent-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:01.957042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:01.957156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:01.957241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:01.957292Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:01.957322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:01.957390Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957493Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:01.958434Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:01.960413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:02.062506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:02.062573Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:02.079093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:02.079301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:02.079527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:02.088316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:02.088571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:02.089414Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.089667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:02.104619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.104853Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:02.123394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123600Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:02.123674Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:02.123815Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:02.125596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.135342Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:48:02.291386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:02.292719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.293940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:48:02.294017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:48:02.295021Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:02.295116Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:02.298440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.300124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:48:02.301343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.301516Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:48:02.301582Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:48:02.301622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:48:02.304349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.304422Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:48:02.304478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:48:02.307254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.307318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.307381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.307436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:48:02.312342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:02.317489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:48:02.318777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:48:02.320033Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.320181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:02.320236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.320596Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:48:02.320659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.320854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:02.321819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:48:02.324840Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.324896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 30906Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:48:10.032766Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046678944 2026-01-04T02:48:10.032895Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:3 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:48:10.032939Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:1 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:48:10.032969Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:4 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:48:10.032997Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046678944:2 hive 72057594037968897 at ss 72057594046678944 2026-01-04T02:48:10.033093Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:10.033134Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:48:10.033266Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:48:10.033319Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:10.033373Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:48:10.033419Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:10.033466Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:48:10.033519Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:10.033572Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:48:10.033612Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:48:10.033784Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 6 2026-01-04T02:48:10.034696Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:10.035627Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 TabletID: 72075186233409548 Forgetting tablet 72075186233409548 2026-01-04T02:48:10.036021Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:48:10.036301Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 5 2026-01-04T02:48:10.037564Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 1 TxId_Deprecated: 0 TabletID: 72075186233409546 2026-01-04T02:48:10.037747Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:10.038168Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 TabletID: 72075186233409549 2026-01-04T02:48:10.038594Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:10.038801Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 4 Forgetting tablet 72075186233409546 2026-01-04T02:48:10.039551Z node 8 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 2 TxId_Deprecated: 0 TabletID: 72075186233409547 2026-01-04T02:48:10.039715Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:48:10.039877Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2026-01-04T02:48:10.041595Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:10.045397Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 2, at schemeshard: 72057594046678944 2026-01-04T02:48:10.045578Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 Forgetting tablet 72075186233409547 2026-01-04T02:48:10.047227Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:10.047290Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:48:10.047420Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:48:10.048233Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:48:10.048289Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:48:10.048377Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:10.051025Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:48:10.051092Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:48:10.051432Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:48:10.051467Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:48:10.051585Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:48:10.051608Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:48:10.051848Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:48:10.051897Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:48:10.052892Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:48:10.053013Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:48:10.053323Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:48:10.053379Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:48:10.053778Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:48:10.053911Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:48:10.053954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:572:2514] TestWaitNotification: OK eventTxId 103 2026-01-04T02:48:10.054439Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:10.054628Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 222us result status StatusPathDoesNotExist 2026-01-04T02:48:10.054796Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 >> TSchemeShardExtSubDomainTest::CreateAndAlter-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] >> KqpSinkTx::OlapExplicitTcl [GOOD] >> KqpSinkTx::LocksAbortOnCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateNameConflicts-AlterDatabaseCreateHiveFirst-true [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:01.957053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:01.957165Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:01.957254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:01.957297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:01.957343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:01.957411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:01.958412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:01.960423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:02.049154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:02.049208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:02.064701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:02.065320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:02.067299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:02.078178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:02.080958Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:02.085625Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.088940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:02.097192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.104949Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:02.125481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.125555Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.125617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:02.125664Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:02.125758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:02.125957Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.136321Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:48:02.291377Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:02.292760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.293964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:48:02.294029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:48:02.295619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:02.295706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:02.298724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.300193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:48:02.301357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.301462Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:48:02.301522Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:48:02.301560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:48:02.307334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.307423Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:48:02.307469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:48:02.313528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.313598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.313672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.313723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:48:02.317103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:02.327100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:48:02.327355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:48:02.328626Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.328775Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:02.328831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.329186Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:48:02.329256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:02.329484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:02.329585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:48:02.332869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.332934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 6545 cookie: 0:108 msg type: 269090816 2026-01-04T02:48:11.319271Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 108, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 108 at step: 5000041 FAKE_COORDINATOR: advance: minStep5000041 State->FrontStep: 5000040 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 108 at step: 5000041 2026-01-04T02:48:11.320056Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000041, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:11.320171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 138 RawX2: 30064773232 } } Step: 5000041 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:11.320217Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-04T02:48:11.320383Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 240 2026-01-04T02:48:11.320438Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 108:0, at tablet# 72057594046678944 2026-01-04T02:48:11.320666Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 5 2026-01-04T02:48:11.320727Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:48:11.320783Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:425: DoUpdateTenant no IsActiveChild, pathId: : [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:48:11.321485Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:48:11.322763Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:48:11.323650Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:11.323683Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:11.323828Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 41] 2026-01-04T02:48:11.323898Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:11.323925Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-04T02:48:11.323957Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [7:211:2212], at schemeshard: 72057594046678944, txId: 108, path id: 41 FAKE_COORDINATOR: Erasing txId 108 2026-01-04T02:48:11.324025Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:48:11.324057Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-04T02:48:11.324193Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:48:11.324233Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:48:11.324276Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:48:11.324319Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:48:11.324365Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: false 2026-01-04T02:48:11.324404Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:48:11.324443Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-04T02:48:11.324477Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-04T02:48:11.324539Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:48:11.324577Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 108, publications: 2, subscribers: 0 2026-01-04T02:48:11.324615Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 1], 14 2026-01-04T02:48:11.324656Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 108, [OwnerId: 72057594046678944, LocalPathId: 41], 3 2026-01-04T02:48:11.325875Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:48:11.325947Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:48:11.325997Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:48:11.326057Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 14 2026-01-04T02:48:11.326108Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 6 2026-01-04T02:48:11.327078Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:48:11.327161Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 41 Version: 3 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:48:11.327193Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:48:11.327235Z node 7 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 41], version: 3 2026-01-04T02:48:11.327262Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:48:11.327326Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 108, subscribers: 0 2026-01-04T02:48:11.329331Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:48:11.330234Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 TestModificationResult got TxId: 108, wait until txId: 108 TestWaitNotification wait txId: 108 2026-01-04T02:48:11.330508Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 108: send EvNotifyTxCompletion 2026-01-04T02:48:11.330559Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 108 2026-01-04T02:48:11.331036Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 108, at schemeshard: 72057594046678944 2026-01-04T02:48:11.331127Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:48:11.331171Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [7:883:2853] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-04T02:48:11.333898Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpCreateExtSubDomain SubDomain { Name: "extSubdomain" } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:11.334148Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_extsubdomain.cpp:58: TCreateExtSubDomain Propose, path/MyRoot/extSubdomain, opId: 109:0, at schemeshard: 72057594046678944 2026-01-04T02:48:11.334287Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:48:11.336372Z node 7 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/extSubdomain\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges)" TxId: 109 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 108, at schemeshard: 72057594046678944 2026-01-04T02:48:11.336642Z node 7 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/extSubdomain', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeExtSubDomain, state: EPathStateNoChanges), operation: CREATE DATABASE, path: /MyRoot/extSubdomain TestModificationResult got TxId: 109, wait until txId: 109 |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst >> KqpLocks::MixedTxFail+useSink [GOOD] >> KqpLocks::MixedTxFail-useSink >> KqpSinkMvcc::OlapReadWriteTxFailsOnConcurrentWrite2 [GOOD] >> KqpSinkMvcc::OlapNamedStatement ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestNoLocksIssueInteractiveTx-withSink [GOOD] Test command err: Trying to start YDB, gRPC: 14331, MsgBus: 65046 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:12.799114Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:12.906349Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:12.906805Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:12.906865Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:13.187873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.188005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.260983Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494830105209 != 1767494830105212 2026-01-04T02:47:13.276525Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:13.321979Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.447675Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:13.882920Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:13.882983Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:13.883029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:13.883256Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:13.898269Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:14.204171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:14.290338Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.593157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:14.998859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.323115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.215918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.216098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.217065Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.217256Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.249450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.447377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.713648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.027221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.302920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.613383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.933349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.292955Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:18.783239Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.783351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.783843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.783914Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.783983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.789721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:19.031175Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:19.088401Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:20.942389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:21.188003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is ... 50700Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:59.450777Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:59.822321Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:59.822511Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:59.857487Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767494875191158 != 1767494875191161 2026-01-04T02:47:59.863189Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:59.912456Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:00.028679Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:48:00.410078Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:00.410151Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:00.410194Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:00.411074Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:00.423130Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:00.947893Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:00.982965Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.254456Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.650218Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.946412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:02.518099Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1904:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.518300Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.519584Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1978:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.519677Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:02.548953Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:02.728265Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.028032Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.285793Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.531041Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.798464Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.089389Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.409771Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:04.809653Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2790:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.809760Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.810372Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.810463Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.810542Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2797:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:04.817008Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:04.978246Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2799:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:48:05.040205Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2859:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:07.673635Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:07.955488Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:08.308780Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst [GOOD] >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/control/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/control/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> QueryStats::Ranges [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/sys_view/query_stats/ut/unittest >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/sys_view/query_stats/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::AlterTwiceAndWithPlainAlterSubdomain-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:01.957569Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:01.957690Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957751Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:01.957833Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:01.957885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:01.957927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:01.958011Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.958085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:01.958887Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:01.960562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:02.059440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:02.059521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:02.077899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:02.078524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:02.078765Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:02.106640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:02.106967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:02.107708Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.108006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:02.114991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.115226Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:02.123643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.123832Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:02.123879Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:02.123930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:02.126885Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.317430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.318503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.318647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.318742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.318852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.318940Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.319711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... State leave, operationId 103:0, at tablet# 72057594046678944 2026-01-04T02:48:13.599820Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 103 ready parts: 1/1 2026-01-04T02:48:13.599977Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 103 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:13.601249Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:103 msg type: 269090816 2026-01-04T02:48:13.601390Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 103 at step: 5000040 FAKE_COORDINATOR: advance: minStep5000040 State->FrontStep: 5000039 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 103 at step: 5000040 2026-01-04T02:48:13.601658Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000040, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:13.601755Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 103 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 34359740527 } } Step: 5000040 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:13.601795Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-04T02:48:13.602103Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 128 -> 240 2026-01-04T02:48:13.602170Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 103:0, at tablet# 72057594046678944 2026-01-04T02:48:13.602289Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:48:13.602438Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:588: Send TEvUpdateTenantSchemeShard, to actor: [8:794:2764], msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72057594046678944 2026-01-04T02:48:13.603951Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6262: Handle TEvUpdateTenantSchemeShard, at schemeshard: 72075186234409546, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4 2026-01-04T02:48:13.604080Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 StoragePools { Name: "pool-1" Kind: "hdd" } SubdomainVersion: 4, at schemeshard: 72075186234409546 2026-01-04T02:48:13.604219Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 FAKE_COORDINATOR: Erasing txId 103 2026-01-04T02:48:13.604587Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:13.604633Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:48:13.604824Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:13.604865Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2212], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:48:13.605224Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:13.605280Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 103:0, ProgressState, NeedSyncHive: 0 2026-01-04T02:48:13.605324Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 240 -> 240 2026-01-04T02:48:13.606379Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:13.606505Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:13.606552Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:48:13.606595Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 6 2026-01-04T02:48:13.606641Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-04T02:48:13.606740Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:48:13.608874Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 1 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-04T02:48:13.608954Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:48:13.609033Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:794:2764], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:13.609114Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-04T02:48:13.609136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-04T02:48:13.609258Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-04T02:48:13.609293Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:885:2829], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-04T02:48:13.610287Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:13.610329Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:48:13.610458Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:48:13.610509Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:13.610555Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:48:13.610589Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:13.610637Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: true 2026-01-04T02:48:13.610689Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:48:13.610732Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:48:13.610763Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:48:13.610828Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:48:13.611248Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72075186234409546, cookie: 0 2026-01-04T02:48:13.611596Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:13.611674Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 TestModificationResult got TxId: 103, wait until txId: 104 TestModificationResults wait txId: 104 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 103 2026-01-04T02:48:13.613113Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:48:13.613161Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:48:13.613534Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:48:13.613619Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:48:13.613667Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [8:1010:2952] TestWaitNotification: OK eventTxId 103 |94.8%| [TA] $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |94.8%| [TA] {RESULT} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::ReadOnlyTxCommitsOnConcurrentWrite+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 25195, MsgBus: 14887 2026-01-04T02:47:47.256327Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332649680357735:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:47.260785Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:47.678924Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:47.692098Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:47.692200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:47.735063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:47.826198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:47.857903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332649680357588:2081] 1767494867242147 != 1767494867242150 2026-01-04T02:47:48.038956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:48.057000Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:48.057029Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:48.057035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:48.057112Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:48.285933Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:48.644540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:48.660104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:50.910865Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332662565260362:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.911056Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.913931Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332662565260374:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.913997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332662565260375:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.914360Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.920166Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:50.930800Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332662565260378:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:51.051164Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332662565260429:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:51.376284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:51.479262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:52.270445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332649680357735:2185];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:52.279623Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:52.470592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.537332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:55.508540Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=N2EwNDBlZTYtN2RlNjM5ODktNDFlNGQ1NjItYTIzMjUzMWY=, ActorId: [1:7591332675450170285:2963], ActorState: ExecuteState, LegacyTraceId: 01ke3eexntfx637kckkv33z1a0, Create QueryResponse for error on request, msg: tx has deferred effects, but locks are broken status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable/IndexUniq/indexImplTable`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 61458, MsgBus: 11718 2026-01-04T02:47:56.713771Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332689936678739:2248];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:56.714271Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:56.739085Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:56.833061Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:56.850053Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332689936678530:2081] 1767494876706597 != 1767494876706600 2026-01-04T02:47:56.864519Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:56.864596Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:56.882209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:56.953219Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:56.953841Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:56.953849Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:56.953857Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:56.953945Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:57.467357Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:57.709940Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:00.184818Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332707116548594:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.184885Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332707116548572:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.185050Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.185572Z node 2 :KQP_WORKLOAD_SERVICE WARN: ... mit failure;tx_id=281474976715671; 2026-01-04T02:48:04.195906Z node 2 :TX_DATASHARD ERROR: datashard.cpp:760: Complete volatile write [1767494884239 : 281474976715671] from 72075186224038089 at tablet 72075186224038089, error: Status: STATUS_ABORTED Issues: { message: "Distributed transaction aborted due to commit failure" issue_code: 2011 severity: 1 } Trying to start YDB, gRPC: 21921, MsgBus: 25793 2026-01-04T02:48:05.609265Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332730973429156:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:05.612231Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:05.632178Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:05.719848Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:05.719938Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:05.730269Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332730973429057:2081] 1767494885603673 != 1767494885603676 2026-01-04T02:48:05.746807Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:05.760731Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:05.838141Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:05.838166Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:05.838176Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:05.838260Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:05.899999Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:06.286698Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:06.305569Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.375875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.527901Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.609719Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:06.629098Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.276249Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332748153300108:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.276362Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.276813Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332748153300118:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.276869Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.388013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.422028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.456377Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.498254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.536071Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.578520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.656775Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.716195Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.801423Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332748153300986:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.801527Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.801629Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332748153300991:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.801842Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332748153300993:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.801932Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:09.805791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:09.817555Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332748153300995:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:48:09.892463Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332748153301046:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:10.613920Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332730973429156:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:10.614746Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/control/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.8%| [TA] $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {RESULT} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} |94.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/sys_view/query_stats/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::ChangeFromTheFuture-IsOlap [GOOD] Test command err: Trying to start YDB, gRPC: 2807, MsgBus: 6721 2026-01-04T02:47:17.198594Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332524275406655:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.199874Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:17.541934Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:17.582919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:17.583058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:17.617201Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:17.794882Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332524275406611:2081] 1767494837154238 != 1767494837154241 2026-01-04T02:47:17.811458Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:17.830589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:17.905245Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:17.905269Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:17.905278Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:17.905378Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:18.215075Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:18.613958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:21.061183Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332541455276677:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.061309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.061694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332541455276690:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.061780Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332541455276689:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.061953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:21.067850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:21.089613Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332541455276695:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:21.226194Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332541455276753:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:21.520813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:21.634738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:22.550278Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332524275406655:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:22.556920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:22.851422Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 15941, MsgBus: 15457 2026-01-04T02:47:26.130982Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:26.138311Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:26.239568Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:26.239661Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:26.248146Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:26.254030Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332562546285637:2081] 1767494846032269 != 1767494846032272 2026-01-04T02:47:26.285329Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:26.397886Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:26.482906Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.482933Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.482940Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.483022Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:26.959121Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.964815Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:27.092870Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:29.677996Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332575431188399:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.678129Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.678484Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332575431188415:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.678518Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332575431188416:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.678551Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:29.681781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:29.695083Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332575431188419:2328], DatabaseId: /Root, PoolId: default, ... id=281474976710671; 2026-01-04T02:48:01.861557Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.861633Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.861740Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.861900Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.861969Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862141Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862178Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862335Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862393Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862473Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862631Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862659Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862766Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862874Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.862902Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863038Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863081Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863234Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863322Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863383Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.863570Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; 2026-01-04T02:48:01.866542Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710671; Trying to start YDB, gRPC: 5903, MsgBus: 1928 2026-01-04T02:48:04.178616Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332724901454682:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:04.178737Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:04.208248Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:04.304666Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:04.307743Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332724901454644:2081] 1767494884165020 != 1767494884165023 2026-01-04T02:48:04.338338Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:04.338442Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:04.350264Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:04.365211Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:04.462453Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:04.462478Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:04.462489Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:04.462588Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:05.089898Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:05.101829Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:05.184988Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:08.572083Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332742081324721:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:08.572737Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332742081324693:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:08.572972Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:08.573873Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332742081324728:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:08.573993Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:08.577932Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:08.589465Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332742081324727:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:08.667895Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332742081324780:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:08.736963Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:08.786750Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.771501Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332724901454682:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:09.848890Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:10.150649Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.960153Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332754966234790:2967], SessionActorId: [4:7591332754966234721:2967], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[4:7591332754966234721:2967]. 2026-01-04T02:48:11.960375Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=NjdmNzYxMDEtYTIwZDU2ZTctYzFhZDQwMzctNDk1MmQ1NjA=, ActorId: [4:7591332754966234721:2967], ActorState: ExecuteState, LegacyTraceId: 01ke3efedq0v8kgt7hqwn7722p, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332754966234811:2967] from: [4:7591332754966234790:2967] trace_id# 2026-01-04T02:48:11.960470Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332754966234811:2967] TxId: 281474976710668. Ctx: { TraceId: 01ke3efedq0v8kgt7hqwn7722p, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NjdmNzYxMDEtYTIwZDU2ZTctYzFhZDQwMzctNDk1MmQ1NjA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-04T02:48:11.960726Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NjdmNzYxMDEtYTIwZDU2ZTctYzFhZDQwMzctNDk1MmQ1NjA=, ActorId: [4:7591332754966234721:2967], ActorState: ExecuteState, LegacyTraceId: 01ke3efedq0v8kgt7hqwn7722p, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_extsubdomain/unittest >> TSchemeShardExtSubDomainTest::CreateAndAlterAlterAddStoragePool-AlterDatabaseCreateHiveFirst-ExternalHive [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:01.957049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:01.957156Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:01.957240Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:01.957282Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:01.957313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:01.957370Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:01.957441Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:01.958423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:01.960422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:02.065781Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:02.065894Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:02.084739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:02.085654Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:02.085898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:02.117188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:02.117510Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:02.118402Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:02.118712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:02.123891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.124079Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:02.128016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:02.128077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:02.128115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:02.128155Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:02.128193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:02.128303Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:02.304719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.307558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.307711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.307823Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.307894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.307956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308441Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308588Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:02.308709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... r__attr_1" Value: "value" } UserAttributesVersion: 2 2026-01-04T02:48:15.068519Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:79: TTxUpdateTenant DoExecute, msg: TabletId: 72057594046678944 Generation: 2 UserAttributes { Key: "user__attr_1" Value: "value" } UserAttributesVersion: 2, at schemeshard: 72075186234409546 2026-01-04T02:48:15.068761Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:601: Cannot publish paths for unknown operation id#0 2026-01-04T02:48:15.069054Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:15.069130Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:48:15.069401Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:15.069459Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:211:2212], at schemeshard: 72057594046678944, txId: 104, path id: 38 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:48:15.070757Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:48:15.070897Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:48:15.070951Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:48:15.071013Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 7 2026-01-04T02:48:15.071077Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 6 2026-01-04T02:48:15.071202Z node 8 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:48:15.074766Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6222: Handle TEvSyncTenantSchemeShard, at schemeshard: 72057594046678944, msg: DomainSchemeShard: 72057594046678944 DomainPathId: 38 TabletID: 72075186234409546 Generation: 2 EffectiveACLVersion: 0 SubdomainVersion: 4 UserAttributesVersion: 2 TenantHive: 72075186233409546 TenantSysViewProcessor: 18446744073709551615 TenantRootACL: "" TenantStatisticsAggregator: 18446744073709551615 TenantGraphShard: 18446744073709551615 2026-01-04T02:48:15.074892Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:26: TTxSyncTenant DoExecute, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:48:15.075022Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[8:794:2764], EffectiveACLVersion: 0, SubdomainVersion: 4, UserAttributesVersion: 2, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 18446744073709551615, TenantRootACL: }, subDomain->GetVersion(): 4, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 2, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:48:15.075166Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186234409546 2026-01-04T02:48:15.075202Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186234409546, txId: 0, path id: [OwnerId: 72075186234409546, LocalPathId: 1] 2026-01-04T02:48:15.075366Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186234409546 2026-01-04T02:48:15.075403Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [8:885:2829], at schemeshard: 72075186234409546, txId: 0, path id: 1 2026-01-04T02:48:15.076036Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:48:15.076136Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__sync_update_tenants.cpp:36: TTxSyncTenant DoComplete, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:48:15.076360Z node 8 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186234409546, msg: Owner: 72075186234409546 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72075186234409546, cookie: 0 TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:48:15.076729Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:48:15.076793Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:48:15.077418Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:48:15.077543Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:48:15.077599Z node 8 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [8:1026:2968] TestWaitNotification: OK eventTxId 104 2026-01-04T02:48:15.078369Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:48:15.078640Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 308us result status StatusSuccess 2026-01-04T02:48:15.079224Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 1 SubDomainVersion: 4 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:15.080069Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186234409546 2026-01-04T02:48:15.080304Z node 8 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186234409546 describe path "/MyRoot/USER_0" took 264us result status StatusSuccess 2026-01-04T02:48:15.080871Z node 8 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "MyRoot/USER_0" PathId: 1 SchemeshardId: 72075186234409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 2 ChildrenVersion: 3 SubDomainVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186234409546 PathType: EPathTypeDir CreateFinished: false CreateTxId: 281474976720657 ParentPathId: 1 PathState: EPathStateCreate Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 4 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } StoragePools { Name: "pool-2" Kind: "hdd-1" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot/USER_0" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } UserAttributes { Key: "user__attr_1" Value: "value" } } PathId: 1 PathOwnerId: 72075186234409546, at schemeshard: 72075186234409546 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::UpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 27103, MsgBus: 3014 2026-01-04T02:47:42.807270Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332629006206903:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:42.807683Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:43.117596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:43.118733Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:43.206414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:43.223479Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:43.241511Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:43.244791Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332629006206802:2081] 1767494862771907 != 1767494862771910 2026-01-04T02:47:43.454759Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:43.454794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:43.454804Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:43.454817Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:43.454900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:43.806276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:44.019687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:44.028246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:46.177018Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332646186076875:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.177181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.177468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332646186076887:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.177497Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332646186076888:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.177547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.181629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:46.195254Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332646186076891:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:46.334043Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332646186076942:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:46.597467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.690436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.778310Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332629006206903:2139];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:47.778834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:47.794446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 7128, MsgBus: 20449 2026-01-04T02:47:50.804634Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:50.804726Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:50.871496Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:50.872888Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332665855887414:2081] 1767494870731162 != 1767494870731165 2026-01-04T02:47:50.907897Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:50.907979Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:50.912812Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:50.977457Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:51.029891Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:51.029912Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:51.029918Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:51.029984Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:51.493420Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:51.769064Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:54.204273Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332683035757460:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.204381Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.204924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332683035757497:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.205278Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332683035757496:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.205349Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.209685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:54.257136Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332683035757500:2328], DatabaseId: /Root, PoolId: default, ... 82181:2962], SessionActorId: [3:7591332726743482163:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7591332726743482163:2962]. 2026-01-04T02:48:04.954496Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ODdhNDc1ZGEtOWU5MTNiOWEtYmRhN2FmNWQtZTZhYmViMWQ=, ActorId: [3:7591332726743482163:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7g7f0vccjj91839jdp0, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332726743482219:2962] from: [3:7591332726743482181:2962] trace_id# 2026-01-04T02:48:04.954600Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332726743482219:2962] TxId: 281474976715666. Ctx: { TraceId: 01ke3ef7g7f0vccjj91839jdp0, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ODdhNDc1ZGEtOWU5MTNiOWEtYmRhN2FmNWQtZTZhYmViMWQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:48:04.954940Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ODdhNDc1ZGEtOWU5MTNiOWEtYmRhN2FmNWQtZTZhYmViMWQ=, ActorId: [3:7591332726743482163:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7g7f0vccjj91839jdp0, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 2204, MsgBus: 6261 2026-01-04T02:48:06.408292Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332734488445276:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:06.408387Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:06.439522Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:06.509882Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332734488445227:2081] 1767494886406013 != 1767494886406016 2026-01-04T02:48:06.531762Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:06.541417Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:06.541520Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:06.567334Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:06.624181Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:06.624206Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:06.624215Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:06.624302Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:06.707348Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:07.114794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:07.418191Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:10.225497Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332751668315294:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.225506Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332751668315303:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.225578Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.225858Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332751668315309:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.225935Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.228210Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:10.235620Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332751668315308:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:10.290432Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332751668315361:2534] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:10.343252Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:10.385600Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.499442Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[4:7591332734488445276:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:11.504584Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:11.551309Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:13.147495Z node 4 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=5; 2026-01-04T02:48:13.147807Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 5 at tablet 72075186224037889 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:48:13.149026Z node 4 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 5 at tablet 72075186224037889 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } 2026-01-04T02:48:13.149221Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [4:7591332760258257960:2964], Table: `/Root/KV` ([72057594046644480:43:1]), SessionActorId: [4:7591332760258257935:2964]Got LOCKS BROKEN for table `/Root/KV`. ShardID=72075186224037889, Sink=[4:7591332760258257960:2964].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:48:13.149338Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332760258257953:2964], SessionActorId: [4:7591332760258257935:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[4:7591332760258257935:2964]. 2026-01-04T02:48:13.149586Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=NzljMzM1MzMtMmI3YTI2NDItZGQzMDk1NzAtZDYxZjFiOTI=, ActorId: [4:7591332760258257935:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3effe8d7yn2kn80t2mpjne, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332764553225289:2964] from: [4:7591332760258257953:2964] trace_id# 2026-01-04T02:48:13.149715Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332764553225289:2964] TxId: 281474976710666. Ctx: { TraceId: 01ke3effe8d7yn2kn80t2mpjne, Database: /Root, SessionId: ydb://session/3?node_id=4&id=NzljMzM1MzMtMmI3YTI2NDItZGQzMDk1NzAtZDYxZjFiOTI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:48:13.150110Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=NzljMzM1MzMtMmI3YTI2NDItZGQzMDk1NzAtZDYxZjFiOTI=, ActorId: [4:7591332760258257935:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3effe8d7yn2kn80t2mpjne, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# >> JsonProtoConversion::JsonToProtoArray [GOOD] >> JsonProtoConversion::JsonToProtoMap [GOOD] >> JsonProtoConversion::JsonToProtoSingleValue [GOOD] >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_extsubdomain/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::OlapSnapshotROInteractive2 [GOOD] Test command err: Trying to start YDB, gRPC: 21345, MsgBus: 26577 2026-01-04T02:47:20.014773Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332531770219276:2255];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:20.014828Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:20.435823Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:20.444485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:20.444585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:20.556402Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:20.647048Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:20.647232Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332531770219051:2081] 1767494839970734 != 1767494839970737 2026-01-04T02:47:20.691179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:20.778401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:20.778420Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:20.778428Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:20.778514Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:20.973070Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:21.373164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:21.381058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:23.799523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332548950089109:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:23.799651Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:23.800010Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332548950089137:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:23.803652Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332548950089138:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:23.803746Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:23.805202Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:23.831260Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332548950089140:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:23.942414Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332548950089192:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:24.277691Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:24.411575Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:24.411778Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:24.411982Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:24.412064Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:24.412158Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:24.412248Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:24.412334Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:24.412463Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:24.412579Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:24.412683Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:24.412748Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:24.412825Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:24.412944Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332553245056626:2338];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:24.427423Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:24.427484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:24.427656Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:24.427769Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:24.427909Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:24.428005Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:24.428135Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:24.428247Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037892;self_id=[1:7591332553245056652:2344];tablet_id=72075186224037892;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:24.428358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:84 ... e;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.325406Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.325470Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.325493Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.334710Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.334784Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.334807Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038063;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.336574Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.336635Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.336655Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.344924Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.344991Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.345010Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.347530Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.347622Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.347646Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357649Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357655Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357715Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357720Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357737Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.357740Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.368940Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.368940Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.369002Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.369009Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.369026Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.369028Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380424Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380424Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380489Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380497Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380512Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038009;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.380517Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038067;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391756Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391757Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391819Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391825Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391839Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.391844Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.432327Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eeyvy52efm6v585tfw6vv", SessionId: ydb://session/3?node_id=3&id=NzFhMTc5ZTEtOWZmODk0MTUtYWQ1MjVjODUtYzYwOGI4YTI=, Slow query, duration: 10.469918s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:10.062234Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:10.062267Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest >> TSchemeShardTest::CreateTable >> TestMalformedRequest::CompressedGzipContentLengthLower |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::JsonToProtoMap [GOOD] |94.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> JsonProtoConversion::NlohmannJsonToProtoArray [GOOD] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OltpNamedStatementNoSink [GOOD] Test command err: Trying to start YDB, gRPC: 29221, MsgBus: 15855 2026-01-04T02:47:39.755984Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332617650403247:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:39.761368Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:40.113994Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:40.137390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:40.137504Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:40.188776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:40.261577Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:40.263228Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332617650403208:2081] 1767494859754550 != 1767494859754553 2026-01-04T02:47:40.305141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:40.319063Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:40.319082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:40.319088Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:40.319176Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:40.780834Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.852516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.861835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:43.098612Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634830273282:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.098756Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.099315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634830273294:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.099352Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634830273295:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.099427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.102129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332634830273299:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.102198Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:43.103958Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:43.117097Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332634830273298:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:43.268723Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332634830273351:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:43.536951Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.667690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.733470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.757546Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332617650403247:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:44.757618Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31795, MsgBus: 7952 2026-01-04T02:47:48.206007Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:48.214554Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:48.292221Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332657375656119:2081] 1767494868124519 != 1767494868124522 2026-01-04T02:47:48.296309Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:48.298214Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:48.298279Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:48.310363Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:48.449943Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:48.510411Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:48.510449Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:48.510457Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:48.510536Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:49.035906Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:49.178594Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:51.817400Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332670260558876:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.817466Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.817508Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332670260558903:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.817938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332670260558905:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.818018Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found ... Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.029339Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332706140631090:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.029463Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.030543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332706140631112:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.030601Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:00.032923Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:00.043177Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332706140631111:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:00.105159Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332706140631164:2535] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:00.158460Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:00.196523Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.225772Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:01.976901Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332688960761054:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:02.072707Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:05.212909Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332727615475643:2962], SessionActorId: [3:7591332714730573668:2962], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 . sessionActorId=[3:7591332714730573668:2962]. 2026-01-04T02:48:05.213140Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=NzMxMDY2NTEtYjJjZmZmY2YtZjNhNTdjYzMtMWNiYmNkZTk=, ActorId: [3:7591332714730573668:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7hyfve2zje8h6zbce04, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332727615475644:2962] from: [3:7591332727615475643:2962] trace_id# 2026-01-04T02:48:05.213226Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332727615475644:2962] TxId: 281474976710666. Ctx: { TraceId: 01ke3ef7hyfve2zje8h6zbce04, Database: /Root, SessionId: ydb://session/3?node_id=3&id=NzMxMDY2NTEtYjJjZmZmY2YtZjNhNTdjYzMtMWNiYmNkZTk=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`, code: 2001 } trace_id# 2026-01-04T02:48:05.213542Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NzMxMDY2NTEtYjJjZmZmY2YtZjNhNTdjYzMtMWNiYmNkZTk=, ActorId: [3:7591332714730573668:2962], ActorState: ExecuteState, LegacyTraceId: 01ke3ef7hyfve2zje8h6zbce04, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`" issue_code: 2001 severity: 1 } trace_id# Trying to start YDB, gRPC: 18243, MsgBus: 14558 2026-01-04T02:48:06.641281Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:06.642659Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:06.726030Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:06.727748Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332735380722176:2081] 1767494886606655 != 1767494886606658 2026-01-04T02:48:06.774947Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:06.775049Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:06.779956Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:06.861189Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:06.877634Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:06.877658Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:06.877666Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:06.877753Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:07.395453Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:07.636218Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:10.468306Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332752560592242:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.468308Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332752560592250:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.468406Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.468818Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:7591332752560592257:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.468891Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:10.472410Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:10.482978Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:7591332752560592256:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:10.586100Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:7591332752560592309:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:10.656411Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:10.697016Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.884136Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] |94.9%| [TA] $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpSinkLocks::EmptyRangeAlreadyBroken [GOOD] >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap |94.9%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_extsubdomain/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeShardTest::CreateTable [GOOD] >> TSchemeShardTest::CreateTableWithDate ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocksTricky::TestSecondaryIndexWithoutSnapshot-StreamIndex [GOOD] Test command err: Trying to start YDB, gRPC: 5716, MsgBus: 1929 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:27.504208Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:27.625079Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:27.625496Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:27.625579Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:27.912001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:27.912138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:27.989292Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494844545275 != 1767494844545278 2026-01-04T02:47:28.005553Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:28.052849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:28.196326Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:28.637713Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:28.637751Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:28.637772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:28.637997Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:28.649893Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:29.007215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:29.081172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.402043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:29.814198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:30.114695Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.016461Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.016624Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.017574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.017784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:31.054428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.261040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.548352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:31.879710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:32.180485Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:32.460823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:32.771052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:33.092376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:33.499226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:33.499354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:33.499897Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2798:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:33.499980Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:33.500048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2801:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:33.505583Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:33.687405Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2803:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:33.743845Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:35.495201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:35.745938Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is u ... 12298Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:08.212374Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:48:08.539444Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:08.539589Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:08.568744Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767494884640188 != 1767494884640191 2026-01-04T02:48:08.573904Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:08.620253Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:08.721574Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:48:09.100462Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:09.100540Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:09.100592Z node 4 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:09.102443Z node 4 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:09.114901Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:09.652078Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:09.695353Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:09.966855Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:10.362509Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:10.667338Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.229989Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1904:3512], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:11.230475Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:11.231578Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1978:3531], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:11.231735Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:11.263790Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.454387Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.758055Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:11.999412Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:12.232794Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:12.499874Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:12.759706Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:13.040162Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:13.428841Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2789:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:13.428947Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:13.429440Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:13.429550Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:2796:4179], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:13.429650Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:13.436476Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:13.598231Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:2798:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:48:13.656457Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:2858:4222] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:15.697544Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:15.966050Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:16.313713Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TFlatTest::WriteMergeAndRead >> TFlatTest::SelectRangeReverseItemsLimit >> TFlatTest::SplitInvalidPath >> TFlatTest::SplitEmptyToMany >> TLocksTest::Range_IncorrectDot1 >> TObjectStorageListingTest::MaxKeysAndSharding >> TFlatTest::SelectRangeReverse >> TLocksTest::Range_IncorrectNullDot1 >> TLocksFatTest::RangeSetRemove >> TObjectStorageListingTest::Listing >> TFlatTest::AutoSplitBySize >> TFlatTest::SelectBigRangePerf >> TLocksTest::BrokenSameKeyLock >> TLocksTest::Range_BrokenLockMax >> TLocksTest::BrokenLockErase >> TFlatTest::ShardFreezeRejectBadProtobuf >> TLocksTest::UpdateLockedKey >> TFlatTest::CrossRW >> TLocksTest::CK_Range_BrokenLock >> TObjectStorageListingTest::Split >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] >> TLocksTest::Range_CorrectNullDot >> TFlatTest::RejectByPerShardReadSize >> KqpSinkTx::LocksAbortOnCommit [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables >> KqpSnapshotIsolation::TPragmaSettingOlap+IsSnapshotIsolation [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation >> TSchemeShardTest::CreateTableWithDate [GOOD] >> TSchemeShardTest::CreateIndexedTable >> KqpLocks::MixedTxFail-useSink [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert+UseBulkUpsert [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotRead::TestSnapshotExpiration+withSink [GOOD] Test command err: Trying to start YDB, gRPC: 7006, MsgBus: 12971 2026-01-04T02:47:46.232522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332647697608458:2108];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:46.232714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:46.274664Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:46.560476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:46.560607Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:46.567042Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:46.760574Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:46.760809Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332647697608388:2081] 1767494866225290 != 1767494866225293 2026-01-04T02:47:46.762580Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:46.781254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:46.781277Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:46.781303Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:46.781383Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:47.044416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:47.249792Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:47.292523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:47.362159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.556731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.728057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.829687Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:49.932995Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332660582512148:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.933131Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.933571Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332660582512158:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:49.933662Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.280298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.313283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.347942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.410390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.446259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.485646Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.529606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.585755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.685600Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332664877480332:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.685680Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.686168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332664877480337:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.686212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332664877480338:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.686244Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:50.690956Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:50.708596Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332664877480341:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:47:50.775501Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332664877480392:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:51.232214Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332647697608458:2108];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:51.232284Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 61329, MsgBus: 15177 2026-01-04T02:47:54.518396Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... n, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:03.052256Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.106495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.240847Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.312469Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:03.447506Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:06.021701Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332732544342713:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.021823Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.022334Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332732544342722:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.022387Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.075938Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.118115Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.162185Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.201114Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.235865Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.273795Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.311771Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.372213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:06.452058Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332732544343589:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.452166Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.452456Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332732544343595:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.452508Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.452519Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332732544343594:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:06.456155Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:06.466971Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332732544343598:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:48:06.552707Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332732544343651:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:07.342435Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332715364471789:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:07.342513Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:17.426059Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:17.426094Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:18.981068Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7591332784083951928:2670], TxId: 281474976710682, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3efn0d4wd1sb1e650q0n76. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2Q2ZGI1YjMtOTIzZTNkNjgtZDNhMzM2MmYtNDlhZmU3MTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1767494888278/18446744073709551615 shard 72075186224037888 with lowWatermark v1767494888614/18446744073709551615 (node# 3 state# Ready) } } 2026-01-04T02:48:18.981614Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591332784083951928:2670], TxId: 281474976710682, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3efn0d4wd1sb1e650q0n76. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2Q2ZGI1YjMtOTIzZTNkNjgtZDNhMzM2MmYtNDlhZmU3MTc=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Table id 38 has no snapshot at v1767494888278/18446744073709551615 shard 72075186224037888 with lowWatermark v1767494888614/18446744073709551615 (node# 3 state# Ready) } }. 2026-01-04T02:48:18.982475Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591332784083951929:2671], TxId: 281474976710682, task: 2. Ctx: { TraceId : 01ke3efn0d4wd1sb1e650q0n76. CheckpointId : . RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=M2Q2ZGI1YjMtOTIzZTNkNjgtZDNhMzM2MmYtNDlhZmU3MTc=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591332784083951924:2531], status: ABORTED, reason: {
: Error: Terminate execution } 2026-01-04T02:48:18.983075Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2Q2ZGI1YjMtOTIzZTNkNjgtZDNhMzM2MmYtNDlhZmU3MTc=, ActorId: [3:7591332741134278559:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3efn0d4wd1sb1e650q0n76, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Table id 38 has no snapshot at v1767494888278/18446744073709551615 shard 72075186224037888 with lowWatermark v1767494888614/18446744073709551615 (node# 3 state# Ready)" severity: 1 } } trace_id# |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] >> TLocksTest::NoLocksSet >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::LocksAbortOnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 27408, MsgBus: 65149 2026-01-04T02:47:38.764832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332612805591263:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.764900Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:39.009554Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:39.095546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:39.095647Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:39.143679Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:39.170995Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.177543Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332612805591222:2081] 1767494858764128 != 1767494858764131 2026-01-04T02:47:39.212030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:39.413997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:39.414022Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:39.414036Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:39.414146Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:39.786206Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.084697Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.116264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:42.314404Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629985461294:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.314501Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.314562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629985461302:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.314822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629985461308:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.314893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.318256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:42.329502Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332629985461309:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:42.446470Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332629985461361:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:42.785586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:43.024710Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:43.024903Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:43.025088Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:43.025183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:43.025277Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:43.025376Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:43.025478Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:43.025565Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:43.025648Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:43.025730Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037889;self_id=[1:7591332629985461532:2339];tablet_id=72075186224037889;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:43.027434Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:43.027521Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:43.027646Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:43.027742Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:43.027829Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:43.027921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:43.028021Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:43.028107Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:43.028182Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:43.028253Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:43.028335Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[1:7591332629985461531:2338];tablet_id=72075186224037891;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:43.028414Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: ... 026-01-04T02:48:07.459293Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.459310Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.466863Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.466921Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.466939Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.467129Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.467175Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.467191Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.473409Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.473496Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:07.473516Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:09.461595Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:09.461625Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:10.035357Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=ODdhZGNmMDktN2Q0NGE5NzctNDg3MjUyOGUtOGRmOTBmMjA=, ActorId: [2:7591332735783839879:3544], ActorState: ReadyState, LegacyTraceId: 01ke3efch756e6c10gqsep53aw, Create QueryResponse for error on request, msg: status# NOT_FOUND issues# { message: "Transaction not found: 01ke3efabmfmttcj4rnpw8rwvd" issue_code: 2015 severity: 1 } trace_id# Trying to start YDB, gRPC: 26045, MsgBus: 19492 2026-01-04T02:48:12.276094Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332757991463807:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:12.276152Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:12.291477Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:12.406512Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:12.408518Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:12.408608Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:12.426168Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:12.494926Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:12.494957Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:12.494966Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:12.495055Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:12.497821Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:12.925393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:13.284240Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:16.003269Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332775171333819:2322], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:16.003292Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332775171333834:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:16.003360Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:16.003700Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591332775171333843:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:16.003793Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:16.007082Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:16.018312Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591332775171333842:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:16.110035Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591332775171333895:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:16.171678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:16.237601Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:17.172804Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:17.276780Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591332757991463807:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:17.276856Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:19.047191Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591332788056243913:2963], SessionActorId: [3:7591332783761276333:2963], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 . sessionActorId=[3:7591332783761276333:2963]. 2026-01-04T02:48:19.047462Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=MjU3NDU3NC1lMWJkYzE1My1hMzM4NWFkNC0xZTc5ZjU4ZA==, ActorId: [3:7591332783761276333:2963], ActorState: ExecuteState, LegacyTraceId: 01ke3efnb2abgd503drq0tw4vs, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591332788056243914:2963] from: [3:7591332788056243913:2963] trace_id# 2026-01-04T02:48:19.047574Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591332788056243914:2963] TxId: 281474976715667. Ctx: { TraceId: 01ke3efnb2abgd503drq0tw4vs, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MjU3NDU3NC1lMWJkYzE1My1hMzM4NWFkNC0xZTc5ZjU4ZA==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } trace_id# 2026-01-04T02:48:19.047967Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MjU3NDU3NC1lMWJkYzE1My1hMzM4NWFkNC0xZTc5ZjU4ZA==, ActorId: [3:7591332783761276333:2963], ActorState: ExecuteState, LegacyTraceId: 01ke3efnb2abgd503drq0tw4vs, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardTest::CreateIndexedTable [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects >> TestMalformedRequest::CompressedGzipContentLengthLower [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpLocks::MixedTxFail-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 31254, MsgBus: 13768 2026-01-04T02:47:49.581392Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332662486227572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:49.582832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:49.854655Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:49.879288Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:49.879388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:49.909548Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:49.990559Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332662486227536:2081] 1767494869575281 != 1767494869575284 2026-01-04T02:47:49.994428Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:50.089905Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:50.089939Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:50.089949Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:50.090073Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:50.135347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:50.581380Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:50.589695Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:50.590755Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:50.628898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:50.830721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:51.035320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:51.110142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.187807Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332679666098595:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:53.187927Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:53.188353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332679666098605:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:53.188397Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:53.557134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.600449Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.633629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.666137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.737972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.778170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.856194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:53.905823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:54.030289Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332683961066781:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.030371Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.030786Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332683961066786:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.030827Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332683961066787:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.030853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:54.035476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:54.055057Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332683961066790:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:47:54.138861Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332683961066841:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:54.584443Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332662486227572:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:54.585642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:56.351082Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332692551001809:2533], SessionActorId: [1:7591332688256034447:2533], sta ... thod=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.130954Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.130971Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037923;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.134741Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.134791Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.134804Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037924;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.140451Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.140530Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.140548Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037895;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.141449Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.141501Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.141514Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037949;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.148583Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.148684Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.148707Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037917;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.150382Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.150441Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.150457Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037937;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.156076Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.156137Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.156153Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037913;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.159610Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.159675Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.159692Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037935;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.163410Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.163466Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.163482Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037909;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.168988Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.169051Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.169071Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037939;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.171362Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.171419Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.171435Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.178522Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.178581Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.178596Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037941;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.179031Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.179085Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.179105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037943;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.184520Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.184574Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.184589Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037951;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.187226Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.187287Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:19.187303Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037945;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710661;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=43;result=not_found; 2026-01-04T02:48:20.040585Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ZTQyY2MzMmYtODkzMmE2NmMtYjcwN2NkMzYtMmY4OTE3MGE=, ActorId: [4:7591332788939450163:2687], ActorState: ExecuteState, LegacyTraceId: 01ke3efp6h6j73jnc8wxsy4e3n, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/DataShard`" issue_code: 2001 severity: 1 } trace_id# 2026-01-04T02:48:20.046974Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037936;tx_state=TTxProgressTx::Complete;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=lock invalidated;tx_id=281474976710670; |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TFlatTest::SelectRangeReverse [GOOD] >> TFlatTest::SelectRangeReverseExcludeKeys >> TFlatTest::SplitInvalidPath [GOOD] >> TFlatTest::SplitThenMerge >> TFlatTest::SelectRangeReverseItemsLimit [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys >> TFlatTest::WriteMergeAndRead [GOOD] >> TFlatTest::WriteSplitAndRead >> TFlatTest::SelectBigRangePerf [GOOD] >> TFlatTest::SelectRangeBothLimit >> TFlatTest::ShardFreezeRejectBadProtobuf [GOOD] >> TFlatTest::SelectRangeSkipNullKeys >> TFlatTest::CrossRW [GOOD] >> TFlatTest::GetTabletCounters >> TObjectStorageListingTest::Split [GOOD] >> TObjectStorageListingTest::SuffixColumns >> TestMalformedRequest::CompressedGzipContentLengthHigher ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/data/unittest >> KqpUserConstraint::KqpReadNull-UploadNull [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:48:18.774995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:18.960543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:48:18.961525Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:18.961614Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:48:19.351510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:19.351651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:19.445644Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494896149985 != 1767494896149988 2026-01-04T02:48:19.464121Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:19.512774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:19.615835Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:48:19.987855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.002009Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.152376Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.705106Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1070:2897], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.705252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1081:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.705370Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.707640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1085:2906], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.707836Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.715738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:20.850559Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1084:2905], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:20.918560Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1141:2943] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/data/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TSnapshotTwoUpdateOlap-UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 31910, MsgBus: 26079 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:16.596485Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:16.722364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:16.722834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:16.722895Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:17.015591Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:17.015790Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:17.115958Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494833716490 != 1767494833716493 2026-01-04T02:47:17.137171Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:17.187407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:17.340579Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:17.864803Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:17.864877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:17.864931Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:17.865183Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:17.878584Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:18.386886Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:18.473316Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.473440Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.473543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.483012Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.483128Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:18.488192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:18.628781Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:18.729635Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:19.065713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:19.371655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:22.155073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:25.615073Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Write conflict with concurrent transaction.;tx_id=4; 2026-01-04T02:47:25.633643Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037989 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:25.654887Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037989 Status: STATUS_LOCKS_BROKEN Issues: { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:25.655231Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:8804:8421], Table: `/Root/KV2` ([72057594046644480:44:1]), SessionActorId: [1:8768:8421]Got LOCKS BROKEN for table `/Root/KV2`. ShardID=72075186224037989, Sink=[1:8804:8421].{
: Error: Write conflict with concurrent transaction., code: 2001 } 2026-01-04T02:47:25.655752Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:8793:8421], SessionActorId: [1:8768:8421], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001
: Error: Write conflict with concurrent transaction., code: 2001 . sessionActorId=[1:8768:8421]. 2026-01-04T02:47:25.656193Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZGZhOGYxYzgtYjg1NzIwNWYtNGQ5NjZiOWEtZDgxOWQ0MjA=, ActorId: [1:8768:8421], ActorState: ExecuteState, LegacyTraceId: 01ke3ee10a3aenq340msgqqfvx, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:8794:8421] from: [1:8793:8421] trace_id# 2026-01-04T02:47:25.656475Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:8794:8421] TxId: 281474976715664. Ctx: { TraceId: 01ke3ee10a3aenq340msgqqfvx, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZGZhOGYxYzgtYjg1NzIwNWYtNGQ5NjZiOWEtZDgxOWQ0MjA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV2`., code: 2001 subissue: {
: Error: Write conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-04T02:47:25.656926Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZGZhOGYxYzgtYjg1NzIwNWYtNGQ5NjZiOWEtZDgxOWQ0MjA=, ActorId: [1:8768:8421], ActorState: ExecuteState, LegacyTraceId: 01ke3ee10a3aenq340msgqqfvx, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV2`." issue_code: 2001 severity: 1 issues { message: "Write conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 21554, MsgBus: 9553 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:30.301352Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:30.308215Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:47:30.311954Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:30.312201Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:30.312327Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:30.577602Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:30.577728Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:30.606853Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494846919463 != 1767494846919467 2026-01-04T02:47:30.612396Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:30.659906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:30.778533Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErro ... node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.825787Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.831843Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.831911Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.831938Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.838647Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.838737Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.838770Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.844868Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.844943Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.844979Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.850901Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.850987Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.851014Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.858428Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.858523Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.858554Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.867004Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.867091Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.867124Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.874639Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.874722Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.874749Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.880476Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.880540Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.880563Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.886416Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.886493Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:17.886521Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:18.016979Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ef2fv14qxcd8kwxba2s49", SessionId: ydb://session/3?node_id=3&id=ZDg2MWU4YzYtZTJlZGQwNTEtZWFlY2ZmM2QtOTM5NmU0Yjk=, Slow query, duration: 18.270277s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:19.503613Z node 3 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037919;tx_state=TTxProgressTx::Execute;tx_current=281474976715667;tx_id=281474976715667;commit_tx_id=281474976715667;commit_lock_id=281474976715666;fline=manager.cpp:80;broken_lock_id=281474976715665; 2026-01-04T02:48:19.589015Z node 3 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037919;self_id=[3:2153:3568];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037919;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715668; 2026-01-04T02:48:19.589284Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [3:9181:8031], SessionActorId: [3:8617:8031], Got LOCKS BROKEN for table. ShardID=72075186224037919, Sink=[3:9181:8031].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-04T02:48:19.589466Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:9181:8031], SessionActorId: [3:8617:8031], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[3:8617:8031]. 2026-01-04T02:48:19.589705Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ODQxNWI5NzYtZjA3YzAwNjctYzhkNTViOTQtYjVmOGYzZDU=, ActorId: [3:8617:8031], ActorState: ExecuteState, LegacyTraceId: 01ke3efn1x9pshk8brkga8v1b1, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:9756:8031] from: [3:9181:8031] trace_id# 2026-01-04T02:48:19.590264Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:9756:8031] TxId: 281474976715668. Ctx: { TraceId: 01ke3efn1x9pshk8brkga8v1b1, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ODQxNWI5NzYtZjA3YzAwNjctYzhkNTViOTQtYjVmOGYzZDU=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/KV`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-04T02:48:19.590734Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ODQxNWI5NzYtZjA3YzAwNjctYzhkNTViOTQtYjVmOGYzZDU=, ActorId: [3:8617:8031], ActorState: ExecuteState, LegacyTraceId: 01ke3efn1x9pshk8brkga8v1b1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest |94.9%| [TA] $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeExchange::ShouldRejectChangesOnQueueOverflowBySize [GOOD] >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace |94.9%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} |94.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/data/test-results/unittest/{meta.json ... results_accumulator.log} >> TLocksTest::GoodLock >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] >> TFlatTest::WriteSplitByPartialKeyAndRead >> TLocksFatTest::RangeSetBreak >> TObjectStorageListingTest::Listing [GOOD] >> TObjectStorageListingTest::ManyDeletes >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] >> TFlatTest::Ls >> TFlatTest::SelectRangeBothLimit [GOOD] >> TFlatTest::SplitThenMerge [GOOD] >> TFlatTest::WriteSplitAndRead [GOOD] >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] >> TLocksTest::GoodDupLock >> TFlatTest::GetTabletCounters [GOOD] >> TFlatTest::SelectRangeSkipNullKeys [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeAnalyzeOneColumnTableSpecificColumns [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:01.653962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:01.725032Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:01.730589Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:01.730849Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:01.730946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:02.040982Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:02.137493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:02.137605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:02.185021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:02.252730Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:02.897271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:02.897719Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:02.897757Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:02.897789Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:02.898166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:02.962747Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:03.505014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:06.163643Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:06.171666Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:06.175440Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:06.206398Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:06.206515Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:06.235186Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:06.237289Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:06.416855Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:06.416988Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:06.418448Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.419104Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.419673Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.420752Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.421015Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.421135Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.421404Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.421547Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.421667Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:06.448286Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:06.661867Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:06.730721Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:06.730813Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:06.759541Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:06.760763Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:06.760970Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:06.761026Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:06.761077Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:06.761130Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:06.761176Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:06.761222Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:06.761818Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:06.762120Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:06.764253Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:06.772236Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:06.772307Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:06.772398Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:06.783242Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:06.793734Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:06.797956Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:06.801144Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:06.801240Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:06.829337Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:06.829731Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:06.944249Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:06.972846Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:07.224662Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:07.226876Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:07.230549Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:07.234120Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:07.234217Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... ool default not found or you don't have access permissions } 2026-01-04T02:46:33.433313Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4132:3726], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:33.433441Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:33.438790Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4149:3731], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:33.441233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720659:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:33.485646Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4131:3725], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720659 completed, doublechecking } 2026-01-04T02:46:33.677330Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4216:3772], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:33.703405Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4215:3771] txid# 281474976720660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:33.878201Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4237:3785]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:33.878434Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:33.878513Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4239:3787] 2026-01-04T02:46:33.878561Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4239:3787] 2026-01-04T02:46:33.878798Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4240:3788] 2026-01-04T02:46:33.878933Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4240:3788], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:33.878987Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:33.879092Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4239:3787], server id = [2:4240:3788], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:33.879177Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:33.879232Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4237:3785], StatRequests.size() = 1 2026-01-04T02:46:33.879395Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:47:30.866867Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4104:3711], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:30.867136Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4104:3711], Start read next stream part 2026-01-04T02:47:30.868011Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ece5w5frmxrn3h67s4ef0", SessionId: ydb://session/3?node_id=2&id=Yjg3YjJhYTctODgwNjY0NzgtZTgzNjE2OS1iZTU3YTZhMw==, Slow query, duration: 57.439662s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:30.868645Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:30.871940Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-04T02:47:30.872480Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:30.872609Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], Start read next stream part 2026-01-04T02:47:30.876136Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4104:3711], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:30.876212Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4104:3711], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTRkNjg1N2YtNTZkMWJjYmYtZTljNGIzYWUtOWFiMDgyMTk=, TxId: 2026-01-04T02:47:30.979285Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4440:3908]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:30.979612Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:30.979668Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4440:3908], StatRequests.size() = 1 2026-01-04T02:48:24.033463Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:24.033614Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], Start read next stream part 2026-01-04T02:48:24.034448Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ee69r2k8wvh7fnj3pwdfe", SessionId: ydb://session/3?node_id=2&id=OTUxNTdlOS1hYjQxZDg0OC1lN2Q2ZTI3YS03OGVjYTY1, Slow query, duration: 53.156831s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:24.036688Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4594:3991], ActorId: [2:4595:3992], Starting query actor #1 [2:4596:3993] 2026-01-04T02:48:24.036784Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4595:3992], ActorId: [2:4596:3993], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:48:24.039608Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-04T02:48:24.040468Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4595:3992], ActorId: [2:4596:3993], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NWYwY2QzMTgtNDBjMWU0YmMtMjQxODI2MmMtZmNjNTI4Zjk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:24.042193Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:24.042285Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4094:2462], ActorId: [2:4415:3887], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzczY2I4ZDgtYzVmYTRhNS0yYmQ0NGM3My03ZWFhMzhhZA==, TxId: 2026-01-04T02:48:24.118654Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4613:4007]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:24.118972Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:24.119025Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4613:4007], StatRequests.size() = 1 2026-01-04T02:48:24.383825Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4595:3992], ActorId: [2:4596:3993], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NWYwY2QzMTgtNDBjMWU0YmMtMjQxODI2MmMtZmNjNTI4Zjk=, TxId: 2026-01-04T02:48:24.383925Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4595:3992], ActorId: [2:4596:3993], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NWYwY2QzMTgtNDBjMWU0YmMtMjQxODI2MmMtZmNjNTI4Zjk=, TxId: 2026-01-04T02:48:24.384351Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4594:3991], ActorId: [2:4595:3992], Got response [2:4596:3993] SUCCESS 2026-01-04T02:48:24.384686Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:24.439895Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:24.440003Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3159:3430] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] >> TObjectStorageListingTest::SuffixColumns [GOOD] >> TSchemeShardTest::CreateIndexedTableRejects [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBothLimit [GOOD] Test command err: 2026-01-04T02:48:19.743153Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332791081631519:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.748553Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.197901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.211618Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.211758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.361374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.389675Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.389931Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332791081631415:2081] 1767494899737547 != 1767494899737550 2026-01-04T02:48:20.415519Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.709257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.720236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.730447Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.753176Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.766640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:48:20.854145Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) insert finished 11582 usec 9215 usec 9157 usec 8788 usec 9106 usec 8774 usec 9151 usec 9098 usec 12488 usec 12721 usec 2026-01-04T02:48:23.781904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.782028Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:23.793923Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332806118297549:2081] 1767494903691346 != 1767494903691349 2026-01-04T02:48:23.822118Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.822184Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.822538Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.843774Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.068677Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.226792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.238161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.250116Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.260241Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.710417Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndRead [GOOD] Test command err: 2026-01-04T02:48:19.719307Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787844243534:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.719540Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:19.759104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:20.076034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.076139Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.084469Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.152283Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.209489Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.441908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.509590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.515922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.528221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.606725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.728352Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.985163Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1297 647 2154)b }, ecr=1.000 2026-01-04T02:48:20.998398Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.008s,wait=0.004s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1171 521 2626)b }, ecr=1.000 2026-01-04T02:48:21.026868Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1653 647 6413)b }, ecr=1.000 2026-01-04T02:48:21.063895Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.020s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2406 1432 5183)b }, ecr=1.000 2026-01-04T02:48:21.119324Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.122752Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:48:21.122820Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:48:21.124475Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3362 2180 6413)b }, ecr=1.000 2026-01-04T02:48:21.125847Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:21.126174Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:21.129356Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.133616Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:48:21.133677Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:48:21.137124Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.138914Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:48:21.138958Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:48:21.146244Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.148230Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.008s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (4201 2983 5183)b }, ecr=1.000 2026-01-04T02:48:21.149976Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:48:21.150024Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:48:21.150552Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037889, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:21.150584Z node 1 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037889, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:21.156891Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.158961Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.159139Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.160655Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.161221Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.161885Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-04T02:48:21.162852Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.162992Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.163495Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-04T02:48:21.164264Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.164900Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.165403Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-04T02:48:21.166218Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.166319Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.166763Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-04T02:48:21.167574Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.168287Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.168779Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-04T02:48:21.169544Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.169641Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.170128Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-04T02:48:21.170889Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.171548Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.172013Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-04T02:48:21.172763Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.172874Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.173713Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-04T02:48:21.174530Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.174609Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.175056Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037888 restored its data 2026-01-04T02:48:21.175834Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.176396Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:21.176895Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976710680 at 72075186224037889 restored its data 2026-01-04T02:48:21.177706Z node 1 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976710680 released its data 2026-01-04T02:48:21.177828Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:21.178293Z node 1 :TX_DATASHARD DEBU ... 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635412Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:24.635421Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635457Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:24.635479Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635528Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:24.635543Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-04T02:48:24.635544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635553Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-04T02:48:24.635563Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:24.635574Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:48:24.635593Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-04T02:48:24.635605Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:24.635607Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-04T02:48:24.635614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:24.635661Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:24.635664Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:48:24.636385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:24.636580Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:48:24.636667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-04T02:48:24.636892Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:24.637079Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:24.637087Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:48:24.637181Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:48:24.637240Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:24.637384Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:48:24.637854Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:48:24.638455Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:48:24.638487Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7591332812799682862:2815], serverId# [2:7591332812799682865:2818], sessionId# [0:0:0] 2026-01-04T02:48:24.642208Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:48:24.642233Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-04T02:48:24.642268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:48:24.642279Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-04T02:48:24.642285Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:48:24.642302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:24.642309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:48:24.642379Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:24.642590Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:24.642766Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-04T02:48:24.642886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:48:24.642975Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-04T02:48:24.643098Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:48:24.643223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:24.643347Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:24.643644Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:24.643703Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:24.648736Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:48:24.648819Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:48:24.650077Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:48:24.650111Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7591332812799682863:2816], serverId# [2:7591332812799682866:2819], sessionId# [0:0:0] 2026-01-04T02:48:24.650147Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-04T02:48:24.650184Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:48:24.652423Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:24.652494Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:48:24.652964Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:24.652979Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:48:24.653010Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-04T02:48:24.653017Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-04T02:48:24.653034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-04T02:48:24.653051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:48:24.653063Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:48:24.653085Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:24.653962Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:48:24.653979Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037892 not found 2026-01-04T02:48:24.656995Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:24.657914Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-04T02:48:24.657989Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-04T02:48:24.661869Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:24.661961Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseExcludeKeys [GOOD] Test command err: 2026-01-04T02:48:19.734366Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332789790335771:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.734412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.133766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.163894Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.163995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.196849Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.343089Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.349921Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332789790335649:2081] 1767494899712433 != 1767494899712436 2026-01-04T02:48:20.380149Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.573237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.582633Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.685150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.782238Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:23.486662Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332807517094454:2061];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.486714Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.520907Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.588872Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.599364Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332807517094434:2081] 1767494903481885 != 1767494903481888 2026-01-04T02:48:23.628209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.628311Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.633068Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.819665Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.842800Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:23.870354Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitThenMerge [GOOD] Test command err: 2026-01-04T02:48:19.718733Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787748411958:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.718773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.148800Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.148892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.186619Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.209239Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.300633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.485906Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.621010Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.630323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.737982Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.788279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusNameConflict Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges), tableStr: /dc-1/Dir1, tableId: , opId: 281474976710659:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir1" SourceTabletId: 100500 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 42 } } } } 2026-01-04T02:48:20.815990Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332792043380023:2501] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Dir1\', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 128: Check failed: path: '/dc-1/Dir1', error: path is not a table (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:48:23.569886Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332808298038909:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.575270Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.644585Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.861731Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.869885Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.912195Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.918155Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.927541Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.245275Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.251375Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.278256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.286733Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.500402Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:24.513122Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.003s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:24.518222Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.550327Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:24.573946Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:48:24.595066Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:24.607210Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.609192Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.609354Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.610645Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.611230Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.611887Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:24.612747Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.612862Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.613282Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:24.614048Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.614779Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.615212Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:24.615919Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.616197Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.616607Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:24.617290Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.617918Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.618310Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:24.618937Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.619023Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.619395Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:24.620038Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.620633Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.621018Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:24.621654Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.621737Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.622156Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:24.622796Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.623421Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:24.623849Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:24.624474Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:24.624555Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:24.624927Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:2 ... onReply execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332812593007632 RawX2: 4503608217307467 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2026-01-04T02:48:25.171929Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2026-01-04T02:48:25.171972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715693:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7591332812593007632 RawX2: 4503608217307467 } Origin: 72075186224037894 State: 5 TxId: 281474976715693 Step: 0 Generation: 1 2026-01-04T02:48:25.172000Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715693:0, shardIdx: 72057594046644480:7, shard: 72075186224037894, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172021Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172036Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172051Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715693:0, datashard: 72075186224037894, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172068Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 129 -> 240 2026-01-04T02:48:25.172398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172499Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172528Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:48:25.172811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:48:25.172934Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:48:25.172946Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-04T02:48:25.172967Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:48:25.172977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-04T02:48:25.172995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715693, ready parts: 1/1, is published: true 2026-01-04T02:48:25.173033Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7591332816887975156:2416] message: TxId: 281474976715693 2026-01-04T02:48:25.173052Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715693 ready parts: 1/1 2026-01-04T02:48:25.173066Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-04T02:48:25.173099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715693:0 2026-01-04T02:48:25.173200Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:25.173644Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037889 state PreOffline 2026-01-04T02:48:25.173685Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:48:25.173791Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715693 datashard 72075186224037894 state PreOffline 2026-01-04T02:48:25.173830Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-04T02:48:25.175709Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:25.175789Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:25.177036Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037894 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:25.177103Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037894 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:25.178194Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:25.178372Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037894 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:25.178497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332812593006972 RawX2: 4503608217307374 } TabletId: 72075186224037889 State: 4 2026-01-04T02:48:25.178540Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:25.178688Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332812593007632 RawX2: 4503608217307467 } TabletId: 72075186224037894 State: 4 2026-01-04T02:48:25.178717Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:25.179082Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:25.179115Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:25.179187Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:25.179210Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:25.180241Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:48:25.180270Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037894 state Offline 2026-01-04T02:48:25.181371Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:48:25.181611Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:48:25.181783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-04T02:48:25.181977Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:25.182205Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:25.182227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:25.182278Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:25.182543Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:48:25.182605Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037894 reason = ReasonStop 2026-01-04T02:48:25.183165Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:25.183204Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-04T02:48:25.183363Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:25.183433Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:48:25.183972Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:48:25.183995Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:48:25.184026Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-04T02:48:25.184039Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-04T02:48:25.184067Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:25.184960Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037894 2026-01-04T02:48:25.185022Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037894 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::GetTabletCounters [GOOD] Test command err: 2026-01-04T02:48:19.781663Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332789368964926:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.781733Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.205965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.209080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.209173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.332801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.338359Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.436515Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.629265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.637524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.649928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.724505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.822644Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:23.737820Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332806666024198:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.751760Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.760970Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:23.900381Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.901796Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.932869Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.933331Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.949038Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.075840Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.279500Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.290333Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.298129Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.305104Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeReverseIncludeKeys [GOOD] Test command err: 2026-01-04T02:48:19.738044Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787211367184:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.738121Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.058777Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.058884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.071583Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.140796Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.186954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.190013Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332787211367126:2081] 1767494899710767 != 1767494899710770 2026-01-04T02:48:20.329890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.476685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.587092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.754730Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:23.540077Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332805258090929:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.541350Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.697989Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.704330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.704403Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.736103Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.737129Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.741945Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332805258090893:2081] 1767494903537597 != 1767494903537600 2026-01-04T02:48:23.908011Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.023434Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.047089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.051619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDrop [GOOD] >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously >> TFlatTest::WriteSplitByPartialKeyAndRead [GOOD] >> TFlatTest::WriteSplitAndReadFromFollower |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetRemove [GOOD] >> TLocksFatTest::ShardLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeServerless [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:59.817384Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:59.895148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:59.900662Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:59.900906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:59.900994Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:00.226235Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:00.320274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:00.320375Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:00.365944Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:00.436277Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:01.071318Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:01.071882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:01.071952Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:01.071985Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:01.072525Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:01.138578Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:01.699055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:04.417944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.426332Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:04.430222Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.455650Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.455833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.484919Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:04.487160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.647105Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.647219Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.648796Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.649430Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.650006Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.651062Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.651344Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.651471Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.651807Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.651985Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.652118Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.678670Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.885224Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:04.950065Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:04.950156Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:04.979012Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:04.980194Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:04.980413Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:04.980467Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:04.980512Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:04.980558Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:04.980624Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:04.980674Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:04.981244Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:04.981571Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:04.984240Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Shared 2026-01-04T02:46:04.991591Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:04.991648Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:04.991717Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:05.001071Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.009020Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:05.012020Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:05.014936Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.015007Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.037827Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:05.038102Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:05.142465Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:05.275642Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Shared/.metadata/script_executions 2026-01-04T02:46:05.437878Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Shared/.metadata/_statistics 2026-01-04T02:46:05.440050Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.443504Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:05.446712Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:05.446832Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 2814749767 ... ce traversal operations sent the requests. 2026-01-04T02:46:33.615142Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 1, paths with incomplete stats: 1, at schemeshard: 72075186224037899 2026-01-04T02:46:33.615215Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 4.731000s, at schemeshard: 72075186224037899 2026-01-04T02:46:33.615442Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037899, stats byte size: 25, entries count: 1, are all stats full: 0 2026-01-04T02:46:33.629320Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete 2026-01-04T02:46:33.683766Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:46:33.688216Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:46:33.692173Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:46:33.692348Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], Start read next stream part 2026-01-04T02:46:33.898760Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4888:4158]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:33.899108Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:33.899413Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 18446744073709551615 ] 2026-01-04T02:46:33.899490Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4891:4161] 2026-01-04T02:46:33.899561Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4891:4161] 2026-01-04T02:46:33.900246Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4891:4161], server id = [2:4892:4162], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:33.900334Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4892:4162] 2026-01-04T02:46:33.900470Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4892:4162], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:33.900555Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:33.900790Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:33.900886Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4888:4158], StatRequests.size() = 1 2026-01-04T02:46:33.900971Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:47:32.780937Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:32.781203Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], Start read next stream part 2026-01-04T02:47:32.782022Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eceew065wnw274hdyxcdj", SessionId: ydb://session/3?node_id=2&id=YmRiMGY4OGUtNTg4YmYyZTAtNDBhZjQzZDAtNWRjNTcyMmY=, Slow query, duration: 59.084346s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:32.782639Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:32.786227Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:32.786302Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:4861:4144], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZmNjMzNiOC0zOGQzMThjZi1iNmQzMWY3YS05Zjg3NzJkYg==, TxId: 2026-01-04T02:47:32.786450Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-04T02:47:32.787162Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:32.787273Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], Start read next stream part 2026-01-04T02:47:32.886105Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:5087:4278]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:32.886431Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:32.886488Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:5087:4278], StatRequests.size() = 1 2026-01-04T02:48:25.981958Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:25.982127Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], Start read next stream part 2026-01-04T02:48:25.983313Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ee85k76c6z9vdsakvfmy1", SessionId: ydb://session/3?node_id=2&id=YWYyNDFlNTUtYjFlODBkNDgtNWNmYzg3MjktN2M1MjJlMg==, Slow query, duration: 53.191737s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:25.985575Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5243:4363], ActorId: [2:5245:4364], Starting query actor #1 [2:5246:4365] 2026-01-04T02:48:25.985650Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5245:4364], ActorId: [2:5246:4365], Bootstrap. Database: /Root/Shared, IsSystemUser: 1, run create session 2026-01-04T02:48:25.988958Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33590, txId: 18446744073709551615] shutting down 2026-01-04T02:48:25.989372Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:25.989440Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4851:2462], ActorId: [2:5067:4262], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTdjMzNiNjUtNmFlOTg0MTMtMzIyZDAzOGMtMzZkOTVmZjU=, TxId: 2026-01-04T02:48:25.989968Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5245:4364], ActorId: [2:5246:4365], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ODk3Y2E4OGYtMTM3NmQ4YjQtYmEzZWIwODctZmNhNDAwMzk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:26.075539Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:5262:4379]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:26.075949Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:26.076306Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:182: [72075186224037894] EvRequestStats, node id = 2, schemeshard count = 1, urgent = 0 2026-01-04T02:48:26.076387Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:48:26.076604Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:48:26.076698Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:5262:4379], StatRequests.size() = 1 2026-01-04T02:48:26.076874Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:48:26.284749Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5245:4364], ActorId: [2:5246:4365], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ODk3Y2E4OGYtMTM3NmQ4YjQtYmEzZWIwODctZmNhNDAwMzk=, TxId: 2026-01-04T02:48:26.284855Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5245:4364], ActorId: [2:5246:4365], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ODk3Y2E4OGYtMTM3NmQ4YjQtYmEzZWIwODctZmNhNDAwMzk=, TxId: 2026-01-04T02:48:26.285293Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5243:4363], ActorId: [2:5245:4364], Got response [2:5246:4365] SUCCESS 2026-01-04T02:48:26.285714Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:26.314901Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037899, LocalPathId: 32] 2026-01-04T02:48:26.314992Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3857:3638] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeSkipNullKeys [GOOD] Test command err: 2026-01-04T02:48:19.754808Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332788675557300:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.754886Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.238670Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.248414Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.248491Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.376908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.415552Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.417945Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332788675557267:2081] 1767494899752051 != 1767494899752054 2026-01-04T02:48:20.450568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.687954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.700522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.764710Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.772745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.945895Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332792970525480:2565] txid# 281474976710659, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-04T02:48:20.956170Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332792970525493:2571] txid# 281474976710660, issues: { message: "Unexpected freeze state" severity: 1 } Error 128: Unexpected freeze state 2026-01-04T02:48:20.958635Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332792970525499:2576] txid# 281474976710661, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-04T02:48:20.960843Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332792970525505:2581] txid# 281474976710662, issues: { message: "Mix freeze cmd with other options is forbidden" severity: 1 } Error 128: Mix freeze cmd with other options is forbidden 2026-01-04T02:48:23.727783Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332808074437551:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.727844Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.777021Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:23.949880Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.959818Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.965929Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332808074437512:2081] 1767494903718185 != 1767494903718188 2026-01-04T02:48:23.984754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.985019Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.008137Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.129170Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.386811Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.406308Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.422264Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.452140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SuffixColumns [GOOD] Test command err: 2026-01-04T02:48:19.785479Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332789168328586:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.785534Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.134009Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.141541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.141633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.235554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.253924Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332789168328377:2081] 1767494899765387 != 1767494899765390 2026-01-04T02:48:20.265480Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.426607Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.750836Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:20.750860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:20.750873Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:20.750953Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:20.766570Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:21.375164Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:21.382112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:21.478893Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.983712Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332806092609132:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.990939Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.993580Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:24.184220Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.242900Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.254209Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:24.254265Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.256634Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332806092608912:2081] 1767494903935027 != 1767494903935030 2026-01-04T02:48:24.263875Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.393255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.447936Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:24.447962Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:24.447973Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:24.448055Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:24.714890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.726614Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.736036Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.746151Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.925420Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:25.304280Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7591332814682545048:2472], Recipient [2:7591332810387577089:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\002\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3\010\000\000\000B\000\000\000\000\000\000\000" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 6 MaxKeys: 10 2026-01-04T02:48:25.304319Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-04T02:48:25.304540Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3") (type:4, value:"B\0\0\0\0\0\0\0")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:25.304727Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 77, String : ) 2026-01-04T02:48:25.304773Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 88, String : ) 2026-01-04T02:48:25.304802Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Shoot to Thrill.mp3" -> (Utf8 : Music/AC DC/Shoot to Thrill.mp3, Uint64 : 666, String : ) 2026-01-04T02:48:25.304832Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, String : ) 2026-01-04T02:48:25.304858Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, String : ) 2026-01-04T02:48:25.304926Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 5 common prefixes: 0 2026-01-04T02:48:25.320495Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553163, Sender [2:7591332814682545052:2473], Recipient [2:7591332810387577089:2299]: NKikimrTxDataShard.TEvObjectStorageListingRequest TableId: 39 SerializedKeyPrefix: "\002\000\010\000\000\0002\000\000\000\000\000\000\000\010\000\000\000Bucket50" PathColumnPrefix: "Music/AC DC/" PathColumnDelimiter: "/" SerializedStartAfterKeySuffix: "\001\000\037\000\000\000Music/AC DC/Shoot to Thrill.mp3" ColumnsToReturn: 3 ColumnsToReturn: 4 ColumnsToReturn: 5 MaxKeys: 10 2026-01-04T02:48:25.320541Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3169: StateWork, processing event TEvDataShard::TEvObjectStorageListingRequest 2026-01-04T02:48:25.320702Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037888 S3 Listing: start at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC/Shoot to Thrill.mp3")), end at key ((type:4, value:"2\0\0\0\0\0\0\0") (type:4608, value:"Bucket50") (type:4608, value:"Music/AC DC0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:25.320893Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 1, Uint64 : 10) 2026-01-04T02:48:25.320947Z node 2 :TX_DATASHARD TRACE: datashard__object_storage_listing.cpp:240: 72075186224037888 S3 Listing: "Music/AC DC/Thunderstruck.mp3" -> (Utf8 : Music/AC DC/Thunderstruck.mp3, Uint64 : 66, Uint64 : 10) 2026-01-04T02:48:25.321022Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037888 S3 Listing: finished status: 0 description: "" contents: 2 common prefixes: 0 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateIndexedTableAndForceDropSimultaneously [GOOD] >> TSchemeShardTest::CreateTableWithUniformPartitioning ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapUpdateLocksOneShardRowExists [GOOD] Test command err: Trying to start YDB, gRPC: 22117, MsgBus: 4558 2026-01-04T02:47:13.515207Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332503987680511:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:13.515272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:13.822261Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:13.835890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.836034Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.910424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.911161Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332503987680483:2081] 1767494833513798 != 1767494833513801 2026-01-04T02:47:13.924922Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:14.062047Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:14.062070Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:14.062077Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:14.062155Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:14.094890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:14.522881Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:14.617679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:16.911551Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332516872583251:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.912175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332516872583235:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.912247Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.912574Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332516872583274:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.912656Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.915747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:16.936078Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332516872583273:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:17.038792Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332516872583326:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:17.335387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:17.609449Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:17.609699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:17.609789Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:17.609849Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:17.610017Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:17.610146Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:17.610246Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:17.610330Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:17.610417Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:17.610513Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:17.610599Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:17.610688Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:17.610826Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:17.610921Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:17.611007Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332521167550787:2345];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:17.611446Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:17.611566Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:17.611633Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:17.611719Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:17.611802Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:17.611858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:17.611901Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332521167550786:2344];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLA ... cal=44;result=not_found; 2026-01-04T02:48:22.310719Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.310740Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.313665Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.313758Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.313778Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038023;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.321987Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.322070Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.322094Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.325235Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.325301Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.325320Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038058;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.332586Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.332664Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.332683Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038030;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.334025Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.334084Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.334101Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.342765Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.342995Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.343022Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.343468Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.343531Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.343549Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.352484Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.352561Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.352581Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038025;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.353105Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.353159Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.353179Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038021;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.367801Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.367890Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.367914Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038032;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:22.412437Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:22.412469Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:22.414352Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3efangc3cgsdtagxmn4t86", SessionId: ydb://session/3?node_id=4&id=NGI5ODA2MjAtNTE2MzYyZDEtZDU1MGQ4YzQtZDdiNmI2Ng==, Slow query, duration: 10.749626s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:23.176001Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037993;tx_state=TTxProgressTx::Execute;tx_current=281474976710668;tx_id=281474976710668;commit_tx_id=281474976710668;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-04T02:48:23.302720Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [4:7591332805479287732:3678], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3efsckbjbvvmvm7ja1yj7r. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzY4YmUzZDUtNTcyZGY4ZDQtZTViMzI4Y2ItOWUyZGExZWU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 } 2026-01-04T02:48:23.302829Z node 4 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [4:7591332805479287732:3678], TxId: 281474976710669, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3efsckbjbvvmvm7ja1yj7r. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=4&id=YzY4YmUzZDUtNTcyZGY4ZDQtZTViMzI4Y2ItOWUyZGExZWU=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/KV`, code: 2001 }. 2026-01-04T02:48:23.303992Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=YzY4YmUzZDUtNTcyZGY4ZDQtZTViMzI4Y2ItOWUyZGExZWU=, ActorId: [4:7591332801184320335:3648], ActorState: ExecuteState, LegacyTraceId: 01ke3efsckbjbvvmvm7ja1yj7r, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/KV`" issue_code: 2001 severity: 1 } trace_id# |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TFlatTest::Ls [GOOD] >> TFlatTest::LsPathId >> TFlatTest::WriteSplitKillRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootColumnShard [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:59.905693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:59.982097Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:45:59.987784Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:59.988046Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:59.988141Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:00.315378Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:00.396770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:00.396902Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:00.443708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:00.516055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:01.129663Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:01.130103Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:01.130146Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:01.130168Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:01.130519Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:01.192962Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:01.724996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:04.697620Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.706072Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:04.710011Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.742807Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.742928Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.772153Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:04.774320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:04.934448Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.934571Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:04.936038Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.936705Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.937263Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.938345Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.938626Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.938736Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.939018Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.939156Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.939273Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:04.965598Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.142247Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:05.190712Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:05.190795Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:05.214245Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:05.215554Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:05.215767Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:05.215833Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:05.215876Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:05.215923Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:05.215974Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:05.216022Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:05.216482Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:05.216715Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:05.218494Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:05.226334Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:05.226413Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:05.226526Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.238243Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.249206Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:05.253728Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:05.257305Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.257403Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:05.281231Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:05.281604Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:05.393886Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:05.433327Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:05.707281Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:05.709372Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:05.712945Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:05.716412Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:05.716531Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... echecking } 2026-01-04T02:46:33.089632Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4209:3768], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:33.127958Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4208:3767] txid# 281474976720660, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:33.353222Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4230:3781]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:33.353421Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:33.353491Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4232:3783] 2026-01-04T02:46:33.353538Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4232:3783] 2026-01-04T02:46:33.353795Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4233:3784] 2026-01-04T02:46:33.353979Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4233:3784], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:33.354036Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:33.354140Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4232:3783], server id = [2:4233:3784], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:33.354202Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:33.354261Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4230:3781], StatRequests.size() = 1 2026-01-04T02:46:33.354411Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 ... waiting for TEvKqpScan (done) 2026-01-04T02:47:34.415355Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:2807:3025];ev=NActors::IEventHandle;fline=columnshard_impl.cpp:984;event=tablet_die; 2026-01-04T02:47:34.432478Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:219: SelfId: [2:4319:3864]. Got EvDeliveryProblem, TabletId: 72075186224037899, NotDelivered: 0, Starting 2026-01-04T02:47:34.432589Z node 2 :KQP_COMPUTE WARN: kqp_scan_fetcher_actor.cpp:619: SelfId: [2:4319:3864]. TKqpScanFetcherActor: broken pipe with tablet 72075186224037899, restarting scan from last received key , attempt #1 (total 1) schedule after 0.000000s 2026-01-04T02:47:34.565781Z node 2 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037899;self_id=[2:4391:3869];tablet_id=72075186224037899;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=20; ... blocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR cookie 0 2026-01-04T02:47:34.894575Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:47:34.894674Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to ... unblocking NKikimr::TEvDataShard::TEvKqpScan from KQP_SCAN_FETCH_ACTOR to TX_COLUMNSHARD_ACTOR 2026-01-04T02:47:35.789087Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4095:3706], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:35.789366Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4095:3706], Start read next stream part 2026-01-04T02:47:35.789951Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ecdm90hj3dh9hf9tm7e6z", SessionId: ydb://session/3?node_id=2&id=M2Q1NzE3ZTctM2U0Y2U5ZmMtYTdiMDI2ZTQtMzBmMTA1NTI=, Slow query, duration: 62.945273s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:35.791017Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:35.795523Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-04T02:47:35.795774Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:35.795940Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], Start read next stream part 2026-01-04T02:47:35.799982Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4095:3706], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:35.800052Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4095:3706], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGUxYzZlN2EtODI2OTE5ZDktOWZkNzFjZDMtOWNkZjEyOGI=, TxId: 2026-01-04T02:47:35.971378Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4551:4005]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:35.971701Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:35.971780Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4551:4005], StatRequests.size() = 1 2026-01-04T02:48:27.670724Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:27.670888Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], Start read next stream part 2026-01-04T02:48:27.671817Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eeb3kd7bbrc7m0248mav2", SessionId: ydb://session/3?node_id=2&id=ZDcxYjM3ZDgtMzZmMDQ5ZmItODAyZDQxNC02ZTJiZmFiMQ==, Slow query, duration: 51.870422s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:27.674211Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4708:4090], ActorId: [2:4709:4091], Starting query actor #1 [2:4710:4092] 2026-01-04T02:48:27.674298Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4709:4091], ActorId: [2:4710:4092], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:48:27.677475Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-04T02:48:27.678559Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4709:4091], ActorId: [2:4710:4092], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjVmNzJhZmQtYWNjMmQzM2EtYTlhMjlmNDUtNWUyNjU4Mzg=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:27.681303Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:27.681370Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4085:2462], ActorId: [2:4524:3982], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjczOTk0OTUtOTllZjg3MjAtYjdmZTM0NzItZWExNjJmOTk=, TxId: 2026-01-04T02:48:27.794171Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4727:4106]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:27.794563Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:27.794621Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4727:4106], StatRequests.size() = 1 2026-01-04T02:48:27.986959Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4709:4091], ActorId: [2:4710:4092], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjVmNzJhZmQtYWNjMmQzM2EtYTlhMjlmNDUtNWUyNjU4Mzg=, TxId: 2026-01-04T02:48:27.987059Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4709:4091], ActorId: [2:4710:4092], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjVmNzJhZmQtYWNjMmQzM2EtYTlhMjlmNDUtNWUyNjU4Mzg=, TxId: 2026-01-04T02:48:27.987495Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4708:4090], ActorId: [2:4709:4091], Got response [2:4710:4092] SUCCESS 2026-01-04T02:48:27.987909Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:28.015515Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:28.015615Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3156:3429] |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TFlatTest::AutoSplitBySize [GOOD] >> TFlatTest::AutoMergeBySize >> TFlatTest::CopyTableAndCompareColumnsSchema >> TFlatTest::CopyCopiedTableAndRead >> TFlatTest::CopyTableAndRead >> TFlatTest::SelectRangeForbidNullArgs2 >> TFlatTest::ShardFreezeUnfreezeAlreadySet >> TFlatTest::SelectRangeBytesLimit >> TFlatTest::Mix_DML_DDL >> TLocksTest::Range_GoodLock0 >> TFlatTest::LargeProxyReply >> TSchemeShardTest::CreateTableWithUniformPartitioning [GOOD] >> TSchemeShardTest::CreateTableWithSplitBoundaries >> AsyncIndexChangeExchange::ShouldNotReorderChangesOnRace [GOOD] >> Cdc::AreJsonsEqualReturnsTrueOnEqual [GOOD] >> Cdc::AreJsonsEqualReturnsFalseOnDifferent [GOOD] >> Cdc::AreJsonsEqualFailsOnWildcardInArray [GOOD] >> Cdc::AlterViaTopicService >> TLocksTest::CK_GoodLock >> TFlatTest::LsPathId [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] >> TFlatTest::WriteSplitKillRead [GOOD] >> TFlatTest::WriteSplitWriteSplit >> TSchemeShardTest::CreateTableWithSplitBoundaries [GOOD] >> TSchemeShardTest::CreateTableWithConfig >> TLocksFatTest::RangeSetBreak [GOOD] >> TLocksFatTest::RangeSetNotBreak >> TFlatTest::ShardFreezeUnfreezeAlreadySet [GOOD] >> TFlatTest::ShardFreezeUnfreeze >> TFlatTest::SelectRangeForbidNullArgs2 [GOOD] >> TFlatTest::SelectRangeForbidNullArgs3 >> TFlatTest::CopyCopiedTableAndRead [GOOD] >> TFlatTest::CopyTableAndAddFollowers >> TFlatTest::CopyTableAndRead [GOOD] >> TFlatTest::CopyTableAndDropOriginal >> TFlatTest::Mix_DML_DDL [GOOD] >> TFlatTest::OutOfDiskSpace [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LsPathId [GOOD] Test command err: 2026-01-04T02:48:27.222514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332825451038940:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.223919Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.456567Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.464796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.464879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.468424Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.605692Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332825451038909:2081] 1767494907219332 != 1767494907219335 2026-01-04T02:48:27.608060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.630113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.871748Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:27.882785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:27.911845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:27.981963Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332825451039751:2527] txid# 281474976710659, issues: { message: "Check failed: path: \'/dc-1/Berkanavt\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges)" severity: 1 } Error 1: Check failed: path: '/dc-1/Berkanavt', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 38], type: EPathTypeDir, state: EPathStateNoChanges) 2026-01-04T02:48:27.993645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:48:31.081690Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.081828Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:31.188720Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332839290211112:2081] 1767494911013726 != 1767494911013729 2026-01-04T02:48:31.189234Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.202332Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:31.202445Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:31.208581Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:31.297921Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.428851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:31.460485Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 |94.9%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeBytesLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs1 >> AnalyzeColumnshard::Analyze [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitAndReadFromFollower [GOOD] Test command err: 2026-01-04T02:48:26.109585Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332821401537155:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:26.109651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:26.390677Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:26.408369Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:26.408470Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:26.451941Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:26.453298Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332821401537127:2081] 1767494906108164 != 1767494906108167 2026-01-04T02:48:26.466350Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:26.613900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:26.695801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:26.701779Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:26.718659Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:26.791092Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:26.975964Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:26.977537Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:48:26.977570Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:48:27.091118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } } } TxId: 281474976710668 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:27.091405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-04T02:48:27.091688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:48:27.091732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:27.092020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:48:27.092039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710668:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-04T02:48:27.092271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710668:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000d\000\000\000\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } 2026-01-04T02:48:27.092329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710668:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:27.093685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710668, response: Status: StatusAccepted TxId: 281474976710668 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:48:27.093787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710668, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-04T02:48:27.093938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480 2026-01-04T02:48:27.093964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710668:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-04T02:48:27.094272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:27.094431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710668:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } BindedChannels { StoragePoolName: "/dc-1:test" StoragePoolKind: "test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:27.095000Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:2 msg type: 268697601 2026-01-04T02:48:27.095065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710668:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-04T02:48:27.095126Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710668, partId: 0, tablet: 72057594037968897 2026-01-04T02:48:27.095144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-04T02:48:27.095154Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-04T02:48:27.097004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-04T02:48:27.097061Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:2, partId: 0 2026-01-04T02:48:27.097223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-04T02:48:27.097256Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:48:27.097337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 2 TabletID: 72075186224037889 Origin: 72057594037968897 2026-01-04T02:48:27.097536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-04T02:48:27.097555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710668, shardIdx: 72057594046644480:3, partId: 0 2026-01-04T02:48:27.098285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976710668:0, at schemeshard: 72057594046644480, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-04T02:48:27.098304Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:48:27.098352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 281474976710668:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-04T02:48:27.098420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710668:0 2 -> 3 2026-01-04T02:48:27.098699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 28147497671066 ... 6-01-04T02:48:32.050207Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715706 datashard 72075186224037890 state PreOffline 2026-01-04T02:48:32.050230Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-04T02:48:32.050851Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715706 datashard 72075186224037889 state PreOffline 2026-01-04T02:48:32.050893Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:48:32.051009Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715706 datashard 72075186224037891 state PreOffline 2026-01-04T02:48:32.051032Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037891 Got TEvSchemaChangedResult from SS at 72075186224037891 2026-01-04T02:48:32.050988Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:91: 72075186224037888 ack parts [ [72075186224037888:1:32:1:12288:402:0] ] return to tablet 72075186224037890 2026-01-04T02:48:32.051032Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.051093Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:91: 72075186224037888 ack parts [ [72075186224037888:1:44:1:12288:227:0] ] return to tablet 72075186224037892 2026-01-04T02:48:32.051108Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037888 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.051151Z node 3 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:32.054037Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037889 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.054102Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:32.055255Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.055299Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:32.061395Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:32.061468Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:32.064342Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332834672698256 RawX2: 4503608217307379 } TabletId: 72075186224037889 State: 4 2026-01-04T02:48:32.064403Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:32.064619Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332838967666108 RawX2: 4503608217307450 } TabletId: 72075186224037891 State: 4 2026-01-04T02:48:32.064640Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:32.066753Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:32.066849Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:32.066950Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:32.066965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:32.068123Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:48:32.068155Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:48:32.072496Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:48:32.072853Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-04T02:48:32.073092Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:32.073238Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:48:32.073254Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:32.073394Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:48:32.077901Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:48:32.077965Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7591332838967665785:3013], serverId# [2:7591332838967665786:3014], sessionId# [0:0:0] 2026-01-04T02:48:32.078316Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:32.078411Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-04T02:48:32.078437Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-04T02:48:32.078765Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:32.079247Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:48:32.079266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:48:32.079302Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:32.079317Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:48:32.081649Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:48:32.081705Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [2:7591332838967666321:3436], serverId# [2:7591332838967666323:3438], sessionId# [0:0:0] 2026-01-04T02:48:32.082169Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:32.082512Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:48:32.083034Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7591332838967665654:2929], serverId# [2:7591332838967665655:2930], sessionId# [0:0:0] 2026-01-04T02:48:32.083058Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7591332834672698241:2842], serverId# [2:7591332834672698245:2843], sessionId# [0:0:0] 2026-01-04T02:48:32.083478Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:32.083606Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:48:32.085147Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:32.085200Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:48:32.108555Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:32.110406Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:32.116956Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.117053Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037892 parts [ [72075186224037888:1:44:1:12288:227:0] ] return ack processed 2026-01-04T02:48:32.117094Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037892 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.117143Z node 3 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:32.118646Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:32.118691Z node 3 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:32.118747Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.118808Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037890 parts [ [72075186224037888:1:32:1:12288:402:0] ] return ack processed 2026-01-04T02:48:32.118833Z node 3 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:32.118867Z node 3 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateTableWithConfig [GOOD] >> TSchemeShardTest::CreateTableWithNamedConfig >> KqpSinkMvcc::OlapNamedStatement [GOOD] >> TLocksFatTest::ShardLocks [GOOD] >> TFlatTest::CopyTableAndReturnPartAfterCompaction [GOOD] >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::OutOfDiskSpace [GOOD] Test command err: 2026-01-04T02:48:33.135277Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332850743119825:2162];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:33.135477Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.204131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:33.506529Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.506621Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.508144Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.520012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.678289Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.679919Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332850743119701:2081] 1767494913108182 != 1767494913108185 2026-01-04T02:48:33.784256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.870377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.890231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.974516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.131771Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:34.148995Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; proxy error code: Unknown error:
: Error: Resolve failed for table: /dc-1/Table, error: column 'value' not exist, code: 200400 2026-01-04T02:48:34.162378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:34.185283Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:34.213154Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) proxy error code: Unknown error:
:5:24: Error: At function: AsList
:5:32: Error: At function: SetResult
:4:27: Error: At function: SelectRow
:4:27: Error: Mismatch of key columns count for table [/dc-1/Table], expected: 2, but got 1., code: 2028 >> TFlatTest::RejectByPerShardReadSize [GOOD] >> TFlatTest::RejectByPerRequestSize |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] >> TFlatTest::WriteSplitWriteSplit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::Analyze [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:10.676483Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:10.746015Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:10.752543Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:10.752807Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:10.752912Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:11.128944Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:11.222333Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:11.222448Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:11.268890Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:11.354264Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:11.931460Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:11.931860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:11.931892Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:11.931924Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:11.932249Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:11.992787Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:12.489210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:14.903520Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:14.905997Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:14.912516Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:14.936142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:14.936240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:14.973566Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:14.975224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:15.121252Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:15.121396Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:15.123189Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.124136Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.125648Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.125784Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.126003Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.126161Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.126431Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.126676Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.126809Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:15.152110Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:15.300820Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:15.334042Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:15.334120Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:15.358189Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:15.359426Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:15.359586Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:15.359634Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:15.359669Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:15.359722Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:15.359780Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:15.359818Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:15.360345Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:15.376150Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:15.376240Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2120:2587], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:15.419309Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2156:2616] 2026-01-04T02:46:15.419876Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2156:2616], schemeshard id = 72075186224037897 2026-01-04T02:46:15.471146Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2230:2655] 2026-01-04T02:46:15.473552Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:15.486504Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Describe result: PathErrorUnknown 2026-01-04T02:46:15.486572Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Creating table 2026-01-04T02:46:15.486644Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:15.496343Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2321:2700], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:15.503440Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:15.520320Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:15.520459Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:15.536200Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:15.691060Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:15.717497Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:15.945828Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:16.063333Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:16.063410Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2246:2665] Owner: [2:2240:2663]. Column diff is empty, finishing 2026-01-04T02:46:16.633034Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... default not found or you don't have access permissions } 2026-01-04T02:46:39.870841Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:4125:3716], DatabaseId: /Root/Database, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:39.871195Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/Database, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:46:39.877979Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4141:3720], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:39.881600Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976720658:2, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:46:39.939029Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:4123:3714], DatabaseId: /Root/Database, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976720658 completed, doublechecking } 2026-01-04T02:46:40.251766Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:4210:3762], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:40.286874Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:4209:3761] txid# 281474976720659, issues: { message: "Check failed: path: \'/Root/Database/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 37], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:46:40.590279Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4231:3775]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:40.590645Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:40.590786Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4233:3777] 2026-01-04T02:46:40.590861Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4233:3777] 2026-01-04T02:46:40.591302Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4234:3778] 2026-01-04T02:46:40.591524Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4234:3778], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:40.591611Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:40.591860Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4233:3777], server id = [2:4234:3778], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:40.591964Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:40.592087Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4231:3775], StatRequests.size() = 1 2026-01-04T02:46:40.592406Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:47:43.451574Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4096:3700], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:43.451865Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4096:3700], Start read next stream part 2026-01-04T02:47:43.452080Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ecmfh7t53qdn1d3wq9yzc", SessionId: ydb://session/3?node_id=2&id=ODBhZDUzNzktODgyOTUxNjgtYjVlNDEzZTEtM2YzYTQ5MjA=, Slow query, duration: 63.587758s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:43.453398Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:43.456687Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-04T02:47:43.457018Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:43.457142Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], Start read next stream part 2026-01-04T02:47:43.461036Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4096:3700], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:43.461103Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4096:3700], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njc0ODRiODMtYWY5OTU1ZmEtZmMxYTAyZTItZjcyYTE5NzY=, TxId: 2026-01-04T02:47:43.649413Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4436:3900]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:43.649695Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:43.649744Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4436:3900], StatRequests.size() = 1 2026-01-04T02:48:35.147833Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:35.147976Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], Start read next stream part 2026-01-04T02:48:35.148811Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eejk19an8gjc9tbbmk4fw", SessionId: ydb://session/3?node_id=2&id=ZDU4ZTVkYWEtZGJmMTA0NTItZjg1NTkzMDItNjI5ZmM5Y2Y=, Slow query, duration: 51.686026s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:35.151487Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4590:3983], ActorId: [2:4591:3984], Starting query actor #1 [2:4592:3985] 2026-01-04T02:48:35.151567Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4591:3984], ActorId: [2:4592:3985], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:48:35.154372Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-04T02:48:35.155369Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4591:3984], ActorId: [2:4592:3985], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MmMxZDA5YTItNTgzNTAxZTYtNWE5ZmNmOGQtYmE4MGE5OTE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:35.157970Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:35.158055Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4086:2462], ActorId: [2:4409:3877], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmJhNjhmMjYtZTNkM2FiM2MtOTFiNGEwNy0zNTI2NTM2Mg==, TxId: 2026-01-04T02:48:35.228655Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4609:3999]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:35.228995Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:35.229043Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4609:3999], StatRequests.size() = 1 2026-01-04T02:48:35.385967Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4591:3984], ActorId: [2:4592:3985], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MmMxZDA5YTItNTgzNTAxZTYtNWE5ZmNmOGQtYmE4MGE5OTE=, TxId: 2026-01-04T02:48:35.386070Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4591:3984], ActorId: [2:4592:3985], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MmMxZDA5YTItNTgzNTAxZTYtNWE5ZmNmOGQtYmE4MGE5OTE=, TxId: 2026-01-04T02:48:35.386470Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4590:3983], ActorId: [2:4591:3984], Got response [2:4592:3985] SUCCESS 2026-01-04T02:48:35.386793Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:35.423368Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:35.423467Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3157:3434] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] >> Cdc::AlterViaTopicService [GOOD] >> Cdc::Alter >> TSchemeShardTest::CreateTableWithNamedConfig [GOOD] >> TSchemeShardTest::CreateIndexedTableAfterBackup >> TLocksTest::Range_IncorrectDot1 [GOOD] >> TLocksTest::Range_IncorrectDot2 >> TLocksTest::Range_BrokenLockMax [GOOD] >> TLocksTest::Range_CorrectDot >> TLocksTest::UpdateLockedKey [GOOD] >> TLocksTest::SetLockNothing >> TLocksTest::CK_Range_BrokenLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf >> TLocksTest::BrokenSameKeyLock [GOOD] >> TLocksTest::BrokenSameShardLock >> TFlatTest::LargeProxyReply [GOOD] >> TFlatTest::LargeProxyReplyRW >> TFlatTest::ShardUnfreezeNonFrozen >> TFlatTest::ShardFreezeUnfreeze [GOOD] >> TLocksTest::Range_IncorrectNullDot1 [GOOD] >> TLocksTest::Range_IncorrectNullDot2 >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] >> TLocksTest::BrokenLockErase [GOOD] >> TLocksTest::BrokenDupLock ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkMvcc::OlapNamedStatement [GOOD] Test command err: Trying to start YDB, gRPC: 5375, MsgBus: 28870 2026-01-04T02:47:13.088449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332506444444012:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:13.088517Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:13.277889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:13.279972Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.280072Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.300856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.375403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:13.376677Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332506444443983:2081] 1767494833086904 != 1767494833086907 2026-01-04T02:47:13.426643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:13.426681Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:13.426692Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:13.426809Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:13.576914Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:13.884388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.910242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:14.096641Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:16.175961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332519329346759:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.175979Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332519329346745:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.176109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.176455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332519329346772:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.176524Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:16.178803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:16.188081Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332519329346771:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:16.323793Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332519329346824:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:16.605539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:16.767204Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:16.767530Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:16.767755Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:16.767858Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:16.767963Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:16.768055Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:16.768150Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:16.768265Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:16.768358Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:16.768453Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:16.768537Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:16.768639Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:16.768724Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332519329346996:2341];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:16.771382Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:16.771440Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:16.771574Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:16.771665Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:16.771784Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:16.771883Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:16.771979Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:16.772067Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037896;self_id=[1:7591332519329346997:2342];tablet_id=72075186224037896;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:16.772155Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841 ... 10662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.548031Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.548037Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.548057Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.548058Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037999;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561270Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561270Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561336Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561350Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561400Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.561411Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038005;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574442Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574442Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574540Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574550Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574570Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038069;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.574573Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.586949Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.586949Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.587011Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.587028Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.587036Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038027;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.587050Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.600119Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.600211Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.600237Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.601008Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.601072Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.601094Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613015Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613102Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613127Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613779Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613856Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.613902Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626065Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626153Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626177Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038081;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626268Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626334Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.626376Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:29.695313Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3efgcg5sepyag5c5e30eme", SessionId: ydb://session/3?node_id=4&id=ZDg2Mzg4MzEtZTNjNGFkNjQtNTZiNjUyMGUtZDhhNDY3ZTg=, Slow query, duration: 11.822901s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TFlatTest::CopyTableAndAddFollowers [GOOD] >> TFlatTest::CopyCopiedTableAndDropFirstCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::ShardLocks [GOOD] Test command err: 2026-01-04T02:48:19.718173Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332790249253684:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.718475Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.161133Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.167070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.210810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.255726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.315955Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.495834Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.624892Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.646121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.674135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.738182Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.797481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.989107Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.040106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.719001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332790249253684:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.719084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:25.308209Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332815456017615:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:25.308242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:25.341715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:25.443068Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:25.444329Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332815456017592:2081] 1767494905284953 != 1767494905284956 2026-01-04T02:48:25.465922Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:25.465993Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:25.480312Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:25.543741Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:25.650486Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:25.654383Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:25.667619Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.717769Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.767325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:26.317619Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:30.021775Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:30.041581Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:30.084433Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:30.084516Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:30.093986Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332830218653077:2081] 1767494909920717 != 1767494909920720 2026-01-04T02:48:30.099956Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:30.122716Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:30.280187Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:30.362988Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:30.374191Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:30.394240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:30.413598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.481143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.534229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.141868Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332854712712170:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:34.141934Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:34.160165Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:34.353147Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:34.353243Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:34.353877Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.355582Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:34.361917Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332854712712039:2081] 1767494914119545 != 1767494914119548 2026-01-04T02:48:34.388543Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:34.612996Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.696680Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:34.708402Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:34.727048Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:34.738191Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.829179Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.880974Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectNullDot [GOOD] >> TLocksTest::Range_EmptyKey >> BasicUsage::PropagateSessionClosed >> TFlatTest::CopyTableAndDropOriginal [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:08.340452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:08.421543Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:08.427760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:08.428003Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:08.428100Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:08.767132Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.857517Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.857629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.905001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.984693Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:09.584199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:09.584864Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:09.584896Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:09.584919Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:09.585204Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:09.647594Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:10.170415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:12.871630Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:12.878554Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:12.880723Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:12.904726Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.904808Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.931931Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:12.932933Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:13.103690Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:13.103796Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:13.105014Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.105552Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.106019Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.107158Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.107374Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.107628Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.107787Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.107956Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.108128Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:13.123330Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:13.281575Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:13.315388Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:13.315487Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:13.344506Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:13.345023Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:13.345300Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:13.345379Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:13.345432Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:13.345489Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:13.345556Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:13.345643Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:13.346527Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:13.366256Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:13.366354Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2124:2590], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:13.387595Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2162:2622] 2026-01-04T02:46:13.388583Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2162:2622], schemeshard id = 72075186224037897 2026-01-04T02:46:13.456633Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2245:2663] 2026-01-04T02:46:13.458063Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:13.467225Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Describe result: PathErrorUnknown 2026-01-04T02:46:13.467277Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Creating table 2026-01-04T02:46:13.467352Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:13.473862Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2304:2692], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:13.476948Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:13.483063Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:13.483161Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:13.493010Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:13.674137Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:13.710655Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:14.003583Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:14.139040Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:14.139133Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2252:2669] Owner: [2:2251:2668]. Column diff is empty, finishing 2026-01-04T02:46:14.887406Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... :1273: SyncNode(), pipe client id = [2:4238:3783] 2026-01-04T02:46:40.859474Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4239:3784] 2026-01-04T02:46:40.859677Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4239:3784], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:40.859771Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:40.859904Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4238:3783], server id = [2:4239:3784], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:40.860016Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:40.860096Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4236:3781], StatRequests.size() = 1 2026-01-04T02:46:40.860329Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:47:43.827463Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4103:3707], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:43.827719Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4103:3707], Start read next stream part 2026-01-04T02:47:43.828360Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:43.831058Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ecmq77je9r9fey77pa03v", SessionId: ydb://session/3?node_id=2&id=NmMyMDJlZTctNDVlYzBlOWQtMTQxNmQxNzItNTIzNzJmZjY=, Slow query, duration: 63.720232s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:43.832525Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:43.832654Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], Start read next stream part 2026-01-04T02:47:43.832795Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-04T02:47:43.836029Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4103:3707], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:43.836100Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4103:3707], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NzdkM2U3MjMtMTY2YzE1ZTgtYjVmNzBjNDQtZWYwYTJhY2Q=, TxId: 2026-01-04T02:47:43.936915Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4441:3906]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:43.937187Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:43.937239Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4441:3906], StatRequests.size() = 1 2026-01-04T02:48:36.884609Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:36.884764Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], Start read next stream part 2026-01-04T02:48:36.884981Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eejyrdar02vsqpe159h27", SessionId: ydb://session/3?node_id=2&id=M2JkYWU3MTktOWQ2ZDBkMDAtZWE3ZDQ5MTEtZDUzZWRlODg=, Slow query, duration: 53.047493s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:36.887758Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4595:3989], ActorId: [2:4596:3990], Starting query actor #1 [2:4597:3991] 2026-01-04T02:48:36.887841Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4596:3990], ActorId: [2:4597:3991], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:48:36.890674Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-04T02:48:36.891840Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4596:3990], ActorId: [2:4597:3991], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=ZWYzOTE4NjktYTE3N2ZhMDktYWRmZDMxZGItNmRlMjdlMDE=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:36.892602Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:36.892658Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4093:2464], ActorId: [2:4414:3883], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZDc5ZDhjODUtNDNhZmY0YWMtYjRlYWIwYzYtMzY3YWMxOTI=, TxId: 2026-01-04T02:48:36.974726Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4614:4005]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:36.975050Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:36.975099Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4614:4005], StatRequests.size() = 1 2026-01-04T02:48:37.143156Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4596:3990], ActorId: [2:4597:3991], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=ZWYzOTE4NjktYTE3N2ZhMDktYWRmZDMxZGItNmRlMjdlMDE=, TxId: 2026-01-04T02:48:37.143251Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4596:3990], ActorId: [2:4597:3991], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZWYzOTE4NjktYTE3N2ZhMDktYWRmZDMxZGItNmRlMjdlMDE=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:48:37.145270Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4631:4013] 2026-01-04T02:48:37.145986Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_IN_PROGRESS 2026-01-04T02:48:37.147393Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4633:4014]
---- StatisticsAggregator ----
Database: /Root/Database
BaseStatistics: 1
SchemeShards: 1
    72075186224037897
Nodes: 1
    2
RequestedSchemeShards: 1
    72075186224037897
FastCounter: 2
FastCheckInFlight: 1
FastSchemeShards: 0
FastNodes: 0
CurPropagationSeq: 0
PropagationInFlight: 0
PropagationSchemeShards: 0
PropagationNodes: 0
LastSSIndex: 0
PendingRequests: 0
ProcessUrgentInFlight: 0
Columns: 0
DatashardRanges: 0
CountMinSketches: 0
ScheduleTraversalsByTime: 2
  oldest table: [OwnerId: 72075186224037897, LocalPathId: 33], update time: 1970-01-01T00:00:00Z
ScheduleTraversalsBySchemeShard: 1
    72075186224037897
    [OwnerId: 72075186224037897, LocalPathId: 34], [OwnerId: 72075186224037897, LocalPathId: 33]
ForceTraversals: 1
    1970-01-01T00:00:04Z
NavigatePathId: 
ForceTraversalOperationId: operationId
  CreatedAt: 1970-01-01T00:00:04.039012Z
, ReplyToActorId: [1:1257:3030]
, Types: 1
, Tables size: 1
, Tables: 
    Table[0] PathId: [OwnerId: 72075186224037897, LocalPathId: 34]
        Status: AnalyzeStarted
        AnalyzedShards size: 0
        ColumnTags: 1,2
TraversalStartTime: 2026-01-04T02:46:40Z
TraversalDatabase: 
TraversalPathId: [OwnerId: 72075186224037897, LocalPathId: 34]
TraversalIsColumnTable: 0
TraversalStartKey: 
GlobalTraversalRound: 1
TraversalRound: 0
HiveRequestRound: 0
... unblocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... 2026-01-04T02:48:37.148899Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4595:3989], ActorId: [2:4596:3990], Got response [2:4597:3991] SUCCESS 2026-01-04T02:48:37.149124Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:37.175866Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:37.175958Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:1257:3030] 2026-01-04T02:48:37.177602Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4637:4017] 2026-01-04T02:48:37.179019Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:505: [72075186224037894] Send TEvStatistics::TEvAnalyzeStatusResponse. Status STATUS_NO_OPERATION |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::WriteSplitWriteSplit [GOOD] Test command err: 2026-01-04T02:48:31.122078Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332840975437994:2215];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.122174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.425925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.490735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:31.490908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:31.532911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:31.579330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.637620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.797030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:31.810041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:31.821723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:31.925655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.125979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:32.187207Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.013s,wait=0.011s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:32.202895Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.022s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:32.263089Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:32.272925Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:48:32.349002Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 8r (max 9), put Spent{time=0.011s,wait=0.005s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-04T02:48:32.554616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } } } TxId: 281474976710680 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:32.555005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-04T02:48:32.555246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:32.555281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:48:32.555294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-04T02:48:32.555615Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-04T02:48:32.555683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710680:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-04T02:48:32.556041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976710680:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000d\000\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000d\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\310\000\000\000" ShardIdx: 4 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000\310\000\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 5 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 100 } } } } SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 200 } } } } 2026-01-04T02:48:32.556084Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710680:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:32.557331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710680, response: Status: StatusAccepted TxId: 281474976710680 SchemeshardId: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:48:32.557563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710680, subject: , status: StatusAccepted, operation: ALTER TABLE PARTITIONS, path: /dc-1/Dir/TableOld 2026-01-04T02:48:32.557716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710680:0, at schemeshard: 72057594046644480 2026-01-04T02:48:32.557748Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710680:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-04T02:48:32.558137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:32.558297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:32.558411Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710680:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 5 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:32.558738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:3 msg type: 268697601 2026-01-04T02:48:32.558819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:4 msg type: 268697601 2026-01-04T02:48:32.558844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710680:0 from tablet: 72057594046644480 to tablet: 72057594037968897 cookie: 72057594046644480:5 msg type: 268697601 2026-01-04T02:48:32.558891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710680, partId: 0, tablet: 72057594037968897 2026-01-04T02:48:32.558930Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2026-01-04T02:48:32.558942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:4, partId: 0 2026-01-04T02:48:32.558948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:5, partId: 0 2026-01-04T02:48:32.561946Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046644480 message: Status: OK Owner: 72057594046644480 OwnerIdx: 3 TabletID: 72075186224037890 Origin: 72057594037968897 2026-01-04T02:48:32.561976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 281474976710680, shardIdx: 72057594046644480:3, partId: 0 2026-01-04T02:48:32.562145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply> KqpSinkLocks::EmptyRangeAlreadyBrokenOlap [GOOD] Test command err: Trying to start YDB, gRPC: 15157, MsgBus: 21394 2026-01-04T02:47:47.744267Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332650636372073:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:47.744387Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:47.780598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:48.124754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:48.124845Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:48.166773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:48.190765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:48.195751Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:48.384747Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:48.421674Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:48.421694Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:48.421701Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:48.421818Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:48.761969Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:48.973427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:51.180173Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332667816241980:2321], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.180346Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.180414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332667816242016:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.184965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:51.185175Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332667816242019:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.185265Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:51.198997Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332667816242020:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:51.343428Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332667816242072:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:51.656292Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:51.849799Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:51.850039Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:51.850288Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:51.850437Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:51.850528Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:51.850614Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:51.850698Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:51.850795Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:51.850880Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:51.851010Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:51.851100Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:51.851183Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:51.851271Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037897;self_id=[1:7591332667816242250:2345];tablet_id=72075186224037897;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:51.855429Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:51.855492Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:51.855672Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:51.855796Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:51.855894Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:51.855972Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:51.856061Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:51.856160Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:51.856275Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037893;self_id=[1:7591332667816242230:2337];tablet_id=72075186224037893;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=norm ... 091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.516975Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.516976Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.527855Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.527935Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.527957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038034;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.528003Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.528085Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.528123Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038083;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.539527Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.539630Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.539657Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038001;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.541120Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.541188Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.541224Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038019;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.550762Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.550842Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.550863Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.551305Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.551372Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.551393Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038035;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.562054Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.562128Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.562150Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:33.621003Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3efncve4xmtx14ed3ngqvv", SessionId: ydb://session/3?node_id=4&id=Y2Q3YTkwNzUtNWFjZDE3MWYtNjRkNmQyMjItNjI0M2FjN2E=, Slow query, duration: 11.376621s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:36.114596Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7591332800980748091:2341];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976715671; 2026-01-04T02:48:36.116965Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7591332856815332670:3648], SessionActorId: [4:7591332852520364851:3648], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7591332856815332670:3648].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-04T02:48:36.117111Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332856815332670:3648], SessionActorId: [4:7591332852520364851:3648], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7591332852520364851:3648]. 2026-01-04T02:48:36.117306Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=OWYwNTliMjItZmE1ODYxNjAtNmZhMDEwMWQtMmJiN2RkOTY=, ActorId: [4:7591332852520364851:3648], ActorState: ExecuteState, LegacyTraceId: 01ke3eg5e5074j00tcxez5qxnw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332861110300161:3648] from: [4:7591332856815332670:3648] trace_id# 2026-01-04T02:48:36.117394Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332861110300161:3648] TxId: 281474976715671. Ctx: { TraceId: 01ke3eg5e5074j00tcxez5qxnw, Database: /Root, SessionId: ydb://session/3?node_id=4&id=OWYwNTliMjItZmE1ODYxNjAtNmZhMDEwMWQtMmJiN2RkOTY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-04T02:48:36.117676Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=OWYwNTliMjItZmE1ODYxNjAtNmZhMDEwMWQtMmJiN2RkOTY=, ActorId: [4:7591332852520364851:3648], ActorState: ExecuteState, LegacyTraceId: 01ke3eg5e5074j00tcxez5qxnw, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 2026-01-04T02:48:36.128855Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.129325Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.129746Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.130173Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.130564Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.130945Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.131323Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.132168Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; 2026-01-04T02:48:36.132605Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976715671; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> RetryPolicy::TWriteSession_RetryOnTargetCluster [GOOD] >> TLocksTest::SetLockFail >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreeze [GOOD] Test command err: 2026-01-04T02:48:32.892609Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332845503744228:2208];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.893114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.273880Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.285465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.285568Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.358176Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.373946Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332845503744057:2081] 1767494912842975 != 1767494912842978 2026-01-04T02:48:33.379029Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.506344Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.654304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.660715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.709462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.884715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:33.904493Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332849798712306:2594] txid# 281474976710660, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-04T02:48:33.906730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:33.921323Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332849798712344:2626] txid# 281474976710662, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-04T02:48:33.930039Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:36.500420Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:36.501370Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332862311633871:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:36.501439Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:36.641877Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.654405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:36.654468Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:36.680893Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:36.685559Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:36.689338Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332862311633636:2081] 1767494916450895 != 1767494916450898 2026-01-04T02:48:36.968594Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.978761Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:36.983219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:36.992593Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:37.050204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:37.087746Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710660: 2026-01-04T02:48:37.088090Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7591332866606601889:2596] txid# 281474976710660 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:37.088159Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7591332866606601889:2596] txid# 281474976710660 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:37.088179Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7591332866606601889:2596] txid# 281474976710660 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-04T02:48:37.091219Z node 2 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710661: 2026-01-04T02:48:37.091506Z node 2 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [2:7591332866606601897:2601] txid# 281474976710661 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:37.091556Z node 2 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [2:7591332866606601897:2601] txid# 281474976710661 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:37.091569Z node 2 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [2:7591332866606601897:2601] txid# 281474976710661 invalidateDistCache: 0 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-04T02:48:37.098861Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs3 [GOOD] Test command err: 2026-01-04T02:48:32.778073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332846056926442:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.778723Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.166304Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.166459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.202110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.300751Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.314982Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.317935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332846056926334:2081] 1767494912770334 != 1767494912770337 2026-01-04T02:48:33.483919Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.589609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.617952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.635621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:33.684745Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.790076Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:36.445180Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332864073949960:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:36.445225Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:36.465273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:36.636029Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.642085Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:36.642163Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:36.653749Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332864073949937:2081] 1767494916439847 != 1767494916439850 2026-01-04T02:48:36.667073Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:36.678548Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:36.933916Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.940206Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:36.954358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:36.986142Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:36.996660Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateIndexedTableAfterBackup [GOOD] >> TSchemeShardTest::CreatePersQueueGroup >> TObjectStorageListingTest::TestFilter >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropOriginal [GOOD] Test command err: 2026-01-04T02:48:32.785067Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332843526480842:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.794237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:32.817034Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:33.227678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.227774Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.229669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.240993Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.241472Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.527291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.532855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.540295Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.608123Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.799465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:33.817385Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.003s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:33.825508Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:33.881750Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.012s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:33.900658Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.026s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-04T02:48:34.026653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976715676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:34.027132Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:34.027674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:34.027728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-04T02:48:34.027743Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:34.027760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:48:34.028050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:34.028070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:34.028215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-04T02:48:34.028336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:34.029036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:34.029072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-04T02:48:34.029683Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715676, response: Status: StatusAccepted TxId: 281474976715676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-04T02:48:34.029907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-04T02:48:34.030087Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:48:34.030100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:48:34.030228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-04T02:48:34.030313Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:48:34.030336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332847821448597:2249], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 38 2026-01-04T02:48:34.030350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332847821448597:2249], at schemeshard: 72057594046644480, txId: 281474976715676, path id: 40 2026-01-04T02:48:34.030388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:34.030416Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-04T02:48:34.030712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:34.030828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976715676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:34.034394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-04T02:48:34.034494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-04T02:48:34.034509Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-01-04T02:48:34.034527Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-04T02:48:34.034543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-04T02:48:34.034734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-04T02:48:34.034771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976715676 2026-01-04T02:48:34.034797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715676 2026-01-04T02:48:34.034842Z node 1 :FLAT_TX_SCHEMESHARD IN ... tate Offline 2026-01-04T02:48:37.792974Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332868070433509 RawX2: 4503608217307432 } TabletId: 72075186224037890 State: 4 2026-01-04T02:48:37.793001Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:37.793074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332868070433509 RawX2: 4503608217307432 } TabletId: 72075186224037890 State: 4 2026-01-04T02:48:37.793093Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:37.793273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:37.793294Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:37.793296Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:48:37.793343Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-04T02:48:37.793344Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:37.793355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:37.793597Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:37.793615Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:37.793617Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:37.793655Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:37.793675Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:37.793685Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:37.794209Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:37.794315Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:48:37.794492Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:37.794698Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:48:37.794715Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:37.794970Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:37.795081Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:48:37.795325Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:37.795341Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:48:37.795364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:37.795497Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:48:37.795712Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:48:37.795857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:37.795973Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:37.796099Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:37.796215Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:37.796309Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:37.796429Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037891 from 72075186224037889 is reset 2026-01-04T02:48:37.796454Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:48:37.796473Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:37.796477Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7591332868070433331:2597], serverId# [2:7591332868070433332:2598], sessionId# [0:0:0] 2026-01-04T02:48:37.796489Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:48:37.796494Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:37.796504Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037888, clientId# [2:7591332868070433321:2590], serverId# [2:7591332868070433322:2591], sessionId# [0:0:0] 2026-01-04T02:48:37.796514Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:48:37.796544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:37.796561Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:37.796577Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:37.797310Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:48:37.797360Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:48:37.798771Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:37.798824Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:48:37.799695Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:48:37.799755Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:48:37.799776Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:48:37.799798Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:37.799810Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-04T02:48:37.800138Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:48:37.800173Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:48:37.800398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:48:37.800410Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-04T02:48:37.800432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:37.800461Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:48:37.800485Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:37.800508Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:37.921162Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:38.094711Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-04T02:48:38.095856Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-04T02:48:38.096321Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-04T02:48:38.096691Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::NoLocksSet [GOOD] >> TLocksTest::MultipleLocks ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs1 [GOOD] Test command err: 2026-01-04T02:48:33.080209Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332848382016474:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:33.099112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.140710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:33.522620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.529823Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.529975Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.545924Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.662377Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332848382016438:2081] 1767494913014136 != 1767494913014139 2026-01-04T02:48:33.676011Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.787899Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.909837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.915709Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:34.009569Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.084094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:37.035967Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:37.036058Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:37.148585Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:37.162794Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:37.162874Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:37.166572Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:37.235279Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:37.370939Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:37.382144Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:37.401868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetBreak >> TFlatTest::RejectByPerRequestSize [GOOD] >> TFlatTest::ShardUnfreezeNonFrozen [GOOD] >> TFlatTest::ShardFreezeUnfreezeRejectScheme >> TLocksTest::GoodSameKeyLock >> TLocksFatTest::RangeSetNotBreak [GOOD] >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] >> TFlatTest::PathSorting >> TCancelTx::CrossShardReadOnly >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableDropOriginalAndReturnPartAfterCompaction [GOOD] Test command err: 2026-01-04T02:48:34.878616Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332855571873418:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:34.878694Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:35.142063Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:35.142178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:35.201886Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:35.211650Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.211700Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:35.459263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:35.465565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:35.508350Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.551526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.778573Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:35.792437Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:35.858759Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.020s,wait=0.001s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:35.861375Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-04T02:48:35.907030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:36.007502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 100000 InMemStepsToSnapshot: 2 InMemForceStepsToSnapshot: 3 InMemForceSizeToSnapshot: 1000000 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 200000 ReadAheadLoThreshold: 100000 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 10000 CountToCompact: 2 ForceCountToCompact: 2 ForceSizeToCompact: 20000 CompactionBrokerQueue: 1 KeepInCache: true } } ColumnFamilies { Id: 0 ColumnCache: ColumnCacheNone Storage: ColumnStorageTest_1_2_1k } } CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:36.007834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:36.008311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:36.008343Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-04T02:48:36.008365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:36.008396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:48:36.008422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:36.008434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:36.008766Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-04T02:48:36.008884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:36.009450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:36.009477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-04T02:48:36.010085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-04T02:48:36.010269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-04T02:48:36.010437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:48:36.010463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:48:36.010587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-04T02:48:36.010660Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:48:36.010673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332859866841205:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-04T02:48:36.010707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332859866841205:2245], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-04T02:48:36.010738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:36.010766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-04T02:48:36.011038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:36.011149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:36.013647Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:36.013712Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:36.013720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-01-04T02:48:36.013732Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-04T02:48:36.013747Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-04T02:48:36.014861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:36.014922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: ... wait to activation from: 2026-01-04T02:48:40.272392Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:40.272993Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-04T02:48:40.274027Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037889, clientId# [2:7591332879593319247:2677], serverId# [2:7591332879593319257:3635], sessionId# [0:0:0] 2026-01-04T02:48:40.274090Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:40.274141Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:40.274505Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332871003382953 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-04T02:48:40.274545Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:40.274848Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:40.274878Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:40.275282Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:40.275315Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:40.275363Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037889 state Offline 2026-01-04T02:48:40.275457Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:40.275618Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332875298350541 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2026-01-04T02:48:40.275647Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:40.275800Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332875298350541 RawX2: 4503608217307431 } TabletId: 72075186224037891 State: 4 2026-01-04T02:48:40.275819Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:40.275910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332875298350535 RawX2: 4503608217307430 } TabletId: 72075186224037890 State: 4 2026-01-04T02:48:40.275955Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:40.276180Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:40.276203Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:40.276258Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:40.276268Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:40.276303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:40.276313Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:40.276547Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:48:40.276563Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:48:40.276574Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:40.278253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:48:40.278500Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:40.278678Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:40.278811Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:40.278915Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:40.279034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:40.279158Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:40.279252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 2 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:40.279273Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:40.279319Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:40.279339Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:40.279355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:40.281874Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:48:40.281913Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:48:40.281940Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:48:40.281969Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [2:7591332875298350630:2766], serverId# [2:7591332875298350632:2768], sessionId# [0:0:0] 2026-01-04T02:48:40.282349Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:40.282420Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:48:40.283831Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:48:40.283856Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:48:40.283889Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:40.283896Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:48:40.283912Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:40.283976Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:40.284048Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:48:40.284332Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:48:40.284358Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:48:40.285402Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:48:40.285468Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:48:40.285872Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:40.285906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:48:40.285947Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:40.285968Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-04T02:48:40.577680Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-04T02:48:40.579108Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-04T02:48:40.580379Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::Init >> TSchemeShardTest::CreatePersQueueGroup [GOOD] >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema >> TFlatTest::CopyTableAndCompareColumnsSchema [GOOD] >> TFlatTest::CopyTableAndDropCopy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByPerRequestSize [GOOD] Test command err: 2026-01-04T02:48:20.219355Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332793855798708:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:20.233435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.260094Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:20.661974Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.670275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.670388Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.744008Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.876799Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332793855798651:2081] 1767494900210907 != 1767494900210910 2026-01-04T02:48:20.889906Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.965246Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:21.153255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:21.166343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:21.186555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:21.222779Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:21.263285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.217235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332793855798708:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:25.217298Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:29.158013Z node 1 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002581 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760 2026-01-04T02:48:29.158311Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002581 exceeds limit 10000 at tablet 72075186224037888 txId 281474976715760) | 2026-01-04T02:48:29.159021Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332832510506193:3131] txid# 281474976715760 RESPONSE Status# WrongRequest marker# P13c 2026-01-04T02:48:29.835395Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332831798802528:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:29.835489Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:29.867014Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:29.986851Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:29.986956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:29.992956Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:30.004790Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332831798802501:2081] 1767494909834190 != 1767494909834193 2026-01-04T02:48:30.013227Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:30.134088Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:30.238885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:30.244479Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:30.264368Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.870657Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:34.835412Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332831798802528:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:34.835478Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:37.860362Z node 2 :TX_DATASHARD ERROR: check_data_tx_unit.cpp:133: Transaction read size 51002469 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760 2026-01-04T02:48:37.860479Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710760 at tablet 72075186224037888 status: BAD_REQUEST errors: READ_SIZE_EXECEEDED (Transaction read size 51002469 exceeds limit 10000 at tablet 72075186224037888 txId 281474976710760) | 2026-01-04T02:48:37.860606Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7591332866158542736:3125] txid# 281474976710760 RESPONSE Status# WrongRequest marker# P13c 2026-01-04T02:48:38.726942Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332871617841664:2098];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:38.728312Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:38.742441Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:38.840360Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:38.841914Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332871617841578:2081] 1767494918703134 != 1767494918703137 2026-01-04T02:48:38.857645Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:38.857746Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:38.885983Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:38.937900Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:39.126074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:39.134354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:39.151641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:39.751598Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:42.466952Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [3:7591332871617841616:2090] Handle TEvProposeTransaction 2026-01-04T02:48:42.466983Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [3:7591332871617841616:2090] TxId# 281474976710700 ProcessProposeTransaction 2026-01-04T02:48:42.467025Z node 3 :TX_PROXY DEBUG: proxy_impl.cpp:272: actor# [3:7591332871617841616:2090] Cookie# 0 userReqId# "" txid# 281474976710700 SEND to# [3:7591332888797712066:2800] DataReq marker# P0 2026-01-04T02:48:42.467076Z node 3 :TX_PROXY DEBUG: datareq.cpp:1330: Actor# [3:7591332888797712066:2800] Cookie# 0 txid# 281474976710700 HANDLE TDataReq marker# P1 2026-01-04T02:48:42.467656Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7591332888797712066:2800] txid 281474976710700 disallow followers cause of operation 2 read target mode 0 2026-01-04T02:48:42.467671Z node 3 :TX_PROXY DEBUG: datareq.cpp:1245: Actor [3:7591332888797712066:2800] txid 281474976710700 disallow followers cause of operation 2 read target mode 0 2026-01-04T02:48:42.467710Z node 3 :TX_PROXY DEBUG: datareq.cpp:1453: Actor# [3:7591332888797712066:2800] txid# 281474976710700 SEND to# [3:7591332871617841803:2117] TSchemeCache with 2 scheme entries. DataReq marker# P2 2026-01-04T02:48:42.467837Z node 3 :TX_PROXY DEBUG: datareq.cpp:1620: Actor# [3:7591332888797712066:2800] txid# 281474976710700 HANDLE EvResolveKeySetResult TDataReq marker# P3 ErrorCount# 0 2026-01-04T02:48:42.469281Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7591332888797712066:2800] txid# 281474976710700 SEND TEvProposeTransaction to datashard 72075186224037888 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-04T02:48:42.469597Z node 3 :TX_PROXY DEBUG: datareq.cpp:1115: Actor# [3:7591332888797712066:2800] txid# 281474976710700 SEND TEvProposeTransaction to datashard 72075186224037889 with 734 bytes program affected shards 2 followers disallowed marker# P4 2026-01-04T02:48:42.470230Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:42.471484Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976710700 at tablet 72075186224037888 2026-01-04T02:48:42.471789Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:42.472811Z node 3 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976710700 at tablet 72075186224037889 2026-01-04T02:48:42.473320Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:48:42.473505Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7591332888797712066:2800] txid# 281474976710700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037888 read size 17000855 out readset size 0 marker# P6 2026-01-04T02:48:42.473881Z node 3 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:48:42.474023Z node 3 :TX_PROXY DEBUG: datareq.cpp:1873: Actor# [3:7591332888797712066:2800] txid# 281474976710700 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# PREPARED shard id 72075186224037889 read size 9000463 out readset size 0 marker# P6 2026-01-04T02:48:42.474088Z node 3 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [3:7591332888797712066:2800] txid# 281474976710700 FailProposedRequest: Transaction total read size 26001318 exceeded limit 10000 Status# ExecError 2026-01-04T02:48:42.474145Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332888797712066:2800] txid# 281474976710700 RESPONSE Status# ExecError marker# P13c 2026-01-04T02:48:42.474643Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037888 txId 281474976710700 2026-01-04T02:48:42.474695Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037888 txId 281474976710700 2026-01-04T02:48:42.475139Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037889 txId 281474976710700 2026-01-04T02:48:42.475165Z node 3 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037889 txId 281474976710700 |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeItemsLimit >> Cdc::Alter [GOOD] >> Cdc::DescribeStream >> TLocksTest::GoodLock [GOOD] >> TLocksTest::GoodNullLock >> TLocksTest::Range_Pinhole >> TLocksTest::SetLockFail [GOOD] >> TLocksTest::SetEraseSet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::RangeSetNotBreak [GOOD] Test command err: 2026-01-04T02:48:26.819580Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332817657829073:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:26.819634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.087086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.093465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.093546Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.172306Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.232808Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.356355Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.486977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:27.491794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:27.576019Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.736133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.791298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.834111Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:31.819709Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332817657829073:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.819817Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:35.651307Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332858623793323:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:35.651471Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:35.660222Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:35.742901Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:35.761978Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.762071Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:35.762130Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:35.786232Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:35.949556Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.995219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:36.006092Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:36.034195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:36.040613Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.127836Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.184774Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.654739Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:40.654686Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332858623793323:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:40.655205Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::MiniKQLRanges >> TFlatTest::ReadOnlyMode >> TFlatTest::LargeProxyReplyRW [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkLocks::OlapInsertWithBulkUpsert-UseBulkUpsert [GOOD] Test command err: Trying to start YDB, gRPC: 3225, MsgBus: 25674 2026-01-04T02:47:38.958947Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332612602614820:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:38.959051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:38.987654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:39.303667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:39.303804Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:39.308392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:39.356961Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:39.358587Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:39.572407Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:39.572424Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:39.572430Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:39.572498Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:39.575266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:39.966563Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:40.236363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:40.282431Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:47:42.772425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629782484804:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.772557Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.772715Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629782484825:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.777231Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:42.777396Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332629782484841:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.777491Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:42.791961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332629782484842:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:42.900572Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332629782484894:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:43.196801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:43.347013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:44.192114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332612602614820:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:44.192211Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:44.474496Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:46.284537Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332646962362048:2964], SessionActorId: [1:7591332642667394712:2964], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 . sessionActorId=[1:7591332642667394712:2964]. 2026-01-04T02:47:46.284777Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ODkxMjcwZWItZGYwMWZlY2EtYmZmOGMzOWUtYzM4ZmQ1NDE=, ActorId: [1:7591332642667394712:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3eenak1x396ajg2d2r636r, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7591332646962362049:2964] from: [1:7591332646962362048:2964] trace_id# 2026-01-04T02:47:46.284873Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332646962362049:2964] TxId: 281474976710665. Ctx: { TraceId: 01ke3eenak1x396ajg2d2r636r, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ODkxMjcwZWItZGYwMWZlY2EtYmZmOGMzOWUtYzM4ZmQ1NDE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } trace_id# 2026-01-04T02:47:46.285255Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODkxMjcwZWItZGYwMWZlY2EtYmZmOGMzOWUtYzM4ZmQ1NDE=, ActorId: [1:7591332642667394712:2964], ActorState: ExecuteState, LegacyTraceId: 01ke3eenak1x396ajg2d2r636r, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id#
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 Trying to start YDB, gRPC: 5415, MsgBus: 5522 2026-01-04T02:47:47.963481Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332651776087005:2092];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:47.963622Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:48.157971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:48.179168Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:48.179256Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:48.193914Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332651776086931:2081] 1767494867905430 != 1767494867905433 2026-01-04T02:47:48.221572Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:48.238754Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:48.358465Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:48.358497Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:48.358505Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:48.358583Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:48.375406Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:48.992204Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:49.000235Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:49.021868Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for tas ... COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.189595Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.189616Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.199773Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.199852Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.199874Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038017;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.201536Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.201585Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.201600Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.210485Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.210562Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.210584Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038073;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.211016Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.211061Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.211079Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.220848Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.220934Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.220957Z node 4 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:38.252267Z node 4 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3efrqjcyb7azphdxc9zt69", SessionId: ydb://session/3?node_id=4&id=NWNhYmM4OTYtYTgyODIxYWMtYzY2NDJjYzEtYjFhOWMxZDE=, Slow query, duration: 11.746624s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:48:39.614896Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: tablet_id=72075186224037891;tx_state=TTxProgressTx::Execute;tx_current=281474976710667;tx_id=281474976710667;commit_tx_id=281474976710667;commit_lock_id=281474976710666;fline=manager.cpp:80;broken_lock_id=281474976710665; 2026-01-04T02:48:39.733596Z node 4 :TX_COLUMNSHARD_WRITE WARN: log.cpp:841: tablet_id=72075186224037891;self_id=[4:7591332818143317259:2348];ev=NKikimr::NEvents::TDataEvents::TEvWrite;tablet_id=72075186224037891;event=TEvWrite;fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=tablet lock have another internal generation counter: 18446744073709551615 != 0;tx_id=281474976710669; 2026-01-04T02:48:39.734777Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4391: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], Got LOCKS BROKEN for table. ShardID=72075186224037891, Sink=[4:7591332873977901364:3650].{
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } 2026-01-04T02:48:39.734881Z node 4 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 . sessionActorId=[4:7591332869682933790:3650]. 2026-01-04T02:48:39.735063Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=4&id=ODVhMmMxMzktOWM5YTc3NjUtYTg1NDJlYzAtN2I4ZDk2ZmI=, ActorId: [4:7591332869682933790:3650], ActorState: ExecuteState, LegacyTraceId: 01ke3eg9e95k9j50gk5beth6nd, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [4:7591332873977901382:3650] from: [4:7591332873977901364:3650] trace_id# 2026-01-04T02:48:39.735150Z node 4 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [4:7591332873977901382:3650] TxId: 281474976710669. Ctx: { TraceId: 01ke3eg9e95k9j50gk5beth6nd, Database: /Root, SessionId: ydb://session/3?node_id=4&id=ODVhMmMxMzktOWM5YTc3NjUtYTg1NDJlYzAtN2I4ZDk2ZmI=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: tablet lock have another internal generation counter: 18446744073709551615 != 0, code: 2001 } } trace_id# 2026-01-04T02:48:39.735446Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=4&id=ODVhMmMxMzktOWM5YTc3NjUtYTg1NDJlYzAtN2I4ZDk2ZmI=, ActorId: [4:7591332869682933790:3650], ActorState: ExecuteState, LegacyTraceId: 01ke3eg9e95k9j50gk5beth6nd, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "tablet lock have another internal generation counter: 18446744073709551615 != 0" issue_code: 2001 severity: 1 } } trace_id# 2026-01-04T02:48:39.737112Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.737142Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.737153Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.737164Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.737175Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.737184Z node 4 :KQP_COMPUTE WARN: kqp_write_actor.cpp:2875: SelfId: [4:7591332873977901364:3650], SessionActorId: [4:7591332869682933790:3650], StateRollback: unknown message 278003713 2026-01-04T02:48:39.739931Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740065Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740245Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740398Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740494Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740593Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740684Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740852Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.740946Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; 2026-01-04T02:48:39.805290Z node 4 :TX_COLUMNSHARD_TX WARN: log.cpp:841: fline=tx_controller.cpp:161;event=cancel_tx;tx_id=281474976710669; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyCopiedTableAndDropFirstCopy [GOOD] Test command err: 2026-01-04T02:48:32.750670Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332842938378619:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.758836Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.109905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.116585Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.116678Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.132411Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.273321Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.277935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332842938378580:2081] 1767494912722265 != 1767494912722268 2026-01-04T02:48:33.367698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.524788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.534880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.636335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.758010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:33.876187Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.017s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:33.886107Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:33.916752Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:33.938784Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.024s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 Copy TableOld to Table 2026-01-04T02:48:34.076298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1/Dir" OperationType: ESchemeOpCreateTable CreateTable { Name: "Table" CopyFromTable: "/dc-1/Dir/TableOld" } } TxId: 281474976710676 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:34.076559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_copy_table.cpp:431: TCopyTable Propose, path: /dc-1/Dir/Table, opId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:34.076954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 38], parent name: Dir, child name: Table, child id: [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:34.076988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 0 2026-01-04T02:48:34.077009Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:34.077030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710676:0 type: TxCopyTable target path: [OwnerId: 72057594046644480, LocalPathId: 40] source path: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:48:34.077062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:34.077077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:34.077219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-04T02:48:34.077329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710676:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:34.078400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:34.078437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 4 2026-01-04T02:48:34.078949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710676, response: Status: StatusAccepted TxId: 281474976710676 SchemeshardId: 72057594046644480 PathId: 40, at schemeshard: 72057594046644480 2026-01-04T02:48:34.079128Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710676, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /dc-1/Dir/Table 2026-01-04T02:48:34.079255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:48:34.079267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:48:34.079405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710676, path id: [OwnerId: 72057594046644480, LocalPathId: 40] 2026-01-04T02:48:34.079468Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:48:34.079481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332847233346407:2249], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 38 2026-01-04T02:48:34.079494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332847233346407:2249], at schemeshard: 72057594046644480, txId: 281474976710676, path id: 40 2026-01-04T02:48:34.079536Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710676:0, at schemeshard: 72057594046644480 2026-01-04T02:48:34.079569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710676:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046644480 2026-01-04T02:48:34.080011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 3 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:34.080117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 281474976710676:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046644480 OwnerIdx: 4 TabletType: DataShard FollowerCount: 0 ObjectDomain { SchemeShard: 72057594046644480 PathId: 1 } ObjectId: 40 BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } BindedChannels { StoragePoolName: "/dc-1:test" } AllowedDomains { SchemeShard: 72057594046644480 PathId: 1 } 2026-01-04T02:48:34.082852Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:34.082965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:34.082978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-01-04T02:48:34.082998Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710676, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-04T02:48:34.083017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-04T02:48:34.083279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:34.083349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 40 Version: 1 PathOwnerId: 72057594046644480, cookie: 281474976710676 2026-01-04T02:48:34.083364Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710676 2026-01-04T02:48:34.083379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_sc ... andle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037889 not found 2026-01-04T02:48:41.421768Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 41], at schemeshard: 72057594046644480 2026-01-04T02:48:41.421784Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037892 not found 2026-01-04T02:48:41.421797Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2026-01-04T02:48:41.421845Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 3 2026-01-04T02:48:41.422084Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-04T02:48:41.422104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-04T02:48:41.422166Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:41.422192Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:41.422247Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:41.422259Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:41.422262Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:48:41.422379Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:48:41.422471Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:41.423829Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037888 state Offline 2026-01-04T02:48:41.424245Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-04T02:48:41.424322Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-04T02:48:41.424929Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:48:41.425104Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:41.425239Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:41.425341Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:41.425419Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:41.425430Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:41.425466Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:41.425505Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:48:41.426232Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037893 2026-01-04T02:48:41.426282Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037893 2026-01-04T02:48:41.426623Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:48:41.426635Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:48:41.426635Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-04T02:48:41.426659Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:48:41.426669Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-04T02:48:41.426690Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:41.427428Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037892 from 72075186224037890 is reset 2026-01-04T02:48:41.427460Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3665: Client pipe to tablet 72075186224037893 from 72075186224037891 is reset 2026-01-04T02:48:41.427477Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:48:41.427493Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:48:41.427525Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037891, clientId# [3:7591332881619609101:2765], serverId# [3:7591332881619609103:2766], sessionId# [0:0:0] 2026-01-04T02:48:41.427633Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332881619609027 RawX2: 4503612512274730 } TabletId: 72075186224037890 State: 4 2026-01-04T02:48:41.427682Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:41.427859Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:48:41.427902Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:48:41.428875Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037891 not found 2026-01-04T02:48:41.428977Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:41.429005Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:41.429401Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:48:41.429439Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:48:41.430117Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:41.430312Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:41.430592Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:41.430608Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:48:41.430646Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:41.430955Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:41.430972Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:48:41.431000Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:41.432223Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:41.432257Z node 3 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:48:41.432289Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7591332881619609110:2771], serverId# [3:7591332881619609111:2772], sessionId# [0:0:0] 2026-01-04T02:48:41.432472Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037890 not found 2026-01-04T02:48:41.432690Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [3:7591332881619609026:2713], serverId# [3:7591332881619609039:2720], sessionId# [0:0:0] 2026-01-04T02:48:41.433028Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:48:41.433073Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:48:41.710852Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted Check that tablet 72075186224037888 was deleted Check that tablet 72075186224037889 was deleted Check that tablet 72075186224037890 was deleted Check that tablet 72075186224037891 was deleted 2026-01-04T02:48:41.714141Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) 2026-01-04T02:48:41.714557Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-04T02:48:41.715005Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) 2026-01-04T02:48:41.715467Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) 2026-01-04T02:48:41.715931Z node 3 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestFilter [GOOD] >> TObjectStorageListingTest::TestSkipShards |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> TSchemeShardTest::CreatePersQueueGroupWithKeySchema [GOOD] >> TSchemeShardTest::CreateFinishedInDescription >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] >> TObjectStorageListingTest::ManyDeletes [GOOD] >> TLocksTest::BrokenLockUpdate >> TFlatTest::PathSorting [GOOD] >> TFlatTest::PartBloomFilter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeProxyReplyRW [GOOD] Test command err: 2026-01-04T02:48:33.462675Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332848054506919:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:33.463359Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.515227Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:33.868602Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.868708Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.920812Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.942356Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.944351Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332848054506807:2081] 1767494913406750 != 1767494913406753 2026-01-04T02:48:33.948163Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:34.177930Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.181509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:34.195638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:34.299170Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.470414Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:38.450458Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332848054506919:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:38.455440Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:39.266571Z node 1 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [1:7591332869529347083:4334] txid# 281474976716010 MergeResult Result too large TDataReq marker# P18 2026-01-04T02:48:39.266640Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332869529347083:4334] txid# 281474976716010 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable 2026-01-04T02:48:39.919991Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332876506173782:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:39.921095Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:39.978097Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.088212Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332876506173654:2081] 1767494919899076 != 1767494919899079 2026-01-04T02:48:40.089432Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:40.123028Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:40.123111Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:40.145881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.147844Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:40.335470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:40.342230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:40.354435Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:40.584669Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.920867Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:44.905943Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332876506173782:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:44.906023Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:45.249977Z node 2 :TX_PROXY ERROR: datareq.cpp:2703: Actor# [2:7591332897981013933:4337] txid# 281474976716011 MergeResult Result too large TDataReq marker# P18 2026-01-04T02:48:45.250038Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7591332897981013933:4337] txid# 281474976716011 RESPONSE Status# ExecResultUnavailable marker# P13c MiniKQLErrors: Query result size limit exceeded. (71692241 > 50331648) proxy error code: ExecResultUnavailable |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TCancelTx::CrossShardReadOnly [GOOD] >> TCancelTx::CrossShardReadOnlyWithReadSets >> TLocksTest::GoodDupLock [GOOD] >> TLocksTest::CK_Range_GoodLock >> TFlatTest::Init [GOOD] >> TFlatTest::LargeDatashardReply >> TSchemeShardTest::CreateFinishedInDescription [GOOD] >> TSchemeShardTest::CreateDropSolomon >> TFlatTest::CopyTableAndDropCopy [GOOD] >> TLocksTest::SetLockNothing [GOOD] >> TFlatTest::SelectRangeItemsLimit [GOOD] >> TFlatTest::SelectRangeForbidNullArgs4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::ShardFreezeUnfreezeRejectScheme [GOOD] Test command err: 2026-01-04T02:48:40.040695Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332880208842660:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:40.040829Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:40.285923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.332377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:40.332464Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:40.335866Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:40.400215Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:40.538140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.668232Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:40.723658Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:40.875405Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332880208843496:2562] txid# 281474976710659, issues: { message: "Requested freeze state already set" severity: 1 } Error 1: Requested freeze state already set 2026-01-04T02:48:43.405351Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332891722999018:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:43.406666Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:43.440296Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.518872Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:43.518933Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:43.519894Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:43.522830Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332891722998993:2081] 1767494923398922 != 1767494923398925 2026-01-04T02:48:43.545576Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:43.741136Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.746161Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:43.749965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:43.752913Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:43.799273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:43.811920Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332891722999937:2595] txid# 281474976710660, issues: { message: "Table is frozen. Only unfreeze alter is allowed" severity: 1 } Error 128: Table is frozen. Only unfreeze alter is allowed 2026-01-04T02:48:43.813916Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:43.828174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::ManyDeletes [GOOD] Test command err: 2026-01-04T02:48:19.743750Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332790902140843:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.743957Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.146797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.165226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.165352Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.293932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332790902140741:2081] 1767494899740171 != 1767494899740174 2026-01-04T02:48:20.294113Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.298160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.565871Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.751976Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:20.758484Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:20.758516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:20.758620Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:20.782152Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:21.392624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:21.398532Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:21.410210Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:21.475943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.744272Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332790902140843:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.747002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:26.897204Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332820141845893:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:26.897246Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:26.969940Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.060968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.061050Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.061692Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.064882Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.164056Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.187885Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:27.187908Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:27.187916Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:27.188007Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:27.427089Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:27.434984Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:27.455930Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.908797Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; .2026-01-04T02:48:31.898120Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332820141845893:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.898190Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; . 2026-01-04T02:48:38.448705Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:38.448705Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037890 2026-01-04T02:48:38.449441Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037890 2026-01-04T02:48:38.449484Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037889 2026-01-04T02:48:38.449707Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037891 2026-01-04T02:48:38.449723Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037892 2026-01-04T02:48:38.450426Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037892 2026-01-04T02:48:38.452604Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:48:38.452703Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037890 2026-01-04T02:48:38.453076Z node 2 :TX_DATASHARD DEBUG: check_data_tx_unit.cpp:313: Prepared DataTx transaction txId 281474976711500 at tablet 72075186224037891 2026-01-04T02:48:38.453239Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037892 2026-01-04T02:48:38.455709Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037891 2026-01-04T02:48:38.464190Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1767494918504 at tablet 72075186224037890 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767494918504 MediatorID: 72057594046382081 TabletID: 72075186224037890 } 2026-01-04T02:48:38.464250Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:48:38.464409Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711500 at step 1767494918504 at tablet 72075186224037892 { Transactions { TxId: 281474976711500 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767494918504 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-04T02:48:38.464431Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T02:48:38.464453Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:48:38.464475Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:48:38.464501Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767494918504:281474976711500] in PlanQueue unit at 72075186224037890 2026-01-04T02:48:38.464554Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037890 got data tx from cache 1767494918504:281474976711500 2026-01-04T02:48:38.464566Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:48:38.464581Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:48:38.464600Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767494918504:281474976711500] in PlanQueue unit at 72075186224037892 2026-01-04T02:48:38.464631Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1767494918504:281474976711500 2026-01-04T02:48:38.466180Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711500 released its data 2026-01-04T02:48:38.466228Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:38.466 ... 2026-01-04T02:48:46.510116Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976711911 at step 1767494926554 at tablet 72075186224037892 { Transactions { TxId: 281474976711911 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767494926554 MediatorID: 72057594046382081 TabletID: 72075186224037892 } 2026-01-04T02:48:46.510130Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T02:48:46.510206Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:48:46.510219Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:48:46.510232Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767494926554:281474976711911] in PlanQueue unit at 72075186224037892 2026-01-04T02:48:46.510254Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:595: LoadTxDetails at 72075186224037892 got data tx from cache 1767494926554:281474976711911 2026-01-04T02:48:46.511318Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-04T02:48:46.511349Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:46.512091Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 1767494926554} 2026-01-04T02:48:46.512172Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:48:46.512306Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-04T02:48:46.512330Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:46.512949Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-04T02:48:46.513501Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037891 step# 1767494926554} 2026-01-04T02:48:46.513536Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037891 2026-01-04T02:48:46.513603Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767494926554 : 281474976711911] from 72075186224037891 at tablet 72075186224037891 send result to client [2:7591332906041205158:5637], exec latency: 0 ms, propose latency: 6 ms 2026-01-04T02:48:46.513626Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037891 2026-01-04T02:48:46.513876Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-04T02:48:46.513897Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:46.514041Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:48:46.514727Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-04T02:48:46.515640Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-04T02:48:46.515665Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:46.517856Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037892 step# 1767494926554} 2026-01-04T02:48:46.517986Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037892 2026-01-04T02:48:46.518173Z node 2 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 1767494926554} 2026-01-04T02:48:46.518279Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:48:46.518818Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037892 restored its data 2026-01-04T02:48:46.519111Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037890 restored its data 2026-01-04T02:48:46.519629Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:48:46.519809Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:48:46.520117Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:48:46.520699Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037889 restored its data 2026-01-04T02:48:46.522423Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:48:46.522477Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767494926554 : 281474976711911] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7591332906041205158:5637], exec latency: 10 ms, propose latency: 13 ms 2026-01-04T02:48:46.522501Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:48:46.522681Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T02:48:46.522707Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767494926554 : 281474976711911] from 72075186224037892 at tablet 72075186224037892 send result to client [2:7591332906041205158:5637], exec latency: 9 ms, propose latency: 12 ms 2026-01-04T02:48:46.522720Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T02:48:46.524962Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976711911 released its data 2026-01-04T02:48:46.524987Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:48:46.535954Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:48:46.536844Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976711911 at 72075186224037889 restored its data 2026-01-04T02:48:46.541918Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:48:46.543864Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:48:46.543920Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767494926554 : 281474976711911] from 72075186224037889 at tablet 72075186224037889 send result to client [2:7591332906041205158:5637], exec latency: 34 ms, propose latency: 36 ms 2026-01-04T02:48:46.543949Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:48:46.563375Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037889 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.563594Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037889 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 1 2026-01-04T02:48:46.564012Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037891 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.564119Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037891 S3 Listing: finished status: 0 description: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.564469Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 0 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.566842Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 1 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.567601Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 2 last path: "" contents: 0 common prefixes: 0 2026-01-04T02:48:46.568140Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Godfather.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 3 last path: "/Videos/Godfather.avi" contents: 2 common prefixes: 0 2026-01-04T02:48:46.568942Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/House of Cards/Season 1/Chapter 1.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 4 last path: "/Videos/House of Cards/Season 1/Chapter 1.avi" contents: 3 common prefixes: 1 2026-01-04T02:48:46.569494Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:152: 72075186224037892 S3 Listing: start at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos/Terminator 2.avi") (type:0)), end at key ((type:4, value:"d\0\0\0\0\0\0\0") (type:4608, value:"Bucket100") (type:4608, value:"/Videos0") (type:0)) restarted: 5 last path: "/Videos/Terminator 2.avi" contents: 4 common prefixes: 1 2026-01-04T02:48:46.569783Z node 2 :TX_DATASHARD DEBUG: datashard__object_storage_listing.cpp:374: 72075186224037892 S3 Listing: finished status: 0 description: "" contents: 4 common prefixes: 1 >> TFlatTest::MiniKQLRanges [GOOD] >> TFlatTest::MergeEmptyAndWrite |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TFlatTest::ReadOnlyMode [GOOD] >> TFlatTest::RejectByIncomingReadSetSize >> TObjectStorageListingTest::TestSkipShards [GOOD] >> TSchemeShardTest::CreateDropSolomon [GOOD] >> TSchemeShardTest::CreateTableWithCompactionStrategies ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetLockNothing [GOOD] Test command err: 2026-01-04T02:48:19.748845Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332788144919590:2213];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.749020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.045887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.105536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.105636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.131270Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.183557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.212654Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.573285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.581978Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.602602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.729315Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.758288Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.944168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.002749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.712201Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.712327Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:23.909585Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.909676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.921918Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332807765160625:2081] 1767494903651662 != 1767494903651665 2026-01-04T02:48:23.931674Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.956580Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.964659Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.332587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.346484Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.362414Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.370832Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.455476Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.463621Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.540460Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.698003Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:27.543004Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822754174877:2247];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.543478Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.617925Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.683219Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.683289Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.697303Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822754174668:2081] 1767494907522008 != 1767494907522011 2026-01-04T02:48:27.711535Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.723924Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.844023Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.073103Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.081586Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.098416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.109291Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.205369Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.265313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.521933Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:31.279651Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332838930353809:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.284446Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.318518Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.377876Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.388864Z node 4 :CONFIGS_ ... : 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:31.698421Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.772559Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.872949Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.632696Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591332856807521786:2181];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:35.632942Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:35.673637Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.787368Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:35.787478Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:35.799658Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591332856807521637:2081] 1767494915575108 != 1767494915575111 2026-01-04T02:48:35.817927Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:35.828128Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:35.953221Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.106935Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:36.118380Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:36.127426Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:36.137485Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.279384Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.330871Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:39.798382Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591332875407242855:2175];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:39.798512Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:39.869533Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:39.957973Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591332875407242716:2081] 1767494919761984 != 1767494919761987 2026-01-04T02:48:39.974366Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:39.977124Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.033498Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:40.033585Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:40.039476Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:40.152025Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:40.266045Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:40.274322Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:40.290686Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:40.298140Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:40.416490Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:40.513325Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.285993Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.286102Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:44.385952Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7591332898193185604:2081] 1767494924222794 != 1767494924222797 2026-01-04T02:48:44.429162Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:44.429268Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:44.431143Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:44.436202Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:44.485891Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.722750Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:44.730293Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:44.739966Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:44.744777Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.857533Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.944970Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::CopyTableAndDropCopy [GOOD] Test command err: 2026-01-04T02:48:32.498241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332843386565601:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.498335Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:32.920962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:32.921076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:32.986656Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.997992Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332843386565561:2081] 1767494912493082 != 1767494912493085 2026-01-04T02:48:33.012385Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.018413Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.233905Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.288304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:33.306218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:33.336635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:33.377417Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.538087Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:33.683109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.805387Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.905721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.037509Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.125150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.224307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.324057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.414888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.516942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.618870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.721578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.832156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.934438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.018692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.106613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.197360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.304674Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710693:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.406903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.532524Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710697:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.639175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.797137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710701:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.909345Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.029461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710705:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.177032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710707:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_cre ... thId [OwnerId: 72057594046644480, LocalPathId: 42] was 4 2026-01-04T02:48:45.985114Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037895 state Ready 2026-01-04T02:48:45.985166Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037895 Got TEvSchemaChangedResult from SS at 72075186224037895 2026-01-04T02:48:45.985280Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710686 datashard 72075186224037894 state Ready 2026-01-04T02:48:45.985302Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037894 Got TEvSchemaChangedResult from SS at 72075186224037894 2026-01-04T02:48:45.992986Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:7591332900823634964:3217], serverId# [2:7591332900823634965:3218], sessionId# [0:0:0] 2026-01-04T02:48:45.993094Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:45.994497Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:45.994542Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:45.997468Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037895, clientId# [2:7591332900823634974:3224], serverId# [2:7591332900823634975:3225], sessionId# [0:0:0] 2026-01-04T02:48:45.997561Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:45.998797Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:45.998833Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.001737Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.002852Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.002891Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.005549Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.009143Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.009197Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.012164Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.013589Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.013639Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.015156Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:46.015855Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:46.015898Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:46.017785Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.019184Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.019230Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.022721Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:46.025537Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:46.025578Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:46.025932Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.027137Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.027178Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.030431Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.033105Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.033161Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.036351Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.037586Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.037623Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.040728Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.041904Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.041945Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.044736Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.046113Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.046159Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.049086Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.050428Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.050471Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.053179Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.056232Z node 2 :OPS_COMPACT INFO: Compact{72075186224037894.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.008s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:46.058257Z node 2 :OPS_COMPACT INFO: Compact{72075186224037895.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.002s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:48:46.058292Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037894, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:46.058307Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037894, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:46.058681Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037895, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:48:46.058691Z node 2 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037895, table# 1001, finished edge# 0, front# 0 2026-01-04T02:48:46.059345Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.059397Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.062478Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.066215Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.066270Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 2026-01-04T02:48:46.069261Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037894 2026-01-04T02:48:46.071927Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037894 2026-01-04T02:48:46.072008Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037894 2026-01-04T02:48:46.075090Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037895 2026-01-04T02:48:46.078302Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037895 2026-01-04T02:48:46.078369Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037895 Check that tablet 72075186224037892 was deleted 2026-01-04T02:48:46.079493Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037892) Check that tablet 72075186224037893 was deleted 2026-01-04T02:48:46.080112Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037893) Check that tablet 72075186224037888 was deleted 2026-01-04T02:48:46.080549Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) Check that tablet 72075186224037889 was deleted 2026-01-04T02:48:46.080947Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037889) Check that tablet 72075186224037890 was deleted 2026-01-04T02:48:46.081379Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037890) Check that tablet 72075186224037891 was deleted 2026-01-04T02:48:46.081833Z node 2 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037891) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetBreak [GOOD] >> TLocksFatTest::LocksLimit >> TFlatTest::PartBloomFilter [GOOD] >> Cdc::DescribeStream [GOOD] >> Cdc::DecimalKey >> KqpPg::ReadPgArray >> KqpPg::EmptyQuery+useSink >> KqpPg::NoTableQuery+useSink >> KqpPg::TypeCoercionBulkUpsert >> KqpPg::InsertNoTargetColumns_Simple+useSink >> KqpPg::JoinWithQueryService+StreamLookup >> KqpPg::TypeCoercionInsert-useSink >> KqpPg::ReadPgArray [GOOD] >> KqpPg::TableArrayInsert+useSink >> TLocksTest::MultipleLocks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::TestSkipShards [GOOD] Test command err: 2026-01-04T02:48:42.281989Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332886797144558:2168];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:42.282226Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:42.593887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:42.602047Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:42.602175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:42.661485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:42.707605Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:42.708332Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332886797144418:2081] 1767494922267124 != 1767494922267127 2026-01-04T02:48:42.850434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:42.850460Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:42.850468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:42.850566Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:42.871878Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.088918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:43.094970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:43.203606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:43.288295Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateTableWithCompactionStrategies [GOOD] >> TSchemeShardTest::CreateSystemColumn >> TObjectStorageListingTest::MaxKeysAndSharding [GOOD] >> TObjectStorageListingTest::SchemaChecks >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] >> TFlatTest::MergeEmptyAndWrite [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::PartBloomFilter [GOOD] Test command err: 2026-01-04T02:48:43.844607Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332891903202580:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:43.845150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:44.075536Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.105521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:44.105652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:44.207107Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:44.207187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:44.376412Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.477186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:44.483005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:44.518788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710659, at schemeshard: 72057594046644480 2026-01-04T02:48:44.528908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:48:47.421800Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332911458569805:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:47.421866Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:47.456301Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.562648Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:47.562742Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:47.577619Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332911458569777:2081] 1767494927420250 != 1767494927420253 2026-01-04T02:48:47.591894Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:47.603009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:47.676039Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.799502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:47.818183Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:47.872538Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.320098Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715719:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:48:48.428101Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> TLocksTest::SetEraseSet [GOOD] |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock0 [GOOD] >> TLocksTest::Range_GoodLock1 >> KqpPg::CreateTableBulkUpsertAndRead >> KqpPg::InsertFromSelect_Simple+useSink >> TLocksTest::CK_GoodLock [GOOD] >> TLocksTest::CK_BrokenLock >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigNotificationRetries [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup >> TFlatTest::RejectByIncomingReadSetSize [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::MultipleLocks [GOOD] Test command err: 2026-01-04T02:48:22.691863Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332803970078652:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:22.692023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:22.737858Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:22.965187Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:22.965262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:22.971497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.021590Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.063227Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.065025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332803970078624:2081] 1767494902690840 != 1767494902690843 2026-01-04T02:48:23.290066Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:23.311950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.370424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.484907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.535824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.701638Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:26.322977Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332820268419816:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:26.324843Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:26.343196Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:26.464563Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:26.476098Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:26.476174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:26.503740Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:26.557743Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:26.715139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:26.726878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:26.742067Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:26.746820Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:26.813417Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:26.866392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.208453Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332838102686243:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:30.208505Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:30.325624Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:30.337522Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:30.337607Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:30.340341Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:30.361268Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:30.584574Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:30.599454Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:30.614149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:30.615510Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:30.620791Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.686016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:30.742570Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.346693Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332855147431543:2108];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:34.347079Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:34.445879Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.584547Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:34.584627Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:34.588692Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332855147431462:2081] 1767494914316437 != 1767494914316440 2026-01-04T02:48:34.589173Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:34.647737Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:34.740097Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinali ... lled at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:34.913826Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:34.926608Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:34.934747Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.009694Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.105922Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.353198Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:38.436393Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591332871969842966:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:38.439045Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:38.464780Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:38.545424Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:38.548152Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591332871969842937:2081] 1767494918434115 != 1767494918434118 2026-01-04T02:48:38.580305Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:38.580403Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:38.591285Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:38.674133Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:38.813078Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:38.820131Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:38.830500Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:38.836202Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:38.901824Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:38.961239Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.585843Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591332889733336058:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:42.586472Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:42.617373Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:42.732707Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:42.733992Z node 6 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [6:7591332889733335951:2081] 1767494922567175 != 1767494922567178 2026-01-04T02:48:42.745315Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:42.745399Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:42.771909Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:42.802931Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.029648Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:43.038875Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:43.052179Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:43.057081Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:43.121954Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:43.175598Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:47.382583Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591332907415370595:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:47.382751Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:47.408592Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.514609Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:47.528927Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:47.529018Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:47.584930Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:47.605900Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.815778Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:47.837258Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:47.940782Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.022455Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::MergeEmptyAndWrite [GOOD] Test command err: 2026-01-04T02:48:45.744187Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332901508299718:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.744403Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.785376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:46.095982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:46.096124Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:46.129172Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:46.150321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.166224Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:46.168637Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332901508299682:2081] 1767494925742926 != 1767494925742929 2026-01-04T02:48:46.428805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:46.435277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:46.440177Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.526254Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.184821Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332918897035098:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.185307Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.242285Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.346407Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332918897034983:2081] 1767494929172664 != 1767494929172667 2026-01-04T02:48:49.359248Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.391367Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.391443Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.399663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.471658Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.585047Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.594177Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.610013Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.615644Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.731984Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:49.746908Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.005s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:48:49.778437Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.012s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:48:49.783890Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.017s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:48:49.822130Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.827073Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.827260Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.828765Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.833938Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.836910Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.838040Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.838197Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.838785Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:49.839530Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.841508Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.842089Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.842894Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.843010Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.843489Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:49.843957Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.844873Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.845192Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.845620Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.845685Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.845994Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:49.846474Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.846753Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.847050Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.847499Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.847566Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.847887Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:49.848323Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.848642Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.848892Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.849435Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.849521Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:48:49.849933Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037889 restored its data 2026-01-04T02:48:49.850570Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:572: tx 281474976715676 released its data 2026-01-04T02:48:49.850663Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:48:49.851072Z node 2 :TX_DATASHARD DEBUG: datashard_active_transaction.cpp:672: tx 281474976715676 at 72075186224037888 restored its data 2026-01-04T02:48:49.851681Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:48:49.851702Z node 2 :TX_DATASHARD DE ... de 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:48:50.225967Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767494930264 : 281474976715687] from 72075186224037890 at tablet 72075186224037890 send result to client [2:7591332918897035350:2153], exec latency: 0 ms, propose latency: 2 ms 2026-01-04T02:48:50.225994Z node 2 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715687 state PreOffline TxInFly 0 2026-01-04T02:48:50.226023Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:48:50.226214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1767494930264 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 742 } } CommitVersion { Step: 1767494930264 TxId: 281474976715687 } 2026-01-04T02:48:50.226227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2026-01-04T02:48:50.226361Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1767494930264 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 742 } } CommitVersion { Step: 1767494930264 TxId: 281474976715687 } 2026-01-04T02:48:50.226447Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046644480, ev# TxKind: TX_KIND_SCHEME Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715687 Step: 1767494930264 OrderId: 281474976715687 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 742 } } CommitVersion { Step: 1767494930264 TxId: 281474976715687 } debug: NTableState::TProposedWaitParts operationId# 281474976715687:0 2026-01-04T02:48:50.226818Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227080Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046644480, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332923192003426 RawX2: 4503608217307433 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-04T02:48:50.227096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715687, tablet: 72075186224037890, partId: 0 2026-01-04T02:48:50.227182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332923192003426 RawX2: 4503608217307433 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-04T02:48:50.227216Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 2026-01-04T02:48:50.227296Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715687:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046644480 message: Source { RawX1: 7591332923192003426 RawX2: 4503608217307433 } Origin: 72075186224037890 State: 5 TxId: 281474976715687 Step: 0 Generation: 1 2026-01-04T02:48:50.227340Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715687:0, shardIdx: 72057594046644480:3, shard: 72075186224037890, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227356Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227369Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715687:0, datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227383Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715687:0 129 -> 240 2026-01-04T02:48:50.227667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227750Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715687:0, at schemeshard: 72057594046644480 2026-01-04T02:48:50.227782Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 281474976715687:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:48:50.228054Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:48:50.228164Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-04T02:48:50.228188Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-04T02:48:50.228207Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715687:0 progress is 1/1 2026-01-04T02:48:50.228213Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-04T02:48:50.228224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715687, ready parts: 1/1, is published: true 2026-01-04T02:48:50.228253Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7591332923192003655:2382] message: TxId: 281474976715687 2026-01-04T02:48:50.228265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715687 ready parts: 1/1 2026-01-04T02:48:50.228278Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715687:0 2026-01-04T02:48:50.228284Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715687:0 2026-01-04T02:48:50.228334Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:48:50.228576Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715687 datashard 72075186224037890 state PreOffline 2026-01-04T02:48:50.228600Z node 2 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-04T02:48:50.229449Z node 2 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037890 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:50.229487Z node 2 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:50.230755Z node 2 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:50.231106Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332923192003426 RawX2: 4503608217307433 } TabletId: 72075186224037890 State: 4 2026-01-04T02:48:50.231174Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:50.231525Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:50.231601Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:50.232097Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:48:50.233097Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:48:50.233286Z node 2 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:48:50.233330Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:50.233544Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:50.233560Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-04T02:48:50.233580Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:48:50.233628Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:50.233818Z node 2 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:48:50.233885Z node 2 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:48:50.234536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:48:50.234562Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:48:50.234594Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeForbidNullArgs4 [GOOD] Test command err: 2026-01-04T02:48:45.034741Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332894673887628:2182];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.034822Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.453175Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.570025Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332894673887484:2081] 1767494924987446 != 1767494924987449 2026-01-04T02:48:45.572286Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:45.588632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:45.588720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:45.593355Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:45.669925Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.891188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:45.922735Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:45.942533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:46.043836Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:46.058930Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.857862Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332915561303572:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.901868Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.952392Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.037521Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.037614Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.062366Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.067097Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332915561303545:2081] 1767494928852668 != 1767494928852671 2026-01-04T02:48:49.086289Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.151109Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.289897Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.307503Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.311951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetEraseSet [GOOD] Test command err: 2026-01-04T02:48:41.877269Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332883600041282:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:41.877336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:42.126429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:42.126527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:42.175778Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:42.182487Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:42.225651Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:42.225948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332883600041171:2081] 1767494921849173 != 1767494921849176 2026-01-04T02:48:42.360136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:42.451699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:42.462494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:42.478738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:42.541797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.681622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.740789Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.788448Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710662: Validate (783): Key validation status: 3 2026-01-04T02:48:42.788697Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7591332887895009573:2696] txid# 281474976710662 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:42.788771Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7591332887895009573:2696] txid# 281474976710662 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:42.788819Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7591332887895009573:2696] txid# 281474976710662 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-04T02:48:42.792168Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710663: Validate (783): Key validation status: 3 2026-01-04T02:48:42.792371Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7591332887895009595:2703] txid# 281474976710663 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:42.792423Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7591332887895009595:2703] txid# 281474976710663 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:42.792444Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7591332887895009595:2703] txid# 281474976710663 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-04T02:48:42.795467Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710664: Validate (783): Key validation status: 3 2026-01-04T02:48:42.795688Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7591332887895009602:2707] txid# 281474976710664 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:42.795795Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7591332887895009602:2707] txid# 281474976710664 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:42.795818Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7591332887895009602:2707] txid# 281474976710664 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-04T02:48:42.798426Z node 1 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037888 cannot parse tx 281474976710665: Validate (783): Key validation status: 3 DataShardErrors: [SCHEME_ERROR] Validate (783): Key validation status: 3 proxy error code: ProxyShardNotAvailable 2026-01-04T02:48:42.798704Z node 1 :TX_PROXY ERROR: datareq.cpp:1873: Actor# [1:7591332887895009608:2710] txid# 281474976710665 HANDLE Prepare TEvProposeTransactionResult TDataReq TabletStatus# StatusWait GetStatus# ERROR shard id 72075186224037888 read size 0 out readset size 0 marker# P6 2026-01-04T02:48:42.798758Z node 1 :TX_PROXY ERROR: datareq.cpp:2071: Actor# [1:7591332887895009608:2710] txid# 281474976710665 HANDLE PrepareErrors TEvProposeTransactionResult TDataReq TabletStatus# StatusWait shard id 72075186224037888 2026-01-04T02:48:42.798798Z node 1 :TX_PROXY ERROR: datareq.cpp:1274: Actor# [1:7591332887895009608:2710] txid# 281474976710665 invalidateDistCache: 1 DIE TDataReq MarkShardError TabletsLeft# 1 2026-01-04T02:48:45.535058Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332900406454752:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.535114Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.613440Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.764828Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:45.769961Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332900406454724:2081] 1767494925525588 != 1767494925525591 2026-01-04T02:48:45.774831Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:45.774906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:45.801002Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:45.883705Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.044004Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:46.049192Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:46.065255Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:46.127439Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:46.179465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.382420Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332917887998403:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.382470Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.425781Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.514793Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.527234Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.527318Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.530813Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.649892Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.711015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.714312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.734918Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.792253Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.844935Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TCancelTx::CrossShardReadOnlyWithReadSets [GOOD] >> TCancelTx::ImmediateReadOnly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_base/unittest >> TSchemeShardTest::CreateSystemColumn [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:17.306839Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:17.306941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:17.306982Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:17.307020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:17.307062Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:17.307095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:17.307177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:17.307284Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:17.308246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:17.308542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:17.404615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:17.404690Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:17.420840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:17.421102Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:17.421321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:17.430661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:17.430902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:17.431559Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:17.431796Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:17.434728Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:17.434933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:17.435995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:17.436061Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:17.436176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:17.436226Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:17.436272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:17.436531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.442745Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:48:17.551127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:48:17.551372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.551595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:48:17.551640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:48:17.551868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:17.551939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:17.554364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:17.554570Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:48:17.554764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.554812Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:48:17.554848Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:48:17.554884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:48:17.556660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.556727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:48:17.556763Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:48:17.558373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.558410Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:48:17.558459Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:17.558533Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:48:17.566203Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:17.570900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:48:17.571143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:48:17.572078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:17.572223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:48:17.572283Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:17.572557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:48:17.572607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:48:17.572765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:48:17.572847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:48:17.575441Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:17.575507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... rget path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 0 2026-01-04T02:48:53.518094Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:48:53.518171Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 103:0 type: TxCopyTable target path: [OwnerId: 72057594046678944, LocalPathId: 39] source path: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:48:53.518266Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:48:53.518460Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:48:53.518715Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 103:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:48:53.519453Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:48:53.519552Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 3 2026-01-04T02:48:53.523168Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 103, response: Status: StatusAccepted TxId: 103 SchemeshardId: 72057594046678944 PathId: 39, at schemeshard: 72057594046678944 2026-01-04T02:48:53.523593Z node 17 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 103, database: /MyRoot, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/SystemColumnInCopyAllowed 2026-01-04T02:48:53.524040Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:53.524116Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:53.524421Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 103, path id: [OwnerId: 72057594046678944, LocalPathId: 39] 2026-01-04T02:48:53.524562Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:53.524634Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 1 2026-01-04T02:48:53.524724Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [17:211:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:48:53.524847Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:53.524952Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 103:0 ProgressState, operation type: TxCopyTable, at tablet# 72057594046678944 2026-01-04T02:48:53.525326Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 103:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-04T02:48:53.529222Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:53.529431Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:53.529523Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:48:53.529599Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 9 2026-01-04T02:48:53.529676Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 4 2026-01-04T02:48:53.532021Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:53.532136Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 1 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:48:53.532177Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:48:53.532212Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 1 2026-01-04T02:48:53.532251Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 4 2026-01-04T02:48:53.532362Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 0/1, is published: true 2026-01-04T02:48:53.533945Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:2 msg type: 268697601 2026-01-04T02:48:53.534190Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72057594037968897 2026-01-04T02:48:53.534270Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-04T02:48:53.534825Z node 17 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 2 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 1 } ObjectId: 39 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 1 } 2026-01-04T02:48:53.535115Z node 17 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 2, type DataShard, boot OK, tablet id 72075186233409547 2026-01-04T02:48:53.535482Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-04T02:48:53.535554Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 103, shardIdx: 72057594046678944:2, partId: 0 2026-01-04T02:48:53.535753Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 103:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-04T02:48:53.535837Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-04T02:48:53.535975Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 103:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 2 TabletID: 72075186233409547 Origin: 72057594037968897 2026-01-04T02:48:53.536123Z node 17 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 103:0 2 -> 3 2026-01-04T02:48:53.539020Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:53.541349Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:48:53.544781Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:53.545084Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:48:53.545184Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:70: TCopyTable TConfigureParts operationId# 103:0 ProgressState at tablet# 72057594046678944 2026-01-04T02:48:53.545300Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_copy_table.cpp:102: TCopyTable TConfigureParts operationId# 103:0 Propose modify scheme on dstDatashard# 72075186233409547 idx# 72057594046678944:2 srcDatashard# 72075186233409546 idx# 72057594046678944:1 operationId# 103:0 seqNo# 2:2 at tablet# 72057594046678944 2026-01-04T02:48:53.550424Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409547 cookie: 72057594046678944:2 msg type: 269549568 2026-01-04T02:48:53.550606Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 103:0 from tablet: 72057594046678944 to tablet: 72075186233409546 cookie: 72057594046678944:1 msg type: 269549568 2026-01-04T02:48:53.550704Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409547 2026-01-04T02:48:53.550744Z node 17 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 103, partId: 0, tablet: 72075186233409546 TestModificationResult got TxId: 103, wait until txId: 103 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeDeadline [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:14.310619Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:14.385983Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:14.391647Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:14.391925Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:14.392027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:14.733729Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:14.817173Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:14.817262Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:14.863033Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:14.930678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:15.518265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:15.518644Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:15.518688Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:15.518711Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:15.519045Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:15.581120Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:16.067363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:18.444956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:18.450545Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:18.453202Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:18.474550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:18.474664Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:18.502254Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:18.503820Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:18.666009Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:18.666089Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:18.667200Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.667707Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.668145Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.668952Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.669149Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.669242Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.669384Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.669519Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.669656Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:18.695293Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:18.866896Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:18.886163Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:18.886272Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:18.910552Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:18.911687Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:18.911875Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:18.911934Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:18.911980Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:18.912085Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:18.912145Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:18.912186Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:18.912710Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:18.912988Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:18.915660Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:18.923488Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:18.923540Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:18.923610Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:18.932282Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:18.970187Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:18.972931Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:18.975545Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:18.975626Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:19.000396Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:19.000769Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:19.106613Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:19.166242Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:19.370164Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:19.371635Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:19.374205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:19.376808Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:19.376906Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... ty: 1 } 2026-01-04T02:46:48.168218Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 1 ], ReplyToActorId[ [2:4233:3782]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:46:48.168575Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 1 ] 2026-01-04T02:46:48.168690Z node 2 :STATISTICS DEBUG: service_impl.cpp:1244: ConnectToSA(), pipe client id = [2:4235:3784] 2026-01-04T02:46:48.168758Z node 2 :STATISTICS DEBUG: service_impl.cpp:1273: SyncNode(), pipe client id = [2:4235:3784] 2026-01-04T02:46:48.169249Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4236:3785] 2026-01-04T02:46:48.169361Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4235:3784], server id = [2:4236:3785], tablet id = 72075186224037894, status = OK 2026-01-04T02:46:48.169445Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4236:3785], node id = 2, have schemeshards count = 0, need schemeshards count = 1 2026-01-04T02:46:48.169516Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:338: [72075186224037894] SendStatisticsToNode(), node id = 2, schemeshard count = 1 2026-01-04T02:46:48.169696Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 18446744073709551615 2026-01-04T02:46:48.169793Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 1, ReplyToActorId = [2:4233:3782], StatRequests.size() = 1 2026-01-04T02:46:48.170089Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 18446744073709551615 2026-01-04T02:47:53.640551Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4098:3707], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:47:53.640809Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4098:3707], Start read next stream part 2026-01-04T02:47:53.641002Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ecvtsa668kj8g14ygndfk", SessionId: ydb://session/3?node_id=2&id=ZTEwYjRkNGUtYmE0Njk1Y2MtZDQ2Nzk0M2QtNjI2YzUyNTg=, Slow query, duration: 66.251123s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:47:53.642494Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:53.645151Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32100, txId: 18446744073709551615] shutting down 2026-01-04T02:47:53.645959Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:47:53.646091Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], Start read next stream part 2026-01-04T02:47:53.646287Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4098:3707], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:47:53.646346Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4098:3707], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OWFjZDdhNWEtZTJhNDg4ZDctZTE4YTA3NWItZDFlMWE0N2U=, TxId: 2026-01-04T02:47:53.750912Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4438:3907]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:53.751225Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:53.751277Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4438:3907], StatRequests.size() = 1 2026-01-04T02:48:46.327893Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:46.328056Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], Start read next stream part 2026-01-04T02:48:46.329066Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eewhe9ert0yb09jvf29dv", SessionId: ydb://session/3?node_id=2&id=NDdkMjNhZTctODAwZmJjZjgtOWY4NTRhODYtZDQzM2EyOA==, Slow query, duration: 52.678704s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:46.350385Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4592:3990], ActorId: [2:4593:3991], Starting query actor #1 [2:4594:3992] 2026-01-04T02:48:46.350480Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4593:3991], ActorId: [2:4594:3992], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:48:46.350990Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32190, txId: 18446744073709551615] shutting down 2026-01-04T02:48:46.354906Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4593:3991], ActorId: [2:4594:3992], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NGYzNDUwYTctOWQyMzEzNTMtODBkMDExMWMtMzMyMDQ4Nzk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:46.355731Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:46.355787Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4088:2462], ActorId: [2:4411:3884], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=N2VjOTFmMmQtODQyYzM3MmQtNTUzZDM4NTMtNzk1NmUzNmQ=, TxId: 2026-01-04T02:48:46.434285Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4611:4006]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:46.434536Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:46.434574Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4611:4006], StatRequests.size() = 1 2026-01-04T02:48:46.603968Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4593:3991], ActorId: [2:4594:3992], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NGYzNDUwYTctOWQyMzEzNTMtODBkMDExMWMtMzMyMDQ4Nzk=, TxId: 2026-01-04T02:48:46.604076Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4593:3991], ActorId: [2:4594:3992], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NGYzNDUwYTctOWQyMzEzNTMtODBkMDExMWMtMzMyMDQ4Nzk=, TxId: ... blocking NKikimr::NStat::TEvStatistics::TEvSaveStatisticsQueryResponse from SQL_QUERY to NKikimr::TQueryRetryActor> const&, NKikimr::TPathId co... cookie 0 ... waiting for TEvSaveStatisticsQueryResponse (done) 2026-01-04T02:48:46.680647Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:48:46.680760Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:48:46.715457Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4235:3784], schemeshard count = 1 2026-01-04T02:48:46.964514Z node 2 :STATISTICS ERROR: tx_analyze_deadline.cpp:26: [72075186224037894] Delete long analyze operation, OperationId=operationId 2026-01-04T02:48:47.358173Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:48:47.358467Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-04T02:48:47.358615Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-04T02:48:47.369733Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:48:47.369875Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:48:47.370277Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:48:48.255405Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=18446744073709551615, at schemeshard: 72057594046644480 2026-01-04T02:48:48.255477Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8343: ConnectToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:48:48.255516Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8374: SendBaseStatsToSA(), no StatisticsAggregatorId, at schemeshard: 72057594046644480 2026-01-04T02:48:48.255551Z node 1 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 30.000000s, at schemeshard: 72057594046644480 2026-01-04T02:48:52.122872Z node 2 :STATISTICS DEBUG: tx_analyze_deadline.cpp:44: [72075186224037894] TTxAnalyzeDeadline::Complete. Send TEvAnalyzeResponse for deleted operation, OperationId=operationId, ActorId=[1:3159:3431] 2026-01-04T02:48:52.123032Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_base/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::RejectByIncomingReadSetSize [GOOD] Test command err: 2026-01-04T02:48:45.867354Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332899045830944:2223];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.867556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.901189Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:46.254416Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.300581Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:46.300673Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:46.345292Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:46.412340Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332899045830755:2081] 1767494925847991 != 1767494925847994 2026-01-04T02:48:46.425494Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:46.555083Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.679060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:46.856748Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/dc-1" OperationType: ESchemeOpMkDir MkDir { Name: "Dir1" } } TxId: 281474976710658 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:48:46.856950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /dc-1/Dir1, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-04T02:48:46.857072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046644480, LocalPathId: 1], parent name: dc-1, child name: Dir1, child id: [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-04T02:48:46.857111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 0 2026-01-04T02:48:46.857151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710658:0 type: TxMkDir target path: [OwnerId: 72057594046644480, LocalPathId: 38] source path: 2026-01-04T02:48:46.857198Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:48:46.857415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T02:48:46.857451Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:48:46.859935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710658, response: Status: StatusAccepted TxId: 281474976710658 SchemeshardId: 72057594046644480 PathId: 38, at schemeshard: 72057594046644480 2026-01-04T02:48:46.860086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /dc-1, subject: , status: StatusAccepted, operation: CREATE DIRECTORY, path: /dc-1/Dir1 2026-01-04T02:48:46.860246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:48:46.860263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:48:46.860380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710658, path id: [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T02:48:46.860489Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:48:46.860504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332903340798718:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 1 2026-01-04T02:48:46.860521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:7591332903340798718:2374], at schemeshard: 72057594046644480, txId: 281474976710658, path id: 38 2026-01-04T02:48:46.860549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-04T02:48:46.860570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:63: MkDir::TPropose operationId# 281474976710658:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:48:46.860625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710658 ready parts: 1/1 2026-01-04T02:48:46.864514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710658 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:48:46.868419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.868507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.868518Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-01-04T02:48:46.868534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 7 2026-01-04T02:48:46.868552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 3 2026-01-04T02:48:46.868766Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.868844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 2 PathOwnerId: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.868859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976710658 2026-01-04T02:48:46.868872Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710658, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 2 2026-01-04T02:48:46.868890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 2 2026-01-04T02:48:46.868927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-01-04T02:48:46.869176Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:46.870189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:46.870214Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710658, ready parts: 0/1, is published: true 2026-01-04T02:48:46.870237Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:46.870866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710658:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710658 msg type: 269090816 2026-01-04T02:48:46.870948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710658, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:46.872005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.872053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710658 2026-01-04T02:48:46.873138Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494926918, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:48:46.873250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710658 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767494926918 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T02:48:46.873285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_mkdir.cpp:33: MkDir::TPropose operationId# 281474976710658:0 HandleReply TEvPrivate::TEvOperationPlan, step: 1767494926918, at schemesha ... ard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:48:47.234512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 281474976710661, publications: 2, subscribers: 1 2026-01-04T02:48:47.234521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710661, [OwnerId: 72057594046644480, LocalPathId: 1], 12 2026-01-04T02:48:47.234528Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 281474976710661, [OwnerId: 72057594046644480, LocalPathId: 40], 3 2026-01-04T02:48:47.234646Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} hope 1 -> done Change{222, redo 163b alter 0b annex 0, ~{ 42, 4 } -{ }, 0 gb} 2026-01-04T02:48:47.234694Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:16} Tx{27, NKikimr::NSchemeShard::TSchemeShard::TTxOperationProgress} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:48:47.235296Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.235356Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-04T02:48:47.235376Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:48:47.235436Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 1 Version: 12 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.235447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046644480, txId: 281474976710661 2026-01-04T02:48:47.235460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 12 2026-01-04T02:48:47.235477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 5 2026-01-04T02:48:47.235585Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{223, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-04T02:48:47.235617Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:17} Tx{28, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:48:47.235707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.235750Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} queued, type NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard 2026-01-04T02:48:47.235789Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T02:48:47.235830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 4 LocalPathId: 40 Version: 3 PathOwnerId: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.235838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710661 2026-01-04T02:48:47.235847Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:19:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.235868Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710661, pathId: [OwnerId: 72057594046644480, LocalPathId: 40], version: 3 2026-01-04T02:48:47.235877Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046316545:2:19:1:24576:109:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.235896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:48:47.235942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710661, subscribers: 1 2026-01-04T02:48:47.235954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7591332907635766683:2291] 2026-01-04T02:48:47.235998Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} hope 1 -> done Change{224, redo 166b alter 0b annex 0, ~{ 48, 59 } -{ }, 0 gb} 2026-01-04T02:48:47.236025Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:18} Tx{29, NKikimr::NSchemeShard::TSchemeShard::TTxAckPublishToSchemeBoard} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T02:48:47.236208Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046316545:2:20} commited cookie 1 for step 19 2026-01-04T02:48:47.236485Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.236520Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:16:1:24576:133:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.236585Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 16 2026-01-04T02:48:47.238209Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.238241Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:18:1:24576:123:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.238258Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.238312Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046644480:4:17:1:24576:118:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:48:47.238398Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 17 2026-01-04T02:48:47.238440Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.238503Z node 1 :TABLET_EXECUTOR DEBUG: Leader{72057594046644480:4:19} commited cookie 1 for step 18 2026-01-04T02:48:47.238524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710661 2026-01-04T02:48:47.238647Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:406: TClient[72057594046644480] received poison pill [1:7591332907635766686:2291] 2026-01-04T02:48:47.238672Z node 1 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:514: TClient[72057594046644480] notify reset [1:7591332907635766686:2291] 2026-01-04T02:48:47.238777Z node 1 :PIPE_SERVER DEBUG: tablet_pipe_server.cpp:182: [72057594046644480] Got PeerClosed from# [1:7591332907635766686:2291] 2026-01-04T02:48:49.593888Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332917711248042:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.594069Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.629881Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.714709Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.721885Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.721957Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.745230Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.881894Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.923835Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.928895Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.941327Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.606315Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:52.901184Z node 2 :TX_PROXY ERROR: datareq.cpp:2829: Actor# [2:7591332930596151216:2799] txid# 281474976715700 FailProposedRequest: Transaction incoming read set size 1000082 for tablet 72075186224037889 exceeded limit 1000 Status# ExecError 2026-01-04T02:48:52.901261Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7591332930596151216:2799] txid# 281474976715700 RESPONSE Status# ExecError marker# P13c >> KqpPg::CreateTableSerialColumns+useSink |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpPg::EmptyQuery+useSink [GOOD] >> KqpPg::EmptyQuery-useSink >> KqpPg::NoTableQuery+useSink [GOOD] >> KqpPg::NoTableQuery-useSink >> KqpPg::InsertNoTargetColumns_Simple+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Simple-useSink >> TObjectStorageListingTest::SchemaChecks [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TPragmaSettingOlap-IsSnapshotIsolation [GOOD] Test command err: Trying to start YDB, gRPC: 17819, MsgBus: 28241 2026-01-04T02:47:43.005522Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332630688264199:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:43.005701Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:43.410263Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:43.410629Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:43.410722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:43.436909Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:43.569959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332630688264087:2081] 1767494862952305 != 1767494862952308 2026-01-04T02:47:43.574547Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:43.675805Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:43.766254Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:43.766279Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:43.766291Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:43.766391Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:44.036416Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:44.365343Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:44.401995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:47:46.583955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332647868134145:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.583954Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332647868134170:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.584109Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.584344Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332647868134175:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.584404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:46.587400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:46.599688Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332647868134174:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:46.719117Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332647868134227:2539] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:47.008696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:47.155666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:48.030634Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332630688264199:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:48.050598Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:48.286477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 20842, MsgBus: 21397 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:54.448867Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:54.455252Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:47:54.459596Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:54.459902Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:54.460072Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:54.713441Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:54.713568Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:54.749027Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494871253758 != 1767494871253762 2026-01-04T02:47:54.756593Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:54.814938Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:54.936142Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:55.434703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:55.434768Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:55.434813Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:55.435572Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:55.450684Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:55.991451Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:56.082237Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:915:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:56.082596Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:926:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:56.082722Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:56.084367Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:931:2790], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:56.084966Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, st ... 15662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.393506Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038082;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.403104Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.403206Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.403250Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.414495Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.414599Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.414641Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.424124Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.424229Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.424269Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.433674Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.433778Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.433839Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.443799Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.443905Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.443946Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.453355Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.453463Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.453509Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.464265Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.464367Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.464409Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.475625Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.475753Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.475798Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.484977Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.485094Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.485139Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.494968Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.495076Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.495119Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.504485Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.504599Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.504642Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.514740Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.514847Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.514888Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.524828Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.524940Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.524983Z node 3 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:48:46.638924Z node 3 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3efwfr4nbjjagm7tgw9tx1", SessionId: ydb://session/3?node_id=3&id=Y2FiMGI0NTUtYWZmZThlMmQtYmU5YmVhYTctYjlhNzUyYmY=, Slow query, duration: 20.273411s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpPg::JoinWithQueryService+StreamLookup [GOOD] >> KqpPg::Insert_Serial+useSink >> Cdc::DecimalKey [GOOD] >> Cdc::AddColumn >> TLocksFatTest::LocksLimit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::SchemaChecks [GOOD] Test command err: 2026-01-04T02:48:19.717403Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787498873928:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.718334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.119018Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.119121Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.283932Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.305689Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.362796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.365979Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332787498873866:2081] 1767494899709368 != 1767494899709371 2026-01-04T02:48:20.602386Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.726081Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.750813Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:20.750833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:20.750893Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:20.751011Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:21.403643Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:21.410420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:21.496730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.720083Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332787498873928:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.720295Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:35.053870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:48:35.053897Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.256732Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332929082181453:2202];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:52.256799Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:52.361887Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:52.369006Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.404444Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:52.404550Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.417973Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.473146Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.473179Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.473189Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.473269Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.560104Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:52.723056Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:52.730353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:52.752899Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:52.767788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.262144Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.0%| [TA] $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} >> TStorageTenantTest::LsLs >> TCancelTx::ImmediateReadOnly [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain >> TStorageTenantTest::GenericCases >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore >> TStorageTenantTest::CreateTableInsideSubDomain2 >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet >> TStorageTenantTest::DeclareAndDefine >> TFlatTest::LargeDatashardReply [GOOD] >> KqpPg::EmptyQuery-useSink [GOOD] >> KqpPg::DuplicatedColumns+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::LocksLimit [GOOD] Test command err: 2026-01-04T02:48:43.281556Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332891854341033:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:43.281948Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:43.509896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.520315Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:43.520445Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:43.578843Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332891854340931:2081] 1767494923277878 != 1767494923277881 2026-01-04T02:48:43.594258Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:43.600052Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:43.735510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.822227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:43.828136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:43.902086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.083959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.156753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.289372Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:48.281628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332891854341033:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.281720Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:50.848044Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332921103769325:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:50.848074Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:50.871457Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:50.961139Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:50.962938Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332921103769303:2081] 1767494930847222 != 1767494930847225 2026-01-04T02:48:50.972736Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:50.972817Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:50.999736Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:51.130215Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:51.177186Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:51.183139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:51.192886Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.245074Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.285829Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.331887Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332938753625177:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.332074Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.390223Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.457153Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.461372Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332938753625072:2081] 1767494934327118 != 1767494934327121 2026-01-04T02:48:54.466531Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.466628Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.492920Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.599708Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.739168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.746496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.766490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:54.777591Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.857610Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.948072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.362118Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |95.0%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_base/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpPg::CreateTableSerialColumns+useSink [GOOD] >> KqpPg::CreateTableSerialColumns-useSink >> KqpPg::NoTableQuery-useSink [GOOD] >> KqpPg::PgCreateTable >> KqpPg::InsertNoTargetColumns_Simple-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TCancelTx::ImmediateReadOnly [GOOD] Test command err: 2026-01-04T02:48:43.990760Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332892295144900:2132];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:43.990811Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:44.283628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.310603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:44.310703Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:44.364061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:44.491743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:44.496205Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332892295144801:2081] 1767494923965225 != 1767494923965228 2026-01-04T02:48:44.575535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.737190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:44.742980Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:44.805031Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.003922Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:45.226618Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080358:2585] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:45.226708Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080358:2585] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:45.241262Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080371:2595] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:45.241331Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080371:2595] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:45.261312Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080384:2605] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:45.261395Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080384:2605] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:45.344276Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080410:2625] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:45.344345Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080410:2625] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:45.373379Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080423:2635] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:45.373435Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080423:2635] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:45.418481Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591332900885080436:2645] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:45.418553Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591332900885080436:2645] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:47.608129Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332907606618520:2062];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:47.608260Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:47.654278Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.744296Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:47.745599Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332907606618499:2081] 1767494927604216 != 1767494927604219 2026-01-04T02:48:47.791596Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:47.791704Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:47.798685Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:47.900007Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.977872Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:47.985344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.291475Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332928503335899:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.291710Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.314143Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:51.408932Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.409029Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:51.414916Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:51.417876Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332928503335789:2081] 1767494931285129 != 1767494931285132 2026-01-04T02:48:51.439867Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:51.511493Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:51.621349Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:51.629943Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.964803Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332928503336736:2584] txid# 281474976710660 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:51.964867Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332928503336736:2584] txid# 281474976710660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:51.985596Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332928503336751:2596] txid# 281474976710661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:51.985663Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332928503336751:2596] txid# 281474976710661 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:51.999571Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332928503336765:2607] txid# 281474976710662 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037888 marker# P12 2026-01-04T02:48:51.999643Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332928503336765:2607] txid# 281474976710662 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:52.033986Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332932798304090:2630] txid# 281474976710664 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:52.034061Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332932798304090:2630] txid# 281474976710664 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:52.046571Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332932798304104:2641] txid# 281474976710665 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:52.046644Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332932798304104:2641] txid# 281474976710665 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:52.063065Z node 3 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [3:7591332932798304118:2652] txid# 281474976710666 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# CANCELLED shard id 72075186224037889 marker# P12 2026-01-04T02:48:52.063124Z node 3 :TX_PROXY ERROR: datareq.cpp:883: Actor# [3:7591332932798304118:2652] txid# 281474976710666 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:55.227952Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332944868901591:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:55.228032Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:55.252912Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:55.340962Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:55.341894Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332944868901563:2081] 1767494935227317 != 1767494935227320 2026-01-04T02:48:55.373074Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:55.373170Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:55.385646Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:55.498498Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:55.602448Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:55.608257Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:55.615951Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.937593Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715660 at tablet 72075186224037888 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-04T02:48:55.937750Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7591332944868902516:2583] txid# 281474976715660 RESPONSE Status# ExecCancelled marker# P13c 2026-01-04T02:48:55.950740Z node 4 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715662 at tablet 72075186224037889 status: CANCELLED errors: EXECUTION_CANCELLED (Tx was cancelled) | 2026-01-04T02:48:55.950991Z node 4 :TX_PROXY ERROR: datareq.cpp:883: Actor# [4:7591332944868902530:2591] txid# 281474976715662 RESPONSE Status# ExecCancelled marker# P13c >> KqpPg::Insert_Serial+useSink [GOOD] >> KqpPg::Insert_Serial-useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReply [GOOD] Test command err: 2026-01-04T02:48:44.607598Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332895536131528:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:44.608281Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.010980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:45.011089Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:45.018066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:45.189606Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.192193Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332895536131339:2081] 1767494924551763 != 1767494924551766 2026-01-04T02:48:45.209207Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:45.210959Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715692, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211024Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715691:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715691 msg type: 269090816 2026-01-04T02:48:45.211070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715691, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715686:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715686 msg type: 269090816 2026-01-04T02:48:45.211161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715686, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715690:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715690 msg type: 269090816 2026-01-04T02:48:45.211216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715690, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715685:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715685 msg type: 269090816 2026-01-04T02:48:45.211339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715685, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715689:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715689 msg type: 269090816 2026-01-04T02:48:45.211399Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715689, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715684:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715684 msg type: 269090816 2026-01-04T02:48:45.211460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715684, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715688:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715688 msg type: 269090816 2026-01-04T02:48:45.211518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715688, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715683:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715683 msg type: 269090816 2026-01-04T02:48:45.211584Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715683, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.211614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715687:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715687 msg type: 269090816 2026-01-04T02:48:45.211634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715687, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.214579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715682:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715682 msg type: 269090816 2026-01-04T02:48:45.214657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715682, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.214760Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:213: SysViewsRosterUpdate# [1:7591332899831099173:2254] at schemeshard: 72057594046644480 Handle TEvNotifyTxCompletionResult, mkdir '/dc-1/.sys' 2026-01-04T02:48:45.214827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715681:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715681 msg type: 269090816 2026-01-04T02:48:45.214877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715681, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.214927Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715680:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715680 msg type: 269090816 2026-01-04T02:48:45.214994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715680, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715675:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715675 msg type: 269090816 2026-01-04T02:48:45.215082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715675, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715679:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715679 msg type: 269090816 2026-01-04T02:48:45.215148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715679, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715674:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715674 msg type: 269090816 2026-01-04T02:48:45.215227Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715674, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715678 msg type: 269090816 2026-01-04T02:48:45.215339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715673:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715673 msg type: 269090816 2026-01-04T02:48:45.215434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715673, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715677:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715677 msg type: 269090816 2026-01-04T02:48:45.215521Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715677, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715672:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715672 msg type: 269090816 2026-01-04T02:48:45.215573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715672, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.215596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715676:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715676 msg type: 269090816 2026-01-04T02:48:45.215639Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715676, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:48:45.218427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715671:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715671 msg type: 269090816 2026-01-04T02:48:45.218552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715671, ... , ready parts: 1/1, is published: true 2026-01-04T02:48:46.479951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:7591332904126067706:2362] message: TxId: 281474976710674 2026-01-04T02:48:46.479973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976710674 ready parts: 1/1 2026-01-04T02:48:46.479984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710674:0 2026-01-04T02:48:46.479990Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976710674:0 2026-01-04T02:48:46.480076Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 2 2026-01-04T02:48:46.480994Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037899 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:48:46.481047Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037899 Initiating switch from PreOffline to Offline state 2026-01-04T02:48:46.482549Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037899 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:48:46.485882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591332904126067295 RawX2: 4503603922340115 } TabletId: 72075186224037899 State: 4 2026-01-04T02:48:46.485945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037899, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:48:46.486316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:48:46.486357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:12 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:48:46.486552Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037899 state Offline 2026-01-04T02:48:46.486609Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:74: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 TxId_Deprecated: 0 TabletID: 72075186224037899 2026-01-04T02:48:46.486629Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:19: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute Tablet 72075186224037899 2026-01-04T02:48:46.486688Z node 1 :HIVE DEBUG: tablet_info.cpp:123: HIVE#72057594037968897 Tablet(DataShard.72075186224037899.Leader.1) VolatileState: Running -> Stopped (Node 1) 2026-01-04T02:48:46.486750Z node 1 :HIVE DEBUG: tablet_info.cpp:518: HIVE#72057594037968897 Sending TEvStopTablet(DataShard.72075186224037899.Leader.1 gen 1) to node 1 2026-01-04T02:48:46.487161Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:67: HIVE#72057594037968897 THive::TTxDeleteTablet::Execute() result Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 2026-01-04T02:48:46.488175Z node 1 :HIVE DEBUG: tx__delete_tablet.cpp:136: HIVE#72057594037968897 THive::TTxDeleteTablet::Complete() SideEffects: {Notifications: 0x10080003 [1:7591332895536131399:2103] NKikimrLocal.TEvStopTablet TabletId: 72075186224037899 FollowerId: 0 Generation: 1,0x10040206 [1:7591332895536131704:2152] NKikimrHive.TEvDeleteTabletReply Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12 Actions: NKikimr::TTabletReqBlockBlobStorage} 2026-01-04T02:48:46.488352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 12, at schemeshard: 72057594046644480 2026-01-04T02:48:46.488597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 42] was 1 2026-01-04T02:48:46.488820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:48:46.488842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 42], at schemeshard: 72057594046644480 2026-01-04T02:48:46.488889Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:48:46.489520Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:23: HIVE#72057594037968897 THive::TTxBlockStorageResult::Execute(72075186224037899 OK) 2026-01-04T02:48:46.489618Z node 1 :HIVE DEBUG: tx__block_storage_result.cpp:64: HIVE#72057594037968897 THive::TTxBlockStorageResult::Complete(72075186224037899 OK) 2026-01-04T02:48:46.489681Z node 1 :HIVE DEBUG: hive_impl.cpp:925: HIVE#72057594037968897 THive::Handle::TEvInitiateDeleteStorage TabletId=72075186224037899 2026-01-04T02:48:46.489885Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037899 reason = ReasonStop 2026-01-04T02:48:46.489947Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037899, clientId# [1:7591332904126067458:2958], serverId# [1:7591332904126067459:2959], sessionId# [0:0:0] 2026-01-04T02:48:46.490435Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:26: HIVE#72057594037968897 THive::TTxDeleteTabletResult::Execute(72075186224037899 OK) 2026-01-04T02:48:46.490684Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:12 2026-01-04T02:48:46.490707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:12 tabletId 72075186224037899 2026-01-04T02:48:46.491968Z node 1 :HIVE DEBUG: hive_impl.cpp:508: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus, TabletId: 72075186224037899 2026-01-04T02:48:46.491991Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037899 not found 2026-01-04T02:48:46.492151Z node 1 :HIVE DEBUG: tx__delete_tablet_result.cpp:72: HIVE#72057594037968897 THive::TTxDeleteTabletResult(72075186224037899)::Complete SideEffects {} 2026-01-04T02:48:46.492408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:48:46.494617Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037899 2026-01-04T02:48:46.494722Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037899 2026-01-04T02:48:48.426107Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332915363887356:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.426188Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.456026Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.529292Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:48.529379Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:48.532135Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:48.542679Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332915363887329:2081] 1767494928423055 != 1767494928423058 2026-01-04T02:48:48.561161Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:48.688609Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.753289Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:48.758532Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:48.769283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:48.780519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.478062Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:53.426460Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332915363887356:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.426515Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:58.894995Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-04T02:48:58.908627Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716360] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-04T02:48:58.909799Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716360 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-04T02:48:58.932078Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7591332958313566924:6107] txid# 281474976716360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TLocksTest::GoodSameKeyLock [GOOD] >> TLocksTest::GoodSameShardLock >> TLocksTest::Range_IncorrectNullDot2 [GOOD] >> TLocksTest::BrokenSameShardLock [GOOD] >> TLocksTest::Range_IncorrectDot2 [GOOD] >> TLocksTest::Range_CorrectDot [GOOD] >> TLocksTest::Range_EmptyKey [GOOD] >> TLocksTest::BrokenDupLock [GOOD] >> TLocksTest::CK_Range_BrokenLockInf [GOOD] >> TLocksTest::Range_Pinhole [GOOD] >> TLocksTest::SetBreakSetEraseBreak ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectNullDot2 [GOOD] Test command err: 2026-01-04T02:48:19.741722Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787110318005:2142];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.741929Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.093938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.105101Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.105235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.116396Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.289993Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332787110317896:2081] 1767494899734861 != 1767494899734864 2026-01-04T02:48:20.301018Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.313881Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.658178Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.670269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.743300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.750791Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.998847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.105287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.879944Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332807031845018:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.894047Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:24.091853Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.103499Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.111211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:24.111281Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.139609Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.381349Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.595137Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.621965Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.648788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.737382Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.804706Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.918672Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:28.004835Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822364895477:2211];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:28.028377Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:28.038036Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.119309Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.120787Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822364895300:2081] 1767494907960133 != 1767494907960136 2026-01-04T02:48:28.152608Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.152686Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.157547Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.260153Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.403300Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.437998Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.474669Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.481784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.549256Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.602208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.826459Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332841268638758:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.826509Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.971080Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.972389Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.972563Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332841268638729:2081] 1767494911817732 != 1767494911817735 2026-01-04T02:48:31.987181Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:31.987258Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:32.014403Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connec ... emeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:45.118353Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:45.139458Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.225296Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.289981Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.634790Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:49.195931Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332918997834765:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.195993Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.211944Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:49.309049Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.314107Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332918997834741:2081] 1767494929194383 != 1767494929194386 2026-01-04T02:48:49.325884Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.337064Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.337163Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.356823Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.531232Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.602408Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.609183Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.624250Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.690495Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.790485Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.654927Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332934419915066:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.655229Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.695020Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.812684Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.826742Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.826844Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.848452Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:53.979142Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.129362Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.136689Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.147507Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:54.160816Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.273633Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.357768Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.885088Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332950587307144:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:57.885254Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:57.914542Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.000837Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:58.000937Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:58.030953Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:58.037384Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:58.128502Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.329616Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:58.336723Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:58.348106Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:58.353577Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.422777Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.482682Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TStorageTenantTest::LsLs [GOOD] >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] >> KqpPg::DuplicatedColumns+useSink [GOOD] >> KqpPg::DuplicatedColumns-useSink >> TStorageTenantTest::DeclareAndDefine [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_IncorrectDot2 [GOOD] Test command err: 2026-01-04T02:48:19.747248Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332788083979078:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.747333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.087291Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.156393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.156623Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.214196Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.360430Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.376413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.627890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.646034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.667137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.756833Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.777909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.996339Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.064700Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.747523Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332807677475907:2155];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.747708Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.770734Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.806409Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332807677475780:2081] 1767494903744216 != 1767494903744219 2026-01-04T02:48:23.907495Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.910509Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.910574Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.957872Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.961288Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.346855Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.358276Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.372775Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.380530Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.477166Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.493655Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.535627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.749368Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:27.757477Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822604086914:2272];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.757775Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.766263Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:27.865996Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.867794Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.867874Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.909966Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822604086651:2081] 1767494907660883 != 1767494907660886 2026-01-04T02:48:27.920270Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.923054Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.069887Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.200180Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.211950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.269823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.274313Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.355580Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.412975Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.354873Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332839122240147:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.355768Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.430596Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeSc ... irst called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.855356Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.941387Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.178241Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:48.766467Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332911657594960:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.767057Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.781185Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:48.900866Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.929874Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332911657594922:2081] 1767494928764183 != 1767494928764186 2026-01-04T02:48:48.933198Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:48.951682Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:48.951804Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:48.954260Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.180518Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.186018Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.188946Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.201603Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.207014Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.300746Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.368181Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.702103Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332934455126831:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.702452Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.785911Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.869492Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.869602Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.890042Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332934455126726:2081] 1767494933686030 != 1767494933686033 2026-01-04T02:48:53.916870Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.921369Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.041898Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.207206Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.214610Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.221894Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:54.229874Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.313138Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.466386Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.739798Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:58.780912Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332958744858211:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.781381Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:58.851210Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.981771Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:58.981993Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:59.023065Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:59.034588Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:59.051742Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.447355Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:59.455172Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:59.470744Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.540387Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.597137Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.781794Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_CorrectDot [GOOD] Test command err: 2026-01-04T02:48:19.721631Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332791022382241:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.724922Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:19.751558Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:20.103485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.106458Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.168276Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.174964Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.199275Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.397873Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.546161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.554760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.671103Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.744037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.853014Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.940456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.590356Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332807525310803:2259];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.590688Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.664599Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.766117Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332807525310572:2081] 1767494903582288 != 1767494903582291 2026-01-04T02:48:23.768330Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.770014Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.770075Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.782880Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.905876Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.094540Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.100107Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.114210Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.200260Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.336671Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.570389Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:27.323028Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822463609669:2171];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.323561Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.358235Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.458954Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.460245Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822463609520:2081] 1767494907315278 != 1767494907315281 2026-01-04T02:48:27.493957Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.494038Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.500291Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:27.627799Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.718408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:27.726222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:27.732530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:27.736884Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.815139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.891736Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.318281Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332839528873222:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.319730Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.331737Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:31.441964Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332839528873175:2081] 1767494911314244 != 1767494911314247 2026-01-04T02:48:31.464069Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.476267Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:31.476356Z node 4 :HIVE WARN: node_info.cpp:25: ... 1474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:44.622310Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.682515Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.749981Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.694329Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332912049017136:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.694441Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.761942Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.804973Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:48.806749Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332912049017012:2081] 1767494928690273 != 1767494928690276 2026-01-04T02:48:48.836970Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:48.837059Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:48.846980Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:48.990184Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.111845Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.118761Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.128681Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.134403Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.214175Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.285864Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.361214Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332934984593987:2084];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.369646Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.399290Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.508378Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.510016Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332934984593930:2081] 1767494933344390 != 1767494933344393 2026-01-04T02:48:53.525340Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.525451Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.558544Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:53.578274Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.835265Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.842872Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:53.853776Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:53.859097Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.934030Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.994922Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.283552Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332958738525350:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.283616Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:58.345587Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.470054Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332958738525243:2081] 1767494938265138 != 1767494938265141 2026-01-04T02:48:58.478307Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:58.483461Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:58.483562Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:58.514310Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:58.605983Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.821545Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:58.829000Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:58.840944Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:58.847189Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.934183Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.013640Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenSameShardLock [GOOD] Test command err: 2026-01-04T02:48:19.772900Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332787391587658:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.772983Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.279739Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.279832Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.410535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.470332Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.473500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.479819Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332787391587633:2081] 1767494899772107 != 1767494899772110 2026-01-04T02:48:20.704314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.723845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.777761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.781072Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.793715Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.009295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.088264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.996897Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:24.050071Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:24.055699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.190328Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:24.190410Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.205998Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332805130598394:2081] 1767494903922140 != 1767494903922143 2026-01-04T02:48:24.241679Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.268200Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.317407Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.683110Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.690398Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.708596Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.806468Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.871526Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.971110Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:28.035693Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332828963170638:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:28.049581Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:28.165972Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.236969Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.246165Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332824668203282:2081] 1767494907960990 != 1767494907960993 2026-01-04T02:48:28.276777Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.276861Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.302898Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.392154Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.543027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.554019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.578296Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.583001Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.656538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.737973Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:29.049138Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:31.873514Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332840294038738:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.900139Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.973891Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.003814Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:32.012670Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:32.012761Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4 ... eration.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:44.906591Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:44.930630Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:44.940195Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.033829Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.099880Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.254097Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:48.566759Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332915334516916:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.566814Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.633332Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.676557Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:48.680191Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:48.680288Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:48.696734Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:48.860681Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.026445Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.036264Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.045943Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.052865Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.128732Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.188483Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.345573Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332937320701183:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.345737Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.403871Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.501577Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.519508Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.519608Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.548641Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:53.565901Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.850381Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.856836Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:53.872948Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.954025Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.020318Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.257157Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332955859105782:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.265178Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:58.294998Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.426358Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:58.430289Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:58.430400Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:58.469652Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:58.477891Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.755331Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:58.762660Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:58.773630Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:58.779753Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.867399Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.938995Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] >> KqpPg::CreateTableSerialColumns-useSink [GOOD] >> KqpPg::DropIndex >> TLocksTest::BrokenLockUpdate [GOOD] >> TLocksTest::BrokenNullLock >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] >> TStorageTenantTest::GenericCases [GOOD] >> KqpPg::Insert_Serial-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText+useSink >> KqpPg::TypeCoercionBulkUpsert [GOOD] >> KqpPg::TypeCoercionInsert+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_EmptyKey [GOOD] Test command err: 2026-01-04T02:48:20.146565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332795064708753:2149];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:20.146937Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.661884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.738636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.738732Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.811859Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.814171Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332795064708633:2081] 1767494900088350 != 1767494900088353 2026-01-04T02:48:20.837677Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.964211Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:21.106263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:21.112139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:21.154074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:21.234456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.415589Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.509137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.278118Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332808745794716:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.289955Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:24.397564Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.561953Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332808745794693:2081] 1767494904261991 != 1767494904261994 2026-01-04T02:48:24.587797Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.602781Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.607213Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:24.607296Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.630005Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.855604Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.861610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.876175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.944790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.991135Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.153952Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.213429Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:28.213867Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332827500829456:2198];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:28.214928Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:28.354069Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.355500Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.357165Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332827500829282:2081] 1767494908148743 != 1767494908148746 2026-01-04T02:48:28.370018Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.370098Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.418601Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.567185Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.658963Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.674421Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.681536Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.693111Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.781530Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.838199Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.084800Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:32.090526Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332846231228270:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.090868Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:32.226101Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metad ... 5.234219Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:45.255193Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.341332Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.409356Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.346973Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332916223382958:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.347042Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.384145Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.494527Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.494618Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.504791Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.508370Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332916223382929:2081] 1767494929345681 != 1767494929345684 2026-01-04T02:48:49.531516Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.577903Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.767011Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.791284Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.851649Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.933558Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.252241Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332941175643395:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.252298Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.289009Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:54.409894Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.451277Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.451377Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.455034Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.473980Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332941175643368:2081] 1767494934251403 != 1767494934251406 2026-01-04T02:48:54.486993Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.613653Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.807282Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.839959Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:54.851626Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.934243Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.998127Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.117123Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332959171618428:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.117225Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.137641Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:59.342820Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332959171618403:2081] 1767494939115653 != 1767494939115656 2026-01-04T02:48:59.342932Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.357070Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:59.359284Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:59.359385Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:59.366199Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:59.635575Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.711842Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:59.807491Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:59.819250Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.900072Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.985596Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.133459Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap-UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma >> Cdc::AddColumn [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning >> KqpPg::InsertNoTargetColumns_Serial-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenDupLock [GOOD] Test command err: 2026-01-04T02:48:19.767440Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332789573775051:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.767556Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.132136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.171554Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.171669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.220798Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.287533Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332789573774921:2081] 1767494899747427 != 1767494899747430 2026-01-04T02:48:20.306393Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.407794Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.592701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.709111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.773724Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.863900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.949053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.618194Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332804924729583:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.618276Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.641311Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:23.751229Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:23.760174Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:23.766058Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:23.790727Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:23.811115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.067266Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.086245Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.099236Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:24.104964Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.213219Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.280554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.718599Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822939580854:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.718659Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:27.759876Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:27.919603Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:27.924751Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822939580829:2081] 1767494907717515 != 1767494907717518 2026-01-04T02:48:27.967197Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:27.967308Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:27.972484Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.013902Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.335637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.354489Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.374068Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.382654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.460211Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.559984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.034120Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332839183545706:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.035042Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:32.079544Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.160734Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:32.192333Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:32.192415Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:32.216326Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:32.308079Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.441646Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: ... lf is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:45.174319Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.249922Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.343461Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.572413Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:49.422192Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.422365Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:49.549928Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.551278Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332916221153944:2081] 1767494929323123 != 1767494929323126 2026-01-04T02:48:49.578447Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.578547Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.595872Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.652689Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.855506Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.862243Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.881883Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.952468Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.013625Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.599015Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332941467764986:2184];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.599311Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.725930Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.742696Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.742792Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.757941Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332941467764829:2081] 1767494934575644 != 1767494934575647 2026-01-04T02:48:54.785399Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.798619Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.948578Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:55.073693Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:55.102703Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.188850Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.250296Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.335892Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332960800394603:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.336081Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.386556Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.517264Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:59.517376Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:59.545951Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332960800394575:2081] 1767494939325617 != 1767494939325620 2026-01-04T02:48:59.573595Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:59.592681Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:59.594217Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.893973Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:59.902347Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:59.922352Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:59.938503Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.030255Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.097553Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.129554Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.346232Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_BrokenLockInf [GOOD] Test command err: 2026-01-04T02:48:19.814676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332790609500617:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.814777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:19.872828Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:20.385170Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.385291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.402596Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.411512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.435382Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332790609500498:2081] 1767494899793727 != 1767494899793730 2026-01-04T02:48:20.442555Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.692556Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.830123Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.864686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.878624Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.896222Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:21.015001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.203979Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:21.303437Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:23.871137Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332805154011576:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:23.871186Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:23.977355Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.167040Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:24.170115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.171225Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:24.171308Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:24.219917Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:24.441657Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:24.447044Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:24.464876Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.516317Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:24.555443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:24.652306Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:27.951078Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332822324419913:2064];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.951136Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:28.133909Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.192992Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.196869Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.220153Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.225915Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332822324419889:2081] 1767494907949226 != 1767494907949229 2026-01-04T02:48:28.237117Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.363746Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.475128Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.494247Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.541967Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.555355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.677429Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.747825Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.966202Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332838630876846:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.966254Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.980737Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:32.123892Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:32.124355Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Fa ... tion, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:45.122948Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:45.133694Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.259595Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.333326Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.590033Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:49.297749Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332918144451112:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:49.302709Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:49.330928Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.444675Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:49.458631Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:49.458731Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:49.479861Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.553887Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:49.740980Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.761690Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.767107Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.836790Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.941856Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.033320Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332937962274828:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.033385Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.067433Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:54.222050Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.225290Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332937962274801:2081] 1767494934032155 != 1767494934032158 2026-01-04T02:48:54.241270Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.241378Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.241961Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.247222Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.450598Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.582730Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.598387Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.616259Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.698089Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.778384Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.133769Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332960057253802:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.134105Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.192460Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.389062Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:59.389169Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:59.393995Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:59.413204Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332960057253762:2081] 1767494939112556 != 1767494939112559 2026-01-04T02:48:59.416306Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:59.481976Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.789675Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:59.796185Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:59.816310Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.925400Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.005437Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.153594Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; >> KqpPg::InsertValuesFromTableWithDefault+useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::LsLs [GOOD] Test command err: 2026-01-04T02:48:59.735117Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332962965977888:2210];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.735189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.760108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:59.813722Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332961194947487:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.813830Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.164184Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.171669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.294383Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.294506Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.302331Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.302432Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.315658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.323177Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:49:00.327955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.406454Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.443488Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.453827Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.746158Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:00.833921Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:00.864045Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332961194947620:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.864138Z node 2 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [2:7591332961194947620:2107], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-04T02:49:00.864365Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:49:00.867911Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962965977607:2052] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7591332965489914968:2119] 2026-01-04T02:49:00.867982Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962965977607:2052] Subscribe: subscriber# [2:7591332965489914968:2119], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:00.868060Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962965977613:2058] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7591332965489914970:2119] 2026-01-04T02:49:00.868078Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962965977613:2058] Subscribe: subscriber# [2:7591332965489914970:2119], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:00.868987Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962965977610:2055] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [2:7591332965489914969:2119] 2026-01-04T02:49:00.869063Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962965977610:2055] Subscribe: subscriber# [2:7591332965489914969:2119], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:00.870001Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332962965977962:2145] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.870067Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332967260945748:2464] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.870155Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962965977982:2149], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.870238Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332967260945478:2285][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332962965977982:2149], cookie# 1 2026-01-04T02:49:00.870312Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332967260945510:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945507:2285], cookie# 1 2026-01-04T02:49:00.870329Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332967260945511:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945508:2285], cookie# 1 2026-01-04T02:49:00.870344Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332967260945512:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945509:2285], cookie# 1 2026-01-04T02:49:00.870376Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962965977607:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945510:2285], cookie# 1 2026-01-04T02:49:00.870401Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962965977610:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945511:2285], cookie# 1 2026-01-04T02:49:00.870437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962965977613:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332967260945512:2285], cookie# 1 2026-01-04T02:49:00.870486Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332967260945510:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962965977607:2052], cookie# 1 2026-01-04T02:49:00.870080Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7591332965489914968:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591332962965977607:2052] 2026-01-04T02:49:00.870152Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7591332965489914970:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591332962965977613:2058] 2026-01-04T02:49:00.870185Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][2:7591332965489914969:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [1:7591332962965977610:2055] 2026-01-04T02:49:00.870261Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7591332965489914965:2119] 2026-01-04T02:49:00.870337Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7591332965489914967:2119] 2026-01-04T02:49:00.870386Z node 2 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Set up state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:00.870409Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/initialization/migrations Version: 0 }: sender# [2:7591332965489914966:2119] 2026-01-04T02:49:00.870505Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332967260945511:2285][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962965977610:2055], cookie# 1 2026-01-04T02:49:00.870451Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332965489914964:2119][/dc-1/.metadata/initialization/migrations] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:00.870544Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: ... RIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784179:2124][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7591332982669784183:2124] 2026-01-04T02:49:04.801873Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784179:2124][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.801896Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784179:2124][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [2:7591332982669784184:2124] 2026-01-04T02:49:04.801924Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784179:2124][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802010Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7591332982669784194:2126] 2026-01-04T02:49:04.802067Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802075Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7591332982669784188:2125] 2026-01-04T02:49:04.802094Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7591332982669784195:2126] 2026-01-04T02:49:04.802105Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802125Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7591332982669784189:2125] 2026-01-04T02:49:04.802153Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802156Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802181Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [2:7591332982669784196:2126] 2026-01-04T02:49:04.802187Z node 2 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [2:7591332982669784190:2125] 2026-01-04T02:49:04.802206Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784181:2126][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.802215Z node 2 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][2:7591332982669784180:2125][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [2:7591332961194947620:2107], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.813935Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332961194947487:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:04.814043Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:04.830579Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332961194947620:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.830715Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332961194947620:2107], cacheItem# { Subscriber: { Subscriber: [2:7591332982669784179:2124] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.830763Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332961194947620:2107], cacheItem# { Subscriber: { Subscriber: [2:7591332982669784180:2125] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.830885Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591332982669784261:2131], recipient# [2:7591332982669784173:2296], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.834004Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:7591332982669784173:2296], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:49:04.916642Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332961194947620:2107], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.916762Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332961194947620:2107], cacheItem# { Subscriber: { Subscriber: [2:7591332965489914964:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.916835Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591332982669784264:2132], recipient# [2:7591332982669784263:2302], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateSolomonInsideSubDomain [GOOD] Test command err: 2026-01-04T02:48:59.659515Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332959174684601:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.659576Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.900275Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.127707Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.127855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.154011Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.196697Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.207699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.483115Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332959174684758:2136] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.483159Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332963469652524:2448] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.483267Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332959174684764:2138], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.483350Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332959174684960:2269][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332959174684764:2138], cookie# 1 2026-01-04T02:49:00.484938Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332959174684969:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684966:2269], cookie# 1 2026-01-04T02:49:00.484965Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332959174684973:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684967:2269], cookie# 1 2026-01-04T02:49:00.484984Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332959174684974:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684968:2269], cookie# 1 2026-01-04T02:49:00.485019Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959174684414:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684969:2269], cookie# 1 2026-01-04T02:49:00.485043Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959174684417:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684973:2269], cookie# 1 2026-01-04T02:49:00.485092Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959174684420:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332959174684974:2269], cookie# 1 2026-01-04T02:49:00.485154Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332959174684969:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684414:2051], cookie# 1 2026-01-04T02:49:00.485194Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332959174684973:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684417:2054], cookie# 1 2026-01-04T02:49:00.485212Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332959174684974:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684420:2057], cookie# 1 2026-01-04T02:49:00.485249Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959174684960:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684966:2269], cookie# 1 2026-01-04T02:49:00.485270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332959174684960:2269][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.485285Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959174684960:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684967:2269], cookie# 1 2026-01-04T02:49:00.485306Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332959174684960:2269][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.485328Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959174684960:2269][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959174684968:2269], cookie# 1 2026-01-04T02:49:00.485340Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332959174684960:2269][/dc-1] Sync cookie mismatch: sender# [1:7591332959174684968:2269], cookie# 1, current cookie# 0 2026-01-04T02:49:00.485385Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332959174684764:2138], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.491972Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332959174684764:2138], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332959174684960:2269] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.492132Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332959174684764:2138], cacheItem# { Subscriber: { Subscriber: [1:7591332959174684960:2269] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.496975Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332963469652525:2449], recipient# [1:7591332963469652524:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.497062Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332963469652524:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.532857Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332963469652524:2448] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:00.536183Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332963469652524:2448] Handle TEvDescribeSchemeResult Forward to# [1:7591332963469652523:2447] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:49:00.542171Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591332959174684758:2136] Handle TEvProposeTransaction 2026-01-04T02:49:00.542194Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591332959174684758:2136] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:49:00.542253Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591332959174684758:2136] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591332963469652530:2453] 2026-01-04T02:49:00.653243Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:7591332963469652530:2453] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "dc-1" StoragePools { Name: "" Kind: "storage-pool-number-2" } StoragePools { Name: "" Kind: "storage-pool-number-1" } StoragePools { Name: "/dc-1:test" Kind: "test" } } } } UserToken: "" PeerName: "" 2026-01-04T02:49:00.653286Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# ... -1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7591332959174684417:2054] 2026-01-04T02:49:03.388822Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332976354555113:2963][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7591332959174684420:2057] 2026-01-04T02:49:03.388857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555106:2963][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7591332976354555108:2963] 2026-01-04T02:49:03.388908Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555106:2963][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7591332976354555109:2963] 2026-01-04T02:49:03.388933Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591332976354555106:2963][/dc-1/.metadata/workload_manager/delayed_requests] Set up state: owner# [1:7591332959174684764:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.388953Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555106:2963][/dc-1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [1:7591332976354555110:2963] 2026-01-04T02:49:03.388973Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7591332976354555106:2963][/dc-1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [1:7591332959174684764:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.388991Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332976354555117:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332959174684414:2051] 2026-01-04T02:49:03.389008Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332976354555118:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332959174684417:2054] 2026-01-04T02:49:03.389038Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332976354555119:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332959174684420:2057] 2026-01-04T02:49:03.389063Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555107:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332976354555114:2964] 2026-01-04T02:49:03.389085Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555107:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332976354555115:2964] 2026-01-04T02:49:03.389107Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591332976354555107:2964][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591332959174684764:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.389152Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332976354555107:2964][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332976354555116:2964] 2026-01-04T02:49:03.389173Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7591332976354555107:2964][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7591332959174684764:2138], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.389195Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684414:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555111:2963] 2026-01-04T02:49:03.389208Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684414:2051] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555117:2964] 2026-01-04T02:49:03.389220Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684417:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555112:2963] 2026-01-04T02:49:03.389242Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684417:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555118:2964] 2026-01-04T02:49:03.389262Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684420:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555113:2963] 2026-01-04T02:49:03.389272Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332959174684420:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332976354555119:2964] 2026-01-04T02:49:03.389316Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332959174684764:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-04T02:49:03.389364Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332959174684764:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332976354555106:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:03.389449Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332959174684764:2138], cacheItem# { Subscriber: { Subscriber: [1:7591332976354555106:2963] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:03.389474Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332959174684764:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-04T02:49:03.389523Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332959174684764:2138], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332976354555107:2964] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:03.389585Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332959174684764:2138], cacheItem# { Subscriber: { Subscriber: [1:7591332976354555107:2964] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:03.389663Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332976354555120:2965], recipient# [1:7591332976354555103:2312], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:03.886075Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591332970421633606:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:03.886210Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591332970421633606:2109], cacheItem# { Subscriber: { Subscriber: [3:7591332974716601093:2218] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:03.886286Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591332979011568543:2314], recipient# [3:7591332979011568542:2307], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateTableInsideSubDomain2 [GOOD] Test command err: 2026-01-04T02:48:59.666701Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332959356906809:2265];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.666847Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.705353Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:00.095488Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.159014Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.162126Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.184512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.223650Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.372312Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.510410Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332959356906819:2120] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.510479Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332963651874608:2443] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.510650Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332959356906827:2123], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.510732Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332959356907121:2314][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332959356906827:2123], cookie# 1 2026-01-04T02:49:00.512207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332963651874457:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874454:2314], cookie# 1 2026-01-04T02:49:00.512246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332963651874458:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874455:2314], cookie# 1 2026-01-04T02:49:00.512255Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332963651874459:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874456:2314], cookie# 1 2026-01-04T02:49:00.512280Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959356906502:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874457:2314], cookie# 1 2026-01-04T02:49:00.512300Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959356906505:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874458:2314], cookie# 1 2026-01-04T02:49:00.512311Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332959356906508:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332963651874459:2314], cookie# 1 2026-01-04T02:49:00.512342Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332963651874457:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959356906502:2051], cookie# 1 2026-01-04T02:49:00.512351Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332963651874458:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959356906505:2054], cookie# 1 2026-01-04T02:49:00.512370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332963651874459:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332959356906508:2057], cookie# 1 2026-01-04T02:49:00.512412Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959356907121:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332963651874454:2314], cookie# 1 2026-01-04T02:49:00.512427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332959356907121:2314][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.512440Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959356907121:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332963651874455:2314], cookie# 1 2026-01-04T02:49:00.512454Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332959356907121:2314][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.512470Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332959356907121:2314][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332963651874456:2314], cookie# 1 2026-01-04T02:49:00.512481Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332959356907121:2314][/dc-1] Sync cookie mismatch: sender# [1:7591332963651874456:2314], cookie# 1, current cookie# 0 2026-01-04T02:49:00.512542Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332959356906827:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.519857Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332959356906827:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332959356907121:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.526073Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332959356906827:2123], cacheItem# { Subscriber: { Subscriber: [1:7591332959356907121:2314] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.528352Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332963651874609:2444], recipient# [1:7591332963651874608:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.528417Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332963651874608:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.611268Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332963651874608:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:00.628886Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332963651874608:2443] Handle TEvDescribeSchemeResult Forward to# [1:7591332963651874607:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:49:00.642624Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591332959356906819:2120] Handle TEvProposeTransaction 2026-01-04T02:49:00.642655Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591332959356906819:2120] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:49:00.642719Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591332959356906819:2120] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591332963651874614:2448] 2026-01-04T02:49:00.662209Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:00.670377Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:759133 ... 842][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7591332959356906505:2054], cookie# 2 2026-01-04T02:49:03.467488Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332976536777044:2842][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7591332959356906508:2057], cookie# 2 2026-01-04T02:49:03.467520Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7591332976536777039:2842], cookie# 2 2026-01-04T02:49:03.467538Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Sync is in progress: cookie# 2, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:03.467766Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7591332976536777040:2842], cookie# 2 2026-01-04T02:49:03.467810Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Sync is done in the ring group: cookie# 2, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:03.467838Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 3 Partial: 0 Cluster State: { } }: sender# [1:7591332976536777041:2842], cookie# 2 2026-01-04T02:49:03.467852Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332976536777038:2842][/dc-1/USER_0/SimpleTable] Sync cookie mismatch: sender# [1:7591332976536777041:2842], cookie# 2, current cookie# 0 2026-01-04T02:49:03.467895Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332959356906827:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 } 2026-01-04T02:49:03.467963Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332959356906827:2123], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1/USER_0/SimpleTable PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332976536777038:2842] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767494943350 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, by pathId# nullptr 2026-01-04T02:49:03.468042Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332959356906827:2123], cacheItem# { Subscriber: { Subscriber: [1:7591332976536777038:2842] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 2 } Filled: 1 Status: StatusSuccess Kind: 3 TableKind: 1 Created: 1 CreateStep: 1767494943350 PathId: [OwnerId: 72057594046644480, LocalPathId: 3] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 1 }, entry# { Path: dc-1/USER_0/SimpleTable TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 2 IsSync: true Partial: 0 } 2026-01-04T02:49:03.468230Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332976536777048:2846], recipient# [1:7591332976536777047:2845], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/SimpleTable TableId: [72057594046644480:3:1] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindTable DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 2] Params { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:03.468259Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332976536777047:2845] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:03.468307Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332976536777047:2845] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/dc-1/USER_0/SimpleTable" Options { ShowPrivateTable: true } 2026-01-04T02:49:03.470966Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332976536777047:2845] Handle TEvDescribeSchemeResult Forward to# [1:7591332976536777046:2844] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 52 Record# Status: StatusSuccess Path: "/dc-1/USER_0/SimpleTable" PathDescription { Self { Name: "SimpleTable" PathId: 3 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715660 CreateStep: 1767494943350 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SimpleTable" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint64" TypeId: 4 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } FollowerCount: 2 PartitioningPolicy { MinPartitionsCount: 2 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186224037888 Coordinators: 72075186224037889 TimeCastBucketsPerMediator: 2 Mediators: 72075186224037890 Mediators: 72075186224037891 } DomainKey { SchemeShard: 72057594046644480 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046644480 2026-01-04T02:49:04.211379Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591332974591131242:2109], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.211508Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591332974591131242:2109], cacheItem# { Subscriber: { Subscriber: [3:7591332978886098763:2237] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.211625Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591332983181066263:2362], recipient# [3:7591332983181066262:2313], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::DeclareAndDefine [GOOD] Test command err: 2026-01-04T02:48:59.611091Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332962358545642:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.611128Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.899391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.068997Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.151218Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.151323Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.182776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.248247Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.457786Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332962358545788:2122] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.457862Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332966653513556:2443] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.457959Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962358545791:2124], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.458035Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332962358546021:2271][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332962358545791:2124], cookie# 1 2026-01-04T02:49:00.459812Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332962358546036:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546033:2271], cookie# 1 2026-01-04T02:49:00.459849Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332962358546037:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546034:2271], cookie# 1 2026-01-04T02:49:00.459864Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332962358546038:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546035:2271], cookie# 1 2026-01-04T02:49:00.459891Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962358545473:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546038:2271], cookie# 1 2026-01-04T02:49:00.459936Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332962358546038:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358545473:2056], cookie# 1 2026-01-04T02:49:00.459972Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962358546021:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358546035:2271], cookie# 1 2026-01-04T02:49:00.460015Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332962358546021:2271][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.460036Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962358545467:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546036:2271], cookie# 1 2026-01-04T02:49:00.460052Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962358545470:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332962358546037:2271], cookie# 1 2026-01-04T02:49:00.460098Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332962358546036:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358545467:2050], cookie# 1 2026-01-04T02:49:00.460125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332962358546037:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358545470:2053], cookie# 1 2026-01-04T02:49:00.460158Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962358546021:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358546033:2271], cookie# 1 2026-01-04T02:49:00.460186Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332962358546021:2271][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.460213Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962358546021:2271][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962358546034:2271], cookie# 1 2026-01-04T02:49:00.460224Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332962358546021:2271][/dc-1] Sync cookie mismatch: sender# [1:7591332962358546034:2271], cookie# 1, current cookie# 0 2026-01-04T02:49:00.460263Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332962358545791:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.474272Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332962358545791:2124], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332962358546021:2271] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.474428Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332962358545791:2124], cacheItem# { Subscriber: { Subscriber: [1:7591332962358546021:2271] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.478921Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332966653513557:2444], recipient# [1:7591332966653513556:2443], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.479017Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332966653513556:2443] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.523825Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332966653513556:2443] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:00.527049Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332966653513556:2443] Handle TEvDescribeSchemeResult Forward to# [1:7591332966653513555:2442] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:49:00.535013Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591332962358545788:2122] Handle TEvProposeTransaction 2026-01-04T02:49:00.535043Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591332962358545788:2122] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:49:00.536979Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591332962358545788:2122] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591332966653513563:2449] 2026-01-04T02:49:00.614038Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962358545791:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/script_executions TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/script_execution_leases ... tion: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:49:03.869597Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962358545467:2050] Subscribe: subscriber# [1:7591332979538416278:3037], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:03.869625Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962358545470:2053] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7591332979538416279:3037] 2026-01-04T02:49:03.869632Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591332962358545470:2053] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:49:03.869650Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962358545470:2053] Subscribe: subscriber# [1:7591332979538416279:3037], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:03.869690Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962358545473:2056] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers DomainOwnerId: 72057594046644480 }: sender# [1:7591332979538416280:3037] 2026-01-04T02:49:03.869697Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591332962358545473:2056] Upsert description: path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers 2026-01-04T02:49:03.869716Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962358545473:2056] Subscribe: subscriber# [1:7591332979538416280:3037], path# /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:03.869745Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332979538416278:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332962358545467:2050] 2026-01-04T02:49:03.869763Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332979538416279:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332962358545470:2053] 2026-01-04T02:49:03.869781Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332979538416280:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332962358545473:2056] 2026-01-04T02:49:03.869921Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332962358545467:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332979538416278:3037] 2026-01-04T02:49:03.869967Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332962358545470:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332979538416279:3037] 2026-01-04T02:49:03.869985Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332962358545473:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332979538416280:3037] 2026-01-04T02:49:03.870165Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332979538416273:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332979538416275:3037] 2026-01-04T02:49:03.870208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332979538416273:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332979538416276:3037] 2026-01-04T02:49:03.870233Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591332979538416273:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Set up state: owner# [1:7591332962358545791:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.870270Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332979538416273:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers Version: 0 }: sender# [1:7591332979538416277:3037] 2026-01-04T02:49:03.870290Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7591332979538416273:3037][/dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers] Ignore empty state: owner# [1:7591332962358545791:2124], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:03.870346Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332962358545791:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 } 2026-01-04T02:49:03.870398Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332962358545791:2124], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332979538416273:3037] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:03.870459Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332962358545791:2124], cacheItem# { Subscriber: { Subscriber: [1:7591332979538416273:3037] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:03.870519Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332979538416281:3039], recipient# [1:7591332979538416257:2328], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.615019Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332962358545642:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:04.615103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:04.660203Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962358545791:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.660320Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332962358545791:2124], cacheItem# { Subscriber: { Subscriber: [1:7591332966653513583:2453] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.660419Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332983833383587:3045], recipient# [1:7591332983833383586:2330], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.888769Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962358545791:2124], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.888915Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332962358545791:2124], cacheItem# { Subscriber: { Subscriber: [1:7591332979538416273:3037] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.889001Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332983833383601:3046], recipient# [1:7591332983833383600:2331], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolBeforeDroppingTablet [GOOD] Test command err: 2026-01-04T02:48:59.621125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332961145117480:2080];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.621177Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.724428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:00.086248Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.150825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.150980Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.160455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.216412Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.299360Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.464614Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332961145117693:2119] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.464663Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332965440085487:2450] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.464817Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332961145117717:2133], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.464911Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332961145117974:2306][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332961145117717:2133], cookie# 1 2026-01-04T02:49:00.467018Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965440085310:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085307:2306], cookie# 1 2026-01-04T02:49:00.467065Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965440085311:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085308:2306], cookie# 1 2026-01-04T02:49:00.467079Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965440085312:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085309:2306], cookie# 1 2026-01-04T02:49:00.467113Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961145117381:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085311:2306], cookie# 1 2026-01-04T02:49:00.467157Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961145117384:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085312:2306], cookie# 1 2026-01-04T02:49:00.467221Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965440085311:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961145117381:2054], cookie# 1 2026-01-04T02:49:00.467253Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965440085312:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961145117384:2057], cookie# 1 2026-01-04T02:49:00.467305Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332961145117974:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965440085308:2306], cookie# 1 2026-01-04T02:49:00.467331Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332961145117974:2306][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.467349Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332961145117974:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965440085309:2306], cookie# 1 2026-01-04T02:49:00.467370Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332961145117974:2306][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.467449Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332961145117717:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.467993Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961145117378:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965440085310:2306], cookie# 1 2026-01-04T02:49:00.468061Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965440085310:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961145117378:2051], cookie# 1 2026-01-04T02:49:00.468110Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332961145117974:2306][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965440085307:2306], cookie# 1 2026-01-04T02:49:00.468125Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332961145117974:2306][/dc-1] Sync cookie mismatch: sender# [1:7591332965440085307:2306], cookie# 1, current cookie# 0 2026-01-04T02:49:00.474885Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332961145117717:2133], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332961145117974:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.475007Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332961145117717:2133], cacheItem# { Subscriber: { Subscriber: [1:7591332961145117974:2306] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.485517Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332965440085488:2451], recipient# [1:7591332965440085487:2450], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.485603Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332965440085487:2450] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.532425Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332965440085487:2450] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:00.536406Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332965440085487:2450] Handle TEvDescribeSchemeResult Forward to# [1:7591332965440085486:2449] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:49:00.538839Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591332961145117693:2119] Handle TEvProposeTransaction 2026-01-04T02:49:00.538867Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591332961145117693:2119] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:49:00.538926Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591332961145117693:2119] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:7591332965440085494:2456] 2026-01-04T02:49:00.622970Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332961145117717:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [184 ... NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7591332969799757438:2246] 2026-01-04T02:49:01.545870Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332961145117381:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7591332965504789664:2112] 2026-01-04T02:49:01.545901Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332961145117381:2054] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7591332969799757439:2246] 2026-01-04T02:49:01.545929Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332961145117384:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7591332965504789665:2112] 2026-01-04T02:49:01.545950Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332961145117384:2057] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 18446744073709551615 }: sender# [3:7591332969799757440:2246] 2026-01-04T02:49:01.549614Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332961145117717:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 } 2026-01-04T02:49:01.555225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 4, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:49:01.555310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:49:01.555324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:49:01.555333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:1 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:49:01.555342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:49:01.555438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2026-01-04T02:49:01.555466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715660 2026-01-04T02:49:01.555732Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332961145117717:2133], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/USER_0 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332965440085748:2648] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767494940855 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [1:7591332965440085748:2648] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 3 } Filled: 1 Status: StatusSuccess Kind: 8 TableKind: 0 Created: 1 CreateStep: 1767494940855 PathId: [OwnerId: 72057594046644480, LocalPathId: 2] DomainId: [OwnerId: 72057594046644480, LocalPathId: 2] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 } 2026-01-04T02:49:01.559155Z node 1 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-04T02:49:01.564282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:49:01.564659Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 5 2026-01-04T02:49:01.564979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:49:01.565168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 4 2026-01-04T02:49:01.565306Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 1, at schemeshard: 72057594046644480 2026-01-04T02:49:01.565477Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 3 2026-01-04T02:49:01.565605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:49:01.565755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 2 2026-01-04T02:49:01.565939Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:49:01.565981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-04T02:49:01.566130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046644480, LocalPathId: 2] was 1 2026-01-04T02:49:01.566270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:49:01.566292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 2], at schemeshard: 72057594046644480 2026-01-04T02:49:01.566332Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:49:01.566916Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037889 2026-01-04T02:49:01.567892Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037890 2026-01-04T02:49:01.573048Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037891 2026-01-04T02:49:01.573542Z node 1 :HIVE WARN: tx__block_storage_result.cpp:34: HIVE#72057594037968897 THive::TTxBlockStorageResult Complete status was NO_GROUP for TabletId 72075186224037888 2026-01-04T02:49:01.581141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:49:01.581173Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:49:01.581221Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:49:01.581230Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:49:01.581247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T02:49:01.581254Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-04T02:49:01.581267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:49:01.581292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:49:01.581327Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T02:49:01.583379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 TabletID: 72075186224037888 Status: OK Info { TabletID: 72075186224037888 Channels { Channel: 0 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 1 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } Channels { Channel: 2 ChannelType: 0 History { FromGeneration: 0 GroupID: 2181038081 } StoragePool: "name_USER_0_kind_storage-pool-number-2" } TabletType: Coordinator Version: 1 TenantIdOwner: 72057594046644480 TenantIdLocalId: 2 } 2026-01-04T02:49:01.590278Z node 1 :HIVE WARN: hive_impl.cpp:2006: HIVE#72057594037968897 Can't find the tablet from RequestHiveInfo(TabletID=72075186224037888) 2026-01-04T02:49:01.628281Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332961145117717:2133], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:01.628418Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332961145117717:2133], cacheItem# { Subscriber: { Subscriber: [1:7591332965440085496:2457] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:01.628525Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332969735053370:2891], recipient# [1:7591332969735053369:2322], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::GenericCases [GOOD] Test command err: 2026-01-04T02:48:59.649491Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332960305519580:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.649955Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.005956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.200839Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.200950Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.213085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.324373Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.338155Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.525823Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332960305519651:2119] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.525880Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332964600487446:2447] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.526414Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332960305519657:2121], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.526534Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332960305519803:2206][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332960305519657:2121], cookie# 1 2026-01-04T02:49:00.538040Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332960305519815:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519811:2206], cookie# 1 2026-01-04T02:49:00.538132Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332960305519340:2050] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519815:2206], cookie# 1 2026-01-04T02:49:00.538207Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332960305519816:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519812:2206], cookie# 1 2026-01-04T02:49:00.538235Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332960305519818:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519813:2206], cookie# 1 2026-01-04T02:49:00.538288Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332960305519815:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519340:2050], cookie# 1 2026-01-04T02:49:00.538359Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332960305519803:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519811:2206], cookie# 1 2026-01-04T02:49:00.538406Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332960305519803:2206][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.538437Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332960305519343:2053] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519816:2206], cookie# 1 2026-01-04T02:49:00.538458Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332960305519346:2056] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332960305519818:2206], cookie# 1 2026-01-04T02:49:00.538487Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332960305519816:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519343:2053], cookie# 1 2026-01-04T02:49:00.538517Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332960305519818:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519346:2056], cookie# 1 2026-01-04T02:49:00.538543Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332960305519803:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519812:2206], cookie# 1 2026-01-04T02:49:00.540414Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332960305519803:2206][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.540652Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332960305519657:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.540950Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332960305519803:2206][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332960305519813:2206], cookie# 1 2026-01-04T02:49:00.540978Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332960305519803:2206][/dc-1] Sync cookie mismatch: sender# [1:7591332960305519813:2206], cookie# 1, current cookie# 0 2026-01-04T02:49:00.554820Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332960305519657:2121], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332960305519803:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.554978Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332960305519657:2121], cacheItem# { Subscriber: { Subscriber: [1:7591332960305519803:2206] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.557575Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332964600487448:2449], recipient# [1:7591332964600487446:2447], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.557635Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332964600487446:2447] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.632698Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332964600487446:2447] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:00.636557Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332964600487446:2447] Handle TEvDescribeSchemeResult Forward to# [1:7591332964600487445:2446] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/dc-1" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046644480 2026-01-04T02:49:00.641512Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:7591332960305519651:2119] Handle TEvProposeTransaction 2026-01-04T02:49:00.641553Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:7591332960305519651:2119] TxId# 281474976710657 ProcessProposeTransaction 2026-01-04T02:49:00.641617Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:7591332960305519651:2119] Cookie# 0 userReqId# "" txid# 281474976710657 SEND to# [1:7591332964600487453:2453] 2026-01-04T02:49:00.650725Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332960305519657:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.650799Z nod ... 04T02:49:04.137180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:359: [replica][1:7591332981780357579:3134][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332960305519346:2056] 2026-01-04T02:49:04.137208Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332981780357561:3134][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332981780357574:3134] 2026-01-04T02:49:04.137246Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332981780357561:3134][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332981780357575:3134] 2026-01-04T02:49:04.137270Z node 1 :SCHEME_BOARD_SUBSCRIBER NOTICE: subscriber.cpp:849: [main][1:7591332981780357561:3134][/dc-1/.metadata/workload_manager/running_requests] Set up state: owner# [1:7591332960305519657:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.137291Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][1:7591332981780357561:3134][/dc-1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/.metadata/workload_manager/running_requests Version: 0 }: sender# [1:7591332981780357576:3134] 2026-01-04T02:49:04.137310Z node 1 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][1:7591332981780357561:3134][/dc-1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [1:7591332960305519657:2121], state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 1 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:04.137328Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519340:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357571:3133] 2026-01-04T02:49:04.137342Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519340:2050] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357577:3134] 2026-01-04T02:49:04.137358Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519343:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357572:3133] 2026-01-04T02:49:04.137391Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519343:2053] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357578:3134] 2026-01-04T02:49:04.137414Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519346:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357573:3133] 2026-01-04T02:49:04.137428Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1098: [1:7591332960305519346:2056] Handle NKikimrSchemeBoard.TEvNotifyAck { Version: 0 }: sender# [1:7591332981780357579:3134] 2026-01-04T02:49:04.137457Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332960305519657:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 } 2026-01-04T02:49:04.137506Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332960305519657:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/delayed_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332981780357560:3133] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:04.137562Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332960305519657:2121], cacheItem# { Subscriber: { Subscriber: [1:7591332981780357560:3133] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.137583Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332960305519657:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 } 2026-01-04T02:49:04.137614Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332960305519657:2121], notify# NKikimr::TSchemeBoardEvents::TEvNotifyDelete { Path: /dc-1/.metadata/workload_manager/running_requests PathId: Strong: 1 }, by path# { Subscriber: { Subscriber: [1:7591332981780357561:3134] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 0 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:04.137657Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332960305519657:2121], cacheItem# { Subscriber: { Subscriber: [1:7591332981780357561:3134] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.137732Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332981780357580:3135], recipient# [1:7591332981780357558:2332], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.137872Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332981780357581:3136], recipient# [1:7591332981780357554:2328], result# { ErrorCount: 2 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo },{ Path: dc-1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.645909Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332960305519580:2233];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:04.646001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:04.655566Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332960305519657:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:04.655690Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332960305519657:2121], cacheItem# { Subscriber: { Subscriber: [1:7591332964600487455:2454] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:04.655813Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332981780357586:3137], recipient# [1:7591332981780357585:2333], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:05.138624Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332960305519657:2121], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:05.138774Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332960305519657:2121], cacheItem# { Subscriber: { Subscriber: [1:7591332981780357559:3132] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:05.138862Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332986075324901:3143], recipient# [1:7591332986075324900:2334], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> BasicUsage::PropagateSessionClosed [GOOD] >> BasicUsage::ReadMirrored >> TLocksTest::GoodNullLock [GOOD] >> KqpPg::DuplicatedColumns-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder+useSink >> KqpPg::DropIndex [GOOD] >> KqpPg::CreateUniqPgColumn+useSink >> TLocksTest::CK_Range_GoodLock [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodNullLock [GOOD] Test command err: 2026-01-04T02:48:24.905565Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332809055815105:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.905773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:24.979538Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:25.268902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:25.311177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:25.311276Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:25.367877Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:25.414410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:25.419390Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332809055815061:2081] 1767494904904206 != 1767494904904209 2026-01-04T02:48:25.557900Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:25.644110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:25.728304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.838711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.888617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:25.936472Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:28.689287Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:28.697888Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.707274Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.708823Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332828683773469:2081] 1767494908469628 != 1767494908469631 2026-01-04T02:48:28.751771Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.751912Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.781849Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.866158Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:29.025501Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:29.032962Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:29.046335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:29.107443Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:29.165525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.535299Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332846851317675:2257];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:32.535373Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:32.542874Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:32.645273Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:32.654214Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332846851317437:2081] 1767494912472655 != 1767494912472658 2026-01-04T02:48:32.670103Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:32.670181Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:32.673221Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:32.674014Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.842469Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.916504Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:32.923759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:32.942050Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:32.949172Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.042769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:33.097201Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.796846Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.796963Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:48:36.905880Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332863708215989:2081] 1767494916724593 != 1767494916724596 2026-01-04T02:48:36.910738Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:36.914820Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:36.914899Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:36.943895Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:37.010012Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecuti ... (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.850258Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.861987Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.867066Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.943215Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.000535Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.277351Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332937700567180:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.298759Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.377983Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.488449Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.488554Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.492819Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.510076Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332937700567035:2081] 1767494934239109 != 1767494934239112 2026-01-04T02:48:54.536911Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.569901Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.851261Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.870249Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.887154Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:54.895420Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.990651Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.047154Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.309965Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:59.310111Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332962749754816:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.310165Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.353406Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.516543Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:59.518524Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332962749754751:2081] 1767494939264039 != 1767494939264042 2026-01-04T02:48:59.533047Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:59.533132Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:59.564604Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:59.621974Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:59.950108Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:59.970514Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.047359Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.117268Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.321936Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:04.476605Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332982145934170:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:04.476675Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:04.517947Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:04.616704Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:04.616803Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:04.625992Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332982145934142:2081] 1767494944475856 != 1767494944475859 2026-01-04T02:49:04.632089Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:04.648953Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:04.685894Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:04.949954Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:04.979425Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:05.067598Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:05.136606Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultText+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink >> KqpPg::InsertValuesFromTableWithDefault+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] |95.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout >> TLocksTest::SetBreakSetEraseBreak [GOOD] >> TFlatTest::AutoMergeBySize [GOOD] >> TFlatTest::AutoSplitMergeQueue ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_Range_GoodLock [GOOD] Test command err: 2026-01-04T02:48:27.720064Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332823244156250:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:27.720127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:28.129851Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.202278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:28.202358Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:28.313123Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332823244156012:2081] 1767494907658558 != 1767494907658561 2026-01-04T02:48:28.334076Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:28.354231Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:28.378011Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:28.714246Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:28.729456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:28.738382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:28.754475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:48:28.850074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:28.985083Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:29.045209Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:31.696976Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332840134583303:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:31.697113Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:31.777885Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:31.845706Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:31.847595Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332840134583091:2081] 1767494911667677 != 1767494911667680 2026-01-04T02:48:31.880405Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:31.880525Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:31.888460Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:31.944645Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:32.169334Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:32.189422Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.255885Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:32.317325Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.794803Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332859789665762:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:35.796091Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:35.845913Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.918834Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:35.925951Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332859789665707:2081] 1767494915778594 != 1767494915778597 2026-01-04T02:48:35.932351Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:35.932442Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:35.965958Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:36.029896Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:36.190829Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:36.196559Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:36.204621Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:36.213188Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.289067Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:36.336654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:39.636154Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332876719410226:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:39.636225Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:39.673338Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:39.756935Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:39.765634Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332876719410198:2081] 1767494919634835 != 1767494919634838 2026-01-04T02:48:39.792346Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:39.792435Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:39.804564Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node ... 10657, at schemeshard: 72057594046644480 2026-01-04T02:48:52.459923Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:52.464611Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:52.532879Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:52.604943Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.772974Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332947331875545:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.773051Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.815477Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:56.873930Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332947331875517:2081] 1767494936771980 != 1767494936771983 2026-01-04T02:48:56.894713Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:56.938749Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:56.938863Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:56.967851Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:57.020738Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:57.189164Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:57.209996Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.275598Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.332947Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:01.748691Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332968028747001:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:01.749345Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:01.758018Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:01.908025Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:01.908131Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:01.908891Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:01.914076Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:01.947114Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:02.174808Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:02.287061Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:02.306135Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:02.316599Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:49:02.321908Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:02.410690Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:02.494339Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:02.751320Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:06.996321Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591332990947553924:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:06.996412Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:07.047127Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.153649Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591332990947553886:2081] 1767494946989466 != 1767494946989469 2026-01-04T02:49:07.171511Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:07.177117Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:07.177218Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:07.191842Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:07.298691Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.564911Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:07.582382Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:07.601615Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:49:07.607769Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.706335Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.773931Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::RemoveStoragePoolAndCreateOneMore [GOOD] Test command err: 2026-01-04T02:48:59.617702Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332962674988172:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:59.618020Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.008319s 2026-01-04T02:48:59.623042Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:59.640562Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639238 Duration# 0.021739s 2026-01-04T02:49:00.084814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.158803Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.158926Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.184810Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.294146Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.307265Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.591220Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332962674988403:2133] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:00.591283Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332966969956179:2448] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:00.591400Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962674988412:2137], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.591496Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332962674988648:2287][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332962674988412:2137], cookie# 1 2026-01-04T02:49:00.593378Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332966969955963:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955960:2287], cookie# 1 2026-01-04T02:49:00.593427Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332966969955964:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955961:2287], cookie# 1 2026-01-04T02:49:00.593464Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332966969955965:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955962:2287], cookie# 1 2026-01-04T02:49:00.593538Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962674988069:2054] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955964:2287], cookie# 1 2026-01-04T02:49:00.593540Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962674988066:2051] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955963:2287], cookie# 1 2026-01-04T02:49:00.593572Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332962674988072:2057] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332966969955965:2287], cookie# 1 2026-01-04T02:49:00.593622Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332966969955964:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962674988069:2054], cookie# 1 2026-01-04T02:49:00.593642Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332966969955963:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962674988066:2051], cookie# 1 2026-01-04T02:49:00.593658Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332966969955965:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332962674988072:2057], cookie# 1 2026-01-04T02:49:00.593756Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962674988648:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332966969955961:2287], cookie# 1 2026-01-04T02:49:00.593790Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332962674988648:2287][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:00.593831Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962674988648:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332966969955960:2287], cookie# 1 2026-01-04T02:49:00.593905Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332962674988648:2287][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:00.593974Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332962674988648:2287][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332966969955962:2287], cookie# 1 2026-01-04T02:49:00.593994Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332962674988648:2287][/dc-1] Sync cookie mismatch: sender# [1:7591332966969955962:2287], cookie# 1, current cookie# 0 2026-01-04T02:49:00.594094Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332962674988412:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:00.609964Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332962674988412:2137], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332962674988648:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:00.610391Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332962674988412:2137], cacheItem# { Subscriber: { Subscriber: [1:7591332962674988648:2287] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:00.612487Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332966969956180:2449], recipient# [1:7591332966969956179:2448], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:00.612550Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332966969956179:2448] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:00.624742Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332962674988412:2137], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:00.624830Z node 1 :TX_PROXY_SCHEME_CACHE TRACE: cache.cpp:2384: Create subscriber: self# [1:7591332962674988412:2137], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480 2026-01-04T02:49:00.625048Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:1023: [main][1:7591332966969956182:2450][/dc-1/.metadata/initialization/migrations] Handle {EvResolveReplicasList ReplicaGroups: [{Replicas: [[1:24339059:0], [1:1099535966835:0], [1:2199047594611:0]] WriteOnly: 0 State: 0}]} 2026-01-04T02:49:00.630224Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962674988066:2051] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591332966969956186:2450] 2026-01-04T02:49:00.630256Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591332962674988066:2051] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:49:00.630327Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962674988066:2051] Subscribe: subscriber# [1:7591332966969956186:2450], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:00.630371Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962674988069:2054] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591332966969956187:2450] 2026-01-04T02:49:00.630379Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591332962674988069:2054] Upsert description: path# /dc-1/.metadata/initialization/migrations 2026-01-04T02:49:00.630399Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:655: [1:7591332962674988069:2054] Subscribe: subscriber# [1:7591332966969956187:2450], path# /dc-1/.metadata/initialization/migrations, domainOwnerId# 72057594046644480, capabilities# AckNotifications: true 2026-01-04T02:49:00.630436Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1070: [1:7591332962674988072:2057] Handle NKikimrSchemeBoard.TEvSubscribe { Path: /dc-1/.metadata/initialization/migrations DomainOwnerId: 72057594046644480 }: sender# [1:7591332966969956188:2450] 2026-01-04T02:49:00.630446Z node 1 :SCHEME_BOARD_REPLICA INFO: replica.cpp:529: [1:7591332962674988072:2057] Upsert description: path# /dc-1/.m ... Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnected 2026-01-04T02:49:10.217364Z node 1 :HIVE WARN: hive_impl.cpp:977: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7591332968160208479:2223], Type=268959746 2026-01-04T02:49:10.217400Z node 1 :HIVE WARN: hive_impl.cpp:977: HIVE#72057594037968897 THive::Handle::TEvUndelivered Sender=[3:7591332968160208479:2223], Type=268959746 2026-01-04T02:49:10.225946Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591332968160208632:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:10.226085Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591332968160208632:2231], cacheItem# { Subscriber: { Subscriber: [3:7591332985340077980:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:10.226158Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591332968160208632:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:10.226221Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591332968160208632:2231], cacheItem# { Subscriber: { Subscriber: [3:7591332985340077979:2307] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:10.226282Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591333006814924678:5731], recipient# [3:7591333006814924675:4810], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:10.226351Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591333006814924677:5730], recipient# [3:7591333006814924676:4811], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.046827Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591332968160208632:2231], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.046978Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591332968160208632:2231], cacheItem# { Subscriber: { Subscriber: [3:7591332985340077980:2308] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:11.047070Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591333011109891976:5732], recipient# [3:7591333011109891975:4812], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_0 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_0/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.134623Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332989907153341:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.134771Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332989907153341:2235], cacheItem# { Subscriber: { Subscriber: [2:7591332989907153391:2244] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:11.134857Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591333011381990113:2346], recipient# [2:7591333011381990112:2555], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.138834Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332989907153341:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.138973Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332989907153341:2235], cacheItem# { Subscriber: { Subscriber: [2:7591332989907153391:2244] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:11.139050Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591333011381990115:2347], recipient# [2:7591333011381990114:2556], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.155523Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332989907153341:2235], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.155668Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332989907153341:2235], cacheItem# { Subscriber: { Subscriber: [2:7591333007087022710:2304] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:11.155774Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591333011381990117:2348], recipient# [2:7591333011381990116:2557], result# { ErrorCount: 1 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/classifiers/resource_pool_classifiers TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:11.565706Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591332989907152696:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:11.565796Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=timeout; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> KqpPg::InsertFromSelect_NoReorder+useSink [GOOD] >> KqpPg::DropTablePg >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::SetBreakSetEraseBreak [GOOD] Test command err: 2026-01-04T02:48:45.261974Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332902208839825:2160];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.262098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.296266Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:45.717545Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:45.717659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:45.750359Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.765932Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332902208839673:2081] 1767494925219393 != 1767494925219396 2026-01-04T02:48:45.769012Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:45.774929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:45.968202Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:46.058926Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:46.078359Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:46.098130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:46.191324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:46.280074Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:46.349272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:46.405561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.786723Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332912045014296:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:48.786809Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:48.799273Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:48.888941Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332912045014273:2081] 1767494928786208 != 1767494928786211 2026-01-04T02:48:48.899142Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:48.915224Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:48.915289Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:48.916615Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.929223Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:49.110698Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:49.119185Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:49.128721Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:49.133464Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.185615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:49.239862Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:52.615323Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:52.620765Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332930721785519:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:52.621170Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:52.738266Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:52.752477Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332930721785286:2081] 1767494932575831 != 1767494932575834 2026-01-04T02:48:52.759234Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.764138Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:52.764216Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.786290Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.925890Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:53.019225Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.028393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:53.042332Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:53.048168Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.138761Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:53.233073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.289646Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332948565195179:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.291303Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.315670Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptSe ... rd__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:56.630674Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:56.651555Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.732260Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.843805Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:00.372195Z node 5 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[5:7591332966969993912:2094];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.394186Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.394512Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:00.523765Z node 5 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.524741Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.537947Z node 5 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [5:7591332966969993829:2081] 1767494940310483 != 1767494940310486 2026-01-04T02:49:00.557426Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.557512Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.564366Z node 5 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(5, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:00.824121Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.855029Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:00.874611Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:49:00.898728Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:49:00.917563Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:01.024454Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:01.086856Z node 5 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:04.834566Z node 6 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[6:7591332981070907465:2113];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:04.834752Z node 6 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:04.863052Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:04.950607Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:04.950697Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:04.953117Z node 6 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:04.967030Z node 6 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(6, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:05.116068Z node 6 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:05.186968Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:05.192296Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:49:05.210421Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:05.270487Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:05.332241Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:08.954772Z node 7 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[7:7591333001205403085:2252];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:08.955069Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:09.011744Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:09.135268Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:09.157033Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:7591333001205402871:2081] 1767494948925807 != 1767494948925810 2026-01-04T02:49:09.165756Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:09.165870Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:09.193763Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:09.257083Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:09.469027Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:09.478184Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:49:09.490975Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:49:09.495957Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:09.600229Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:09.664041Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] >> Cdc::AddColumn_TopicAutoPartitioning [GOOD] >> Cdc::AddIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::PeriodicTopicStatsReload [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.831631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.831747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.831793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.831831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.831868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.831899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.831963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.832053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.832885Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.833858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.931204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.931285Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.946594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.946783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.948297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.967674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.967969Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.968779Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.969068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.978480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.978712Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.981437Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.981513Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.981637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.981690Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.981752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.981937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.173385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.176464Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.176634Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.176738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.176814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.176932Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177085Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177149Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177385Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177453Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.177653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2:49:15.442550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.442685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.442744Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.442830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.443003Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.443097Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.444548Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.444838Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.444923Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.444974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.445166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.445212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.445256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:15.445586Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:49:15.451045Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:15.451195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:15.454220Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:928:2859], Recipient [1:928:2859]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:15.454268Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:15.455880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:15.455940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:15.456353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:15.456413Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:15.456470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:15.456510Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:15.457818Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:964:2859], Recipient [1:928:2859]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:15.457868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:15.457901Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:928:2859] sender: [1:986:2058] recipient: [1:15:2062] 2026-01-04T02:49:15.509351Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:985:2904], Recipient [1:928:2859]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:49:15.509439Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:15.509575Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:15.509893Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 253us result status StatusSuccess 2026-01-04T02:49:15.510457Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:15.511303Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:987:2905], Recipient [1:928:2859]: NKikimrPQ.TEvPeriodicTopicStats PathId: 38 Generation: 1 Round: 96 DataSize: 19 UsedReserveSize: 7 2026-01-04T02:49:15.511361Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-04T02:49:15.511400Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 19 UsedReserveSize 7 2026-01-04T02:49:15.511455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-04T02:49:15.511524Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:141: Will delay TTxStoreTopicStats on# 0.000000s, queue# 1 2026-01-04T02:49:15.511767Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:988:2906], Recipient [1:928:2859]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:49:15.511807Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:15.511901Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:15.512153Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 252us result status StatusSuccess 2026-01-04T02:49:15.512582Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 1 WriteSpeedInBytesPerSecond: 7 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 7 AccountSize: 17 DataSize: 17 UsedReserveSize: 7 } } PQPartitionsInside: 1 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicAccountSizeAndUsedReserveSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.832890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.832993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.833080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.833134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.833162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.833245Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.834174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.834474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.931044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.931103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.944895Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.945070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.948039Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.957020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.957941Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.960430Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.963927Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.971481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.972667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.980125Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980205Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.980394Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.980527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.980761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.160864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164566Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... shard: 72057594046678944 2026-01-04T02:49:15.661052Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409551][Topic3] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409551 2026-01-04T02:49:15.661343Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-04T02:49:15.661924Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:15.664128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:49:15.664179Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:15.664438Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:49:15.664844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:49:15.664877Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:15.666265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:49:15.666354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:49:15.666534Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [1:1313:3162], Recipient [1:692:2679]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [1:1313:3162] ServerId: [1:1315:3164] } 2026-01-04T02:49:15.666573Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T02:49:15.666603Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046678944 is reset TestModificationResult got TxId: 104, wait until txId: 104 TestWaitNotification wait txId: 104 2026-01-04T02:49:15.666987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 104: send EvNotifyTxCompletion 2026-01-04T02:49:15.667057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 104 2026-01-04T02:49:15.667555Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:1329:3178], Recipient [1:692:2679]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:49:15.667613Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:49:15.667649Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046678944 2026-01-04T02:49:15.667822Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:913:2847], Recipient [1:692:2679]: NKikimrScheme.TEvNotifyTxCompletion TxId: 104 2026-01-04T02:49:15.667867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:49:15.667966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 104, at schemeshard: 72057594046678944 2026-01-04T02:49:15.668058Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:49:15.668096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:1327:3176] 2026-01-04T02:49:15.668286Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:1329:3178], Recipient [1:692:2679]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:49:15.668326Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:49:15.668369Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 104 2026-01-04T02:49:15.668982Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1330:3179], Recipient [1:692:2679]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:49:15.669056Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:15.669148Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:15.669347Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 183us result status StatusSuccess 2026-01-04T02:49:15.669843Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 31 UsedReserveSize: 31 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:15.670684Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271188001, Sender [1:1331:3180], Recipient [1:692:2679]: NKikimrPQ.TEvPeriodicTopicStats PathId: 40 Generation: 1 Round: 6 DataSize: 151 UsedReserveSize: 151 2026-01-04T02:49:15.670735Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5286: StateWork, processing event TEvPersQueue::TEvPeriodicTopicStats 2026-01-04T02:49:15.670777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 40] DataSize 151 UsedReserveSize 151 2026-01-04T02:49:15.670814Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__pq_stats.cpp:128: Will execute TTxStoreStats, queue# 1 2026-01-04T02:49:15.671196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1332:3181], Recipient [1:692:2679]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false } 2026-01-04T02:49:15.671238Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:15.671363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:15.673773Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 222us result status StatusSuccess 2026-01-04T02:49:15.674299Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 1 PartitionPerTablet: 1 PQTabletConfig { PartitionConfig { LifetimeSeconds: 13 WriteSpeedInBytesPerSecond: 19 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 1 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 808 AccountSize: 808 DataSize: 182 UsedReserveSize: 182 } } PQPartitionsInside: 4 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] >> TLocksTest::CK_BrokenLock [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultText-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink >> TLocksTest::Range_GoodLock1 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeReserved [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.833482Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.833611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833662Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.833702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.833741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.833778Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.833903Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.834882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.835190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.934397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.934457Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.952300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.956519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.956713Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.961894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.962127Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.962802Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.963857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.978946Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.979148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.980485Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980570Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.980767Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.980828Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.980968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.161187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164270Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164487Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164710Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164869Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.165651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.765626Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-04T02:49:16.806712Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:49:16.806781Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.806813Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.806845Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.806887Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-04T02:49:16.806972Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:49:16.806996Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.807017Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.807040Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.807059Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-04T02:49:16.807098Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:16.807124Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.807146Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.807172Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.807191Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-04T02:49:16.838323Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:49:16.838392Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838498Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.838533Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838563Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-04T02:49:16.838634Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:49:16.838670Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838691Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.838718Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838738Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-04T02:49:16.838775Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:16.838797Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838816Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.838838Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.838858Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-04T02:49:16.870329Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-04T02:49:16.870398Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-04T02:49:16.870786Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][0][StateIdle] Topic PartitionStatus PartitionSize: 16975298 UsedReserveSize: 16975298 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-04T02:49:16.870917Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][2][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-04T02:49:16.871057Z node 1 :PERSQUEUE DEBUG: partition.cpp:1025: [72075186233409546][Partition][1][StateIdle] Topic PartitionStatus PartitionSize: 0 UsedReserveSize: 0 ReserveSize: 45532800 PartitionConfig{ LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 TotalPartitions: 3 } 2026-01-04T02:49:16.871493Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 16975298 2026-01-04T02:49:16.871534Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-04T02:49:16.871727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 16975298 2026-01-04T02:49:16.885047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:49:16.906662Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:49:16.906723Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.906753Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.906789Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.906819Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][1][StateIdle] Try persist 2026-01-04T02:49:16.906876Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:49:16.906897Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.906917Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.906937Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.906976Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][2][StateIdle] Try persist 2026-01-04T02:49:16.907021Z node 1 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186233409546][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:16.907038Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.907053Z node 1 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186233409546][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:16.907072Z node 1 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186233409546][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:16.907093Z node 1 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186233409546][Partition][0][StateIdle] Try persist 2026-01-04T02:49:16.928380Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:16.928603Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 267us result status StatusSuccess 2026-01-04T02:49:16.929159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 2678400 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_RESERVED_CAPACITY } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 136598400 AccountSize: 136598400 DataSize: 16975298 UsedReserveSize: 16975298 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::TopicPeriodicStatMeteringModeRequest [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.837993Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.838089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.838130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.838173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.838217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.838244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.838317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.838412Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.839263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.839563Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.925004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.925098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.944527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.945068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.948303Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.974734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.975031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.975862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.976160Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.979447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.979644Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.980860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980922Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.981021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.981077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.981297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.178324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.181974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.182914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.183021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.183116Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... oup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:15.342288Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:942:2864] connected; active server actors: 1 2026-01-04T02:49:15.373969Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:15.374228Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 315us result status StatusSuccess 2026-01-04T02:49:15.374743Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:15.994821Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-04T02:49:15.996063Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 1 2026-01-04T02:49:16.000484Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 2 DataSize: 16975298 UsedReserveSize: 0 2026-01-04T02:49:16.000642Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-04T02:49:16.001027Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72057594046678944, NodeId 1, Generation 3 2026-01-04T02:49:16.001132Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-04T02:49:16.015006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:49:16.431441Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-04T02:49:16.431533Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 2 2026-01-04T02:49:16.432558Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 3 DataSize: 16975298 UsedReserveSize: 0 2026-01-04T02:49:16.432621Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-04T02:49:16.432869Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-04T02:49:16.452716Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:49:16.858707Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:159: [72075186233409547][Topic1] TPersQueueReadBalancer::HandleWakeup 2026-01-04T02:49:16.858803Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:445: [72075186233409547][Topic1] Send TEvPersQueue::TEvStatus TabletId: 72075186233409546 Cookie: 3 2026-01-04T02:49:16.859684Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:523: [72075186233409547][Topic1] Send TEvPeriodicTopicStats PathId: 38 Generation: 2 StatsReportRound: 4 DataSize: 16975298 UsedReserveSize: 0 2026-01-04T02:49:16.859757Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1843: [72075186233409547][Topic1] ProcessPendingStats. PendingUpdates size 0 2026-01-04T02:49:16.860000Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__pq_stats.cpp:100: Got periodic topic stats at partition [OwnerId: 72057594046678944, LocalPathId: 38] DataSize 16975298 UsedReserveSize 0 2026-01-04T02:49:16.873318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__pq_stats.cpp:119: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:49:16.915118Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Topic1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:16.915295Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Topic1" took 212us result status StatusSuccess 2026-01-04T02:49:16.915772Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Topic1" PathDescription { Self { Name: "Topic1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypePersQueueGroup CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 PQVersion: 1 } ChildrenExist: false } PersQueueGroup { Name: "Topic1" PathId: 38 TotalGroupCount: 3 PartitionPerTablet: 3 PQTabletConfig { PartitionConfig { LifetimeSeconds: 11 WriteSpeedInBytesPerSecond: 17 } YdbDatabasePath: "/MyRoot" MeteringMode: METERING_MODE_REQUEST_UNITS } Partitions { PartitionId: 0 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 1 TabletId: 72075186233409546 Status: Active } Partitions { PartitionId: 2 TabletId: 72075186233409546 Status: Active } AlterVersion: 1 BalancerTabletID: 72075186233409547 NextPartitionId: 3 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 16975298 DataSize: 16975298 UsedReserveSize: 0 } } PQPartitionsInside: 3 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:16.916626Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1016:2929] connected; active server actors: 1 2026-01-04T02:49:16.930895Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186233409547][Topic1] BALANCER INIT DONE for Topic1: (0, 72075186233409546) (1, 72075186233409546) (2, 72075186233409546) 2026-01-04T02:49:16.931488Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186233409547][Topic1] Discovered subdomain [OwnerId: 72057594046678944, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186233409547 2026-01-04T02:49:16.932997Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186233409547][Topic1] TEvClientConnected TabletId 72075186233409546, NodeId 1, Generation 2 2026-01-04T02:49:16.957537Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186233409547][Topic1] pipe [1:1060:2963] connected; active server actors: 1 >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] >> KqpPg::InsertValuesFromTableWithDefault-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> YdbTableSplit::RenameTablesAndSplit >> YdbTableSplit::SplitByLoadWithReads >> YdbTableSplit::MergeByNoLoadAfterSplit >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads >> YdbTableSplit::SplitByLoadWithDeletes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::CK_BrokenLock [GOOD] Test command err: 2026-01-04T02:48:34.379982Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332852521094935:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:34.380053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:34.435847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:34.833698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.852128Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:34.852226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:34.852887Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:34.853935Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332852521094824:2081] 1767494914350800 != 1767494914350803 2026-01-04T02:48:34.878085Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:35.101004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:35.106837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:35.128102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:35.181590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.366584Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:35.423947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:35.429935Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:37.950302Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332865824071806:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:37.952320Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:37.983214Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:38.067982Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:38.069983Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332865824071767:2081] 1767494917946780 != 1767494917946783 2026-01-04T02:48:38.098786Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:38.098871Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:38.113630Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:38.205576Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:38.300438Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:38.322801Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:38.387699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:38.443773Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:41.696516Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332881856150059:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:41.696673Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:41.733964Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:41.792804Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332881856149939:2081] 1767494921692904 != 1767494921692907 2026-01-04T02:48:41.806933Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:41.825603Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:41.825681Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:41.828668Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:41.986856Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:41.995689Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:42.006019Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.061015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:42.110695Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.363064Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332901952861728:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:45.363112Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:45.466120Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.598303Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:45.599786Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:45.599857Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:45.605325Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:45.743852Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:45.899256Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0 ... 196725Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.247829Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:02.524793Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332972065931906:2066];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:02.524966Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:02.645294Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:02.731798Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:02.745600Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332972065931880:2081] 1767494942523230 != 1767494942523233 2026-01-04T02:49:02.752454Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:02.752545Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:02.795707Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:02.837877Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:03.099249Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:03.110636Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:03.126173Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:49:03.131744Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:03.220909Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:03.281123Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:03.520644Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:07.110616Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332997424565076:2133];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:07.110795Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:07.147158Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.269334Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:07.285938Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332997424564983:2081] 1767494947107393 != 1767494947107396 2026-01-04T02:49:07.297738Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:07.297860Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:07.315000Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.320916Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:07.652419Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:07.658846Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:07.675869Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.760281Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.798156Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.824757Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:12.191922Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591333017933530964:2191];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:12.192007Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:12.211702Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:12.361929Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591333017933530802:2081] 1767494952177005 != 1767494952177008 2026-01-04T02:49:12.385890Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:12.398470Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:12.400806Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:12.400898Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:12.418400Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:12.602108Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:12.750645Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:12.775471Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:49:12.786910Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:12.930761Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:13.013936Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:13.194248Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_GoodLock1 [GOOD] Test command err: 2026-01-04T02:48:33.161191Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332850598563944:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:33.167421Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:33.230252Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:33.625908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:33.629851Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:33.629945Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:33.701616Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:33.768957Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:33.770121Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332850598563901:2081] 1767494913128829 != 1767494913128832 2026-01-04T02:48:33.828111Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:34.040942Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:34.052105Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:34.066344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:34.142815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.165940Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:34.325244Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:34.380891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:37.158227Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:37.158389Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332866013599925:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:37.170862Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:37.295143Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:37.297861Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332866013599755:2081] 1767494917095791 != 1767494917095794 2026-01-04T02:48:37.310141Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:37.310245Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:37.311536Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:37.357094Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:37.493157Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:37.558759Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:37.580999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:37.640392Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:37.713792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:40.747918Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332879741258042:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:40.747971Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:40.770752Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:40.857591Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:40.857671Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:40.860489Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:40.866135Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332879741258015:2081] 1767494920747111 != 1767494920747114 2026-01-04T02:48:40.873484Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:40.874066Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:41.091069Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:41.098368Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:41.106134Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:41.110641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:41.151336Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:41.172798Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:41.222554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.486408Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332896436424016:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:44.487356Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:44.573885Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions ... 1474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:57.922656Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:57.932791Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:57.940097Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.011953Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.069838Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:02.600797Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591332973640791064:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:02.600904Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:02.658384Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:02.772184Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:02.785977Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591332973640790831:2081] 1767494942577643 != 1767494942577646 2026-01-04T02:49:02.817046Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:02.817149Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:02.818784Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:02.822818Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:02.944549Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:03.098995Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:03.126605Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:03.213382Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:03.341287Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.468630Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591332994419467806:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:07.468783Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:07.507907Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.597743Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:07.602071Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591332994419467777:2081] 1767494947467776 != 1767494947467779 2026-01-04T02:49:07.610606Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:07.610714Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:07.633726Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:07.705920Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.930408Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:07.955216Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:08.024726Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:08.082860Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:12.668549Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591333015730338773:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:12.668624Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:12.681107Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:12.808893Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:12.813832Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591333015730338744:2081] 1767494952667529 != 1767494952667532 2026-01-04T02:49:12.824529Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:12.824631Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:12.825576Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:12.828791Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:12.996443Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:13.275398Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:13.305070Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:13.391330Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:13.465084Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:13.673402Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStorageTenantTest::CreateDummyTabletsInDifferentDomains [GOOD] Test command err: 2026-01-04T02:49:00.267884Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332965897188929:2267];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.268127Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.318931Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332964147644496:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.318977Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.765904Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.861907Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:00.965735Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.965953Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.980026Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.980093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:01.005025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:01.011700Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:49:01.023603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:01.159031Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:01.195745Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:01.202631Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:01.259024Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:01.353553Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:01.562376Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [1:7591332965897188943:2145] Handle TEvNavigate describe path dc-1 2026-01-04T02:49:01.562422Z node 1 :TX_PROXY DEBUG: describe.cpp:270: Actor# [1:7591332970192156731:2468] HANDLE EvNavigateScheme dc-1 2026-01-04T02:49:01.562530Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [1:7591332965897188949:2147], request# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:01.562619Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:889: [main][1:7591332965897189178:2294][/dc-1] Handle NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest: sender# [1:7591332965897188949:2147], cookie# 1 2026-01-04T02:49:01.563857Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965897189216:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189213:2294], cookie# 1 2026-01-04T02:49:01.563880Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965897189217:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189214:2294], cookie# 1 2026-01-04T02:49:01.563891Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:375: [replica][1:7591332965897189218:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189215:2294], cookie# 1 2026-01-04T02:49:01.563935Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961602221290:2052] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189216:2294], cookie# 1 2026-01-04T02:49:01.563959Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961602221293:2055] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189217:2294], cookie# 1 2026-01-04T02:49:01.563980Z node 1 :SCHEME_BOARD_REPLICA DEBUG: replica.cpp:1137: [1:7591332961602221296:2058] Handle NKikimrSchemeBoard.TEvSyncVersionRequest { Path: /dc-1 }: sender# [1:7591332965897189218:2294], cookie# 1 2026-01-04T02:49:01.564014Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965897189216:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961602221290:2052], cookie# 1 2026-01-04T02:49:01.564028Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965897189217:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961602221293:2055], cookie# 1 2026-01-04T02:49:01.564041Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:384: [replica][1:7591332965897189218:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332961602221296:2058], cookie# 1 2026-01-04T02:49:01.564076Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332965897189178:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965897189213:2294], cookie# 1 2026-01-04T02:49:01.564103Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:986: [main][1:7591332965897189178:2294][/dc-1] Sync is in progress: cookie# 1, ring group# 0, size# 3, half# 1, successes# 1, failures# 0 2026-01-04T02:49:01.564122Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332965897189178:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965897189214:2294], cookie# 1 2026-01-04T02:49:01.564153Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:999: [main][1:7591332965897189178:2294][/dc-1] Sync is done in the ring group: cookie# 1, ring group# 0, size# 3, half# 1, successes# 2, failures# 0, partial# 0 2026-01-04T02:49:01.564180Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:919: [main][1:7591332965897189178:2294][/dc-1] Handle NKikimrSchemeBoard.TEvSyncVersionResponse { Version: 2 Partial: 0 Cluster State: { } }: sender# [1:7591332965897189215:2294], cookie# 1 2026-01-04T02:49:01.564193Z node 1 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:925: [main][1:7591332965897189178:2294][/dc-1] Sync cookie mismatch: sender# [1:7591332965897189215:2294], cookie# 1, current cookie# 0 2026-01-04T02:49:01.564227Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2651: HandleNotify: self# [1:7591332965897188949:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 } 2026-01-04T02:49:01.598278Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2526: ResolveCacheItem: self# [1:7591332965897188949:2147], notify# NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse { Path: /dc-1 PathId: Partial: 0 }, by path# { Subscriber: { Subscriber: [1:7591332965897189178:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, by pathId# nullptr 2026-01-04T02:49:01.598514Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [1:7591332965897188949:2147], cacheItem# { Subscriber: { Subscriber: [1:7591332965897189178:2294] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 1 } Filled: 1 Status: StatusSuccess Kind: 2 TableKind: 0 Created: 1 CreateStep: 0 PathId: [OwnerId: 72057594046644480, LocalPathId: 1] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1 TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 1 IsSync: true Partial: 0 } 2026-01-04T02:49:01.601582Z node 1 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [1:7591332970192156732:2469], recipient# [1:7591332970192156731:2468], result# { ErrorCount: 0 DatabaseName: DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1 TableId: [72057594046644480:1:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: true SyncVersion: true Status: Ok Kind: KindPath DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T02:49:01.601673Z node 1 :TX_PROXY DEBUG: describe.cpp:354: Actor# [1:7591332970192156731:2468] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T02:49:01.637468Z node 1 :TX_PROXY DEBUG: describe.cpp:433: Actor# [1:7591332970192156731:2468] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "dc-1" Options { ShowPrivateTable: true } 2026-01-04T02:49:01.641355Z node 1 :TX_PROXY DEBUG: describe.cpp:446: Actor# [1:7591332970192156731:2468] Handle TEvDescribeSchemeResult Forward to# [1:7591332970192156730:2467] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 2 Record# Status: StatusSuccess Path: "dc-1" PathDescription { Self { Name: "dc-1" PathId: 1 SchemeshardId: 72057594046644480 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 2 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 2 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 0 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 0 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 ... hot_base.cpp:71;action=cannot detect path existence;path=//dc-1/USER_1/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:49:16.927925Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [3:7591333015451506619:2206], request# { ErrorCount: 0 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:16.928119Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591333015451506619:2206], cacheItem# { Subscriber: { Subscriber: [3:7591333032631376084:2252] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:16.928208Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [3:7591333015451506619:2206], cacheItem# { Subscriber: { Subscriber: [3:7591333032631376086:2253] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: undefined Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:16.928341Z node 3 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [3:7591333032631376142:2261], recipient# [3:7591333032631376081:2557], result# { ErrorCount: 2 DatabaseName: /dc-1/USER_1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/USER_1/.metadata/workload_manager/delayed_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo },{ Path: dc-1/USER_1/.metadata/workload_manager/running_requests TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpTable RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: LookupError Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:16.928622Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:7591333032631376081:2557], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:49:16.986047Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591333032631376088:2252] 2026-01-04T02:49:16.986111Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:16.986131Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591333032631376089:2252] 2026-01-04T02:49:16.986153Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:16.986164Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/delayed_requests Version: 0 }: sender# [3:7591333032631376090:2252] 2026-01-04T02:49:16.986178Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376084:2252][/dc-1/USER_1/.metadata/workload_manager/delayed_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:16.986878Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591333032631376094:2253] 2026-01-04T02:49:16.986921Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:16.986943Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591333032631376095:2253] 2026-01-04T02:49:16.986964Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:16.986981Z node 3 :SCHEME_BOARD_SUBSCRIBER DEBUG: subscriber.cpp:814: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Handle NKikimrSchemeBoard.TEvNotify { Path: /dc-1/USER_1/.metadata/workload_manager/running_requests Version: 0 }: sender# [3:7591333032631376096:2253] 2026-01-04T02:49:16.987000Z node 3 :SCHEME_BOARD_SUBSCRIBER INFO: subscriber.cpp:867: [main][3:7591333032631376086:2253][/dc-1/USER_1/.metadata/workload_manager/running_requests] Ignore empty state: owner# [3:7591333015451506619:2206], state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements }, other state# { Deleted: 1 Strong: 0 Version: DomainId: AbandonedSchemeShards: there are 0 elements } 2026-01-04T02:49:17.360998Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332964147644697:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:17.361125Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332964147644697:2108], cacheItem# { Subscriber: { Subscriber: [2:7591332968442612047:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:17.361217Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591333037162088920:2163], recipient# [2:7591333037162088919:2336], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:17.523830Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [2:7591332964147644697:2108], request# { ErrorCount: 0 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T02:49:17.523995Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [2:7591332964147644697:2108], cacheItem# { Subscriber: { Subscriber: [2:7591332968442612047:2119] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T02:49:17.524089Z node 2 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [2:7591333037162088922:2164], recipient# [2:7591333037162088921:2337], result# { ErrorCount: 1 DatabaseName: /dc-1 DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: dc-1/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } >> KqpPg::TableArrayInsert+useSink [GOOD] >> KqpPg::TableArrayInsert-useSink |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_proxy/ut_storage_tenant/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] >> KqpPg::InsertFromSelect_Simple+useSink [GOOD] >> KqpPg::InsertFromSelect_Simple-useSink >> YdbTableSplit::SplitByLoadWithUpdates >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> KqpPg::CreateUniqPgColumn+useSink [GOOD] >> KqpPg::CreateUniqPgColumn-useSink >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration |95.1%| [TA] $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::CreateTableBulkUpsertAndRead [GOOD] >> KqpPg::CopyTableSerialColumns+useSink |95.1%| [TA] {RESULT} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TStoragePoolsStatsPersistence::SameAggregatedStatsAfterRestart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.831623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.831747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.831788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.831841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.831879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.831911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.831970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.832049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.832864Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.833909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.927520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.927578Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.947278Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.947817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.948042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.971617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.971895Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.972539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.972776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.982177Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.982368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.983585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.983646Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.983695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.983754Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.983801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.983966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.160520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169223Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169484Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169660Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.169756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173224Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173407Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173548Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173630Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.173753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... thId: 37] was 0 2026-01-04T02:49:19.408632Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 2, at schemeshard: 72057594046678944 2026-01-04T02:49:19.408782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.408984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:19.409034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 1 2026-01-04T02:49:19.409191Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:19.410148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:19.410557Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-04T02:49:19.410900Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.411020Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.411423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.411510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.411801Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.411959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.412053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.412145Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.412359Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.412452Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.412898Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413332Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.413595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:19.426623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:19.432560Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:19.432652Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:19.434242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:19.434319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:19.434401Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:19.435440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1163:3116] sender: [1:1218:2058] recipient: [1:15:2062] 2026-01-04T02:49:19.477693Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:19.478035Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/SomeTable" took 375us result status StatusSuccess 2026-01-04T02:49:19.478532Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeTable" PathDescription { Self { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "SomeTable" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 Family: 1 FamilyName: "alternative" NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 4140 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { PoolsUsage { PoolKind: "pool-kind-1" DataSize: 1020 IndexSize: 0 } PoolsUsage { PoolKind: "pool-kind-2" DataSize: 3120 IndexSize: 0 } } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:49:19.480906Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:49:19.481116Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 242us result status StatusSuccess 2026-01-04T02:49:19.481652Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "SomeTable" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 4140 DataSize: 4140 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-1" TotalSize: 1020 DataSize: 1020 IndexSize: 0 } StoragePoolsUsage { PoolKind: "pool-kind-2" TotalSize: 3120 DataSize: 3120 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_proxy/ut_storage_tenant/test-results/unittest/{meta.json ... results_accumulator.log} |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration >> TNodeBrokerTest::Test999NodesSubscribers >> TNodeBrokerTest::ShiftIdRangeRemoveActive >> TNodeBrokerTest::NodesMigrationExpireRemoved >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::NodesMigrationReuseID >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TNodeBrokerTest::Test1001NodesSubscribers >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> BasicUsage::ReadMirrored [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldNotBatchWhenDisabled [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.831630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.831747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.831784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.831825Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.831861Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.831891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.831955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.832024Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.832884Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.835409Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.932799Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.932855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.948844Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.949296Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.949458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.962568Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.962799Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.963475Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.963857Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.971553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.972655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.980326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980401Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.980507Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.980559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.980736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.170517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.171621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.171787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.171882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.171983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172323Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172725Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.172846Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... T02:49:22.247707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-04T02:49:22.248036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.248146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.248536Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.248611Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.248851Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.249907Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250172Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250293Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250428Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250480Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:22.250912Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:49:22.255900Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:22.256070Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:22.259898Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1418:3362], Recipient [1:1418:3362]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:22.259952Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:22.261475Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:22.261547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:22.262074Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1418:3362], Recipient [1:1418:3362]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:22.262128Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:22.262579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:22.262636Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:22.262688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:22.262737Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:22.267579Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1454:3362], Recipient [1:1418:3362]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:22.267641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:22.267695Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1418:3362] sender: [1:1475:2058] recipient: [1:15:2062] 2026-01-04T02:49:22.312227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1474:3407], Recipient [1:1418:3362]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:49:22.312295Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:22.312411Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:49:22.312795Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 328us result status StatusSuccess 2026-01-04T02:49:22.313685Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 82624 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> KqpPg::DropTablePg [GOOD] >> KqpPg::DropTablePgMultiple >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration >> TSchemeShardServerLess::StorageBillingLabels [GOOD] >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> BasicUsage::ReadMirrored [GOOD] Test command err: 2026-01-04T02:48:40.497890Z :PropagateSessionClosed INFO: Random seed for debugging is 1767494920497862 2026-01-04T02:48:40.848767Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332881427331118:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:40.848825Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:41.044890Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332884880530751:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:41.044984Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:41.049982Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:48:41.088794Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:48:41.323971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:41.323229Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:41.384123Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:41.384235Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:41.387155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:41.387277Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:41.405427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:41.427043Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:48:41.429758Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:41.473070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:41.521102Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:41.569391Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:41.595481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002da0/r3tmp/yandexrkxnVH.tmp 2026-01-04T02:48:41.595504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002da0/r3tmp/yandexrkxnVH.tmp 2026-01-04T02:48:41.595647Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/002da0/r3tmp/yandexrkxnVH.tmp 2026-01-04T02:48:41.596209Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:41.638510Z INFO: TTestServer started on Port 13739 GrpcPort 26740 PQClient connected to localhost:26740 2026-01-04T02:48:41.922621Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:41.929576Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:42.058020Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:48:42.065628Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:44.738425Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332898607201612:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:44.738550Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:44.738880Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332898607201625:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:44.738924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332898607201626:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:44.739033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:44.743040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:44.816337Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332898607201629:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710661 completed, doublechecking } 2026-01-04T02:48:45.125987Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332898607201712:2964] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:45.172654Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591332902902169028:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:48:45.173005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:45.175869Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YmIxN2IyNGMtMTI0YjZkMDMtZmU1NmM2ZmQtMTU2ZTU5YjM=, ActorId: [1:7591332898607201610:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3egecwdxp38fcsv4rqhbra, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:48:45.176832Z node 2 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [2:7591332897765432892:2307], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:48:45.178454Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T02:48:45.178335Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=2&id=YjcwMTQzNS01MDAwNWE3OC0yYTVhZDM0Ny04NzcxOGUwMQ==, ActorId: [2:7591332897765432852:2301], ActorState: ExecuteState, LegacyTraceId: 01ke3egeen2vqvfmcsjv5app3t, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:48:45.178815Z node 2 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correc ... 822818Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.822833Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:49:21.822834Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:21.913964Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:21.914006Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.914020Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:21.914041Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.914055Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:49:21.924236Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:21.924273Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.924298Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:21.924320Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.924334Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:21.924391Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:21.924401Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.924410Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:21.924422Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:21.924432Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.014670Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.014700Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.014714Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.014735Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.014748Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.024482Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.024501Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.024519Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.024525Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.024532Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.024538Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.024561Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.024570Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.024585Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.024595Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.116264Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.116297Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.116311Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.116333Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.116358Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.124901Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.124902Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.124924Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.124930Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.124937Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.124940Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.124958Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.124958Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.124971Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.124977Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.216479Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.216514Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.216528Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.216552Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.216567Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.225200Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.225240Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.225253Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.225282Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.225296Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.225357Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.225365Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.225380Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.225390Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.225399Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.316817Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.316852Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.316865Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.316888Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.316902Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.325517Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.325517Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:22.325549Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.325560Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.325563Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.325576Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:22.325580Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.325591Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:49:22.325597Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:22.325612Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist |95.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/federated_topic/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] >> TNodeBrokerTest::ExtendLeaseRestartRace ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchSize [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.833148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.833268Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833329Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.833378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.833421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.833481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.833556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.833648Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.834600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.834933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.931306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.931370Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.946314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.950626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.950830Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.956495Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.957950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.960411Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.963865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:13.971125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.972655Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:13.980325Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:13.980642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:13.980696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:13.980759Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:13.980906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.161799Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163417Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163502Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.163997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.164526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 6, at schemeshard: 72057594046678944 2026-01-04T02:49:24.343109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.343243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.343645Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.343762Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344267Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344432Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.344783Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345270Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345710Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345896Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.345957Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.346019Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:24.346426Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:49:24.353732Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:24.353911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:24.358018Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:1532:3464], Recipient [1:1532:3464]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:24.358085Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:24.359811Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:24.359906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:24.361127Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:1532:3464], Recipient [1:1532:3464]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:24.361180Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:24.362403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:24.362478Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:24.362561Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:24.362604Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:24.363538Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:1568:3464], Recipient [1:1532:3464]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:24.363589Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:24.363629Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:1532:3464] sender: [1:1589:2058] recipient: [1:15:2062] 2026-01-04T02:49:24.410774Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1588:3509], Recipient [1:1532:3464]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:49:24.410851Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:24.410972Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:49:24.411324Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 341us result status StatusSuccess 2026-01-04T02:49:24.412248Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 1 MinPartitionsCount: 20 MaxPartitionsCount: 20 } } TableSchemaVersion: 2 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 13984 RowCount: 100 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 21833 Memory: 141504 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 13984 DataSize: 13984 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseID [GOOD] Test command err: 2026-01-04T02:49:22.939357Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.939432Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false >> RetryPolicy::TWriteSession_SwitchBackToLocalCluster [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_serverless/unittest >> TSchemeShardServerLess::StorageBillingLabels [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:48:05.479844Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:48:05.479976Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:05.480030Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:48:05.480081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:48:05.480120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:48:05.480159Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:48:05.480220Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:48:05.480301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:48:05.481247Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:48:05.483266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:48:05.583557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:05.583631Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:05.600625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:48:05.601293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:48:05.602275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:48:05.614094Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:48:05.615538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:48:05.618354Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:48:05.622431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:48:05.630527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:05.632036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:48:05.640777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:48:05.640880Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:48:05.640947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:05.641003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:48:05.641179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:05.642017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:48:05.818030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.820490Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.820737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.820854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.820943Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821172Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821289Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:48:05.821971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... p:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:48:54.271243Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:48:58.329724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:48:58.329893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:129: TTxServerlessStorageBilling: too soon call, wait until current period ends, schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:02:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, next retry at: 1970-01-01T00:03:00.000000Z 2026-01-04T02:48:58.329971Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:48:58.424329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046678944 2026-01-04T02:48:58.424467Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046678944 2026-01-04T02:48:58.424550Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046678944 2026-01-04T02:48:58.570043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409546 2026-01-04T02:48:58.570171Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-04T02:48:58.570252Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-04T02:48:58.624181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-04T02:48:58.624309Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-04T02:48:58.624379Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-04T02:48:58.670476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0026 2026-01-04T02:48:58.713233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:48:58.713418Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:48:58.713493Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:48:58.713609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:48:58.724038Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:03.124550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0026 2026-01-04T02:49:03.174046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:49:03.174240Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:49:03.174315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:49:03.174415Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:49:03.184843Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:07.606438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0022 2026-01-04T02:49:07.666091Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:49:07.666300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:49:07.666369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:49:07.666472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:49:07.678082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:12.090569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-04T02:49:12.138073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:49:12.138274Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:49:12.138354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:49:12.138471Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:49:12.150231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:16.513156Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.0017 2026-01-04T02:49:16.562057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:49:16.562252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:49:16.562319Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:49:16.562443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:49:16.572914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:20.792604Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 41 rowCount 1 cpuUsage 0.002 2026-01-04T02:49:20.835609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 1 2026-01-04T02:49:20.835908Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 32 shard idx 72075186233409549:4 data size 41 row count 1 2026-01-04T02:49:20.835975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409552 maps to shardIdx: 72075186233409549:4 followerId=0, pathId: [OwnerId: 72075186233409549, LocalPathId: 32], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 41 2026-01-04T02:49:20.836107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409552: SplitByLoadNotEnabledForTable 2026-01-04T02:49:20.846698Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72075186233409549, queue size# 0 2026-01-04T02:49:24.738948Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:24.739333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:202: TTxServerlessStorageBilling: make a bill, record: '{"usage":{"start":120,"quantity":59,"finish":179,"type":"delta","unit":"byte*second"},"tags":{"ydb_size":41},"id":"72057594046678944-39-120-179-41","cloud_id":"CLOUD_ID_VAL","source_wt":180,"source_id":"sless-docapi-ydb-storage","resource_id":"DATABASE_ID_VAL","schema":"ydb.serverless.v1","labels":{"Category":"Table","k":"v"},"folder_id":"FOLDER_ID_VAL","version":"1.0.0"} ', schemeshardId: 72075186233409549, domainId: [OwnerId: 72057594046678944, LocalPathId: 39], now: 1970-01-01T00:03:00.000000Z, LastBillTime: 1970-01-01T00:01:00.000000Z, lastBilled: 1970-01-01T00:01:00.000000Z--1970-01-01T00:01:59.000000Z, toBill: 1970-01-01T00:02:00.000000Z--1970-01-01T00:02:59.000000Z, next retry at: 1970-01-01T00:04:00.000000Z 2026-01-04T02:49:24.742569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete ... blocking NKikimr::NMetering::TEvMetering::TEvWriteMeteringJson from FLAT_SCHEMESHARD_ACTOR to TFakeMetering cookie 0 ... waiting for metering (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveActive [GOOD] Test command err: 2026-01-04T02:49:22.936611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.936686Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:24.025755Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.2%| [TA] $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> RetryPolicy::TWriteSession_SeqNoShift >> TLocksTest::GoodSameShardLock [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_serverless/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] |95.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/federated_topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireRemoved [GOOD] Test command err: 2026-01-04T02:49:22.940795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.940865Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> KqpPg::CopyTableSerialColumns+useSink [GOOD] >> KqpPg::CopyTableSerialColumns-useSink |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TA] $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} >> Cdc::AddIndex [GOOD] >> Cdc::AddStream |95.2%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_serverless/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-04T02:49:22.947625Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.947720Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:23.066013Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-04T02:49:23.078385Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-04T02:49:26.506450Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:26.506533Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false >> TNodeBrokerTest::DoNotReuseDynnodeIdsBelowMinDynamicNodeId [GOOD] |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails2 [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TNodeBrokerTest::ConfigPipelining |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas |95.2%| [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckSameGenerationAndTransact [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::GoodSameShardLock [GOOD] Test command err: 2026-01-04T02:48:43.420029Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332891438074593:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:43.420236Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:43.690464Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:43.694219Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:43.694348Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:43.720748Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:43.775173Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:43.913917Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:44.022280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:44.031598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:44.042785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:44.122307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.324882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.387669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:44.426937Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:47.010154Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332910096514643:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:47.010238Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:47.023384Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.090769Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:47.132774Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:47.132887Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:47.138968Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:47.296465Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.297237Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:47.315454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:47.367688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:47.412123Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.436486Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332921507006441:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:50.436531Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:50.453631Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:50.523286Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:50.533283Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332921507006414:2081] 1767494930435523 != 1767494930435526 2026-01-04T02:48:50.563678Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:50.563764Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:50.565277Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:50.729304Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:50.746149Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:50.767971Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.826612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:50.879933Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.193785Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332941399369545:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.239672Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.258042Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.375368Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.389553Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.389640Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.389930Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:7591332941399369426:2081] 1767494934154752 != 1767494934154755 2026-01-04T02:48:54.405833Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.504422Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.644105Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.650233Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: ... 97 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:07.399699Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:07.428997Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:07.449843Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:07.698687Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:07.710650Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:07.725047Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.853496Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:07.913390Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:11.759637Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591333013291191278:2263];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:11.759754Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:11.806086Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:11.925985Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591333013291191039:2081] 1767494951739762 != 1767494951739765 2026-01-04T02:49:11.933088Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:11.939768Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:11.939871Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:11.967204Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:12.017891Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:12.218650Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:12.244794Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:12.334042Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:12.413942Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:16.507356Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591333034637759184:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:16.507417Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:16.527574Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:16.627101Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:16.628858Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591333034637759157:2081] 1767494956505984 != 1767494956505987 2026-01-04T02:49:16.665590Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:16.665690Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:16.685864Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:16.702713Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:16.986075Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:17.010788Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:17.080218Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:17.141017Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:21.174763Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591333053350886563:2150];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:21.175016Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:21.208784Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.305965Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591333053350886435:2081] 1767494961169888 != 1767494961169891 2026-01-04T02:49:21.309044Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.316946Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:21.317053Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:21.324235Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.387669Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.666074Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.690017Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:21.788812Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:21.854420Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] >> TTenantPoolTests::TestSensorsConfigForStaticSlot >> TNodeBrokerTest::ShiftIdRangeRemoveExpired >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true >> KqpPg::InsertValuesFromTableWithDefaultTextNotNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> KqpPg::DropTablePgMultiple [GOOD] >> KqpPg::DropTableIfExists >> TLocalTests::TestAlterTenant >> TNodeBrokerTest::ConfigPipelining [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultAndCast-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink >> TNodeBrokerTest::NodeNameExpiration >> TNodeBrokerTest::NodesMigrationExtendLease >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch >> TLocalTests::TestAlterTenant [GOOD] >> TLocalTests::TestAddTenantWhileResolving ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-04T02:49:28.252928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:28.253004Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-04T02:49:29.111216Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:29.111285Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ConfigPipelining [GOOD] Test command err: 2026-01-04T02:49:22.936801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.936870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:28.329535Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:28.329603Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:28.433591Z node 9 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: ERROR_TEMP: No free node IDs ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] >> TNodeBrokerTest::NodesMigration999Nodes >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] >> TNodeBrokerTest::RegistrationPipelining >> TLocalTests::TestAddTenantWhileResolving [GOOD] >> KqpPg::CreateUniqPgColumn-useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseRestartRace [GOOD] Test command err: 2026-01-04T02:49:26.371263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:26.371342Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... rebooting node broker ... OnActivateExecutor tabletId# 72057594037936129 ... captured cache request ... captured cache request ... sending extend lease request ... captured cache request ... waiting for response ... waiting for epoch update |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] >> KqpPg::TypeCoercionInsert-useSink [GOOD] >> KqpPg::V1CreateTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNodeName [GOOD] Test command err: 2026-01-04T02:49:28.860877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:28.860948Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestSensorsConfigForStaticSlot [GOOD] Test command err: 2026-01-04T02:49:29.019286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:29.019357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:29.422881Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenantWhileResolving [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration [GOOD] Test command err: 2026-01-04T02:49:20.559125Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333049305883835:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.559241Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.604294Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:20.871950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:20.919077Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:20.919223Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:20.963801Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:20.968229Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333049305883795:2081] 1767494960554314 != 1767494960554317 2026-01-04T02:49:20.973301Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.168278Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.236026Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.246038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:49:21.570827Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:22.264139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.627605Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7591333066485754007:2599], ActorId: [1:7591333066485754008:2599], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=MTE4MDhjYTUtODVjOTIzN2ItNTc0ZjNlYmEtMTFhNjc5MWM=, TxId: 01ke3ehnc96jpnj579jresk8w2 2026-01-04T02:49:24.629822Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066485754031:2328], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.629943Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.630209Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066485754044:2329], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.630281Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> KqpPg::TypeCoercionInsert+useSink [GOOD] >> KqpPg::TableSelect+useSink >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] >> TNodeBrokerTest::TestListNodes >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] >> TNodeBrokerTest::SyncNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveExpired [GOOD] Test command err: 2026-01-04T02:49:29.211995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:29.212074Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:30.911108Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLease [GOOD] Test command err: 2026-01-04T02:49:30.717900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:30.717992Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodesEpochDeltas [GOOD] Test command err: 2026-01-04T02:49:28.951512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:28.951583Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::BrokenNullLock [GOOD] Test command err: 2026-01-04T02:48:47.449634Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332911074836730:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:47.449920Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:47.488634Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:47.760883Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:47.766246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:47.766335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:47.817765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:47.886961Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:47.993285Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:48.066830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:48.138137Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.247253Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.291195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:48.463792Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:50.994338Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332921187635999:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:50.994394Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.025971Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:51.106098Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:51.116069Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332921187635969:2081] 1767494930992702 != 1767494930992705 2026-01-04T02:48:51.145713Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.145843Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:51.152024Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:51.282883Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:51.332997Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:51.355988Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.424510Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:51.477364Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:54.585189Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332941384471437:2172];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:54.585519Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:54.620977Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.707105Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:54.707177Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:54.707748Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.709349Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591332941384471299:2081] 1767494934557688 != 1767494934557691 2026-01-04T02:48:54.738628Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:54.833634Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:54.933016Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.942144Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.953229Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.010879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.100852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.118411Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591332958229158595:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.118550Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:58.142597Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.207051Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:58.248922Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:58.249017Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:58.252286Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:58.342030Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:58.446889Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:58.454126Z node 4 :FLAT_TX_SC ... ation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:11.373998Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:49:11.385996Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:49:11.391807Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:11.468677Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:11.605077Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:15.779469Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591333031840235514:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:15.779560Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:15.808489Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:15.920858Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:15.926197Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:7591333031840235483:2081] 1767494955778189 != 1767494955778192 2026-01-04T02:49:15.937333Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:15.937433Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:15.963291Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:16.067735Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:16.197395Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:16.230757Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:16.303561Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:16.381580Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:20.870203Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591333050719886573:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.870272Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.895309Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:21.016699Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:21.016819Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:21.024454Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.025658Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.029998Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591333050719886546:2081] 1767494960868670 != 1767494960868673 2026-01-04T02:49:21.050588Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.292474Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.379396Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.409496Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:49:21.466666Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:21.533248Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:21.593136Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:25.855869Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591333074378580342:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:25.856030Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:26.050571Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:26.062952Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:26.066038Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591333074378580303:2081] 1767494965854301 != 1767494965854304 2026-01-04T02:49:26.097591Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:26.101085Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:26.139035Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:26.215568Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:26.524259Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:26.551176Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:26.633426Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:26.704417Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:26.910648Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SyncNodes [GOOD] Test command err: 2026-01-04T02:49:31.377723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:31.377849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLogEmptyEpoch [GOOD] Test command err: 2026-01-04T02:49:30.758383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:30.758461Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipelining [GOOD] Test command err: 2026-01-04T02:49:31.448323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:31.448411Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Expansion [GOOD] |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNextGeneration [GOOD] Test command err: 2026-01-04T02:49:20.977514Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333049407998952:2229];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.977592Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:21.292496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:21.292596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:21.326033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.362390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.381936Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333049407998738:2081] 1767494960948194 != 1767494960948197 2026-01-04T02:49:21.384409Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.499699Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.598454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.979052Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:22.616928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.636257Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7591333066587868948:2598], ActorId: [1:7591333066587868949:2598], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=OWNkMmFjMjMtODQyZmRmYTEtODg1ZTI1OTItYjg3ZDJiZDA=, TxId: 01ke3ehncj4fsk4xqv9xhc9eh5 2026-01-04T02:49:24.638813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066587868971:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.638955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.639417Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066587868984:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.639464Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.756193Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333069136945323:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.756256Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:24.793512Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:25.030340Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:25.030447Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:25.048396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:25.065970Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333069136945295:2081] 1767494964749718 != 1767494964749721 2026-01-04T02:49:25.088450Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:25.098079Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:25.241939Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:25.790370Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:25.875205Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:25.886470Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:26.900128Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration >> TNodeBrokerTest::NodeNameExpiration [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldNotRegisterCheckPrevGeneration2 [GOOD] Test command err: 2026-01-04T02:49:20.576364Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333052706227175:2166];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.576415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.913977Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:20.915363Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:20.918098Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333052706227038:2081] 1767494960569084 != 1767494960569087 2026-01-04T02:49:20.932075Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:20.932241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:20.967730Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.076173Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.226963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.586002Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:22.256535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.626547Z node 1 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [1:7591333069886097247:2596], ActorId: [1:7591333069886097248:2596], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=1&id=MjQwM2YzMDMtNWNkZGVlNDEtNjBlNmE3NzgtMzFiMTliNmY=, TxId: 01ke3ehnc914p12fxcp09paqpk 2026-01-04T02:49:24.629569Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333069886097270:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.629688Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.630052Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333069886097283:2328], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.630157Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.753975Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333066675979886:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.754532Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:24.839947Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:24.856713Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:24.860848Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333066675979776:2081] 1767494964736344 != 1767494964736347 2026-01-04T02:49:24.888169Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:24.888257Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:24.891421Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:25.084963Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:25.541790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:25.550951Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:25.756564Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:26.564301Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:28.634928Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7591333083855849980:2595], ActorId: [2:7591333083855849981:2595], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=ODQ0NDJiMzQtMTg3Njc5ZmMtMmU4ODUyNjQtM2RhYzY4NzU=, TxId: 01ke3ehs9d7gxvs4b2nxh95p6g 2026-01-04T02:49:28.637199Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333083855850002:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.637385Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.637732Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333083855850015:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.637819Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRegisterCheckNewGenerationAndTransact [GOOD] Test command err: 2026-01-04T02:49:20.560289Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333049645181222:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.560333Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.885050Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:20.942603Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:20.942721Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:21.032792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.054403Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.078444Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.365245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.371871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:21.546618Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:22.384605Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.751856Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333066433203332:2082];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.756242Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:24.850473Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:24.904055Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:24.904136Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:24.924249Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:24.948969Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:25.134649Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:25.760926Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:25.811877Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:25.817076Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:26.828283Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:26.981087Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> KqpPg::CopyTableSerialColumns-useSink [GOOD] >> KqpPg::CreateIndex ------- [TS] {asan, default-linux-x86_64, release} ydb/core/fq/libs/ydb/ut/unittest >> NFq::TRegisterCheckTestBase::ShouldRollbackTransactionWhenCheckFails [GOOD] Test command err: 2026-01-04T02:49:20.566696Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333050512818469:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.566778Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.859500Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:20.894310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:20.894409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:20.981228Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333050512818431:2081] 1767494960554308 != 1767494960554311 2026-01-04T02:49:20.993137Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:21.004830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:21.035131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:21.267815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.601979Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:22.301159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.537420Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.829000Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333068247485687:2154];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.829345Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:24.836910Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:24.929065Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:24.945110Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333068247485558:2081] 1767494964808114 != 1767494964808117 2026-01-04T02:49:24.950366Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:24.950454Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:24.973699Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:49:24.990622Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:25.155354Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:49:25.837949Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:25.883669Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:25.889990Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 --------------------------- INIT FINISHED --------------------------- 2026-01-04T02:49:26.897101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:28.426152Z node 2 :STREAMS_STORAGE_SERVICE WARN: query_actor.cpp:376: [TQueryBase] OwnerId: [2:7591333085427355756:2594], ActorId: [2:7591333085427355757:2594], Finish with INTERNAL_ERROR, Issues: {
: Error: Rollback transaction }, SessionId: ydb://session/3?node_id=2&id=YzgyZTIzZDctOTIwYmYyOTAtNDE2NmRmNDAtODM0MmQyNmE=, TxId: 01ke3ehs33c1nenycarh8h8ekv 2026-01-04T02:49:28.428480Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333085427355779:2326], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.428965Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.429922Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333085427355792:2327], DatabaseId: /dc-1, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:28.429961Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /dc-1, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } |95.2%| [TS] {BAZEL_UPLOAD} ydb/core/fq/libs/ydb/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameExpiration [GOOD] Test command err: 2026-01-04T02:49:30.543325Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:30.543397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:30.910323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes |95.2%| [TA] $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {RESULT} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink |95.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/fq/libs/ydb/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenRemove [GOOD] Test command err: 2026-01-04T02:49:33.482399Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:33.482478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] >> TNodeBrokerTest::UpdateNodesLog >> TNodeBrokerTest::SubscribeToNodes >> TNodeBrokerTest::NodeNameReuseRestart |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire >> TNodeBrokerTest::MinDynamicNodeIdShifted >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false >> KqpPg::DropTableIfExists [GOOD] >> KqpPg::DropTableIfExists_GenericQuery >> TSlotIndexesPoolTest::Basic [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration999Nodes [GOOD] Test command err: 2026-01-04T02:49:31.342040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:31.342112Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Basic [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissNoDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::LoadStateMoveEpoch >> TNodeBrokerTest::NodesMigrationSetLocation ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NoEffectBeforeCommit [GOOD] Test command err: 2026-01-04T02:49:34.771080Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:34.771168Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] >> Cdc::AddStream [GOOD] >> Cdc::DisableStream >> TDynamicNameserverTest::CacheMissPipeDisconnect-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TNodeBrokerTest::ExtendLeasePipelining >> TNodeBrokerTest::NodesV2BackMigration [GOOD] >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeaseSetLocationInOneRegistration [GOOD] Test command err: 2026-01-04T02:49:35.240279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:35.240355Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-04T02:49:36.912797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.912874Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from to NODE_BROKER_ACTOR 2026-01-04T02:49:37.707256Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:37.707323Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigration [GOOD] Test command err: 2026-01-04T02:49:34.581258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:34.581366Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveReusedID [GOOD] Test command err: 2026-01-04T02:49:34.583352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:34.583430Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:36.789132Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestart [GOOD] Test command err: 2026-01-04T02:49:36.635769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.635850Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:36.996436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-04T02:49:37.427586Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:37.427666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE cookie 0 ... unblocking NKikimr::TEvTabletPipe::TEvClientConnected from TABLET_PIPE_CLIENT to NAMESERVICE 2026-01-04T02:49:38.267323Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:38.267390Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_stats/unittest >> TSchemeshardStatsBatchingTest::ShouldPersistByBatchTimeout [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T02:49:13.831637Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:49:13.831752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.831814Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:49:13.831879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:49:13.831937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:49:13.831994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:49:13.832075Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:49:13.832162Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:49:13.833086Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:49:13.834871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:49:13.934101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:13.934176Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:13.962443Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:49:13.966499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:49:13.966727Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:49:13.995053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:49:13.995350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:49:13.996071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:49:13.996358Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:49:14.000125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:14.000312Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:14.001645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:14.001727Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:14.001876Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:14.001935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:14.001991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:14.002133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:49:14.173422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174344Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174776Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.174965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:49:14.175422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 9:38.104012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 3, at schemeshard: 72057594046678944 2026-01-04T02:49:38.104330Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.104440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.104809Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.104890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105195Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105365Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105506Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.105776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106308Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106914Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.106970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:49:38.107322Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:49:38.114455Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:38.114636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:49:38.119254Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435083, Sender [1:2156:4080], Recipient [1:2156:4080]: NKikimr::NSchemeShard::TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:38.119321Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5311: StateWork, processing event TEvPrivate::TEvServerlessStorageBilling 2026-01-04T02:49:38.121104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:49:38.121192Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:49:38.122367Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [1:2156:4080], Recipient [1:2156:4080]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:38.122430Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:49:38.122886Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:49:38.122954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:49:38.123050Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:49:38.123100Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:49:38.125879Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 274399233, Sender [1:2194:4080], Recipient [1:2156:4080]: NKikimr::TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:38.125945Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5421: StateWork, processing event TEvTxAllocatorClient::TEvAllocateResult 2026-01-04T02:49:38.125983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:2156:4080] sender: [1:2215:2058] recipient: [1:15:2062] 2026-01-04T02:49:38.183374Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:2214:4127], Recipient [1:2156:4080]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:49:38.183498Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:49:38.183617Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Simple" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:49:38.183954Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Simple" took 305us result status StatusSuccess 2026-01-04T02:49:38.184910Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Simple" PathDescription { Self { Name: "Simple" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 1001 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Simple" Columns { Name: "key" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 MaxPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 27456 RowCount: 200 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 19532 Memory: 156864 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 27456 DataSize: 27456 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_stats/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange >> TNodeBrokerTest::TestListNodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExtendLeaseThenExpire [GOOD] Test command err: 2026-01-04T02:49:36.922059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.922152Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::MinDynamicNodeIdShifted [GOOD] Test command err: 2026-01-04T02:49:37.011517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:37.011599Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::SubscribeToNodes [GOOD] >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] |95.3%| [TA] $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] |95.3%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} |95.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_stats/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestListNodes [GOOD] Test command err: 2026-01-04T02:49:33.476454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:33.476533Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ShiftIdRangeRemoveNew [GOOD] Test command err: 2026-01-04T02:49:37.451352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:37.451429Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:38.661667Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1025] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false >> KqpPg::CreateIndex [GOOD] >> KqpPg::CreateNotNullPgColumn ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationSetLocation [GOOD] Test command err: 2026-01-04T02:49:38.209412Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:38.209490Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] >> KqpPg::CreateUniqComplexPgColumn+useSink [GOOD] >> KqpPg::CreateUniqComplexPgColumn-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1000Nodes [GOOD] Test command err: 2026-01-04T02:49:36.139102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.139186Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateNodesLog [GOOD] Test command err: 2026-01-04T02:49:36.604831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.604895Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR 2026-01-04T02:49:39.165530Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1026 not in range (1023, 1024] >> TNodeBrokerTest::RegistrationPipeliningNodeName |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SubscribeToNodes [GOOD] Test command err: 2026-01-04T02:49:36.614889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.614965Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:38.947205Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1085: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultTextNotNullButNull-useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink >> TNodeBrokerTest::NodesAlreadyMigrated >> TDynamicNameserverTest::TestCacheUsage >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::LoadStateMoveEpoch [GOOD] Test command err: 2026-01-04T02:49:37.994828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:37.994910Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] >> TEnumerationTest::TestPublish [GOOD] >> TLocalTests::TestAddTenant >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ExtendLeasePipelining [GOOD] Test command err: 2026-01-04T02:49:38.801456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:38.801515Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TLocalTests::TestAddTenant [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes >> TNodeBrokerTest::NodeNameWithDifferentTenants >> TNodeBrokerTest::NodesMigrationReuseRemovedID >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TLocalTests::TestAddTenant [GOOD] Test command err: 2026-01-04T02:49:42.485794Z node 1 :LOCAL ERROR: local.cpp:1299: TDomainLocal(dc-1): Receive TEvDescribeSchemeResult with bad status StatusPathDoesNotExist reason is <> while resolving subdomain dc-1 2026-01-04T02:49:42.486057Z node 1 :LOCAL ERROR: local.cpp:1549: Unknown domain dc-3 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSameDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-04T02:49:41.312685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:41.312753Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) 2026-01-04T02:49:42.195770Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:42.195843Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] >> TDynamicNameserverTest::TestCacheUsage [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true >> KqpPg::DropTableIfExists_GenericQuery [GOOD] >> KqpPg::EquiJoin+useSink >> KqpPg::InsertValuesFromTableWithDefaultBool-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink >> TNodeBrokerTest::ListNodesEpochDeltasPersistance >> TSlotIndexesPoolTest::Init [GOOD] >> TNodeBrokerTest::NodesMigrationExpireActive >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] >> TNodeBrokerTest::TestRandomActions >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::UpdateEpochPipelining [GOOD] Test command err: 2026-01-04T02:49:39.709553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:39.709639Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:41.202622Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1001: ERROR_TEMP: No free node IDs ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Init [GOOD] |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::RegistrationPipeliningNodeName [GOOD] Test command err: 2026-01-04T02:49:41.942446Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:41.942520Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:42.085172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:49:42.132090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ... waiting for commit ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 1 ... waiting for commit (done) ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-04T02:49:42.142052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:42.142130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:43.528868Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.528942Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationShiftIdRange [GOOD] Test command err: 2026-01-04T02:49:40.305817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:40.305915Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:42.530767Z node 1 :NODE_BROKER ERROR: node_broker.cpp:1142: [DB] Removing node with wrong ID 1025 not in range (1023, 1024] ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseIDThenExtendLease [GOOD] Test command err: 2026-01-04T02:49:42.250939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:42.251026Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration1001Nodes [GOOD] Test command err: 2026-01-04T02:49:40.208455Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:40.208534Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TNodeBrokerTest::NodesSubscriberDisconnect >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesV2BackMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-04T02:49:36.797438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:36.797523Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) |95.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameWithDifferentTenants [GOOD] Test command err: 2026-01-04T02:49:43.440087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.440160Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:43.538126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:49:43.564952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SeveralNodesSubscribersPerPipe [GOOD] Test command err: 2026-01-04T02:49:42.834186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:42.834265Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for updates are sent ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME cookie 0 ... waiting for updates are sent (done) ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME ... unblocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to TEST_ACTOR_RUNTIME |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] >> KqpPg::CreateNotNullPgColumn [GOOD] >> KqpPg::CreateSequence |95.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesAlreadyMigrated [GOOD] Test command err: 2026-01-04T02:49:42.107726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:42.107799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR ... blocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR cookie 2 ... unblocking NKikimr::TEvTablet::TEvCommit from FLAT_EXECUTOR to TABLET_ACTOR |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges >> TDynamicNameserverTest::BasicFunctionality-EnableNodeBrokerDeltaProtocol-false [GOOD] >> GracefulShutdown::TTxGracefulShutdown >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpireActive [GOOD] Test command err: 2026-01-04T02:49:44.111172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:44.111255Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationManyNodesInterrupted [GOOD] Test command err: 2026-01-04T02:49:41.362013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:41.362098Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for first batch is committed ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 2 ... blocking NKikimr::TEvTablet::TEvCommitResult from TABLET_ACTOR to FLAT_EXECUTOR cookie 1 ... waiting for first batch is committed (done) |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/blobstorage/storagepoolmon/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestForcedSensorLabelsForStaticConfig [GOOD] Test command err: 2026-01-04T02:49:45.830351Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:49:45.830831Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:49:45.831436Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 13414483620382358296 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T02:49:45.836335Z node 3 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T02:49:45.836700Z node 3 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T02:49:45.836908Z node 3 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/0019b9/r3tmp/tmpkJARB1/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 7327609997526615688 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase+useSink [GOOD] >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/blobstorage/storagepoolmon/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> Cdc::DisableStream [GOOD] >> Cdc::AwsRegion ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ListNodesEpochDeltasPersistance [GOOD] Test command err: 2026-01-04T02:49:43.957366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.957450Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseRemovedID [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveActive Test command err: 2026-01-04T02:49:43.419541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.419612Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut_fat/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut_fat/unittest >> TLocalTests::TestRemoveTenantWhileResolving |95.4%| [TA] $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {RESULT} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/blobstorage/storagepoolmon/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::ResolveScopeIdForServerless >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] |95.4%| [TA] $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::NodesMigrationNewExpiredNode >> TNodeBrokerTest::NodesMigrationNewActiveNode |95.4%| [TA] {RESULT} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_external_blobs/unittest |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut_fat/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_external_blobs/unittest >> KqpSnapshotIsolation::ConflictWrite-IsOlap+FillTables [GOOD] >> KqpSnapshotIsolation::TConflictReadWriteOlap >> TLocalTests::TestRemoveTenantWhileResolving [GOOD] >> TNodeBrokerTest::BasicFunctionality >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesSubscriberDisconnect [GOOD] Test command err: 2026-01-04T02:49:45.700363Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:45.700440Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> GracefulShutdown::TTxGracefulShutdown [GOOD] |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationReuseExpiredID [GOOD] Test command err: 2026-01-04T02:49:45.095701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:45.095799Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TA] $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::SingleDomainModeBannedIds >> KqpPg::InsertNoTargetColumns_SerialNotNull+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] >> TNodeBrokerTest::ExtendLeaseBumpVersion >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} |95.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_external_blobs/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::FixedNodeId [GOOD] >> TDynamicNameserverTest::ListNodesCacheWhenNoChanges-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> GracefulShutdown::TTxGracefulShutdown [GOOD] Test command err: 2026-01-04T02:49:43.559427Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.559521Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:43.899839Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Dirty] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-04T02:49:43.912430Z node 1 :NODE_BROKER ERROR: node_broker.cpp:798: [Committed] Configured lease duration (10.000000s) is too small. Using min. value: 300.000000s 2026-01-04T02:49:47.362798Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:47.362871Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigration2000Nodes [GOOD] Test command err: 2026-01-04T02:49:43.296535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:43.296623Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] >> TFlatTest::AutoSplitMergeQueue [GOOD] >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::ResolveScopeIdForServerless [GOOD] Test command err: 2026-01-04T02:49:48.564780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:48.564856Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:48.661279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:49:48.820711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 102:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 102 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000002 FAKE_COORDINATOR: Erasing txId 102 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::FixedNodeId [GOOD] Test command err: 2026-01-04T02:49:47.726456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:47.726540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) >> TDynamicNameserverTest::CacheMissDifferentDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] >> KqpPg::EquiJoin+useSink [GOOD] >> KqpPg::EquiJoin-useSink >> KqpPg::CreateUniqComplexPgColumn-useSink [GOOD] >> KqpPg::CreateTempTable ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveExpired [GOOD] Test command err: 2026-01-04T02:49:46.817073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:46.817153Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissSimpleDeadline-EnableNodeBrokerDeltaProtocol-true [GOOD] Test command err: 2026-01-04T02:49:49.364358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.364437Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:49.955477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.955552Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationRemoveActive [GOOD] Test command err: 2026-01-04T02:49:48.424335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:48.424421Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithReads [GOOD] Test command err: 2026-01-04T02:49:19.086812Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333046927582289:2264];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:19.086950Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:19.324225Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.365536Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:19.365651Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:19.395301Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:19.426330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:19.605570Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:19.653780Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:19.653835Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:19.653881Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:19.653970Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:20.075127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:20.080300Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:20.090133Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:22.390871Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333059812485333:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.390993Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.391673Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333059812485343:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.391755Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.776738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.975118Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333059812485522:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.975202Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.975456Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333059812485524:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.975493Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.995784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-04T02:49:23.170288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452923:2378], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.170468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.171017Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452934:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.171068Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452935:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.171123Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452937:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.171154Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452936:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.171515Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.174850Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-04T02:49:23.175211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.175249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2026-01-04T02:49:23.175351Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.175377Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2026-01-04T02:49:23.175478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.175554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710660:3, path# /Root/.metadata/workload_manager/pools/default 2026-01-04T02:49:23.175940Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710660:3 1 -> 128 2026-01-04T02:49:23.176105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452979:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452981:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176235Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452994:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452997:2405], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176488Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333064107452998:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.176545Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TP ... eshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:49:42.975206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:49:42.975246Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 2 -> 3 2026-01-04T02:49:42.979116Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-04T02:49:42.992948Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7591333145711933833:11726] 2026-01-04T02:49:43.003597Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7591333145711933827:11725] 2026-01-04T02:49:43.030896Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:49:43.030991Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:49:43.031150Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-04T02:49:43.032609Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:49:43.032692Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:49:43.032842Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:49:43.038656Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2026-01-04T02:49:43.041201Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2026-01-04T02:49:43.041248Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 3 -> 131 2026-01-04T02:49:43.043017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:49:43.062766Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-04T02:49:43.062776Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-04T02:49:43.062937Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:49:43.063033Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:49:43.063037Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:49:43.063069Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:49:43.063090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-04T02:49:43.063090Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-04T02:49:43.063363Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:49:43.063458Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-04T02:49:43.067043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2026-01-04T02:49:43.067281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 131 -> 132 2026-01-04T02:49:43.069037Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:49:43.069281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:49:43.069350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:49:43.070003Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-01-04T02:49:43.070048Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-01-04T02:49:43.070065Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-04T02:49:43.073987Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-04T02:49:43.074032Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-04T02:49:43.074114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:49:43.074154Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:49:43.074205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-04T02:49:43.075817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-04T02:49:43.077527Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:49:43.078343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:49:43.082944Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:49:43.083522Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:49:43.083866Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-04T02:49:43.083916Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 0 2026-01-04T02:49:43.083974Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:49:43.084089Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:49:43.119220Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.119267Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.291014Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.291035Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.578051Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.578058Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.894234Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:43.894233Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:44.416552Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:44.416617Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:45.249507Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:45.483010Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:45.923789Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:46.411760Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:46.613911Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724895, task: 1, CA Id [1:7591333150006901429:2370]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:47.091330Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724894, task: 1, CA Id [1:7591333150006901428:2377]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 Table has 2 shards |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> KqpPg::CreateSequence [GOOD] >> KqpPg::AlterSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::AutoSplitMergeQueue [GOOD] Test command err: 2026-01-04T02:48:19.726071Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332788802758799:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.726362Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.104006Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.104106Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.158698Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.176601Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.201948Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332788802758683:2081] 1767494899709531 != 1767494899709534 2026-01-04T02:48:20.218397Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.406523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.530048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.538377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.562220Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.676739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.747429Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; A-0 B-0 2026-01-04T02:48:21.218597Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.9, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.028s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-04T02:48:21.230111Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-04T02:48:21.241516Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.016s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-04T02:48:21.243184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 6291502 rowCount 1 cpuUsage 0 2026-01-04T02:48:21.330727Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-04T02:48:21.333901Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 6291502 row count 1 2026-01-04T02:48:21.334010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 1, DataSize 6291502 2026-01-04T02:48:21.334137Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-04T02:48:21.334256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-04T02:48:21.605984Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.14, eph 2} end=Done, 2 blobs 1r (max 1), put Spent{time=0.027s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-04T02:48:21.626789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 A-1 2026-01-04T02:48:21.665047Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 3 blobs 2r (max 2), put Spent{time=0.039s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (12583126 0 0)b }, ecr=1.000 2026-01-04T02:48:21.667023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 12583004 rowCount 2 cpuUsage 0 2026-01-04T02:48:21.728774Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-04T02:48:21.728904Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 12583004 row count 2 2026-01-04T02:48:21.728949Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 2, DataSize 12583004 2026-01-04T02:48:21.729037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-04T02:48:21.729525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 B-1 2026-01-04T02:48:21.991190Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.19, eph 3} end=Done, 2 blobs 1r (max 1), put Spent{time=0.022s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-04T02:48:22.020872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2026-01-04T02:48:22.120355Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 3r (max 3), put Spent{time=0.105s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (18874656 0 0)b }, ecr=1.000 2026-01-04T02:48:22.122107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-04T02:48:22.122248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874506 row count 3 2026-01-04T02:48:22.122324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2026-01-04T02:48:22.122607Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, compactionInfo# {72057594046644480:1, SH# 2, Rows# 3, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:48:22.134004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 599 seconds 2026-01-04T02:48:22.134103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-04T02:48:22.134280Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-04T02:48:22.172761Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 39], datashard# 72075186224037888, shardIdx# 72057594046644480:1 in# 50 ms, with status# 1, next wakeup in# 599.949724s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:48:22.172880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dataSize 18874506 rowCount 3 cpuUsage 0 2026-01-04T02:48:22.276335Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 1 2026-01-04T02:48:22.276425Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046644480:1 data size 18874506 row count 3 2026-01-04T02:48:22.276463Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037888 maps to shardIdx: 72057594046644480:1 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], pathId map=T1, is column=0, is olap=0, RowCount 3, DataSize 18874506 2026-01-04T02:48:22.276545Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037888: SplitByLoadNotEnabledForTable 2026-01-04T02:48:22.276794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 A-2 2026-01-04T02:48:22.325941Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 4} end=Done, 2 blobs 1r (max 1), put Spent{time=0.031s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (6291598 0 0)b }, ecr=1.000 2026-01-04T02:48:22.358765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046644480 from shard 72075186224037888 followerId 0 pathId [OwnerId: 72057594046644480, LocalPathId: 39] state 'Ready' dat ... :49:31.146100Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037899 not found 2026-01-04T02:49:31.146132Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037893 not found 2026-01-04T02:49:31.217290Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037895 not found 2026-01-04T02:49:31.217337Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037902 not found WriteKVRow: 182 EraseKVRow: 152 2026-01-04T02:49:31.285013Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037905.1.11, eph 355} end=Term, 0 blobs 0r (max 96), put Spent{time=0.014s,wait=0.000s,interrupts=0} 2026-01-04T02:49:31.285896Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037905:1:14} Compact 5 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 11, product {0 parts epoch 0} thrown 2026-01-04T02:49:31.345850Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037897 not found 2026-01-04T02:49:31.345886Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037903 not found WriteKVRow: 183 2026-01-04T02:49:31.353820Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037901 not found EraseKVRow: 153 2026-01-04T02:49:31.439422Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation_split_merge.cpp:816: TSplitMerge Propose failed StatusMultipleModifications TabletId 72075186224037905 is already in process of split, tableStr: /dc-1/Dir/T1, tableId: , opId: 281474976710706:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/T1" SourceTabletId: 72075186224037905 SplitBoundary { SerializedKeyPrefix: "\001\000\004\000\000\000\233\000\000\000" } SchemeshardId: 72057594046644480 WriteKVRow: 184 2026-01-04T02:49:31.516058Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037908.1.6, eph 359} end=Term, 0 blobs 0r (max 54), put Spent{time=0.028s,wait=0.000s,interrupts=0} EraseKVRow: 154 2026-01-04T02:49:31.521983Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037908:1:13} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 1 parts} step 6, product {0 parts epoch 0} thrown 2026-01-04T02:49:31.569479Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037904 not found 2026-01-04T02:49:31.571988Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037905 not found WriteKVRow: 185 EraseKVRow: 155 WriteKVRow: 186 EraseKVRow: 156 WriteKVRow: 187 EraseKVRow: 157 WriteKVRow: 188 EraseKVRow: 158 WriteKVRow: 189 EraseKVRow: 159 WriteKVRow: 190 EraseKVRow: 160 WriteKVRow: 191 EraseKVRow: 161 WriteKVRow: 192 EraseKVRow: 162 2026-01-04T02:49:32.039220Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037900 not found WriteKVRow: 193 2026-01-04T02:49:32.051572Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037906 not found EraseKVRow: 163 2026-01-04T02:49:32.095574Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037908.1.87, eph 378} end=Term, 0 blobs 0r (max 41), put Spent{time=0.018s,wait=0.000s,interrupts=0} 2026-01-04T02:49:32.095896Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037908:1:103} Compact 63 on TGenCompactionParams{1001: gen 1 epoch 0, 2 parts} step 87, product {0 parts epoch 0} thrown WriteKVRow: 194 EraseKVRow: 164 WriteKVRow: 195 2026-01-04T02:49:32.171160Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037908 not found EraseKVRow: 165 WriteKVRow: 196 EraseKVRow: 166 WriteKVRow: 197 EraseKVRow: 167 WriteKVRow: 198 EraseKVRow: 168 WriteKVRow: 199 EraseKVRow: 169 WriteKVRow: 200 EraseKVRow: 170 WriteKVRow: 201 EraseKVRow: 171 WriteKVRow: 202 EraseKVRow: 172 WriteKVRow: 203 EraseKVRow: 173 WriteKVRow: 204 2026-01-04T02:49:32.544595Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037911 not found 2026-01-04T02:49:32.544630Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037909 not found EraseKVRow: 174 2026-01-04T02:49:32.569775Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037910 not found 2026-01-04T02:49:32.569843Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037907 not found WriteKVRow: 205 2026-01-04T02:49:32.648678Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037915 not found 2026-01-04T02:49:32.648720Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037914 not found EraseKVRow: 175 WriteKVRow: 206 EraseKVRow: 176 WriteKVRow: 207 2026-01-04T02:49:32.720609Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037913 not found EraseKVRow: 177 2026-01-04T02:49:32.805635Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037918.1.21, eph 394} end=Term, 2 blobs 21r (max 21), put Spent{time=0.016s,wait=0.000s,interrupts=1} 2026-01-04T02:49:32.805731Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037918:1:36} Compact 11 on TGenCompactionParams{1001: gen 1 epoch 0, 2 parts} step 21, product {0 parts epoch 0} thrown 2026-01-04T02:49:32.819867Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037919.1.8, eph 394} end=Term, 0 blobs 0r (max 16), put Spent{time=0.001s,wait=0.000s,interrupts=0} 2026-01-04T02:49:32.820015Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037919:1:12} Compact 3 on TGenCompactionParams{1001: gen 255 epoch 0, 2 parts} step 8, product {0 parts epoch 0} thrown WriteKVRow: 208 2026-01-04T02:49:32.834052Z node 3 :OPS_COMPACT ERROR: Compact{72075186224037922.1.6, eph 394} end=Term, 0 blobs 0r (max 21), put Spent{time=0.016s,wait=0.000s,interrupts=0} 2026-01-04T02:49:32.834174Z node 3 :TABLET_EXECUTOR ERROR: Leader{72075186224037922:1:13} Compact 1 on TGenCompactionParams{1001: gen 255 epoch 0, 2 parts} step 6, product {0 parts epoch 0} thrown EraseKVRow: 178 WriteKVRow: 209 2026-01-04T02:49:32.856903Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037917 not found EraseKVRow: 179 WriteKVRow: 210 2026-01-04T02:49:32.897097Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037918 not found EraseKVRow: 180 WriteKVRow: 211 EraseKVRow: 181 WriteKVRow: 212 EraseKVRow: 182 WriteKVRow: 213 EraseKVRow: 183 WriteKVRow: 214 EraseKVRow: 184 WriteKVRow: 215 EraseKVRow: 185 WriteKVRow: 216 EraseKVRow: 186 WriteKVRow: 217 EraseKVRow: 187 WriteKVRow: 218 EraseKVRow: 188 WriteKVRow: 219 EraseKVRow: 189 WriteKVRow: 220 EraseKVRow: 190 WriteKVRow: 221 EraseKVRow: 191 WriteKVRow: 222 EraseKVRow: 192 WriteKVRow: 223 EraseKVRow: 193 WriteKVRow: 224 EraseKVRow: 194 WriteKVRow: 225 EraseKVRow: 195 WriteKVRow: 226 EraseKVRow: 196 WriteKVRow: 227 EraseKVRow: 197 WriteKVRow: 228 EraseKVRow: 198 WriteKVRow: 229 EraseKVRow: 199 WriteKVRow: 230 EraseKVRow: 200 WriteKVRow: 231 EraseKVRow: 201 WriteKVRow: 232 EraseKVRow: 202 WriteKVRow: 233 EraseKVRow: 203 WriteKVRow: 234 EraseKVRow: 204 WriteKVRow: 235 EraseKVRow: 205 WriteKVRow: 236 EraseKVRow: 206 WriteKVRow: 237 EraseKVRow: 207 WriteKVRow: 238 EraseKVRow: 208 WriteKVRow: 239 EraseKVRow: 209 WriteKVRow: 240 EraseKVRow: 210 WriteKVRow: 241 EraseKVRow: 211 WriteKVRow: 242 EraseKVRow: 212 WriteKVRow: 243 EraseKVRow: 213 WriteKVRow: 244 EraseKVRow: 214 WriteKVRow: 245 EraseKVRow: 215 WriteKVRow: 246 EraseKVRow: 216 WriteKVRow: 247 EraseKVRow: 217 WriteKVRow: 248 EraseKVRow: 218 WriteKVRow: 249 EraseKVRow: 219 WriteKVRow: 250 EraseKVRow: 220 WriteKVRow: 251 EraseKVRow: 221 WriteKVRow: 252 EraseKVRow: 222 WriteKVRow: 253 EraseKVRow: 223 WriteKVRow: 254 EraseKVRow: 224 WriteKVRow: 255 EraseKVRow: 225 WriteKVRow: 256 EraseKVRow: 226 WriteKVRow: 257 EraseKVRow: 227 WriteKVRow: 258 EraseKVRow: 228 WriteKVRow: 259 EraseKVRow: 229 WriteKVRow: 260 EraseKVRow: 230 WriteKVRow: 261 EraseKVRow: 231 WriteKVRow: 262 EraseKVRow: 232 WriteKVRow: 263 EraseKVRow: 233 WriteKVRow: 264 EraseKVRow: 234 WriteKVRow: 265 EraseKVRow: 235 WriteKVRow: 266 EraseKVRow: 236 WriteKVRow: 267 EraseKVRow: 237 WriteKVRow: 268 EraseKVRow: 238 WriteKVRow: 269 EraseKVRow: 239 WriteKVRow: 270 EraseKVRow: 240 WriteKVRow: 271 EraseKVRow: 241 WriteKVRow: 272 EraseKVRow: 242 WriteKVRow: 273 EraseKVRow: 243 WriteKVRow: 274 EraseKVRow: 244 WriteKVRow: 275 EraseKVRow: 245 WriteKVRow: 276 EraseKVRow: 246 WriteKVRow: 277 EraseKVRow: 247 WriteKVRow: 278 EraseKVRow: 248 WriteKVRow: 279 EraseKVRow: 249 WriteKVRow: 280 EraseKVRow: 250 WriteKVRow: 281 EraseKVRow: 251 WriteKVRow: 282 EraseKVRow: 252 WriteKVRow: 283 EraseKVRow: 253 WriteKVRow: 284 EraseKVRow: 254 WriteKVRow: 285 EraseKVRow: 255 WriteKVRow: 286 EraseKVRow: 256 WriteKVRow: 287 EraseKVRow: 257 WriteKVRow: 288 EraseKVRow: 258 WriteKVRow: 289 EraseKVRow: 259 WriteKVRow: 290 EraseKVRow: 260 WriteKVRow: 291 EraseKVRow: 261 WriteKVRow: 292 EraseKVRow: 262 WriteKVRow: 293 EraseKVRow: 263 WriteKVRow: 294 EraseKVRow: 264 WriteKVRow: 295 EraseKVRow: 265 WriteKVRow: 296 EraseKVRow: 266 WriteKVRow: 297 EraseKVRow: 267 WriteKVRow: 298 EraseKVRow: 268 WriteKVRow: 299 EraseKVRow: 269 EraseKVRow: 270 EraseKVRow: 271 EraseKVRow: 272 EraseKVRow: 273 EraseKVRow: 274 EraseKVRow: 275 EraseKVRow: 276 EraseKVRow: 277 EraseKVRow: 278 EraseKVRow: 279 EraseKVRow: 280 EraseKVRow: 281 EraseKVRow: 282 EraseKVRow: 283 EraseKVRow: 284 EraseKVRow: 285 EraseKVRow: 286 EraseKVRow: 287 EraseKVRow: 288 EraseKVRow: 289 EraseKVRow: 290 EraseKVRow: 291 EraseKVRow: 292 EraseKVRow: 293 EraseKVRow: 294 EraseKVRow: 295 EraseKVRow: 296 EraseKVRow: 297 EraseKVRow: 298 EraseKVRow: 299 2026-01-04T02:49:48.263170Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037916 not found 2026-01-04T02:49:48.263213Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037912 not found 2026-01-04T02:49:48.370962Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037919 not found 2026-01-04T02:49:48.966737Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037921 not found 2026-01-04T02:49:48.966778Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037920 not found 2026-01-04T02:49:49.066911Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037923 not found 2026-01-04T02:49:49.203049Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037924 not found 2026-01-04T02:49:49.203238Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037922 not found |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewActiveNode [GOOD] Test command err: 2026-01-04T02:49:49.145018Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.145102Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TDynamicNameserverTest::CacheMissDifferentDeadlineInverseOrder-EnableNodeBrokerDeltaProtocol-false [GOOD] Test command err: 2026-01-04T02:49:49.662459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.662530Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvSyncNodesRequest from NAMESERVICE to NODE_BROKER_ACTOR cookie 1 ... waiting for cache miss (done) ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvUpdateNodes from NODE_BROKER_ACTOR to NAMESERVICE cookie 0 2026-01-04T02:49:50.426762Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:50.426813Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ... waiting for cache miss ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... blocking NKikimr::NNodeBroker::TEvNodeBroker::TEvResolveNode from NAMESERVICE to NODE_BROKER_ACTOR cookie 0 ... waiting for cache miss (done) |95.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::SingleDomainModeBannedIds [GOOD] Test command err: 2026-01-04T02:49:49.580711Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.580785Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:49.793500Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:1001: ERROR_TEMP: No free node IDs 2026-01-04T02:49:49.819674Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-04T02:49:49.832849Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node ID is banned 2026-01-04T02:49:50.625774Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.654765Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationNewExpiredNode [GOOD] Test command err: 2026-01-04T02:49:49.059442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.059519Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodeNameReuseRestartWithHostChanges [GOOD] Test command err: 2026-01-04T02:49:47.264112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:47.264198Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:47.396032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 101:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) FAKE_COORDINATOR: Add transaction: 101 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000001 FAKE_COORDINATOR: Erasing txId 101 |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] >> TestProgram::YqlKernelEndsWithScalar [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains >> TestProgram::YqlKernelContains [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithDeletes [GOOD] Test command err: 2026-01-04T02:49:19.070676Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333044990162086:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:19.070773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:19.262506Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.366576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:19.366656Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:19.378020Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:19.412392Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:19.432602Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.654514Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:19.654544Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:19.654557Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:19.654636Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:19.970268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:20.064436Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:20.085114Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:22.287947Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333057875065307:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.288068Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.288420Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333057875065316:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.288480Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.776708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.996980Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333057875065491:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.003868Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.004292Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032790:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.004377Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.019421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-04T02:49:23.228683Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032894:2381], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.228765Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.228938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032902:2387], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.228968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032901:2386], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.228991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.230956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032915:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.231035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032933:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.231054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032944:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.231096Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032946:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.231704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032934:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.231832Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.232023Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032938:2396], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.232205Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032939:2397], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.233465Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333062170032961:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.233545Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.233974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-04T02:49:23.234249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.234278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710660:1, at schemeshard: 72057594046644480 2026-01-04T02:49:23.234368Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.234396Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710660:2, at schemeshard: 72057594046644480 2026-01-04T02:49:23 ... a TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state) 2026-01-04T02:49:43.338732Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7591333148069459810:2359], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7591333062170032870:2359]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7591333148069459810:2359]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-04T02:49:43.338787Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591333148069459783:2359], SessionActorId: [1:7591333062170032870:2359], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7591333062170032870:2359]. 2026-01-04T02:49:43.338867Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=YzUwMTJkMzgtYWM5YjczZS1mNThmMmE3ZC1iZjM4YTY4Mw==, ActorId: [1:7591333062170032870:2359], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7n2dpmzxxygtc22kpsn, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7591333148069459785:2359] from: [1:7591333148069459783:2359] trace_id# 2026-01-04T02:49:43.338914Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591333148069459785:2359] TxId: 281474976722295. Ctx: { TraceId: 01ke3ej7n2dpmzxxygtc22kpsn, Database: , SessionId: ydb://session/3?node_id=1&id=YzUwMTJkMzgtYWM5YjczZS1mNThmMmE3ZC1iZjM4YTY4Mw==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-04T02:49:43.338950Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-01-04T02:49:43.338995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-01-04T02:49:43.339013Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-04T02:49:43.339075Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YzUwMTJkMzgtYWM5YjczZS1mNThmMmE3ZC1iZjM4YTY4Mw==, ActorId: [1:7591333062170032870:2359], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7n2dpmzxxygtc22kpsn, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-04T02:49:43.339562Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7591333148069459817:2374], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7591333062170032885:2374]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7591333148069459817:2374]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-04T02:49:43.339628Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591333148069459761:2374], SessionActorId: [1:7591333062170032885:2374], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7591333062170032885:2374]. 2026-01-04T02:49:43.339749Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NTA4NDM1ZGUtYjczODk2ZWItMjE5YjFmNzYtMjFlM2JkMDA=, ActorId: [1:7591333062170032885:2374], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7mz3ckdvgf61pew5vmc, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7591333148069459762:2374] from: [1:7591333148069459761:2374] trace_id# 2026-01-04T02:49:43.339816Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591333148069459762:2374] TxId: 281474976722290. Ctx: { TraceId: 01ke3ej7mz3ckdvgf61pew5vmc, Database: , SessionId: ydb://session/3?node_id=1&id=NTA4NDM1ZGUtYjczODk2ZWItMjE5YjFmNzYtMjFlM2JkMDA=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-04T02:49:43.340036Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTA4NDM1ZGUtYjczODk2ZWItMjE5YjFmNzYtMjFlM2JkMDA=, ActorId: [1:7591333062170032885:2374], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7mz3ckdvgf61pew5vmc, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-04T02:49:43.340327Z node 1 :KQP_COMPUTE WARN: kqp_write_actor.cpp:880: SelfId: [1:7591333148069459804:2378], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7591333062170032889:2378]Got OVERLOADED for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7591333148069459804:2378]. Ignored this error.{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } 2026-01-04T02:49:43.340392Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591333148069459764:2378], SessionActorId: [1:7591333062170032889:2378], statusCode=OVERLOADED. Issue=
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 . sessionActorId=[1:7591333062170032889:2378]. 2026-01-04T02:49:43.340511Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NTFjMWFkMWMtMzFkNGRkOGItNjc1MTIyYjAtNmNjZmRjNQ==, ActorId: [1:7591333062170032889:2378], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7mz4mdwax0q0y9jnjdv, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7591333148069459766:2378] from: [1:7591333148069459764:2378] trace_id# 2026-01-04T02:49:43.340568Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591333148069459766:2378] TxId: 281474976722291. Ctx: { TraceId: 01ke3ej7mz4mdwax0q0y9jnjdv, Database: , SessionId: ydb://session/3?node_id=1&id=NTFjMWFkMWMtMzFkNGRkOGItNjc1MTIyYjAtNmNjZmRjNQ==, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-04T02:49:43.340800Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTFjMWFkMWMtMzFkNGRkOGItNjc1MTIyYjAtNmNjZmRjNQ==, ActorId: [1:7591333062170032889:2378], ActorState: ExecuteState, LegacyTraceId: 01ke3ej7mz4mdwax0q0y9jnjdv, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976715693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-04T02:49:43.347550Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-04T02:49:43.347569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-04T02:49:43.347651Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:49:43.347680Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:49:43.347749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-04T02:49:43.349789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-04T02:49:43.350798Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:49:43.358644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:49:43.369215Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:49:43.369483Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:49:43.373102Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:49:43.373226Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 Table has 2 shards |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "amet." } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"amet.\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"amet."},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TNodeBrokerTest::ExtendLeaseBumpVersion [GOOD] >> TNodeBrokerTest::EpochCacheUpdate |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelContains [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\005@\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \034StringContains?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithUpdates [GOOD] Test command err: 2026-01-04T02:49:20.497282Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333053276915212:2159];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:20.498841Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:20.523347Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:20.756033Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:20.829631Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:20.829728Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:20.837303Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:20.902326Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:20.929917Z node 3 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 268639239 Duration# 0.007869s 2026-01-04T02:49:21.075255Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:21.169185Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:21.169206Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:21.169214Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:21.170994Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:21.416559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:21.503215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:23.784671Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066161818342:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.784785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.785256Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333066161818352:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.785315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.068375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:24.286290Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785823:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.286384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.286681Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785826:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.286719Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.309529Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-04T02:49:24.404216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785926:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.404358Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.404826Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785937:2385], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.404951Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.405011Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785947:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.405061Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785949:2394], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.405145Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785950:2395], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.405837Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785948:2393], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.405916Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785942:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.407686Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785989:2408], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.407687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785993:2410], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.408034Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785980:2404], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.408056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456785983:2406], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.408118Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.408546Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333070456786008:2414], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.408668Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:24.410941Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976715661:0, at schemeshard: 72057594046644480 2026-01-04T02:49:24.411090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715661:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:24.411112Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976715661:1, at schemeshard: 72057594046644480 2026-01-04T02:49:24 ... _actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=NzExMGI5ZjAtNzdlNDIwMjgtOTVmMTFjMmMtYjUxYTIxZTY=, ActorId: [1:7591333070456785912:2369], ActorState: ExecuteState, LegacyTraceId: 01ke3ej8p28q2vjvc93b4yhbsd, got TEvKqpBuffer::TEvError in ExecuteState, status: OVERLOADED send to: [1:7591333156356210012:2369] from: [1:7591333156356210011:2369] trace_id# 2026-01-04T02:49:44.422116Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591333156356210012:2369] TxId: 281474976722060. Ctx: { TraceId: 01ke3ej8p28q2vjvc93b4yhbsd, Database: , SessionId: ydb://session/3?node_id=1&id=NzExMGI5ZjAtNzdlNDIwMjgtOTVmMTFjMmMtYjUxYTIxZTY=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# OVERLOADED Issues# {
: Error: Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`., code: 2006 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state), code: 2006 } } trace_id# 2026-01-04T02:49:44.422426Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NzExMGI5ZjAtNzdlNDIwMjgtOTVmMTFjMmMtYjUxYTIxZTY=, ActorId: [1:7591333070456785912:2369], ActorState: ExecuteState, LegacyTraceId: 01ke3ej8p28q2vjvc93b4yhbsd, Create QueryResponse for error on request, msg: status# OVERLOADED issues# { message: "Kikimr cluster or one of its subsystems is overloaded. Tablet 72075186224037888 is overloaded. Table `/Root/Foo`." issue_code: 2006 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in process of split opId 281474976710693 state SplitSrcWaitForPartitioningChanged (wrong shard state)" issue_code: 2006 severity: 1 } } trace_id# 2026-01-04T02:49:44.429245Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_WRONG_SHARD_STATE;details=Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state);tx_id=0; 2026-01-04T02:49:44.429282Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:3149: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state) 2026-01-04T02:49:44.429558Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:807: SelfId: [1:7591333156356209944:2367], Table: `/Root/Foo` ([72057594046644480:38:2]), SessionActorId: [1:7591333070456785910:2367]Got WRONG SHARD STATE for table `/Root/Foo`. ShardID=72075186224037888, Sink=[1:7591333156356209944:2367].{
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } 2026-01-04T02:49:44.429662Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591333156356209921:2367], SessionActorId: [1:7591333070456785910:2367], statusCode=UNAVAILABLE. Issue=
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 . sessionActorId=[1:7591333070456785910:2367]. 2026-01-04T02:49:44.430008Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ODUyYzQ0NDktODUzOGE3NzEtMzI3MDBiMTgtMzI0MWUyZGU=, ActorId: [1:7591333070456785910:2367], ActorState: ExecuteState, LegacyTraceId: 01ke3ej8nr7w927gx0bq53f5t8, got TEvKqpBuffer::TEvError in ExecuteState, status: UNAVAILABLE send to: [1:7591333156356209922:2367] from: [1:7591333156356209921:2367] trace_id# 2026-01-04T02:49:44.430103Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591333156356209922:2367] TxId: 281474976722050. Ctx: { TraceId: 01ke3ej8nr7w927gx0bq53f5t8, Database: , SessionId: ydb://session/3?node_id=1&id=ODUyYzQ0NDktODUzOGE3NzEtMzI3MDBiMTgtMzI0MWUyZGU=, PoolId: default, DatabaseId: /Root, IsStreamingQuery: 0}. Runtime error Status# UNAVAILABLE Issues# {
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005 subissue: {
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 } } trace_id# 2026-01-04T02:49:44.430369Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ODUyYzQ0NDktODUzOGE3NzEtMzI3MDBiMTgtMzI0MWUyZGU=, ActorId: [1:7591333070456785910:2367], ActorState: ExecuteState, LegacyTraceId: 01ke3ej8nr7w927gx0bq53f5t8, Create QueryResponse for error on request, msg: status# UNAVAILABLE issues# { message: "Wrong shard state. Table `/Root/Foo`." issue_code: 2005 severity: 1 issues { message: "Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state)" issue_code: 2029 severity: 1 } } trace_id# 2026-01-04T02:49:44.432255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976710693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-04T02:49:44.432346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-04T02:49:44.432383Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976710693:0 progress is 1/1 2026-01-04T02:49:44.432435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710693:0 2026-01-04T02:49:44.434768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976710693:0 UNAVAILABLE:
: Error: Wrong shard state. Table `/Root/Foo`., code: 2005
: Error: Rejecting data TxId 0 because datashard 72075186224037888: is in a pre/offline state assuming this is due to a finished split (wrong shard state), code: 2029 2026-01-04T02:49:44.509883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.510084Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999763s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.511659Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591333053276915475:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-04T02:49:44.515132Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591333053276915475:2205] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-04T02:49:44.537789Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 27, next wakeup# 14.972067s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.540981Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-04T02:49:44.558643Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 48, next wakeup# 14.951211s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.560912Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-04T02:49:44.566617Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-04T02:49:44.580199Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:49:44.583176Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:49:44.625988Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:49:44.626275Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:49:44.627091Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:49:44.627223Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:49:44.635764Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, compactionInfo# {72057594046644480:3, SH# 3, Rows# 2712, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 584.383538s, rate# 5.787037037e-06, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.636323Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037890 tableId# 38 localTid# 1001, requested from [1:7591333053276915475:2205], partsCount# 2, memtableSize# 2080, memtableWaste# 2528, memtableRows# 14 2026-01-04T02:49:44.676088Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, compactionInfo# {72057594046644480:2, SH# 3, Rows# 3673, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 584.343229s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.676164Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 40 ms, with status# 0, next wakeup in# 584.343137s, rate# 1.157407407e-05, in queue# 1 shards, waiting after compaction# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:49:44.676871Z node 1 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186224037889 tableId# 38 localTid# 1001, requested from [1:7591333053276915475:2205], partsCount# 2, memtableSize# 6128, memtableWaste# 5648, memtableRows# 41 2026-01-04T02:49:44.728803Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 52 ms, with status# 0, next wakeup in# 584.290507s, rate# 1.157407407e-05, in queue# 2 shards, waiting after compaction# 2 shards, running# 0 shards at schemeshard 72057594046644480 Table has 2 shards |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith >> TestProgram::JsonExistsBinary >> TNodeBrokerTest::BasicFunctionality [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] >> TestProgram::SimpleFunction [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls >> TestProgram::YqlKernelEquals >> TestProgram::NumRowsWithNulls [GOOD] |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::BasicFunctionality [GOOD] Test command err: 2026-01-04T02:49:49.249859Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.249933Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:50.668340Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:1001: WRONG_REQUEST: Another location is registered for host1:1001, expected = DC=1/M=2/R=3/U=4/, got = DC=1/M=2/R=3/U=5/ 2026-01-04T02:49:50.683015Z node 2 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:1001: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.683515Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-04T02:49:50.684004Z node 2 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEndsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \020EndsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertValuesFromTableWithDefaultNegativeCase-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 9548, MsgBus: 11803 2026-01-04T02:48:51.664575Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332924676312029:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.664642Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.980318Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.990224Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.990319Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.043389Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.119928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.229972Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.320100Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.320118Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.320133Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.320196Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.680296Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:52.958141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:52.970478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.982885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332937561214757:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.983005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.983353Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332937561214767:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.983404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.234773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.364111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941856182159:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.364190Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.364453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941856182161:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.364507Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.378991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.429135Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941856182239:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.429209Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.429577Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941856182244:2352], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.429629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941856182245:2353], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.429718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.433426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.443565Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332941856182248:2354], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715660 completed, doublechecking } 2026-01-04T02:48:55.564747Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332941856182299:2644] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 24483, MsgBus: 27890 2026-01-04T02:48:56.912832Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332950085574790:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.913130Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.934425Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:57.039649Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:57.077042Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:57.077130Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:57.083681Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:57.145339Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:57.145363Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:57.145372Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:57.145454Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:57.192230Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:57.487390Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:57.493548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:57.926768Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:00.202430Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332967265444794:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:00.202489Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332 ... ault, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:46.339576Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333164688189914:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:46.339765Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:46.340803Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333164688189929:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:46.340891Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:46.344460Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:46.356508Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7591333164688189928:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:49:46.431630Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7591333164688189981:2536] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:49:46.458183Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7591333164688189998:2333], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-04T02:49:46.458628Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=ODEwYTM0ZGEtMzE0NTNiYmMtODcyMjQwNGYtMjBlNmFhMmI=, ActorId: [10:7591333164688189912:2323], ActorState: ExecuteState, LegacyTraceId: 01ke3ej71d81h7apkfmrxb4g2d, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-04T02:49:46.705922Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591333143213352572:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:46.706041Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 28657, MsgBus: 12739 2026-01-04T02:49:47.471351Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7591333166232764464:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:47.471426Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:47.489515Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:47.629922Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:47.634575Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:47.635413Z node 11 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [11:7591333166232764437:2081] 1767494987470138 != 1767494987470141 2026-01-04T02:49:47.652526Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:47.652660Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:47.689503Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:47.768873Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:47.768896Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:47.768911Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:47.769012Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:47.893634Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:48.476815Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:48.501361Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:51.911521Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333183412634509:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:51.911524Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333183412634514:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:51.911642Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:51.911981Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333183412634524:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:51.912104Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:51.915320Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:51.927344Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333183412634523:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:49:52.029993Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333187707601872:2538] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:49:52.058372Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7591333187707601882:2333], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" 2026-01-04T02:49:52.059049Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=YjM0NWYwYzYtYzIyOWM0N2UtNjJlMTdiMzAtY2MwNmRlOTE=, ActorId: [11:7591333183412634507:2323], ActorState: ExecuteState, LegacyTraceId: 01ke3ejcqg92aczzvwwdzg01hv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiCreateTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: \"text\"\n" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id#
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiCreateTable!
:1:1: Error: Failed to parse default expr for typename int4, error reason: Error while converting text to binary: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: invalid input syntax for type integer: "text" |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExistsBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\203\021H\214\n\210\203\001H\214\002?6\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?J\203\005@\200\203\005@\202\022\000\003?d6Json2.JsonDocumentSqlExists\202\003?f\000\002\017\003?L\000\003?N\000\003?P\000\003?R\000\027?T?<\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?|\203\005@\200\203\005@\202\022\000\003?\210\"Json2.CompilePath\202\003?\212\000\002\017\003?~\000\003?\200\000\003?\202\000\003?\204\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::NumRowsWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 10001 } Function { Id: 7 Arguments { Id: 2 } } } } Command { Filter { Predicate { Id: 10001 } } } Command { GroupBy { Aggregates { Column { Id: 10002 } Function { Id: 2 } } } } Command { Projection { Columns { Id: 10002 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Filter\"}\nREMOVE:10001",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N5(8):{\"a\":true,\"p\":{\"function\":{\"function\":\"NumRows\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10002\",\"t\":\"Calculation\"}\n"]; N5[shape=box, label="N6(8):{\"i\":\"10002\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N6[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N6->N1->N2->N0->N3->N4->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":6}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]},{"owner_id":5,"inputs":[{"from":4}]},{"owner_id":6,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Filter"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"6":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":6},"5":{"p":{"i":"10002","t":"Projection"},"w":8,"id":5},"4":{"p":{"a":true,"p":{"function":{"function":"NumRows","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10002","t":"Calculation"},"w":8,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Filter; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::SimpleFunction [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Id: 8 Arguments { Id: 2 } } } } Command { Projection { Columns { Id: 15 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"i\":\"2\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"15","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"i":"2","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelEquals [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 10 } Arguments { Id: 11 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\020\203B\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\001\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\014Equals?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"10","p":{"address":{"name":"i16","id":10}},"o":"10","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"10,11","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"i16","id":10},{"name":"float","id":11}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"11","p":{"address":{"name":"float","id":11}},"o":"11","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"10,11","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int16TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9FloatTypeE; digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"10,11\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:10,11"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"10\",\"p\":{\"address\":{\"name\":\"i16\",\"id\":10}},\"o\":\"10\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"11\",\"p\":{\"address\":{\"name\":\"float\",\"id\":11}},\"o\":\"11\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"i16\",\"id\":10},{\"name\":\"float\",\"id\":11}]},\"o\":\"10,11\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; } FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging >> TestScript::StepMerging [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestScript::StepMerging [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel >> TestProgram::JsonValueBinary >> TestProgram::YqlKernel [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] >> TestProgram::YqlKernelStartsWith [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonExists [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\020JsonNode\006Arg\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\006\000\t\251\000?\032\006\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\006\000\t\251\000?\004\006\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\006\000\t\211\022?6\211\010?6\207\214\002\214\n\210\203\001H?>?6\016\000\203\004\203\005@\203\004\203\004\207\214\002\214\n\210\203\001H\214\002\207\203\014\026\000\t\211\010?H\203\005@\200\203\005@\202\022\000\003?d\036Json2.SqlExists\202\003?f\000\002\017\003?J\000\003?L\000\003?N\000\003?P\000\027?T\t\211\014?R\311\002?R\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\200\005\205\004\203\010\203\005@\032\036\003?\206\002\003?\210\000\003\001\003?\202\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\230\203\005@\200\203\005@\202\022\000\003?\244\026Json2.Parse\202\003?\246\000\002\017\003?\232\000\003?\234\000\003?\236\000\003?\240\000?<\036\t\211\014?V\211\002?V\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\276\203\005@\200\203\005@\202\022\000\003?\312\"Json2.CompilePath\202\003?\314\000\002\017\003?\300\000\003?\302\000\003?\304\000\003?\306\000?4\036\010\000?\\7?`\003?^\000\276\001\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; >> TestProgram::Like |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernel [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 3 } Arguments { Id: 4 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\002\213\002?\000\001\235?\002\001\235?\004\001\002\000\t\211\002?\n\235?\000\001\002\000\t\251\000?\020\014Arg\000\000\t\211\002?\014?\020\002\000\t\211\006?\020\203\005@?\020?\020$BlockFunc\000\003?\034\006Add?\026?\026\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"3,4\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:3,4"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"3\",\"p\":{\"address\":{\"name\":\"sum\",\"id\":3}},\"o\":\"3\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"sum\",\"id\":3},{\"name\":\"vat\",\"id\":4}]},\"o\":\"3,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"3","p":{"address":{"name":"sum","id":3}},"o":"3","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"3,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"sum","id":3},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"3,4","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar >> TestProgram::Like [GOOD] >> TestProgram::YqlKernelStartsWithScalar [GOOD] >> TestProgram::CountWithNulls |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] >> TestProgram::CountWithNulls [GOOD] >> Cdc::AwsRegion [GOOD] |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWith [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Function { Arguments { Id: 7 } Arguments { Id: 9 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 15 } } } Kernels: "O\002\030BlockAsTuple\t\211\004\235\213\004\213\002\203\001H\213\002\203\014\001\235?\002\001\235?\006\001\002\000\t\211\002?\014\235?\000\001\002\000\t\251\000?\022\014Arg\000\000\t\211\002?\016\235?\004\001\002\000\t\211\006?\034\203\005@?\022?\022$BlockFunc\000\003? \024StartsWith?\030?\030\001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(26):{\"i\":\"7,9\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"15\",\"t\":\"Calculation\"}\nREMOVE:7,9"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N2(9):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N3(9):{\"i\":\"9\",\"p\":{\"address\":{\"name\":\"substring\",\"id\":9}},\"o\":\"9\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(26):{\"i\":\"15\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N5[label="1"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7},{\"name\":\"substring\",\"id\":9}]},\"o\":\"7,9\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N2->N4->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"7,9","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"string","id":7},{"name":"substring","id":9}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"15","t":"Projection"},"w":26,"id":5},"4":{"p":{"i":"9","p":{"address":{"name":"substring","id":9}},"o":"9","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"i":"7,9","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"15","t":"Calculation"},"w":26,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::EpochCacheUpdate [GOOD] Test command err: 2026-01-04T02:49:49.789717Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:49.789773Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:53.001074Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:53.001146Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValueBinary [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\032\000\t\211\004?\020\235?\002\001\235?\004\000\032\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\032\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?nNJson2.JsonDocumentSqlValueConvertToUtf8\202\003?p\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?b?:\t\211\014?d\211\002?d\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\206\203\005@\200\203\005@\202\022\000\003?\222\"Json2.CompilePath\202\003?\224\000\002\017\003?\210\000\003?\212\000\003?\214\000\003?\216\000?2\036\010\000?j\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\264\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\" ... ?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\270\016Convert?\266\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 6 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\016\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\021H\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\032\000\t\211\004?\022\235?\002\001\235?\004\000\032\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\032\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\203\021H\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207?\000\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?p@Json2.JsonDocumentSqlValueNumber\202\003?r\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?d?<\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\210\203\005@\200\203\005@\202\022\000\003?\224\"Json2.CompilePath\202\003?\226\000\002\017\003?\212\000\003?\214\000\003?\216\000\003?\220\000?4\036\010\000?l\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\266\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"6,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,6"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"6\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"6\",\"p\":{\"address\":{\"name\":\"json_binary\",\"id\":6}},\"o\":\"6\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_binary\",\"id\":6}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"6,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"6","p":{"address":{"name":"json_binary","id":6}},"o":"6","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_binary","id":6}]},"o":"6","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_binary","id":6}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10BinaryTypeE; json_binary: [ 7B226B6579223A2276616C7565227D, 7B226B6579223A31307D, 7B226B6579223A302E317D, 7B226B6579223A66616C73657D, 7B22616E6F74686572223A2276616C7565227D, 5B5D ] json_binary: [ 010200002100000014000000030300000200000000040000C00400006B65790076616C756500, 0102000021000000140000008403000001000000800300006B6579000000000000002440, 0102000021000000140000008403000001000000800300006B6579009A9999999999B93F, 0102000021000000140000000000000001000000800300006B657900, 01020000210000001400000003030000020000008004000040050000616E6F746865720076616C756500, 010100000000000000000000 ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpPg::AlterSequence [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequence ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::Like [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "001" } } } Command { Assign { Column { Id: 16 } Constant { Bytes: "uid" } } } Command { Assign { Column { Id: 17 } Function { Id: 33 Arguments { Id: 7 } Arguments { Id: 16 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Assign { Column { Id: 18 } Function { Id: 34 Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 1 } } } Command { Assign { Column { Id: 19 } Function { Id: 18 Arguments { Id: 17 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 20 } Function { Id: 18 Arguments { Id: 18 } FunctionType: SIMPLE_ARROW } } } Command { Assign { Column { Id: 21 } Function { Id: 11 Arguments { Id: 19 } Arguments { Id: 20 } FunctionType: SIMPLE_ARROW } } } Command { Projection { Columns { Id: 21 } } } Kernels: "O\006\006Arg\022BlockFunc\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\004\203\014?\006\001\235?\004\001\235?\010\001\n\000\t\211\004?\016\235?\000\001\235?\002\000\n\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\004?\020\235?\006\001?$\n\000\t\211\006?$\203\005@?\024?\026\006\000\003?(\024StartsWith?\034? \001\t\211\006?$\203\005@?\024?\026\006\000\003?0\020EndsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N6(0):{\"p\":{\"v\":\"001\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N0(0):{\"p\":{\"v\":\"uid\"},\"o\":\"16\",\"t\":\"Const\"}\n"]; N2[shape=box, label="N4(15):{\"i\":\"7,16\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"17\",\"t\":\"Calculation\"}\nREMOVE:16"]; N1 -> N2[label="1"]; N4 -> N2[label="2"]; N3[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N10 -> N3[label="1"]; N4[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N3 -> N4[label="1"]; N5[shape=box, label="N7(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"18\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N5[label="1"]; N4 -> N5[label="2"]; N6[shape=box, label="N5(23):{\"i\":\"17\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"19\",\"t\":\"Calculation\"}\nREMOVE:17"]; N2 -> N6[label="1"]; N7[shape=box, label="N8(23):{\"i\":\"18\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"20\",\"t\":\"Calculation\"}\nREMOVE:18"]; N5 -> N7[label="1"]; N8[shape=box, label="N9(54):{\"i\":\"19,20\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"21\",\"t\":\"Calculation\"}\nREMOVE:19,20"]; N6 -> N8[label="1"]; N7 -> N8[label="2"]; N9[shape=box, label="N10(54):{\"i\":\"21\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N8 -> N9[label="1"]; N10[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N1->N10->N3->N4->N2->N6->N0->N5->N7->N8->N9[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[]},{"owner_id":2,"inputs":[{"from":1},{"from":4}]},{"owner_id":3,"inputs":[{"from":10}]},{"owner_id":4,"inputs":[{"from":3}]},{"owner_id":5,"inputs":[{"from":0},{"from":4}]},{"owner_id":6,"inputs":[{"from":2}]},{"owner_id":7,"inputs":[{"from":5}]},{"owner_id":8,"inputs":[{"from":6},{"from":7}]},{"owner_id":9,"inputs":[{"from":8}]},{"owner_id":10,"inputs":[]}],"nodes":{"1":{"p":{"p":{"v":"uid"},"o":"16","t":"Const"},"w":0,"id":1},"3":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":3},"8":{"p":{"i":"19,20","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"21","t":"Calculation"},"w":54,"id":8},"2":{"p":{"i":"7,16","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"17","t":"Calculation"},"w":15,"id":2},"0":{"p":{"p":{"v":"001"},"o":"15","t":"Const"},"w":0,"id":0},"5":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"18","t":"Calculation"},"w":15,"id":5},"9":{"p":{"i":"21","t":"Projection"},"w":54,"id":9},"7":{"p":{"i":"18","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"20","t":"Calculation"},"w":23,"id":7},"4":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":4},"10":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":10},"6":{"p":{"i":"17","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"19","t":"Calculation"},"w":23,"id":6}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow11BooleanTypeE; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpPg::PgCreateTable [GOOD] >> KqpPg::PgUpdate+useSink >> TestProgram::JsonValue [GOOD] >> KqpPg::CreateTempTable [GOOD] >> KqpPg::CreateTempTableSerial ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountWithNulls [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } } } Command { Projection { Columns { Id: 10001 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N3(15):{\"a\":true,\"i\":\"2\",\"p\":{\"function\":{\"function\":\"Count\",\"need_concatenation\":true},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"10001\",\"t\":\"Calculation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N1[shape=box, label="N1(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"2\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N4 -> N1[label="1"]; N2[shape=box, label="N2(7):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N1 -> N2[label="1"]; N3[shape=box, label="N4(15):{\"i\":\"10001\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N0 -> N3[label="1"]; N4[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N4->N1->N2->N0->N3[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[{"from":2}]},{"owner_id":1,"inputs":[{"from":4}]},{"owner_id":2,"inputs":[{"from":1}]},{"owner_id":3,"inputs":[{"from":0}]},{"owner_id":4,"inputs":[]}],"nodes":{"1":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2}]},"o":"2","t":"FetchOriginalData"},"w":2,"id":1},"3":{"p":{"i":"10001","t":"Projection"},"w":15,"id":3},"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":7,"id":2},"4":{"p":{"p":{"data":[{"name":"uid","id":2}]},"o":"0","t":"ReserveMemory"},"w":0,"id":4},"0":{"p":{"a":true,"i":"2","p":{"function":{"function":"Count","need_concatenation":true},"kernel":{"class_name":"SIMPLE"}},"o":"10001","t":"Calculation"},"w":15,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::YqlKernelStartsWithScalar [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Bytes: "Lorem" } } } Command { Assign { Column { Id: 16 } Function { Arguments { Id: 7 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\004\006Arg\030BlockAsTuple\t\211\004\235\213\004\213\004\203\001H\203\005@\213\002\203\014\001\235?\004\001\235?\010\001\006\000\t\211\004?\016\235?\000\001\235?\002\000\006\000\t\251\000?\024\002\000\t\251\000?\026\002\000\000\t\211\002?\020\235?\006\001\006\000\t\211\006?$\203\005@?\024?\026$BlockFunc\000\003?(\024StartsWith?\034? \001\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"Lorem\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"7,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:7,15"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"7\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"7\",\"p\":{\"address\":{\"name\":\"string\",\"id\":7}},\"o\":\"7\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"string\",\"id\":7}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"7,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"7","p":{"address":{"name":"string","id":7}},"o":"7","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"string","id":7}]},"o":"7","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"string","id":7}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"Lorem"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9UInt8TypeE; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::CountUIDByVAT [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { GroupBy { Aggregates { Column { Id: 10001 } Function { Id: 2 Arguments { Id: 2 } } } KeyColumns { Id: 4 } } } Command { Projection { Columns { Id: 10001 } Columns { Id: 4 } } } ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N7[shape=box, label="N0(0):{\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0[shape=box, label="N4(18):{\"a\":true,\"i\":\"2,4\",\"p\":{\"options\":[\"{10001(Count):[2]}\"],\"type\":\"AGGREGATION\",\"keys\":[4]},\"o\":\"10001\",\"t\":\"Aggregation\"}\nREMOVE:2"]; N2 -> N0[label="1"]; N4 -> N0[label="2"]; N2[shape=box, label="N3(9):{\"i\":\"2\",\"p\":{\"address\":{\"name\":\"uid\",\"id\":2}},\"o\":\"2\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N2[label="1"]; N4[shape=box, label="N2(9):{\"i\":\"4\",\"p\":{\"address\":{\"name\":\"vat\",\"id\":4}},\"o\":\"4\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N6 -> N4[label="1"]; N5[shape=box, label="N5(27):{\"i\":\"10001,4\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N4 -> N5[label="1"]; N0 -> N5[label="2"]; N6[shape=box, label="N1(4):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"uid\",\"id\":2},{\"name\":\"vat\",\"id\":4}]},\"o\":\"2,4\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N7 -> N6[label="1"]; N7->N6->N4->N2->N0->N5[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":7,"inputs":[]},{"owner_id":0,"inputs":[{"from":2},{"from":4}]},{"owner_id":2,"inputs":[{"from":6}]},{"owner_id":4,"inputs":[{"from":6}]},{"owner_id":5,"inputs":[{"from":4},{"from":0}]},{"owner_id":6,"inputs":[{"from":7}]}],"nodes":{"2":{"p":{"i":"2","p":{"address":{"name":"uid","id":2}},"o":"2","t":"AssembleOriginalData"},"w":9,"id":2},"6":{"p":{"i":"0","p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"2,4","t":"FetchOriginalData"},"w":4,"id":6},"7":{"p":{"p":{"data":[{"name":"uid","id":2},{"name":"vat","id":4}]},"o":"0","t":"ReserveMemory"},"w":0,"id":7},"5":{"p":{"i":"10001,4","t":"Projection"},"w":27,"id":5},"4":{"p":{"i":"4","p":{"address":{"name":"vat","id":4}},"o":"4","t":"AssembleOriginalData"},"w":9,"id":4},"0":{"p":{"a":true,"i":"2,4","p":{"options":["{10001(Count):[2]}"],"type":"AGGREGATION","keys":[4]},"o":"10001","t":"Aggregation"},"w":18,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Aggregation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10UInt64TypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow9Int32TypeE; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> TestProgram::JsonValue [GOOD] Test command err: FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207?\004\001\235?\006\001\235?\n\001\"\000\t\211\004?\020\235?\002\001\235?\004\000\"\000\t\251\000?\026\002\000\t\251\000?\030\002\000\000\t\211\002?\022\235?\010\001\"\000\t\211\n?&?\026?\030?\002?\004?\010,ScalarApply\000?\036?\"\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\010?\002?\000\207?\004?4$IfPresent\000?.\t\251\000?\000\002\000\t\211\n?4\201\213\004\213\004\203\n\203\005@\207\203\001H?@?4?D?D VisitAll\000\t\211\020?H\211\006?H\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?X\203\005@\200\203\005@\202\022\000\003?p6Json2.SqlValueConvertToUtf8\202\003?r\000\002\017\003?Z\000\003?\\\000\003?^\000\003?`\000\027?d\t\211\014?b\311\002?b\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\214\005\205\004\203\010\203\005@\032\036\003?\222\002\003?\224\000\003\001\003?\216\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\244\203\005@\200\203\005@\202\022\000\003?\260\026Json2.Parse\202\003?\262\000\002\017\003?\246\000\003?\250\000\003?\252\000\003?\254\000?:\036\t\211\014?f\211\002?f\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\312\203\005@\200\203\005@\202\022\000\003?\326\"Json2.CompilePath\202\003?\330\000\002\017\003?\314\000\003?\316\000\003?\320\000\003?\322\000?2\036\010\000?l\276\t\251\000?@\002\000\'?4\t\251\000?D\002\000?\370\004\'?4\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Utf8 FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203\014\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203\014?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r$Json2.SqlValueBool\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000 ... 04\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000\t\211\004?6\203\005@?F\030Invoke\000\003?\374\016Convert?\372\001\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Float FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:34;event=parse_program;program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:103;parse_proto_program=Command { Assign { Column { Id: 15 } Constant { Text: "$.key" } } } Command { Assign { Column { Id: 16 } Function { Id: 8 Arguments { Id: 5 } Arguments { Id: 15 } FunctionType: YQL_KERNEL KernelIdx: 0 } } } Command { Projection { Columns { Id: 16 } } } Kernels: "O\022\006Arg\020JsonNode\020JsonPath\006UDF\006Udf\014Apply2\nFlags\010Name\030BlockAsTuple\t\211\004\235\213\004\213\004\207\203\tH\203\001H\213\002\207\203@\001\235?\006\001\235?\014\001\"\000\t\211\004?\022\235?\002\001\235?\004\000\"\000\t\251\000?\030\002\000\t\251\000?\032\002\000\000\t\211\002?\024\235?\n\001\"\000\t\211\n?(?\030?\032?\002?\004?\n,ScalarApply\000? ?$\t\251\000?\002\002\000\t\251\000?\004\002\000\t\211\010?\n?\002?\000\207?\010?6$IfPresent\000?0\t\251\000?\000\002\000\t\211\n?6\201\213\004\213\004\203\n\203\005@\207\203@?B?6?F?F VisitAll\000\t\211\020?J\211\006?J\207\214\006\214\n\210\203\001H\214\006\016\000\203\004\203\005@\203\004\203\004\207\214\006\214\n\210\203\001H\214\006\026\000\t\211\010?Z\203\005@\200\203\005@\202\022\000\003?r(Json2.SqlValueNumber\202\003?t\000\002\017\003?\\\000\003?^\000\003?`\000\003?b\000\027?f\t\211\014?d\311\002?d\203\tH\005\205\004\206\205\004\203\010\203\005@\032\036\203\005@\020Args\034Payload\006\002?\216\005\205\004\203\010\203\005@\032\036\003?\224\002\003?\226\000\003\001\003?\220\000\003\016\000\203\004\203\005@\203\004\203\004?\000\026\000\t\211\010?\246\203\005@\200\203\005@\202\022\000\003?\262\026Json2.Parse\202\003?\264\000\002\017\003?\250\000\003?\252\000\003?\254\000\003?\256\000?<\036\t\211\014?h\211\002?h\203\001H\016\000\203\004\203\005@\203\004\203\004?\004\026\000\t\211\010?\314\203\005@\200\203\005@\202\022\000\003?\330\"Json2.CompilePath\202\003?\332\000\002\017\003?\316\000\003?\320\000\003?\322\000\003?\324\000?4\036\010\000?n\276\t\251\000?B\002\000\'?6\t\251\000?F\002\000?\372\004\'?6\010\000\000\000/" ; FALLBACK_ACTOR_LOGGING;priority=TRACE;component=2101;fline=graph_execute.cpp:162;graph_constructed=digraph program {N0[shape=box, label="N0(0):{\"p\":{\"v\":\"$.key\"},\"o\":\"15\",\"t\":\"Const\"}\n"]; N1[shape=box, label="N4(15):{\"i\":\"5,15\",\"p\":{\"function\":{},\"kernel\":{\"class_name\":\"SIMPLE\"}},\"o\":\"16\",\"t\":\"Calculation\"}\nREMOVE:15,5"]; N0 -> N1[label="1"]; N3 -> N1[label="2"]; N2[shape=box, label="N2(2):{\"i\":\"0\",\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"5\",\"t\":\"FetchOriginalData\"}\n",style=filled,color="#FFFF88"]; N5 -> N2[label="1"]; N3[shape=box, label="N3(7):{\"i\":\"5\",\"p\":{\"address\":{\"name\":\"json_string\",\"id\":5}},\"o\":\"5\",\"t\":\"AssembleOriginalData\"}\n",style=filled,color="#FFFF88"]; N2 -> N3[label="1"]; N4[shape=box, label="N5(15):{\"i\":\"16\",\"t\":\"Projection\"}\n",style=filled,color="#FFAAAA"]; N1 -> N4[label="1"]; N5[shape=box, label="N1(0):{\"p\":{\"data\":[{\"name\":\"json_string\",\"id\":5}]},\"o\":\"0\",\"t\":\"ReserveMemory\"}\n"]; N0->N5->N2->N3->N1->N4[color=red]; }; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=332;fline=program.cpp:52;event=program_parsed;result={"edges":[{"owner_id":0,"inputs":[]},{"owner_id":1,"inputs":[{"from":0},{"from":3}]},{"owner_id":2,"inputs":[{"from":5}]},{"owner_id":3,"inputs":[{"from":2}]},{"owner_id":4,"inputs":[{"from":1}]},{"owner_id":5,"inputs":[]}],"nodes":{"1":{"p":{"i":"5,15","p":{"function":{},"kernel":{"class_name":"SIMPLE"}},"o":"16","t":"Calculation"},"w":15,"id":1},"3":{"p":{"i":"5","p":{"address":{"name":"json_string","id":5}},"o":"5","t":"AssembleOriginalData"},"w":7,"id":3},"2":{"p":{"i":"0","p":{"data":[{"name":"json_string","id":5}]},"o":"5","t":"FetchOriginalData"},"w":2,"id":2},"5":{"p":{"p":{"data":[{"name":"json_string","id":5}]},"o":"0","t":"ReserveMemory"},"w":0,"id":5},"4":{"p":{"i":"16","t":"Projection"},"w":15,"id":4},"0":{"p":{"p":{"v":"$.key"},"o":"15","t":"Const"},"w":0,"id":0}}}; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10StringTypeE; json_string: [ "{"key":"value"}", "{"key":10}", "{"key":0.1}", "{"key":false}", "{"another":"value"}", "[]" ] FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Const; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=ReserveMemory; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=FetchOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=AssembleOriginalData; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Calculation; FALLBACK_ACTOR_LOGGING;priority=DEBUG;component=334;fline=abstract.cpp:30;execute=Projection; Check output for Double FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; FALLBACK_ACTOR_LOGGING;priority=NOTICE;component=332;fline=columnshard_ut_common.h:520;T=N5arrow10DoubleTypeE; |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeRebootSa [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:08.083200Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:08.192447Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:08.202811Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:08.203227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:08.203380Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:08.569180Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.667632Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.667769Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.715185Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.823569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:09.422846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:09.423654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:09.423696Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:09.423723Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:09.424042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:09.486862Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:09.989740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:12.434944Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:12.440866Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:12.472556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.472663Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.500825Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:12.502741Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.691626Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.691718Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.693162Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.693865Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.694469Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.695297Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.695509Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.695675Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.695862Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.696019Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.696177Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.711676Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.874593Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.909477Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.909561Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.968542Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:12.968771Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:12.968920Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:12.968964Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:12.969000Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:12.969038Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:12.969090Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:12.969134Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:12.969470Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:12.971921Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.972000Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2117:2585], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.989196Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2158:2620] 2026-01-04T02:46:12.989660Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2158:2620], schemeshard id = 72075186224037897 2026-01-04T02:46:13.043051Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2249:2667] 2026-01-04T02:46:13.044660Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:13.053673Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Describe result: PathErrorUnknown 2026-01-04T02:46:13.053718Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Creating table 2026-01-04T02:46:13.053773Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:13.058282Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2306:2694], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:13.060892Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:13.074577Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:13.074683Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:13.083448Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:13.251401Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:13.269566Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:13.533371Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:13.632282Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:13.632354Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2255:2671] Owner: [2:2254:2670]. Column diff is empty, finishing 2026-01-04T02:46:14.248125Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:14.419735Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2743:3370], Data ... :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:47:50.306601Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 2 2026-01-04T02:47:50.306697Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 1 2026-01-04T02:47:50.306804Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 1 2026-01-04T02:47:50.306870Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:47:50.307576Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:47:50.308607Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:4432:3888] Owner: [2:4431:3887]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:47:50.308685Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:4432:3888] Owner: [2:4431:3887]. Column diff is empty, finishing 2026-01-04T02:47:50.329773Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4439:3893] 2026-01-04T02:47:50.330079Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:4439:3893], schemeshard id = 72075186224037897 2026-01-04T02:47:50.330187Z node 2 :STATISTICS DEBUG: service_impl.cpp:1092: EvClientConnected, node id = 2, client id = [2:4398:3868], server id = [2:4440:3894], tablet id = 72075186224037894, status = OK 2026-01-04T02:47:50.330282Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4440:3894] 2026-01-04T02:47:50.330374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:144: [72075186224037894] EvConnectNode, pipe server id = [2:4440:3894], node id = 2, have schemeshards count = 1, need schemeshards count = 0 2026-01-04T02:47:50.472905Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:4445:3897] 2026-01-04T02:47:50.474080Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3165:3436] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-04T02:47:50.474154Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:40: [72075186224037894] TTxAnalyze::Execute. Update existing force traversal. OperationId operationId , ReplyToActorId [1:3165:3436] 2026-01-04T02:47:50.474241Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-04T02:47:50.474508Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:47:50.475290Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:47:50.479177Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:47:50.479348Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], Start read next stream part 2026-01-04T02:47:50.535240Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 2 ], ReplyToActorId[ [2:4466:3916]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:47:50.535561Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 2 ] 2026-01-04T02:47:50.535642Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 2, ReplyToActorId = [2:4466:3916], StatRequests.size() = 1 2026-01-04T02:48:59.617076Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:48:59.617360Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], Start read next stream part 2026-01-04T02:48:59.618229Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eesefeg7mhw6gpb0pnj7y", SessionId: ydb://session/3?node_id=2&id=OTQxNDA1ZjctMjMwMWE2MjYtNjc2MTA4NGYtOTNmMWRjMzM=, Slow query, duration: 69.134869s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:48:59.618846Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:48:59.621958Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32160, txId: 18446744073709551615] shutting down 2026-01-04T02:48:59.622073Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:48:59.622159Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], Start read next stream part 2026-01-04T02:48:59.625569Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:59.625654Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4448:3899], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MTE0MzY5ZjctZDY1Nzc3NzItZjdjOTQxZDItZjU3MzNlZGU=, TxId: 2026-01-04T02:48:59.734417Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4657:4028]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:59.734748Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:59.734798Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4657:4028], StatRequests.size() = 1 2026-01-04T02:49:53.615140Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:49:53.615255Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], Start read next stream part 2026-01-04T02:49:53.615939Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3egwz643f54hmttrv8ghd4", SessionId: ydb://session/3?node_id=2&id=Y2M2N2E0NGEtZmEzYzhmZDQtYTI4M2RmOWQtZjE0NDhkYTM=, Slow query, duration: 53.988911s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:49:53.618152Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:49:53.618213Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4446:3872], ActorId: [2:4637:4012], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OGRiMDhmYTUtMmIzNmYyNDEtNDk2ZWExMzctMmIzZjRkY2Y=, TxId: 2026-01-04T02:49:53.618319Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32220, txId: 18446744073709551615] shutting down 2026-01-04T02:49:53.618581Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4814:4113], ActorId: [2:4816:4115], Starting query actor #1 [2:4817:4116] 2026-01-04T02:49:53.618631Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4816:4115], ActorId: [2:4817:4116], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:49:53.621997Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4816:4115], ActorId: [2:4817:4116], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=MjgwY2E1NDctZjhhOTE1NS1iZTMzZDA0MC0yZTUyYzk=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:49:53.690946Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4832:4129]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:49:53.691320Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:49:53.691369Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4832:4129], StatRequests.size() = 1 2026-01-04T02:49:53.864082Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4816:4115], ActorId: [2:4817:4116], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=MjgwY2E1NDctZjhhOTE1NS1iZTMzZDA0MC0yZTUyYzk=, TxId: 2026-01-04T02:49:53.864174Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4816:4115], ActorId: [2:4817:4116], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MjgwY2E1NDctZjhhOTE1NS1iZTMzZDA0MC0yZTUyYzk=, TxId: 2026-01-04T02:49:53.864546Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4814:4113], ActorId: [2:4816:4115], Got response [2:4817:4116] SUCCESS 2026-01-04T02:49:53.864875Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:49:53.890748Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:49:53.890831Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3165:3436] |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_SerialNotNull-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16586, MsgBus: 32699 2026-01-04T02:48:51.649469Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332926121521498:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.651582Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.953913Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.959082Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.959188Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:51.971360Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.086077Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.140552Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.318461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.318486Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.318504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.318614Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.651174Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:53.243115Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.250821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:55.081389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943301391463:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.081490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943301391455:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.081794Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.086058Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943301391471:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.086149Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.090374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.105635Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332943301391469:2327], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:55.212064Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332943301391522:2533] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:55.285685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 15026, MsgBus: 2365 2026-01-04T02:48:56.602171Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332946938812227:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.602250Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.639521Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:56.728101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:56.728177Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:56.728511Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:56.741962Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332946938812200:2081] 1767494936600931 != 1767494936600934 2026-01-04T02:48:56.751856Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:56.840799Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:56.874824Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:56.874845Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:56.874854Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:56.874932Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:57.285767Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:57.289787Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:57.612473Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:59.871441Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332959823714947:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.871588Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.871950Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332959823714983:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.872014Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332959823714982:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.872024Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.875978Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:59.889500Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332959823714986:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:59.964835Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332959823715037:2531] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:59.999125Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 17803, MsgBus: 11533 2026-01-04T02:49:01.533780Z node 3 :METADATA_PROVIDER WARN: log. ... r] ActorId: [10:7591333166777961404:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:47.792702Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:47.797288Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:47.808851Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7591333166777961403:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:49:47.906023Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7591333166777961456:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:49:47.944475Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:48.062248Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:48.162017Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7591333171072928986:2355], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-04T02:49:48.162615Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=OTZjMGJhMzItZDRlZDA2OGEtYzE5Mzk2ZDItODk1N2JjNWI=, ActorId: [10:7591333171072928984:2354], ActorState: ExecuteState, LegacyTraceId: 01ke3ejcaec4zyh2p4xh6yfpga, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# 2026-01-04T02:49:48.454615Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591333149598091355:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:48.454690Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 7391, MsgBus: 30494 2026-01-04T02:49:49.455392Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7591333174251271753:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:49.455490Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:49.474628Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:49.604375Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:49.604515Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:49.624982Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:49.637875Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:49:49.637918Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:49:49.653978Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:49.654125Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:49.684685Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:49.684717Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:49.684727Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:49.684841Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:50.054051Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:50.365063Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:50.462646Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:54.398941Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333195726109082:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:54.398965Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333195726109090:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:54.399069Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:54.399387Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333195726109097:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:54.399469Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:54.404625Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:54.422804Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333195726109096:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:49:54.457924Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591333174251271753:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:54.458035Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:54.518135Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333195726109152:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:49:54.556442Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:54.653441Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:54.754789Z node 11 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [11:7591333195726109387:2356], status: BAD_REQUEST, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Missing not null column in input: c. All not null columns should be initialized, code: 2032 2026-01-04T02:49:54.755329Z node 11 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=11&id=NTNmYzA0MjYtMmVhYjBmOTUtYjI2NmZhMS1kNjdiYjYzZg==, ActorId: [11:7591333195726109385:2355], ActorState: ExecuteState, LegacyTraceId: 01ke3ejjrz7ycjxjxdrfh1fbym, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Missing not null column in input: c. All not null columns should be initialized" end_position { row: 1 column: 1 } issue_code: 2032 severity: 1 } } } tx_id# trace_id# |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/columnshard/engines/ut/unittest >> KqpPg::InsertFromSelect_Simple-useSink [GOOD] >> KqpPg::InsertFromSelect_NoReorder-useSink |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/columnshard/engines/ut/unittest >> KqpPg::EquiJoin-useSink [GOOD] >> KqpPg::ExplainColumnsReorder ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_exchange/unittest >> Cdc::AwsRegion [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:45:38.806198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:45:39.222295Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:45:39.222883Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:45:39.222952Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:45:40.226516Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:45:40.226634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:45:40.336644Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494736352362 != 1767494736352365 2026-01-04T02:45:40.376128Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:45:40.440377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:45:40.685421Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:45:41.080010Z node 1 :BS_CONTROLLER ERROR: {BSC07@impl.h:2204} ProcessControllerEvent event processing took too much time Type# 268637706 Duration# 0.115084s 2026-01-04T02:45:41.080128Z node 1 :BS_CONTROLLER ERROR: {BSC00@bsc.cpp:1009} StateWork event processing took too much time Type# 2146435078 Duration# 0.115227s 2026-01-04T02:45:41.119656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:45:41.132483Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:45:41.529503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:45:41.860015Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:45:41.860363Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:45:42.001061Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:45:42.001189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:45:42.017906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:45:42.018008Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:45:42.018077Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:45:42.049289Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:45:42.049450Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:45:42.049523Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:45:42.060217Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:45:42.081165Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:45:42.134314Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:45:42.134500Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:45:42.134543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:42.134595Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:45:42.134634Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:42.164260Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:45:42.164386Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:45:42.164504Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:42.164562Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:42.164609Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:45:42.164659Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:42.164801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:45:42.165301Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:45:42.165827Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:45:42.165965Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:45:42.184936Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:42.195689Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:45:42.195804Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:45:42.320249Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:45:42.323217Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:45:42.323293Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:42.323570Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:42.323615Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:45:42.323691Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:45:42.331803Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:45:42.331992Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:45:42.332250Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:45:42.332308Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:45:42.355449Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:45:42.417067Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:45:42.435030Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:45:42.435101Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:42.436133Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:45:42.436209Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:42.437385Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:45:42.437444Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:45:42.437550Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:45:42.437635Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:45:42.437690Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:45:42.437779Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:45:42.439316Z node 1 :CHANGE_EXCHANGE DEBUG: change_sender.cpp:153: [ChangeSender][72075186224037888:1][1:903:2775][Inactive] Handle NKikimrChangeExchange.TEvActivateSender 2026-01-04T02:45:42.443628Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:45:42.445088Z node 1 :TX_DATASHARD DEBUG: datashard ... tion][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.790335Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.829321Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.829415Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.829462Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.829507Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.829547Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.829666Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.829707Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.829740Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.829774Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.829823Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.861338Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.861432Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.861473Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.861531Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.861572Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.861670Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.861725Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.861757Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.861792Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.861840Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.882744Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.882849Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.882894Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.882942Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.882982Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.883077Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.883113Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.883148Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.883183Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.883216Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.904147Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.904238Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.904280Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.904324Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.904363Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.904454Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.904479Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.904503Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.904530Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.904553Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.925535Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037891][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.925626Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.925668Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037891][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.925710Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037891][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.925766Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037891][Partition][0][StateIdle] Try persist 2026-01-04T02:49:55.925970Z node 24 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037889][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:49:55.926007Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.926038Z node 24 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037889][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:49:55.926074Z node 24 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037889][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:49:55.926119Z node 24 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037889][Partition][0][StateIdle] Try persist >>>>> GetRecords path=/Root/Table/Stream1 partitionId=0 2026-01-04T02:49:55.948431Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:49:55.948518Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037889] got client message batch for topic 'Table/Stream1/streamImpl' partition 0 2026-01-04T02:49:55.948738Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037889][Partition][0][StateIdle] read cookie 5 Topic 'Table/Stream1/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-04T02:49:55.949364Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037889][Partition][0][StateIdle] read cookie 5 added 1 blobs, size 427 count 1 last offset 0, current partition end offset: 1 2026-01-04T02:49:55.949523Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037889][Partition][0][StateIdle] Reading cookie 5. Send blob request. 2026-01-04T02:49:55.949703Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 427 accessed 1 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-04T02:49:55.949863Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037889][PQCacheProxy]Reading cookie 5. All 1 blobs are from cache. 2026-01-04T02:49:55.950072Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037889' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:49:55.950273Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:49:55.950597Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 407 from pos 0 cbcount 1 2026-01-04T02:49:55.951459Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 >>>>> GetRecords path=/Root/Table/Stream2 partitionId=0 2026-01-04T02:49:55.953800Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:147: Handle TEvRequest topic: 'streamImpl' requestId: 2026-01-04T02:49:55.953950Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:2698: [PQ: 72075186224037891] got client message batch for topic 'Table/Stream2/streamImpl' partition 0 2026-01-04T02:49:55.955019Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:891: [72075186224037891][Partition][0][StateIdle] read cookie 3 Topic 'Table/Stream2/streamImpl' partition 0 user $without_consumer offset 0 partno 0 count 10000 size 26214400 endOffset 1 max time lag 0ms effective offset 0 2026-01-04T02:49:55.955642Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1116: [72075186224037891][Partition][0][StateIdle] read cookie 3 added 1 blobs, size 426 count 1 last offset 0, current partition end offset: 1 2026-01-04T02:49:55.955775Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:1132: [72075186224037891][Partition][0][StateIdle] Reading cookie 3. Send blob request. 2026-01-04T02:49:55.955964Z node 24 :PERSQUEUE DEBUG: cache_eviction.h:487: Got data from cache. Partition 0 offset 0 partno 0 count 1 parts_count 0 source 1 size 426 accessed 0 times before, last time 1970-01-01T00:00:03.000000Z 2026-01-04T02:49:55.956170Z node 24 :PERSQUEUE DEBUG: read.h:126: [72075186224037891][PQCacheProxy]Reading cookie 3. All 1 blobs are from cache. 2026-01-04T02:49:55.956399Z node 24 :PERSQUEUE DEBUG: pq_l2_cache.cpp:201: PQ Cache (L2). Touched. Tablet '72075186224037891' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' 2026-01-04T02:49:55.956547Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:580: FormAnswer for 1 blobs 2026-01-04T02:49:55.956876Z node 24 :PERSQUEUE DEBUG: partition_read.cpp:500: FormAnswer processing batch offset 0 totakecount 1 count 1 size 406 from pos 0 cbcount 1 2026-01-04T02:49:55.957679Z node 24 :PERSQUEUE DEBUG: pq_impl.cpp:182: Answer ok topic: 'streamImpl' partition: 0 messageNo: 0 requestId: cookie: 0 |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_exchange/unittest |95.6%| [TA] $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/columnshard/engines/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} |95.6%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_exchange/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::CreateStreamInIncorrectDb >> TestYmqHttpProxy::TestCreateQueue >> TestKinesisHttpProxy::DifferentContentTypes >> TestKinesisHttpProxy::MissingAction >> TestYmqHttpProxy::TestSendMessage >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams >> TestKinesisHttpProxy::TestPing >> TestYmqHttpProxy::TestGetQueueUrl >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl >> KqpPg::TableSelect+useSink [GOOD] >> KqpPg::TableSelect-useSink |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.6%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork >> TCdcStreamTests::Basic >> TCdcStreamTests::VirtualTimestamps >> KqpPg::CreateTempTableSerial [GOOD] >> KqpPg::DropSequence >> KqpPg::PgUpdate+useSink [GOOD] >> KqpPg::PgUpdate-useSink >> KqpPg::InsertFromSelect_NoReorder-useSink [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink >> KqpEffects::InsertRevert_Literal_Success >> KqpInplaceUpdate::SingleRowSimple-UseSink >> KqpEffects::DeletePkPrefixWithIndex >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink >> KqpEffects::InsertAbort_Literal_Success >> KqpImmediateEffects::ConflictingKeyRW1WR2 >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink >> KqpImmediateEffects::ReplaceExistingKey >> KqpImmediateEffects::UpsertExistingKey >> KqpInplaceUpdate::SingleRowSimple+UseSink >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] >> TCdcStreamTests::VirtualTimestamps [GOOD] >> TCdcStreamTests::ResolvedTimestamps >> KqpPg::ExplainColumnsReorder [GOOD] >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] >> TCdcStreamTests::Basic [GOOD] >> TCdcStreamTests::DropMultipleStreams ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithGlobalSyncIndex [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.549928Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.550085Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.550183Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.550230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.550261Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.550338Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550410Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.551424Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.556123Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:02.667801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:02.667872Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:02.681550Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:02.681795Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:02.682023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:02.690653Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:02.690892Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:02.691725Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:02.692001Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:02.712278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.712577Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:02.722773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:02.722874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.723018Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:02.723077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:02.723136Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:02.724385Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:02.932625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.936902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937276Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.937956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.938067Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.938161Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... s: true ShowPrivateTable: true } 2026-01-04T02:50:04.251986Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.252139Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.252457Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex" took 304us result status StatusSuccess 2026-01-04T02:50:04.253487Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 Index impl table name: indexImplTable 2026-01-04T02:50:04.254168Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1069:3003], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:50:04.254260Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.254363Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.254703Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" took 290us result status StatusSuccess 2026-01-04T02:50:04.255570Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndexCopy/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409548 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 42 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyWithOmitIndexesTrueSkipsIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.551997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.552104Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.552148Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.552190Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.552243Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.552274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.552340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.552447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.553420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.553821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:02.652474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:02.652543Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:02.670729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:02.670968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:02.671225Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:02.686539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:02.686801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:02.689893Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:02.692146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:02.706032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.706266Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:02.722721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:02.722825Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.722960Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:02.723016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:02.723138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:02.724359Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:02.911879Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.912944Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913070Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913174Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913670Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.913777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.914112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.914182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.914273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.914374Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... t TxId: 102 at schemeshard: 72057594046678944 2026-01-04T02:50:04.140856Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:880:2836] msg type: 269552132 msg: NKikimrTxDataShard.TEvSchemaChangedResult TxId: 102 at schemeshard: 72057594046678944 2026-01-04T02:50:04.141250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:128:2153], Recipient [1:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:50:04.141293Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:50:04.141355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:50:04.141407Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:93: TCopyTable::TWaitCopyTableBarrier operationId: 102:0ProgressState, operation type TxCopyTable 2026-01-04T02:50:04.141451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:50:04.141511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1081: Set barrier, OperationId: 102:0, name: CopyTableBarrier, done: 0, blocked: 1, parts count: 1 2026-01-04T02:50:04.141556Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1125: All parts have reached barrier, tx: 102, done: 0, blocked: 1 2026-01-04T02:50:04.141649Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_states.h:76: TCopyTable::TWaitCopyTableBarrier operationId: 102:0 HandleReply TEvPrivate::TEvCompleteBarrier, msg: NKikimr::NSchemeShard::TEvPrivate::TEvCompleteBarrier { TxId: 102 Name: CopyTableBarrier }, at tablet# 72057594046678944 2026-01-04T02:50:04.141696Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 240 -> 240 2026-01-04T02:50:04.143424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:50:04.143465Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 102:0 2026-01-04T02:50:04.143607Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:128:2153], Recipient [1:128:2153]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:50:04.143643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:50:04.143700Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:50:04.143759Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:50:04.143865Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046678944 2026-01-04T02:50:04.143905Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:50:04.143941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:50:04.143980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:50:04.144030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:50:04.144071Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:50:04.144141Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:779:2746] message: TxId: 102 2026-01-04T02:50:04.144186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:50:04.144238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:50:04.144275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:50:04.144420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 3 2026-01-04T02:50:04.144457Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate source path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:50:04.146530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:50:04.146641Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:643: Send to actor: [1:779:2746] msg type: 271124998 msg: NKikimrScheme.TEvNotifyTxCompletionResult TxId: 102 at schemeshard: 72057594046678944 2026-01-04T02:50:04.146818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:50:04.146861Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:908:2856] 2026-01-04T02:50:04.147109Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:910:2858], Recipient [1:128:2153]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:50:04.147149Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T02:50:04.147178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 102 2026-01-04T02:50:04.147583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:979:2925], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-04T02:50:04.147639Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.147765Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.148016Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy" took 203us result status StatusSuccess 2026-01-04T02:50:04.148548Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy" PathDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "TableCopy" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:04.150077Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:980:2926], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-04T02:50:04.150144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.150263Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.150493Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex" took 229us result status StatusPathDoesNotExist 2026-01-04T02:50:04.150675Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/TableCopy/ValueIndex\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/TableCopy\' (id: [OwnerId: 72057594046678944, LocalPathId: 41])" Path: "/MyRoot/TableCopy/ValueIndex" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot/TableCopy" LastExistedPrefixPathId: 41 LastExistedPrefixDescription { Self { Name: "TableCopy" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::ConsistentCopyTableWithMultipleIndexes [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.550435Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.550526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.550624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.550654Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.550686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.550732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.551746Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.556095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:02.647750Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:02.647819Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:02.668736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:02.670073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:02.671517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:02.683978Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:02.684396Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:02.689891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:02.692164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:02.705835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.706037Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:02.723796Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:02.723889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.723969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:02.724017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:02.724087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:02.724538Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:02.920605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.921580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.921713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.921783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.921885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.921951Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922083Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.922675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... hildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value1" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value1" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 45 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:04.376315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1223:3134], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:50:04.376373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.376496Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.376856Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2" took 309us result status StatusSuccess 2026-01-04T02:50:04.377763Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2" PathDescription { Self { Name: "ValueIndex2" PathId: 46 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 43 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 TableIndexVersion: 1 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex2" LocalPathId: 46 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value2" SchemaVersion: 1 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 46 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:04.378571Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1224:3135], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true } 2026-01-04T02:50:04.378663Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.378824Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.379137Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableCopy/ValueIndex2/indexImplTable" took 298us result status StatusSuccess 2026-01-04T02:50:04.379574Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableCopy/ValueIndex2/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 47 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 46 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "indexImplTable" Columns { Name: "value2" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value2" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 46 PathsLimit: 10000 ShardsInside: 6 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 47 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest >> TSchemeShardConsistentCopyTablesTest::IncrementalBackupIndexesContinuesToWork [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.549904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.550040Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.550174Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.550217Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.550256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.550379Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.550459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.551450Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.556371Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:02.647629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:02.647694Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:02.668304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:02.668838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:02.671458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:02.686929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:02.687203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:02.689908Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:02.691880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:02.704142Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.705652Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:02.723688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:02.723769Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:02.723825Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:02.723874Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:02.723947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:02.724604Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:02.902789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.909383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.909638Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.909734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.909854Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.909933Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910579Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910665Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:02.910741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... BE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.707443Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex" took 308us result status StatusSuccess 2026-01-04T02:50:04.708326Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex" PathDescription { Self { Name: "ValueIndex" PathId: 42 SchemeshardId: 72057594046678944 PathType: EPathTypeTableIndex CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 41 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 TableIndexVersion: 2 } ChildrenExist: true } Children { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } TableIndex { Name: "ValueIndex" LocalPathId: 42 Type: EIndexTypeGlobal State: EIndexStateReady KeyColumnNames: "value" SchemaVersion: 2 PathOwnerId: 72057594046678944 DataSize: 0 IndexImplTableDescriptions { Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } } } } PathId: 42 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:04.709009Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [1:1496:3320], Recipient [1:128:2153]: NKikimrSchemeOp.TDescribePath Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true } 2026-01-04T02:50:04.709061Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme 2026-01-04T02:50:04.709159Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" Options { ReturnPartitioningInfo: true ReturnPartitionConfig: true BackupInfo: false ReturnBoundaries: true ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:50:04.709443Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" took 259us result status StatusSuccess 2026-01-04T02:50:04.710417Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/TableWithIndex/ValueIndex/indexImplTable" PathDescription { Self { Name: "indexImplTable" PathId: 43 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 104 CreateStep: 5000041 ParentPathId: 42 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 4 PathSubType: EPathSubTypeSyncIndexImplTable Version { GeneralVersion: 4 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 2 TablePartitionVersion: 1 } ChildrenExist: true } Table { Name: "indexImplTable" Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "value" KeyColumnNames: "key" KeyColumnIds: 1 KeyColumnIds: 2 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { SizeToSplit: 2147483648 MinPartitionsCount: 1 } } TableSchemaVersion: 2 IsBackup: false CdcStreams { Name: "19700101000000Z_continuousBackupImpl" Mode: ECdcStreamModeUpdate PathId { OwnerId: 72057594046678944 LocalId: 49 } State: ECdcStreamStateReady SchemaVersion: 1 Format: ECdcStreamFormatProto VirtualTimestamps: false AwsRegion: "" ResolvedTimestampsIntervalMs: 0 SchemaChanges: false } IsRestore: false } TablePartitions { EndOfRangeKeyPrefix: "" IsPoint: false IsInclusive: false DatashardId: 72075186233409546 } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 53 PathsLimit: 10000 ShardsInside: 8 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 2 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 43 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> KqpPg::AlterColumnSetDefaultFromSequence [GOOD] >> KqpPg::CreateTableIfNotExists_GenericQuery |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_consistent_copy_tables/unittest |95.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_consistent_copy_tables/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::ExplainColumnsReorder [GOOD] Test command err: Trying to start YDB, gRPC: 14909, MsgBus: 30526 2026-01-04T02:48:51.629095Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332928558896304:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.629179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.907644Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.976863Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.976971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.006540Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.161925Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.187214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.317432Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.317462Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.317542Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.317704Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.638289Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:52.963033Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:52.974391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:54.958812Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941443799051:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.958916Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.962049Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941443799060:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.962178Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.971866Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941443799065:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:54.994595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.008818Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332941443799067:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:55.096919Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332945738766414:2538] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 64653, MsgBus: 19967 2026-01-04T02:48:56.113034Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332950098953801:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.113075Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.127633Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:56.185731Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:56.239203Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:56.239300Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:56.246202Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:56.304013Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:56.304036Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:56.304042Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:56.304135Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:56.335558Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:56.718478Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:57.119276Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:59.488623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962983856534:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.488755Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.489109Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962983856546:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.489161Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962983856547:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.489437Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.497891Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:59.514263Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332962983856550:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:59.584683Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332962983856601:2534] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 13778, MsgBus: 21421 2026-01-04T02:49:00.630418Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332963596683858:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.630839Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.749487Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:00.847060Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.883070Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.883171Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.911489Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, ... ctors::TEvents::TEvWakeup; 2026-01-04T02:49:55.633484Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333202279243219:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.633623Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.634079Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333202279243229:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.634154Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.652620Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591333180804405899:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:55.652710Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:55.663562Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:55.712054Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:55.786431Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333202279243403:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.786549Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.794018Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333202279243408:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.794138Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333202279243409:2349], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.794375Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:55.803091Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:55.819732Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333202279243412:2350], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T02:49:55.896335Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333202279243463:2647] txid# 281474976710661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ... waiting for SysViewsRoster update finished 2026-01-04T02:50:02.938954Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:02.948697Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:50:02.955139Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [12:110:2157], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:02.955584Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:02.955933Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:03.337069Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:03.337324Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:03.386195Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:34:2081] 1767494998335481 != 1767494998335485 2026-01-04T02:50:03.392875Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:03.446901Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:03.535025Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:03.920548Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:03.935698Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:04.066941Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:864:2747], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.067104Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:875:2752], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.067975Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.069050Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:880:2757], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.069153Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.078401Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.202662Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:878:2755], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:50:04.230794Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:935:2793] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } PreparedQuery: "4c025024-f140e52b-dee61af0-bf23b191" QueryAst: "(\n(let $1 (PgType \'int4))\n(let $2 \'(\'(\'\"_logical_id\" \'219) \'(\'\"_id\" \'\"e27e3d93-ecf0d5c7-e796a979-f4401104\") \'(\'\"_partition_mode\" \'\"single\")))\n(let $3 (DqPhyStage \'() (lambda \'() (Iterator (AsList (AsStruct \'(\'\"x\" (PgConst \'1 $1)) \'(\'\"y\" (PgConst \'2 $1)))))) $2))\n(let $4 (DqCnResult (TDqOutput $3 \'\"0\") \'(\'\"y\" \'\"x\")))\n(return (KqpPhysicalQuery \'((KqpPhysicalTx \'($3) \'($4) \'() \'(\'(\'\"type\" \'\"generic\")))) \'((KqpTxResultBinding (ListType (StructType \'(\'\"x\" $1) \'(\'\"y\" $1))) \'\"0\" \'\"0\")) \'(\'(\'\"type\" \'\"query\"))))\n)\n" QueryPlan: "{\"Plan\":{\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{x: \\\"1\\\",y: \\\"2\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Node Type\":\"ResultSet\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}" YdbResults { columns { name: "y" type { pg_type { oid: 23 } } } columns { name: "x" type { pg_type { oid: 23 } } } } QueryDiagnostics: "" >> TCdcStreamTests::ResolvedTimestamps [GOOD] >> TCdcStreamTests::SchemaChanges |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TCdcStreamTests::DropMultipleStreams [GOOD] >> TCdcStreamTests::Attributes >> TestKinesisHttpProxy::TestPing [GOOD] >> TestKinesisHttpProxy::CreateStreamInIncorrectDb [GOOD] >> TestYmqHttpProxy::TestSendMessageEmptyQueueUrl [GOOD] >> TestKinesisHttpProxy::MissingAction [GOOD] >> TestYmqHttpProxy::TestCreateQueue [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndSameParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrl [GOOD] >> TestKinesisHttpProxy::UnauthorizedGetShardIteratorRequest [GOOD] >> KqpEffects::InsertAbort_Select_Success >> TestYmqHttpProxy::TestSendMessage [GOOD] >> TestKinesisHttpProxy::DifferentContentTypes [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink >> TestKinesisHttpProxy::TestRequestBadJson >> KqpFail::OnPrepare >> TestKinesisHttpProxy::CreateStreamWithInvalidName >> TestYmqHttpProxy::TestSendMessageFifoQueue >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey >> TestKinesisHttpProxy::TestRequestWithWrongRegion >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams >> TestYmqHttpProxy::TestReceiveMessage >> KqpImmediateEffects::DeleteAfterUpsert >> TCdcStreamTests::SchemaChanges [GOOD] >> TCdcStreamTests::RetentionPeriod >> TestKinesisHttpProxy::GoodRequestPutRecords >> TCdcStreamTests::Attributes [GOOD] >> TCdcStreamTests::DocApi >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink >> KqpPg::DropSequence [GOOD] >> KqpPg::DeleteWithQueryService+useSink >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] >> KqpPg::InsertFromSelect_Serial+useSink [GOOD] >> KqpPg::InsertFromSelect_Serial-useSink >> TCdcStreamTests::RetentionPeriod [GOOD] >> TCdcStreamTests::TopicPartitions >> TCdcStreamTests::DocApi [GOOD] >> TCdcStreamTests::DocApiNegative >> KqpPg::PgUpdate-useSink [GOOD] >> KqpPg::JoinWithQueryService-StreamLookup >> KqpEffects::DeletePkPrefixWithIndex [GOOD] >> KqpEffects::AlterDuringUpsertTransaction+UseSink >> KqpEffects::InsertAbort_Literal_Success [GOOD] >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink >> KqpInplaceUpdate::SingleRowSimple+UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink >> KqpEffects::InsertRevert_Literal_Success [GOOD] >> KqpEffects::InsertRevert_Literal_Duplicates >> KqpImmediateEffects::ConflictingKeyRW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RWR2 >> KqpInplaceUpdate::Negative_SingleRowListFromRange+UseSink [GOOD] >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink >> KqpImmediateEffects::ReplaceExistingKey [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink >> KqpImmediateEffects::UpsertExistingKey [GOOD] >> KqpImmediateEffects::WriteThenReadWithCommit ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::TConflictReadWriteOlap [GOOD] Test command err: Trying to start YDB, gRPC: 8197, MsgBus: 61249 2026-01-04T02:47:12.312449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332499893410465:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:12.314938Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:12.356571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:12.619715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:12.619884Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:12.624317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:12.645413Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:12.703288Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:12.704079Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332499893410430:2081] 1767494832310152 != 1767494832310155 2026-01-04T02:47:12.785746Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:12.785771Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:12.785779Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:12.785900Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:12.896470Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:47:13.161776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:13.318313Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:15.488116Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512778313206:2323], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.488270Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.488588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512778313218:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.488690Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332512778313219:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.488751Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:15.493541Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:15.508460Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332512778313222:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:47:15.630098Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332512778313273:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:15.960746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:16.062463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.060837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.831023Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332499893410465:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:17.914946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:47:18.731594Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Read conflict with concurrent transaction.;tx_id=3; 2026-01-04T02:47:18.743381Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:18.743529Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_LOCKS_BROKEN Issues: { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } 2026-01-04T02:47:18.743760Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [1:7591332525663223354:2950], Table: `/Root/Test` ([72057594046644480:42:1]), SessionActorId: [1:7591332525663223084:2950]Got LOCKS BROKEN for table `/Root/Test`. ShardID=72075186224037888, Sink=[1:7591332525663223354:2950].{
: Error: Read conflict with concurrent transaction., code: 2001 } 2026-01-04T02:47:18.744302Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:7591332525663223347:2950], SessionActorId: [1:7591332525663223084:2950], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001
: Error: Read conflict with concurrent transaction., code: 2001 . sessionActorId=[1:7591332525663223084:2950]. 2026-01-04T02:47:18.744570Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=M2NlMjFhOTgtZjFhN2UxZGUtODAzMjg0NjYtZjNlODk1MDY=, ActorId: [1:7591332525663223084:2950], ActorState: ExecuteState, LegacyTraceId: 01ke3edtb32gdqwr7nxbqzam0n, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [1:7591332525663223348:2950] from: [1:7591332525663223347:2950] trace_id# 2026-01-04T02:47:18.744670Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:7591332525663223348:2950] TxId: 281474976710666. Ctx: { TraceId: 01ke3edtb32gdqwr7nxbqzam0n, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2NlMjFhOTgtZjFhN2UxZGUtODAzMjg0NjYtZjNlODk1MDY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/Test`., code: 2001 subissue: {
: Error: Read conflict with concurrent transaction., code: 2001 } } trace_id# 2026-01-04T02:47:18.745008Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=M2NlMjFhOTgtZjFhN2UxZGUtODAzMjg0NjYtZjNlODk1MDY=, ActorId: [1:7591332525663223084:2950], ActorState: ExecuteState, LegacyTraceId: 01ke3edtb32gdqwr7nxbqzam0n, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`." issue_code: 2001 severity: 1 issues { message: "Read conflict with concurrent transaction." issue_code: 2001 severity: 1 } } trace_id# Trying to start YDB, gRPC: 2946, MsgBus: 13368 2026-01-04T02:47:20.307584Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332536062142151:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:47:20.307773Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:47:20.375336Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:20.561687Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:20.561768Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:20.565079Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:47:20.565873Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332536062142051:2081] 1767494840302424 != 1767494840302427 2026-01-04T02:47:20.567223Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:20.595904Z node 2 :HIV ... esolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.483381Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.486936Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.487011Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.487035Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.494804Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.494872Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.494918Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038080;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.498596Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.498672Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.498693Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038046;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.506733Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.506805Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.506826Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.509302Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.509355Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.509375Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.523024Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.523075Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.523090Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.527078Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.527155Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.527177Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038053;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.532275Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.532346Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.532368Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.537584Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.537660Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.537680Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038075;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.542276Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.542346Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.542368Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038048;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.549364Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.549445Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.549474Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038065;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.551480Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.551535Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.551554Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.560416Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.560485Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.560505Z node 17 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:50:04.626152Z node 17 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ejdxz01v8k41s9546rfrr", SessionId: ydb://session/3?node_id=17&id=NDQ2MzEyOWQtYWVkZGNjODMtNTUwN2IzN2YtMjM1M2M5OGI=, Slow query, duration: 10.883412s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> KqpInplaceUpdate::SingleRowSimple-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink >> TCdcStreamTests::DocApiNegative [GOOD] >> TCdcStreamTests::Negative >> KqpPg::CreateTableIfNotExists_GenericQuery [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname >> TCdcStreamTests::TopicPartitions [GOOD] >> TCdcStreamTests::ReplicationAttribute >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink >> TestKinesisHttpProxy::TestRequestBadJson [GOOD] >> TestKinesisHttpProxy::CreateStreamWithInvalidName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithBadQueueName [GOOD] >> TestKinesisHttpProxy::TestRequestWithWrongRegion [GOOD] >> TCdcStreamTests::Negative [GOOD] >> TCdcStreamTests::DisableProtoSourceIdInfo >> TestYmqHttpProxy::TestCreateQueueWithSameNameAndDifferentParams [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlOfNotExistingQueue [GOOD] >> TestKinesisHttpProxy::TestConsumersEmptyNames >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions >> KqpWrite::UpsertNullKey >> TestKinesisHttpProxy::PutRecordsWithLongExplicitHashKey [GOOD] >> TestYmqHttpProxy::TestSendMessageFifoQueue [GOOD] >> TestKinesisHttpProxy::TestRequestWithIAM >> TestYmqHttpProxy::TestCreateQueueWithEmptyName >> KqpPg::TableArrayInsert-useSink [GOOD] >> KqpPg::Returning+useSink >> TestKinesisHttpProxy::GoodRequestPutRecords [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM >> KqpEffects::RandomWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink >> TestYmqHttpProxy::TestCreateQueueWithWrongBody >> KqpEffects::InsertAbort_Select_Success [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey >> TestYmqHttpProxy::TestSendMessageWithAttributes >> TestYmqHttpProxy::TestReceiveMessage [GOOD] >> TCdcStreamTests::ReplicationAttribute [GOOD] >> TCdcStreamTests::RebootSchemeShard >> TCdcStreamTests::DisableProtoSourceIdInfo [GOOD] >> TCdcStreamTests::CreateStream >> TestKinesisHttpProxy::DoubleCreateStream >> KqpInplaceUpdate::Negative_BatchUpdate+UseSink [GOOD] >> KqpInplaceUpdate::BigRow >> KqpEffects::DeleteWithIndex+UseSecondaryIndex-UseSink [GOOD] >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink >> KqpImmediateEffects::DeleteAfterUpsert [GOOD] >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex >> TestYmqHttpProxy::TestReceiveMessageWithAttributes >> KqpEffects::InsertAbort_Literal_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink >> KqpPg::DeleteWithQueryService+useSink [GOOD] >> KqpPg::DeleteWithQueryService-useSink >> KqpPg::InsertFromSelect_Serial-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink >> TCdcStreamTests::RebootSchemeShard [GOOD] >> TCdcStreamTests::StreamOnIndexTableNegative >> TCdcStreamTests::CreateStream [GOOD] >> TCdcStreamTests::AlterStream >> KqpPg::JoinWithQueryService-StreamLookup [GOOD] >> KqpPg::PgAggregate+useSink >> KqpEffects::AlterDuringUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterDuringUpsertTransaction-UseSink >> KqpEffects::InsertAbort_Literal_Duplicates-UseSink [GOOD] >> KqpEffects::InsertAbort_Params_Conflict+UseSink >> KqpEffects::InsertRevert_Literal_Duplicates [GOOD] >> KqpEffects::InsertRevert_Literal_Conflict >> KqpImmediateEffects::ConflictingKeyRW1RWR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 >> TCdcStreamTests::StreamOnIndexTableNegative [GOOD] >> TCdcStreamTests::StreamOnIndexTable >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink >> RetryPolicy::TWriteSession_SeqNoShift [GOOD] >> RetryPolicy::RetryWithBatching >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] >> TCdcStreamTests::AlterStream [GOOD] >> TCdcStreamTests::DropStream >> TestKinesisHttpProxy::TestConsumersEmptyNames [GOOD] >> KqpFail::OnPrepare [GOOD] >> KqpImmediateEffects::AlreadyBrokenImmediateEffects >> TestKinesisHttpProxy::CreateStreamWithDifferentRetentions [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::WriteThenReadWithCommit [GOOD] Test command err: Trying to start YDB, gRPC: 30613, MsgBus: 14270 2026-01-04T02:50:04.504886Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240049186942:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.505288Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.734446Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.792312Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.792399Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.879120Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:04.886948Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:05.021862Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.187982Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.188013Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.188035Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.188129Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.508008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.835689Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.844950Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.895828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.087456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.248240Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.316046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:07.916770Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333252934090520:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.916878Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.921955Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333252934090530:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.922055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.512243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.556666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.609462Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.643264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.721655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.776901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.863920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.919072Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.022588Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261524026003:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.022677Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.022838Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261524026008:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.023120Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261524026009:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.023150Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.026603Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.038219Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333261524026012:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.109450Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333261524026063:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.505973Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240049186942:2199];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.506032Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:10.900329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000780s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:50:18.929550Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:7591333302427932273:2531] TxId: 281474976710676. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:50:18.929721Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:92: StageInfo: StageId #[0,0], InputsCount: 1, OutputsCount: 1 2026-01-04T02:50:18.930003Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:836} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Executing physical tx TxType# 2 Stages# 1 trace_id# 2026-01-04T02:50:18.930214Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710677. Resolved key sets: 1 2026-01-04T02:50:18.930347Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:294: TxId: 281474976710677. Resolved key: { TableId: [OwnerId: 72057594046644480, LocalPathId: 54] Access: 1 SyncVersion: false Status: OkData Kind: KindRegularTable PartitionsCount: 3 DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } From: (Uint64 : NULL) IncFrom: 1 To: () IncTo: 0 } 2026-01-04T02:50:18.930578Z node 2 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-04T02:50:18.930693Z node 2 :KQP_EXECUTER DEBUG: kqp_shards_resolver.cpp:76: [ShardsResolver] TxId: 281474976710677. Shard resolve complete, resolved shards: 1 2026-01-04T02:50:18.930738Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:259} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Shards nodes resolved SuccessNodes# 1 FailedNodes# 0 trace_id# 2026-01-04T02:50:18.930776Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:283} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Shards on nodes: node 2: [72075186224037927] trace_id# 2026-01-04T02:50:18.930818Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 1, readonly: true, 1 scan tasks on 1 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1767495018597} 2026-01-04T02:50:18.931056Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:872: TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Collect channels updates for task: 1 at actor [2:7591333302427932286:2531] 2026-01-04T02:50:18.931091Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:864: TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Sending channels info to compute actor: [2:7591333302427932286:2531], channels: 1 2026-01-04T02:50:18.931135Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 1 read_only# true datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:50:18.931164Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7591333302427932286:2531], trace_id# 2026-01-04T02:50:18.931194Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: WaitResolveState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7591333302427932286:2531], trace_id# 2026-01-04T02:50:18.931216Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:2350} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Immediate tx, become ExecuteState current_state# WaitResolveState immediate# true trace_id# 2026-01-04T02:50:18.931712Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7591333302427932286:2531] TaskId# 1 State# COMPUTE_STATE_EXECUTING Stats# {} trace_id# 2026-01-04T02:50:18.931751Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:884} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Waiting for: CA [2:7591333302427932286:2531], trace_id# 2026-01-04T02:50:18.931780Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:168} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. ActorState: ExecuteState, waiting for 1 compute actor(s) and 0 datashard(s): CA [2:7591333302427932286:2531], trace_id# 2026-01-04T02:50:18.933302Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:570} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Got execution state from compute actor ActorState# ExecuteState ComputeActor# [2:7591333302427932286:2531] TaskId# 1 State# COMPUTE_STATE_FINISHED Stats# {CpuTimeUs: 778 Tasks { TaskId: 1 CpuTimeUs: 157 FinishTimeMs: 1767495018932 OutputRows: 1 OutputBytes: 22 Tables { TablePath: "/Root/TestImmediateEffects" ReadRows: 1 ReadBytes: 22 AffectedPartitions: 1 } IngressRows: 1 ResultRows: 1 ResultBytes: 22 ComputeCpuTimeUs: 59 BuildCpuTimeUs: 98 HostName: "ghrun-46vtxmsa7i" NodeId: 2 StartTimeMs: 1767495018932 CreateTimeMs: 1767495018931 UpdateTimeMs: 1767495018932 } MaxMemoryUsage: 1048576 } trace_id# 2026-01-04T02:50:18.933407Z node 2 :KQP_EXECUTER INFO: kqp_planner.cpp:756: TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Compute actor has finished execution: [2:7591333302427932286:2531] 2026-01-04T02:50:18.933577Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# 2026-01-04T02:50:18.933613Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:7591333302427932282:2531] TxId: 281474976710677. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000778s ReadRows: 1 ReadBytes: 22 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:50:18.934051Z node 2 :KQP_EXECUTER DEBUG: kqp_table_resolver.cpp:270: TxId: 281474976710678. Resolved key sets: 0 2026-01-04T02:50:18.934134Z node 2 :KQP_EXECUTER DEBUG: kqp_planner.cpp:585: TxId: 281474976710678. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks: 0, readonly: true, 0 scan tasks on 0 nodes, localComputeTasks: 0, MayRunTasksLocally 1, snapshot: {18446744073709551615, 1767495018597} 2026-01-04T02:50:18.934170Z node 2 :KQP_EXECUTER INFO: {KQPDATA@kqp_data_executer.cpp:2746} ActorId: [2:7591333302427932290:2531] TxId: 281474976710678. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Total tasks total_tasks# 0 read_only# false datashard_txs# 0 ev_write_txs# 0 topic_txs# 0 volatile# false immediate# true pending_compute_tasks# 0 use_followers# false trace_id# 2026-01-04T02:50:18.934221Z node 2 :KQP_EXECUTER DEBUG: {KQPDATA@kqp_data_executer.cpp:220} ActorId: [2:7591333302427932290:2531] TxId: 281474976710678. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Send Commit to BufferActor buffer_actor_id# [2:7591333302427932272:2531] trace_id# 2026-01-04T02:50:18.934275Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1124} ActorId: [2:7591333302427932290:2531] TxId: 281474976710678. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Resource usage for last stat interval, rate limiter was not found Consumption# ComputeTime: 0.000000s ReadRows: 0 ReadBytes: 0 RequestUnits# 1 ForceFlag# true trace_id# 2026-01-04T02:50:18.937743Z node 2 :KQP_EXECUTER DEBUG: {KQPEX@kqp_executer_impl.h:1490} ActorId: [2:7591333302427932290:2531] TxId: 281474976710678. Ctx: { TraceId: 01ke3ekaa8e0bnpvy3g2bajz4w, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NzljZTgwMmUtNDAzZDVjMDktZTg3ZmMxNi00ZmRkZmRjZQ==, PoolId: default, IsStreamingQuery: 0}. Terminate execution trace_id# >> TestKinesisHttpProxy::TestRequestWithIAM [GOOD] >> TestYmqHttpProxy::TestGetQueueUrlWithIAM [GOOD] |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::Negative_BatchUpdate-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28477, MsgBus: 1917 2026-01-04T02:50:04.409513Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240319083644:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.409673Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.450117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:04.786439Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.786531Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.849510Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.855975Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.891781Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:04.894988Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333240319083536:2081] 1767495004404829 != 1767495004404832 2026-01-04T02:50:05.020575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.186445Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.186466Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.186529Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.186612Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.417001Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.814934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.825044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.873724Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.043450Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.243241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.326004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.003753Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333253203987304:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.003883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.004454Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257498954610:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.004519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.512040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.571843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.623336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.659064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.699531Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.750050Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.828314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.900277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.996798Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257498955485:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.996885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.997351Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257498955491:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.997398Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257498955490:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.997506Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.002342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.014962Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333257498955494:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:09.122036Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333261793922841:3770] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.409974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240319083644:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.410078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 7307199536658146131:7762515]; 2026-01-04T02:50:12.489979Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:12.508456Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:12.603928Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:12.604192Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:12.640527Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:12.640614Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:12.644881Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:12.786375Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:12.786400Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:12.786408Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:12.786487Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:12.864452Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:13.177431Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:13.183273Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:13.194548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.254432Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.392854Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.475131Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.511130Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:16.029988Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333287386932510:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.030100Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.035914Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333291681899816:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.036029Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.088030Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.126437Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.167633Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.213495Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.246790Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.289937Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.345140Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.402021Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.527623Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333291681900692:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.527778Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.528244Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333291681900697:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.528299Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333291681900698:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.528423Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.532436Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:16.544553Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333291681900701:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:16.632854Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333291681900754:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:17.493914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333274502028778:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:17.493998Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:18.459618Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TestYmqHttpProxy::TestCreateQueueWithEmptyName [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongBody [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowStr+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 25592, MsgBus: 16344 2026-01-04T02:50:04.418697Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333241288340429:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.418788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.856688Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.971241Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.977908Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:05.014561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:05.143554Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.166226Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333241288340383:2081] 1767495004396375 != 1767495004396378 2026-01-04T02:50:05.186976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.240137Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.240159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.240174Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.240295Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.475848Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.821421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.831835Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.900858Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.059554Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.228201Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.308992Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.102084Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333258468211440:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.102203Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.102452Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333258468211450:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.102481Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.519688Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.574190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.638747Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.672808Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.713769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.762306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.838778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.899907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.004039Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333262763179619:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.004129Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.004406Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333262763179624:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.004436Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333262763179625:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.004475Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.007896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.023941Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333262763179628:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.083257Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333262763179681:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.426689Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333241288340429:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.426742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:11.078538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... 272818269273:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:12.695009Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:12.747165Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:12.816683Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:12.817735Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333272818269234:2081] 1767495012683901 != 1767495012683904 2026-01-04T02:50:12.870526Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:12.870611Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:12.874260Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:12.948509Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:12.948530Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:12.948537Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:12.948621Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:13.019439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:13.396632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:13.407024Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:13.424113Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.500856Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.661627Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.726467Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:13.733469Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.249945Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333289998140283:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.250035Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.250423Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333289998140293:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.250461Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.324543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.384917Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.430311Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.474263Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.510141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.570048Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.621250Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.699103Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.802665Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333289998141164:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.802769Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.803626Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333289998141169:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.803644Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333289998141170:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.803748Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.807483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:16.823041Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333289998141173:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:16.928669Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333289998141224:3763] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:17.690064Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333272818269273:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:17.690155Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:18.580741Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestKinesisHttpProxy::TestListStreamConsumers ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::SingleRowPgNotNull-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 11028, MsgBus: 1561 2026-01-04T02:50:04.586742Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240263838246:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.586821Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.923773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.967756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.967861Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:05.053585Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.058422Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:05.149104Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.187288Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.187310Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.187344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.187406Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.609208Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.792963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.801965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.865245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.053191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.253340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.342045Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.195796Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257443709248:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.195955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.196414Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257443709258:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.196496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.532074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.576127Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.613239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.654774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.728600Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.791172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.831594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.914901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.014924Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261738677432:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.015018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.015490Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261738677437:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.015530Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261738677438:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.015796Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.019604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.031407Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333261738677441:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.115494Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333261738677492:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.587412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240263838246:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.587465Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:10.900968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 26739, MsgBus: 19511 2026-01-04T02:50:12.407433Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333276197670834:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:12.407644Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:12.452255Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:12.516985Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:12.519624Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:12.520026Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:12.552597Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:12.682689Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:12.707331Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:12.707354Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:12.707361Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:12.707433Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:13.231154Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:13.249101Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.306195Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.410529Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:13.457911Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.533147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.181166Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333293377541721:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.181256Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.181650Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333293377541731:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.181693Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.265272Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.319587Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.398482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.445574Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.491554Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.536666Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.611840Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.669294Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.776943Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333293377542606:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.777034Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.777303Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333293377542611:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.777336Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333293377542612:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.777491Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:16.781175Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:16.796294Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333293377542615:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:16.888937Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333293377542666:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:17.410128Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333276197670834:2174];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:17.410200Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:18.763353Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestKinesisHttpProxy::TestRequestNoAuthorization >> TestKinesisHttpProxy::CreateDeleteStream >> TCdcStreamTests::StreamOnIndexTable [GOOD] >> TCdcStreamTests::StreamOnBuildingIndexTable >> TestKinesisHttpProxy::PutRecordsWithIncorrectHashKey [GOOD] >> KqpPg::AlterColumnSetDefaultFromSequenceWithSchemaname [GOOD] >> KqpPg::CheckPgAutoParams+useSink >> TestYmqHttpProxy::TestGetQueueAttributes >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute >> TestYmqHttpProxy::TestSendMessageWithAttributes [GOOD] >> KqpEffects::DeleteWithJoinAndIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::EffectWithSelect+UseSink >> KqpWrite::UpsertNullKey [GOOD] >> KqpWrite::ProjectReplace-UseSink >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Duplicates-UseSink >> TestKinesisHttpProxy::ListShards >> TestKinesisHttpProxy::DoubleCreateStream [GOOD] >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] >> TestYmqHttpProxy::TestSetQueueAttributes >> TestYmqHttpProxy::TestReceiveMessageWithAttributes [GOOD] >> TCdcStreamTests::DropStream [GOOD] >> TCdcStreamTests::AlterStreamImplShouldFail >> TestKinesisHttpProxy::GoodRequestGetRecords >> KqpInplaceUpdate::BigRow [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Literal_Conflict-UseSink >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink >> KqpEffects::AlterAfterUpsertTransaction+UseSink >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId >> KqpEffects::UpdateOn_Select ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::SplitByLoadWithNonEmptyRangeReads [GOOD] Test command err: 2026-01-04T02:49:19.058234Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333047441122589:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:19.058352Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:19.276480Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.367059Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:19.367155Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:19.392537Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:19.425451Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:19.536801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:19.653794Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:19.653848Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:19.653855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:19.653955Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:20.082426Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:20.085656Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:22.394227Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333060326025825:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.394352Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.398137Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333060326025835:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.398226Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.776708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.978327Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333060326026011:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.978427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.978927Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333060326026016:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.978978Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333060326026017:2348], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.979294Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.986349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:49:23.005979Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333060326026020:2349], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:49:23.141671Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333064620993396:3102] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:49:24.063990Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333047441122589:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.064084Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:49:34.328751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:49:34.328780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:59.321961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976712661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-04T02:50:02.856875Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 2.0425 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-04T02:50:07.859103Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 6.2407 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 1 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-04T02:50:07.965349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 153440, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 62407, cpuUsageThreshold: 50000) 2026-01-04T02:50:07.965373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-04T02:50:07.965943Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2279: Started key access sampling at datashard: 72075186224037888 2026-01-04T02:50:07.966209Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 153440 row count 2000 buckets 9 ready 1 2026-01-04T02:50:07.966275Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-01-04T02:50:12.860494Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 6.2407 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-04T02:50:12.960773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 153440, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 62407, cpuUsageThreshold: 50000) 2026-01-04T02:50:12.960803Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-04T02:50:12.960984Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2281: Extended key access sampling at datashard: 72075186224037888 2026-01-04T02:50:12.961180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp:302: Got partition histogram at tablet 72057594046644480 from datashard 72075186224037888 state Ready data size 153440 row count 2000 buckets 9 ready 1 2026-01-04T02:50:12.961221Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats_histogram.cpp:382: TTxPartitionHistogram Process detailed partition statistics at tablet 72057594046644480 from datashard 72075186224037888 from follower ID 0 for pathId [OwnerId: 72057594046644480, LocalPathId: 38], state Ready, data size buckets 9, key access buckets 0 2026-01-04T02:50:17.860660Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 6.2407 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037888 table: [/Root/Foo] 2026-01-04T02:50:17.961246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:591: Want to split tablet 72075186224037888 by load: split by load (rowCount: 2000, minRowCount: 0, shardSize: 153440, minShardSize: 0, shardCount: 1, expectedShardCount: 1, maxShardCount: 50, cpuUsage: 62407, cpuUsageThreshold: 50000) 2026-01-04T02:50:17.961274Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:694: Requesting full tablet stats 72075186224037888 to split it 2026-01-04T02:50:17.961974Z node 1 :TX_DATASHARD NOTICE: datashard.cpp:2281: Extended key access sampling at datashard: 72075186224037888 2026-01-04T02:50:17.962158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats_histogram.cpp ... 200" KeyRangeEnd: "\002\000\004\000\000\000O\"-\214\000\000\000\200" ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\004\000\000\000O\"-\214\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037888 SplitBoundary { SerializedKeyPrefix: "\002\000\004\000\000\000O\"-\214\000\000\000\200" } SchemeshardId: 72057594046644480 2026-01-04T02:50:17.963955Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:50:17.970995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715693:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-04T02:50:17.982824Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:50:17.983110Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715693:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:50:17.983165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 2 -> 3 2026-01-04T02:50:17.990752Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715693:0, at schemeshard: 72057594046644480 2026-01-04T02:50:17.998374Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037890 actor [1:7591333296549332686:3793] 2026-01-04T02:50:18.000288Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:7591333296549332687:3780] 2026-01-04T02:50:18.024058Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037890 2026-01-04T02:50:18.024179Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037890, state: WaitScheme 2026-01-04T02:50:18.024343Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-04T02:50:18.024395Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:50:18.024444Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:50:18.024541Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:50:18.036835Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037889 2026-01-04T02:50:18.037145Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715693:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715693:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715693 TabletId: 72075186224037890 2026-01-04T02:50:18.037169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 3 -> 131 2026-01-04T02:50:18.042585Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:50:18.088228Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037890 2026-01-04T02:50:18.088386Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037890 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:50:18.088443Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037889 2026-01-04T02:50:18.088447Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:50:18.088484Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-04T02:50:18.088546Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:50:18.088594Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:50:18.088622Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-04T02:50:18.088860Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037890 TxInFly 0 2026-01-04T02:50:18.088964Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:50:18.091719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715693:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715693 TabletId: 72075186224037888 2026-01-04T02:50:18.092089Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715693:0 131 -> 132 2026-01-04T02:50:18.094802Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:50:18.095117Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:50:18.095188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715693:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:50:18.098159Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046644480, cookie: 281474976715693 2026-01-04T02:50:18.098222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715693 2026-01-04T02:50:18.098243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715693, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 5 2026-01-04T02:50:18.106579Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715693:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-04T02:50:18.106679Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:50:18.106720Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715693:0 progress is 1/1 2026-01-04T02:50:18.106769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-04T02:50:18.108165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715693:0 2026-01-04T02:50:18.206166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:50:18.206291Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, next wakeup# 14.999831s, rate# 0, in queue# 1 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:50:18.206699Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591333047441122962:2212] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037890 2026-01-04T02:50:18.206927Z node 1 :TX_DATASHARD INFO: datashard__compact_borrowed.cpp:22: TEvCompactBorrowed request from [1:7591333047441122962:2212] for table [OwnerId: 72057594046644480, LocalPathId: 38] at tablet 72075186224037889 2026-01-04T02:50:18.234349Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037889, shardIdx# 72057594046644480:2 in# 28, next wakeup# 14.971803s, rate# 0, in queue# 0 shards, running# 1 shards at schemeshard 72057594046644480 2026-01-04T02:50:18.236551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:152: Finished borrowed compaction for pathId# [OwnerId: 72057594046644480, LocalPathId: 38], datashard# 72075186224037890, shardIdx# 72057594046644480:3 in# 30, next wakeup# 14.969582s, rate# 0, in queue# 0 shards, running# 0 shards at schemeshard 72057594046644480 2026-01-04T02:50:18.239669Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037889 CompletedLoansChanged 2026-01-04T02:50:18.239873Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:53: 72075186224037890 CompletedLoansChanged 2026-01-04T02:50:18.252622Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037888 Initiating switch from PreOffline to Offline state 2026-01-04T02:50:18.259180Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:18.259244Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037888 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:18.262442Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:18.262670Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037888, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:18.269567Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037888 reason = ReasonStop 2026-01-04T02:50:18.269882Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:50:18.271024Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T02:50:18.271136Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T02:50:19.372452Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2026-01-04T02:50:19.372533Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2026-01-04T02:50:19.372571Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 Table has 2 shards >> TCdcStreamTests::StreamOnBuildingIndexTable [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanEnabled |95.7%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> KqpImmediateEffects::ConflictingKeyW1RR2 >> KqpEffects::RandomWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink >> KqpPg::Returning+useSink [GOOD] >> KqpPg::Returning-useSink >> KqpSinkTx::TIsolationSettingTest-IsOlap+UsePragma [GOOD] >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::MergeByNoLoadAfterSplit [GOOD] Test command err: 2026-01-04T02:49:19.056039Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333048670723525:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:19.056116Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:19.107945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:19.336662Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.375148Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:19.375229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:19.386719Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:19.482334Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:19.641959Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:19.653637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:19.653736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:19.653747Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:19.653866Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:20.046063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:20.066634Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Triggering split by load 2026-01-04T02:49:22.380911Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333061555626771:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.381000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.381300Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333061555626780:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.381351Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.776567Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.970687Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333061555626957:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.970807Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.971463Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333061555626960:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.971516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.991785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Table has 1 shards 2026-01-04T02:49:23.134278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594360:2379], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.134347Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.134474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594374:2388], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.134513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594375:2389], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.134531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594376:2390], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.135005Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594381:2392], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.135081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.142001Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594377:2391], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.160704Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594415:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.162129Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594417:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.166427Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.167101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333065850594425:2402], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.167169Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:23.168150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata, operationId: 281474976710662:0, at schemeshard: 72057594046644480 2026-01-04T02:49:23.168333Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710662:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.168393Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager, operationId: 281474976710662:1, at schemeshard: 72057594046644480 2026-01-04T02:49:23.168477Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710662:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.168494Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/.metadata/workload_manager/pools, operationId: 281474976710662:2, at schemeshard: 72057594046644480 2026-01-04T02:49:23.168558Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710662:3, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:23.168592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_resource_pool.cpp:148: [72057594046644480] TCreateResourcePool Propose: opId# 281474976710662:3, path# /Root/.metadata/workload_manager/pools/default 2026-01-04T02:49:23.169298Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710662:3 1 -> 128 2026-01-04T02:49:23.169595Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation ... CA Id [1:7591333151750043488:2369]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 2026-01-04T02:49:47.088359Z node 1 :KQP_COMPUTE WARN: kqp_read_actor.cpp:1107: TxId: 281474976724960, task: 1, CA Id [1:7591333151750043571:2371]. Got EvDeliveryProblem, TabletId: 72075186224037888, NotDelivered: 1 Table has 2 shards Loading new shards 2026-01-04T02:49:58.006640Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.115 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] 2026-01-04T02:50:03.006928Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.115 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] 2026-01-04T02:50:08.008237Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.115 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] 2026-01-04T02:50:13.008272Z node 1 :TX_DATASHARD ERROR: datashard__stats.cpp:704: CPU usage 1.2419 is higher than threshold of 1 in-flight Tx: 0 immediate Tx: 0 readIterators: 0 at datashard: 72075186224037890 table: [/Root/Foo] Table has 2 shards Fast forward > 10h to trigger the merge 2026-01-04T02:50:18.109236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__table_stats.cpp:560: Propose merge request: Transaction { WorkingDir: "/Root" OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 } Internal: true FailOnExist: false } TxId: 281474976715694 TabletId: 72057594046644480, reason: shard with tabletId: 72075186224037889 merge by load (shardLoad: 0.02), shardToMergeCount: 2, totalSize: 0, sizeToMerge: 0, totalLoad: 0.04, loadThreshold: 0.07 2026-01-04T02:50:18.109448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Foo, tableId: , opId: 281474976715694:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-04T02:50:18.110139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Foo, tableId: , opId: 281474976715694:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000(\210\317g\000\000\000\200" TabletID: 72075186224037889 ShardIdx: 2 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000(\210\317g\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037890 ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 4 }, request: TablePath: "/Root/Foo" SourceTabletId: 72075186224037889 SourceTabletId: 72075186224037890 SchemeshardId: 72057594046644480 2026-01-04T02:50:18.110182Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715694:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:50:18.119970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715694:0 ProgressState, operation type: TxSplitTablePartition, at tablet# 72057594046644480 2026-01-04T02:50:18.137403Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 281474976715694:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046644480 2026-01-04T02:50:18.137505Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 2 -> 3 2026-01-04T02:50:18.142598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:84: TSplitMerge TConfigureDestination ProgressState, operationId: 281474976715694:0, at schemeshard: 72057594046644480 2026-01-04T02:50:18.145158Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037891 actor [1:7591333302073922559:3159] 2026-01-04T02:50:18.173153Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037891 2026-01-04T02:50:18.173281Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037891, state: WaitScheme 2026-01-04T02:50:18.173482Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:50:18.178024Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715694:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715694:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715694 TabletId: 72075186224037891 2026-01-04T02:50:18.178076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 3 -> 131 2026-01-04T02:50:18.180031Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:50:18.222164Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037891 2026-01-04T02:50:18.222294Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037891 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:50:18.222344Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:50:18.222374Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037891 2026-01-04T02:50:18.222627Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037891 TxInFly 0 2026-01-04T02:50:18.226456Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715694 TabletId: 72075186224037889 2026-01-04T02:50:18.233447Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715694:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715694 TabletId: 72075186224037890 2026-01-04T02:50:18.233771Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715694:0 131 -> 132 2026-01-04T02:50:18.236165Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:50:18.236436Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:50:18.236503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715694:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:50:18.238372Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 38 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976715694 2026-01-04T02:50:18.238419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715694 2026-01-04T02:50:18.238438Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715694, pathId: [OwnerId: 72057594046644480, LocalPathId: 38], version: 6 2026-01-04T02:50:18.249063Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037889 Initiating switch from PreOffline to Offline state 2026-01-04T02:50:18.249102Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037890 Initiating switch from PreOffline to Offline state 2026-01-04T02:50:18.251644Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037889, at schemeshard: 72057594046644480 2026-01-04T02:50:18.251936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715694:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037890, at schemeshard: 72057594046644480 2026-01-04T02:50:18.251994Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-01-04T02:50:18.252029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715694:0 progress is 1/1 2026-01-04T02:50:18.252063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715694:0 2026-01-04T02:50:18.255262Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037889 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:18.255279Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715694:0 2026-01-04T02:50:18.255436Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:18.255842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:18.256027Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:18.260048Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037889 reason = ReasonStop 2026-01-04T02:50:18.260093Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:50:18.261418Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:50:18.261457Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:50:18.262475Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037889 2026-01-04T02:50:18.262563Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037889 2026-01-04T02:50:18.264169Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:50:18.264243Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:50:19.438387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72057594046644480 2026-01-04T02:50:19.438493Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72057594046644480 2026-01-04T02:50:19.438554Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72057594046644480 |95.7%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpInplaceUpdate::BigRow [GOOD] Test command err: Trying to start YDB, gRPC: 15590, MsgBus: 4647 2026-01-04T02:50:08.130792Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333257296630943:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:08.131130Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:08.164389Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:08.569104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:08.569200Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:08.609481Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:08.740296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:08.740994Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:08.746028Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333257296630819:2081] 1767495008111813 != 1767495008111816 2026-01-04T02:50:08.918311Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:09.030516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:09.030535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:09.030541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:09.030608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:09.134838Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:09.806985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:09.816055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:09.921845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.121752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.303737Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.375271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.263310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333274476501884:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.263457Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.263902Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333274476501894:2401], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.263991Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.582355Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.617096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.655977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.693706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.730655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.801830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.850135Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.929705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.018698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333278771470067:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.018779Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.019513Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333278771470072:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.019602Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.019607Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333278771470073:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.023528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:13.036481Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333278771470076:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:13.124372Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333278771470127:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:13.130816Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333257296630943:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:13.130964Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... 291975246753:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:16.497896Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:16.645956Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:16.653904Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:16.653987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:16.665872Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333291975246549:2081] 1767495016476460 != 1767495016476463 2026-01-04T02:50:16.683307Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:16.691847Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:16.846876Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:16.846899Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:16.846907Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:16.846993Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:16.859794Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:17.247659Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:17.252257Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:17.269699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:17.332803Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:17.479031Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:17.489746Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:17.576525Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.086835Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333309155117598:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.086950Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.087198Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333309155117607:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.087259Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.156459Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.183878Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.213982Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.240293Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.266519Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.332778Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.371033Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.421497Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.503646Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333309155118474:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.503745Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.505544Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333309155118479:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.505546Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333309155118480:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.505605Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:20.509173Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:20.521305Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333309155118483:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:20.605392Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333309155118536:3762] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:21.485308Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333291975246753:2242];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:21.485362Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:22.120215Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TCdcStreamTests::AlterStreamImplShouldFail [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail >> KqpEffects::DeleteWithIndex-UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink >> KqpPg::PgAggregate+useSink [GOOD] >> KqpPg::PgAggregate-useSink >> TCdcStreamWithInitialScanTests::InitialScanEnabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanDisabled ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::DeleteWithQueryService-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 3715, MsgBus: 7731 2026-01-04T02:48:55.570249Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332945118359625:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:55.572193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:55.617967Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:55.879050Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:55.879157Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:55.881976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:55.924787Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:55.968668Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:55.970347Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332945118359597:2081] 1767494935568194 != 1767494935568197 2026-01-04T02:48:56.071126Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:56.071162Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:56.071169Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:56.071253Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:56.103503Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:56.460445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:56.468609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:56.578174Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:58.647051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332958003262376:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.647175Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.647566Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332958003262386:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.647614Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.690972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.852333Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332958003262514:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.852433Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.852648Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332958003262519:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.852694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332958003262521:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.853000Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:58.856679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:58.869757Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332958003262523:2340], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:48:58.978724Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332958003262574:2619] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 1 1 1 Trying to start YDB, gRPC: 26179, MsgBus: 22808 2026-01-04T02:49:00.678486Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332966437129783:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.678552Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.720279Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:49:00.870115Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:00.878560Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:00.878634Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:00.893539Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332966437129746:2081] 1767494940674418 != 1767494940674421 2026-01-04T02:49:00.910673Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:00.911803Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:01.100473Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:01.100489Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:01.100499Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:01.100583Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:01.163058Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:01.591974Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:01.606934Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:49:01.713946Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:04.446976Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332983616999815:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:04.447080Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:04.447785Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332983616999825:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:04.447838Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't ... 0:11.287824Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:15.703432Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333286229254656:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.703619Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.706109Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333286229254666:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.706241Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.747156Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:15.862290Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333286229254762:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.862446Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.863042Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333286229254768:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.863116Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333286229254767:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.865945Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:15.874506Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:15.896359Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333286229254771:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:50:15.995562Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333286229254822:2605] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 16173, MsgBus: 25672 2026-01-04T02:50:17.854457Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7591333297675460041:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:17.854637Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:17.905402Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:18.024089Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:18.026941Z node 12 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [12:7591333297675459927:2081] 1767495017833204 != 1767495017833207 2026-01-04T02:50:18.043911Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:18.044039Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:18.074274Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:18.093075Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:18.209455Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:18.209485Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:18.209502Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:18.209621Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:18.864159Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:19.073326Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:22.853786Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591333297675460041:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:22.853935Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:22.969851Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333319150297301:2326], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.970005Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.970775Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333319150297311:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.970888Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.060718Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.159831Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333323445264705:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.160001Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.160493Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333323445264710:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.160576Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333323445264711:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.160764Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.166775Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:23.193992Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591333323445264714:2343], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:50:23.264448Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591333323445264765:2600] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] >> KqpEffects::EmptyUpdate+UseSink >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] >> KqpImmediateEffects::UpdateAfterUpsert >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] >> TCdcStreamTests::DropStreamImplShouldFail [GOOD] >> TCdcStreamTests::CopyTableShouldNotCopyStream >> KqpImmediateEffects::MultiShardUpsertAfterRead >> KqpImmediateEffects::DeleteOnAfterInsertWithIndex [GOOD] >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink >> TCdcStreamWithInitialScanTests::InitialScanDisabled [GOOD] >> TCdcStreamWithInitialScanTests::InitialScanProgress >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] >> TestKinesisHttpProxy::TestListStreamConsumers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterDuringUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27136, MsgBus: 22321 2026-01-04T02:50:04.411832Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240383522437:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.412227Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.744119Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.783367Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.783465Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.854081Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.920798Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.030674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.194308Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.194323Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.194332Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.194418Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.412192Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.849304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.856907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.908384Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.095102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.272519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.356708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.077453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257563393323:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.077586Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.077966Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257563393333:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.078025Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.521030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.594608Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.646801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.685089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.739521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.828820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.916733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.973153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.069453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261858361505:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.069599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.070295Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261858361510:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.070354Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261858361511:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.070467Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.074690Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.085607Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333261858361514:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:09.175469Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333261858361565:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.413200Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240383522437:2194];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.416174Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:10.914008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... : will try to initialize from file: (empty maybe) 2026-01-04T02:50:19.553617Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:19.553715Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:19.557906Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:20.037048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:20.041925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:20.058410Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.109023Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.255803Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.336972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.374998Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:22.907198Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333317981060999:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.907288Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.907736Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333317981061009:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.907806Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:22.966602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.008346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.046012Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.085445Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.115788Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.160986Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.218243Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.277391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.367334Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333322276029180:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.367423Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.367528Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333322276029185:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.367689Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333322276029187:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.367750Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.372333Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:23.384131Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333322276029189:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:23.491234Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333322276029241:3780] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:24.362263Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333305096157278:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.362439Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:25.424358Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.638904Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:50:25.695810Z node 3 :TX_DATASHARD ERROR: datashard_pipeline.cpp:1585: Shard 72075186224037927 cannot parse tx 281474976715675: Table '/Root/TestTable' scheme changed. 2026-01-04T02:50:25.696082Z node 3 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [3:7591333330865964255:2522] TxId: 281474976715675. Ctx: { TraceId: 01ke3ekh0xdsm12pkna99zpq92, Database: /Root, SessionId: ydb://session/3?node_id=3&id=MjY1Y2NlZTEtNDVkNTVhMjgtNDg3MjRiMjgtNzk3OTU1ZWM=, PoolId: default, IsStreamingQuery: 0}. ERROR: [SCHEME_CHANGED] Table '/Root/TestTable' scheme changed.; trace_id# 2026-01-04T02:50:25.696439Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=MjY1Y2NlZTEtNDVkNTVhMjgtNDg3MjRiMjgtNzk3OTU1ZWM=, ActorId: [3:7591333330865964110:2522], ActorState: ExecuteState, LegacyTraceId: 01ke3ekh0xdsm12pkna99zpq92, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Table \'/Root/TestTable\' scheme changed." issue_code: 2028 severity: 1 }{ message: "Query invalidated on scheme/internal error during Data execution" issue_code: 2019 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::AlreadyBrokenImmediateEffects [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 >> TestKinesisHttpProxy::TestRequestNoAuthorization [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Params_Conflict+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 23802, MsgBus: 21931 2026-01-04T02:50:04.426462Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333241963016019:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.426554Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.883565Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.955032Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:04.962266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.962390Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.995496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:05.105896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.190388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.190417Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.190457Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.190526Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.432058Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.788991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.800610Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.864113Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.055396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.240358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.312219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.068500Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259142887026:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.068628Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.069079Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259142887036:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.069171Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.511989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.551868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.590801Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.639798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.674555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.727880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.784968Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.896912Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.048940Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333263437855205:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.049026Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.049283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333263437855211:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.049283Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333263437855210:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.049322Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.052882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.068252Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333263437855214:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.132595Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333263437855265:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.430290Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333241963016019:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.430364Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 5149, MsgBus: 4422 2026-01-04T02:50:12.236861Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333276325438391:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:12.236921Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp ... 4T02:50:20.453626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.541051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.720415Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.796371Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.826047Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.359765Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333321553028899:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.359875Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.362203Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333321553028909:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.362285Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.433551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.466956Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.510678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.546477Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.579999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.620672Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.655529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.706029Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.787146Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333321553029776:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.787225Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.787473Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333321553029781:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.787480Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333321553029782:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.787527Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.791978Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:23.803763Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333321553029785:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:23.885800Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333321553029836:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:24.812868Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333304373157958:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.812986Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:25.700074Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-04T02:50:25.700288Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:50:25.700437Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 3 at tablet 72075186224037888 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:50:25.700624Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7591333330142964768:2531], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [3:7591333330142964738:2531]Got CONSTRAINT VIOLATION for table `/Root/TwoShard`. ShardID=72075186224037888, Sink=[3:7591333330142964768:2531].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:50:25.700716Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591333330142964761:2531], SessionActorId: [3:7591333330142964738:2531], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7591333330142964738:2531]. 2026-01-04T02:50:25.700955Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=M2ZlNDMxY2MtMTFlODhiNzAtOTc1YTQzZTQtZDkwMzdjM2M=, ActorId: [3:7591333330142964738:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ekgw79zrf6swvd2qm58eh, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7591333330142964762:2531] from: [3:7591333330142964761:2531] trace_id# 2026-01-04T02:50:25.701055Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591333330142964762:2531] TxId: 281474976710673. Ctx: { TraceId: 01ke3ekgw79zrf6swvd2qm58eh, Database: /Root, SessionId: ydb://session/3?node_id=3&id=M2ZlNDMxY2MtMTFlODhiNzAtOTc1YTQzZTQtZDkwMzdjM2M=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TwoShard`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:50:25.701341Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=M2ZlNDMxY2MtMTFlODhiNzAtOTc1YTQzZTQtZDkwMzdjM2M=, ActorId: [3:7591333330142964738:2531], ActorState: ExecuteState, LegacyTraceId: 01ke3ekgw79zrf6swvd2qm58eh, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TwoShard`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestKinesisHttpProxy::CreateDeleteStream [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithWrongAttribute [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithAllAttributes [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertRevert_Literal_Conflict [GOOD] Test command err: Trying to start YDB, gRPC: 29180, MsgBus: 21870 2026-01-04T02:50:04.419392Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333238863772221:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.419779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.488241Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:04.838250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.838368Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.845903Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.937156Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.937268Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333238863772001:2081] 1767495004404019 != 1767495004404022 2026-01-04T02:50:04.937334Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.162889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.187251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.187270Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.187290Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.187349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.422117Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.796706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.865866Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.085089Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.273048Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.353186Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.000161Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333251748675763:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.000285Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.000993Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333251748675772:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.001040Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.511965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.545525Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.580614Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.622012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.660155Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.712053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.770685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.822937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.933422Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333256043643936:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.933509Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.933841Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333256043643941:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.933883Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.933929Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333256043643942:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.941997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:08.957996Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333256043643945:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.036309Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333260338611294:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.425902Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333238863772221:2250];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.425970Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 8599, MsgBus: 18793 2026-01-04T02:50:12.460419Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:17.453712Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333273384345286:2063];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:17.453800Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 23656, MsgBus: 14138 2026-01-04T02:50:19.766913Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591333302668175075:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:19.767006Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:19.778343Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:19.900987Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:19.902510Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:19.902601Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:19.930946Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:19.972136Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:20.004398Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:20.004425Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:20.004438Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:20.004528Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:20.470769Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:20.488305Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.576762Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.726139Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.774956Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:20.802062Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.398768Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319848046077:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.398876Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.399348Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319848046086:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.399414Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.479417Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.524551Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.561474Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.599721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.637441Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.684024Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.726066Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.778510Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.888318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319848046960:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.888424Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.888902Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319848046965:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.888964Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319848046966:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.889001Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.893028Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:23.907139Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333319848046969:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:24.000754Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333319848047020:3774] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:24.769955Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333302668175075:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.770037Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; >> KqpPg::V1CreateTable [GOOD] >> KqpPg::ValuesInsert+useSink >> TestKinesisHttpProxy::TestUnauthorizedPutRecords >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 16025, MsgBus: 27671 2026-01-04T02:50:04.406128Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240661440083:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.406250Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.947447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.947557Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.950533Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:05.013105Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:05.041682Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.212097Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.226537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.226568Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.226584Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.226674Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.440688Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.819861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.828265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.868313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.087693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.260939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.349742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.156637Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257841311110:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.156795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.157293Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257841311120:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.157382Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.516182Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.557846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.590791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.630679Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.671268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.717341Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.787025Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.833217Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.926212Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257841311993:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.926306Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.926799Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257841311998:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.926851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257841311999:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.926893Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.939652Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:08.954096Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333257841312002:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.039276Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333262136279349:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.406014Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240661440083:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.406101Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:10.912861Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... _classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:19.966120Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:19.966129Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:19.966230Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:20.385699Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:20.400894Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.460490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.621013Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.690217Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.839121Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.394479Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319979448192:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.394581Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.398274Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319979448202:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.398362Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.475747Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.513780Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.553344Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.587297Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.621896Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.665147Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.714787Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.794027Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.877512Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319979449076:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.877608Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.878031Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319979449081:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.878071Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333319979449082:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.878206Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.882715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:23.896038Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333319979449085:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:23.992675Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333319979449136:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:24.773909Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333302799577179:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.773981Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:25.627678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.320071Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7591333332864351629:2533], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ekhhwcjp8fca9rt1cme6e. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NGRlMjEwZTAtMzcyZmYyYjMtODA2MTQxLTVjOTBjOThm. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-04T02:50:26.320141Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591333332864351629:2533], TxId: 281474976710679, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ekhhwcjp8fca9rt1cme6e. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NGRlMjEwZTAtMzcyZmYyYjMtODA2MTQxLTVjOTBjOThm. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-04T02:50:26.320734Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NGRlMjEwZTAtMzcyZmYyYjMtODA2MTQxLTVjOTBjOThm, ActorId: [3:7591333328569384038:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3ekhhwcjp8fca9rt1cme6e, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] >> TestYmqHttpProxy::TestGetQueueAttributes [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WR2 >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer >> TestKinesisHttpProxy::ListShards [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags >> TestYmqHttpProxy::BillingRecordsForJsonApi ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::TxWithReadAtTheEnd-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 28736, MsgBus: 26526 2026-01-04T02:50:04.480061Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333238182164672:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.480129Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.836345Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.850844Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.850957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.922838Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.987284Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.054568Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.186583Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.186618Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.186651Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.186721Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.501797Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.838438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.851904Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.902360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.102669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.273574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.349016Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.246761Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333255362035668:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.246892Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.249982Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333255362035678:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.250095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.610998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.647090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.682084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.722961Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.759512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.807075Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.864756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.933038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.078162Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259657003854:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.078266Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.078682Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259657003859:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.078724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259657003860:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.079005Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.086360Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.099107Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333259657003863:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.173375Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333259657003914:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.475667Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333238182164672:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.475751Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:10.907752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... ing to start YDB, gRPC: 3051, MsgBus: 24255 2026-01-04T02:50:20.118231Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591333310657698013:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:20.118415Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:20.133336Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:20.197846Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:20.236755Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:20.236878Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:20.250862Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:20.313451Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:20.318251Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:20.318274Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:20.318283Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:20.318371Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:20.795081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:20.810275Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:20.820126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.888367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:21.047398Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:21.109051Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:21.124613Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.585219Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333323542601732:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.585315Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.585625Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333323542601742:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.585667Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:23.654517Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.694940Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.727496Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.800241Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.831881Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.872440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.912352Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.958005Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:24.056149Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333327837569907:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:24.056247Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:24.056487Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333327837569912:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:24.056517Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333327837569913:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:24.056567Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:24.060108Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:24.072137Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333327837569916:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:24.138444Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333327837569967:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:25.121951Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333310657698013:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:25.122026Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:26.169534Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TCdcStreamWithInitialScanTests::InitialScanProgress [GOOD] >> TCdcStreamWithInitialScanTests::WithoutPqTransactions >> TestYmqHttpProxy::TestDeleteQueue >> TestKinesisHttpProxy::ListShardsEmptyFields >> TCdcStreamTests::CopyTableShouldNotCopyStream [GOOD] >> TCdcStreamTests::MoveTableShouldFail >> TestYmqHttpProxy::TestSetQueueAttributes [GOOD] >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] >> KqpOverload::OltpOverloaded-Distributed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpWrite::ProjectReplace-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 62998, MsgBus: 26011 2026-01-04T02:50:14.973691Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333282966232670:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:14.973774Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:15.263804Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:15.319493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:15.319654Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:15.373941Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333282966232625:2081] 1767495014957564 != 1767495014957567 2026-01-04T02:50:15.383181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:15.385282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:15.582193Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:15.658652Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:15.658672Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:15.658689Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:15.658772Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:15.987161Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:16.238464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:16.248863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:16.297169Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.437282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.598517Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.666952Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:18.611220Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333300146103687:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:18.611341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:18.611667Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333300146103697:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:18.611717Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:18.945911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:18.981454Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.013795Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.057780Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.091476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.153388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.193468Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.241902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:19.330168Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333304441071872:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:19.330211Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:19.330473Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333304441071877:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:19.330549Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333304441071878:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:19.330617Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:19.334007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:19.346470Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333304441071881:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:19.438268Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333304441071932:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:19.974090Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333282966232670:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:19.974155Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 3326, MsgBus: 20509 2026-01-04T02:50:22.455212Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591333315897476748:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:22.455259Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:22.479396Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:22.593765Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:22.608445Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:22.608524Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:22.631459Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:22.641762Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:22.721728Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:22.721752Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:22.721760Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:22.721856Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:23.224200Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:23.230158Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:23.245405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.318994Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.461577Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.467805Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.539616Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.989548Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333328782380459:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:25.989622Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:25.989916Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333328782380468:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:25.989967Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.065220Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.103031Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.136367Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.168051Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.199785Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.237681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.275615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.324003Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.403784Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333333077348634:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.403862Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.403924Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333333077348639:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.404157Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333333077348641:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.404201Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.406863Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:26.416488Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333333077348642:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:26.495440Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333333077348696:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:27.455948Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333315897476748:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:27.456016Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] >> TestYmqHttpProxy::TestReceiveMessageWithAttemptId [GOOD] >> TestYmqHttpProxy::TestTagQueue >> KqpImmediateEffects::UpsertAfterInsert >> KqpEffects::AlterAfterUpsertTransaction+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertTransaction-UseSink >> KqpImmediateEffects::Replace >> KqpInplaceUpdate::SingleRowStr-UseSink >> KqpEffects::UpdateOn_Select [GOOD] >> KqpFail::Immediate >> TestKinesisHttpProxy::GoodRequestGetRecords [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd+UseSink [GOOD] >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WR2 >> TestYmqHttpProxy::TestListQueues >> KqpEffects::EmptyUpdate+UseSink [GOOD] >> KqpEffects::EmptyUpdate-UseSink >> KqpPg::InsertNoTargetColumns_ColumnOrder-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor >> KqpEffects::InsertAbort_Params_Success >> TCdcStreamTests::MoveTableShouldFail [GOOD] >> TCdcStreamTests::CheckSchemeLimits ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Duplicates-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 10971, MsgBus: 25749 2026-01-04T02:50:07.954421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333251478264176:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:07.954531Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:08.283462Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:08.283605Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:08.283965Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:08.311177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:08.345914Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333251478264151:2081] 1767495007953159 != 1767495007953162 2026-01-04T02:50:08.346575Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:08.466179Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:08.466208Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:08.466241Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:08.466360Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:08.466861Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:08.972130Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:09.050168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:09.130247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.304221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.517616Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.628996Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.471603Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333268658135214:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:11.471761Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:11.472301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333268658135224:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:11.472350Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:11.842736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.873661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.901741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.928434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.969352Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.031521Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.061122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.105444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.204741Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333272953103401:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.204858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333272953103406:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.204913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.205645Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333272953103409:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.205715Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.208734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:12.219000Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333272953103408:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:12.292152Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333272953103461:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:12.954777Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333251478264176:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:12.954866Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:13.987474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... or outdated, will use file: (empty maybe) 2026-01-04T02:50:23.173671Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:23.173679Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:23.173760Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:23.201898Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:23.706017Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:23.725727Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.794425Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.915192Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.969535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:24.049272Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.406224Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333335815230474:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.406317Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.406620Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333335815230483:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.406694Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.479386Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.509649Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.537950Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.569900Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.601571Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.635040Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.666393Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.709294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.800543Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333335815231356:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.800633Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.800714Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333335815231361:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.800859Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333335815231363:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.800920Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.804535Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:26.813727Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333335815231364:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:26.903826Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333335815231416:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:27.876894Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333318635359491:2086];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:27.876972Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:28.757528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.341723Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591333348700133730:2550], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3ekm6w33qnhr76n127hyaa. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NTIyYTc2ZjItZTNlZDNjMjEtMzA0ZWY3ZTktZDkwOTFhNGY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Duplicated keys found., code: 2012 }. 2026-01-04T02:50:29.341984Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591333348700133732:2551], TxId: 281474976710677, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ekm6w33qnhr76n127hyaa. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=NTIyYTc2ZjItZTNlZDNjMjEtMzA0ZWY3ZTktZDkwOTFhNGY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591333348700133727:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:50:29.342460Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NTIyYTc2ZjItZTNlZDNjMjEtMzA0ZWY3ZTktZDkwOTFhNGY=, ActorId: [3:7591333344405166272:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ekm6w33qnhr76n127hyaa, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Duplicated keys found." issue_code: 2012 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TCdcStreamWithInitialScanTests::WithoutPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/unittest >> TestMalformedRequest::CompressedGzipContentLengthHigher [GOOD] Test command err: 2026-01-04T02:48:17.136422Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332780238716610:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:17.136560Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:17.317833Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:17.368547Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:17.368667Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:17.402577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:17.491163Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:17.492541Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332780238716580:2081] 1767494897134699 != 1767494897134702 2026-01-04T02:48:17.539251Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:17.539274Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:17.539281Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:17.539395Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:17.573783Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:17.737238Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:17.918645Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:48:17.925109Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:48:17.985972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.091011Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.125470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.145276Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:18.164847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.197888Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.240187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.281797Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.316074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.347790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:18.377878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.147575Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332793123620119:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.147684Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.148151Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332793123620131:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.148188Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332793123620132:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.148348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:20.152597Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:20.172628Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332793123620135:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:48:20.261348Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332793123620186:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:20.680649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.730091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.782637Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.820161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.862136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.905079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeO ... :29.447458Z node 2 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:29.447494Z node 2 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 4ms 2026-01-04T02:50:29.447921Z node 2 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:29.447953Z node 2 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:50:29.448056Z node 2 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 5ms 2026-01-04T02:50:29.448868Z node 2 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710711 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:29.553419Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333349056505485:2782], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.553520Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591333349056505486:2783], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:29.553574Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.554439Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333349056505489:2784], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.554500Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.558196Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591333349056505483:2781]: Pool not found 2026-01-04T02:50:29.558475Z node 2 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:50:30.169012Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333353351472806:2789], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.169107Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591333353351472807:2790], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:30.169161Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.169984Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333353351472810:2791], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.170059Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.176569Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591333353351472804:2788]: Pool not found 2026-01-04T02:50:30.177054Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:50:30.180075Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333353351472830:2796], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.180160Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [2:7591333353351472831:2797], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:30.180209Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.180779Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333353351472834:2798], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.181103Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.185687Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [2:7591333353351472828:2795]: Pool not found 2026-01-04T02:50:30.186341Z node 2 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:50:30.319613Z node 2 :HTTP DEBUG: http_proxy_incoming.cpp:219: (#37,[::1]:46376) connection closed by inactivity timeout |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/unittest >> KqpEffects::UpdateOn_Literal ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Literal_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 21194, MsgBus: 3735 2026-01-04T02:50:09.626378Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333261424006891:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.626823Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:10.033954Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:10.076055Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:10.076182Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:10.155754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:10.159954Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:10.214642Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:10.289481Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:10.289506Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:10.289513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:10.289617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:10.624676Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:10.790119Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:10.882840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.044309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.199825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.302176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.211654Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333278603877714:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.211774Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.214051Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333278603877724:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.214130Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.580939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.617490Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.650730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.737282Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.776079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.813329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.855235Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.904731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:14.004100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333282898845897:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.004189Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.004523Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333282898845903:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.004532Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333282898845902:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.004612Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.008150Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:14.018457Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333282898845906:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:14.112700Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333282898845957:3768] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:14.624888Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333261424006891:2260];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:14.624949Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:15.746338Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=3; 2026-01-04T02:50:15.757412Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 3 at tablet 72075186224037888 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:50:15.764316Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors whi ... ode_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.111898Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:24.232086Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:24.232114Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:24.232122Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:24.232196Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:24.245154Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:24.662717Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:24.680249Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:24.758721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:24.921561Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:24.990429Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.020841Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.574931Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333340714124727:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.575013Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.575206Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333340714124736:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.575247Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.641670Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.672275Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.705556Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.737095Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.770520Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.809857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.849415Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.902994Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.003609Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333345009092904:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.003691Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.003907Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333345009092909:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.003967Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333345009092910:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.004023Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.007426Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.020139Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333345009092913:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:28.096408Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333345009092964:3766] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:28.973909Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333323534253800:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:28.973970Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:30.134579Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591333353599027870:2536], TxId: 281474976710674, task: 1. Ctx: { TraceId : 01ke3ekn27cfb3a23093hzmdwj. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2JlZWE1MzAtYzVkYzhhMjAtNTUwMTg1ODgtZjU3ZThkYmY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:50:30.134865Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591333353599027872:2537], TxId: 281474976710674, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3ekn27cfb3a23093hzmdwj. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=N2JlZWE1MzAtYzVkYzhhMjAtNTUwMTg1ODgtZjU3ZThkYmY=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591333353599027867:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:50:30.135299Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=N2JlZWE1MzAtYzVkYzhhMjAtNTUwMTg1ODgtZjU3ZThkYmY=, ActorId: [3:7591333349304060523:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3ekn27cfb3a23093hzmdwj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TA] $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::PgAggregate-useSink [GOOD] >> KqpPg::MkqlTerminate >> KqpImmediateEffects::UpdateAfterUpsert [GOOD] >> KqpImmediateEffects::UpdateAfterInsert >> KqpImmediateEffects::MultiShardUpsertAfterRead [GOOD] >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd >> KqpImmediateEffects::Delete >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] >> TCdcStreamWithInitialScanTests::WithPqTransactions [GOOD] >> TCdcStreamWithInitialScanTests::AlterStream >> KqpPg::TableSelect-useSink [GOOD] >> KqpPg::TableInsert+useSink >> TestKinesisHttpProxy::TestUnauthorizedPutRecords [GOOD] >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] >> TCdcStreamTests::CheckSchemeLimits [GOOD] >> TCdcStreamTests::MeteringServerless >> TestKinesisHttpProxy::TestListStreamConsumersWithMaxResults [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumer [GOOD] >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] >> KqpEffects::EffectWithSelect+UseSink [GOOD] >> KqpImmediateEffects::Insert >> TestKinesisHttpProxy::TestWrongStream >> KqpImmediateEffects::ConflictingKeyR1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyR1WRR2 >> TestKinesisHttpProxy::ListShardsEmptyFields [GOOD] >> TestYmqHttpProxy::TestCreateQueueWithTags [GOOD] >> KqpPg::Returning-useSink [GOOD] >> KqpPg::SelectIndex+useSink >> TestKinesisHttpProxy::TestListStreamConsumersWithToken >> KqpEffects::EmptyUpdate-UseSink [GOOD] >> KqpEffects::EffectWithSelect-UseSink >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ForceImmediateEffectsExecution+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12087, MsgBus: 19262 2026-01-04T02:50:08.709032Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333255616897577:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:08.709112Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:09.065535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:09.065600Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:09.065892Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:09.169444Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333255616897539:2081] 1767495008706915 != 1767495008706918 2026-01-04T02:50:09.170840Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:09.184159Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:09.356910Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:09.442427Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:09.442449Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:09.442455Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:09.442563Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:09.722114Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:09.975239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:10.068475Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.264817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.433970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:10.505719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.444224Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333272796768600:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.444341Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.444981Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333272796768609:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.445045Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:12.817994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.855306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.928205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:12.971959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.010219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.052803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.100696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.183088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.262794Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333277091736780:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.262856Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.263088Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333277091736785:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.263114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333277091736786:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.263199Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:13.266491Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:13.276754Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333277091736789:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:13.335064Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333277091736840:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:13.709906Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333255616897577:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:13.709982Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:14.982622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_crea ... 46131:7762515]; 2026-01-04T02:50:27.833340Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:27.839403Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:27.924095Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:27.929998Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:27.932390Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333336841110969:2081] 1767495027825151 != 1767495027825154 2026-01-04T02:50:27.953422Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:27.953519Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:27.970240Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:28.033044Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:28.033066Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:28.033073Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:28.033145Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:28.122591Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:28.422399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:28.439538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.496637Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.654875Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.724380Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.863651Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:31.407283Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333354020982033:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.407381Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.407679Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333354020982043:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.407724Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.478524Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.533290Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.572140Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.652529Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.691080Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.733771Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.795129Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.863716Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.977733Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333354020982918:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.977862Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.978320Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333354020982923:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.978368Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333354020982924:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.978632Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.983145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:32.002097Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333354020982927:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:32.060486Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333358315950274:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:32.827137Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333336841111071:2140];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:32.827215Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:33.716467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61168, MsgBus: 24011 2026-01-04T02:50:04.403013Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333241486293020:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.403103Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.738328Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.764871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.764997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.766577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.936924Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:04.954431Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.186501Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.186526Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.186541Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.186617Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.411309Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.841877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.917883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.078556Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.245602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.336408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.010824Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333254371196743:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.010966Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.011342Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333258666164049:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.011384Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.512284Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.569528Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.605329Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.646068Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.686546Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.736125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.820002Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.875078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.001655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333258666164920:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.001753Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.002056Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333262961132222:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.002114Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333262961132221:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.002227Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.005461Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.018333Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333262961132225:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.111919Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333262961132276:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.403320Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333241486293020:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.403381Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:11.106481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.180086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOp ... pp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:24.783679Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:24.785982Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333326949603548:2081] 1767495024594718 != 1767495024594721 2026-01-04T02:50:24.801576Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:24.801665Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.830045Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:24.936661Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:24.947292Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:24.947319Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:24.947327Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:24.947408Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:25.491254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:25.497846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:25.507531Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.574991Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.692187Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.754064Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.842010Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.439453Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333344129474599:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.439535Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.439743Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333344129474608:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.439782Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.517499Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.571126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.603651Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.633631Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.663855Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.701240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.739518Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.791204Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.881959Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333344129475483:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.882063Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.882237Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333344129475488:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.882391Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333344129475490:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.882440Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.887431Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.906559Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333344129475492:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:29.008804Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333348424442839:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:29.597106Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333326949603593:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.597831Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:30.875072Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.954058Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestYmqHttpProxy::TestDeleteMessage >> TCdcStreamWithInitialScanTests::AlterStream [GOOD] >> TCdcStreamWithInitialScanTests::DropStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 16794, MsgBus: 19525 ... waiting for SysViewsRoster update finished 2026-01-04T02:50:11.447148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:11.580545Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:11.581037Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:11.581098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:11.884941Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:11.885093Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:11.971313Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495008305500 != 1767495008305503 2026-01-04T02:50:11.988574Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:12.039071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:12.170477Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:12.574051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:12.574163Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:12.574237Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:12.574440Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:12.588337Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:12.969948Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:13.069664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.349548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:13.750970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:14.050464Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:14.851938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.852083Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.853073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.853210Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:14.883097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:15.116685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:15.372828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:15.690065Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:15.944402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.280320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.573121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:16.944099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:17.326961Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:17.327081Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:17.327499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:17.327588Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:17.327691Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:17.334957Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:17.525918Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:17.596486Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:20.390413Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3181:4452] TxId: 281474976715674. Ctx: { TraceId: 01ke3ekaw153bh9cbnx3b547zt, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWFlOWNlNjctNTY5ZTBhODAtNDAyZjkyYjktMzlmMWVkZjE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-04T02:50:20.390701Z node 1 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-01-04T02:50:20. ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:28.323186Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:28.397349Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:28.397428Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:28.397923Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:28.407548Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333342496543525:2081] 1767495028313024 != 1767495028313027 2026-01-04T02:50:28.408827Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:28.410969Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:28.490249Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:28.490270Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:28.490274Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:28.490326Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:28.688208Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:28.988096Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:28.998440Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:29.077715Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.153777Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.312405Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.318217Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:29.394219Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.702878Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333355381447275:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.703018Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.703560Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333355381447285:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.703632Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.777705Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.848404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.886283Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.923789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.957207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.003989Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.040612Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.120054Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.231426Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333359676415457:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.231521Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.231821Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333359676415462:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.231891Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333359676415463:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.231930Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.235444Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:32.251989Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333359676415466:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:32.332664Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333359676415518:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:33.317603Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333342496543639:2144];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:33.317678Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:34.076237Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3937, MsgBus: 8121 2026-01-04T02:50:04.400541Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333242149787654:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.400611Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.767275Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.767403Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.790609Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.816691Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.846892Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.056108Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.190522Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.190539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.190563Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.190641Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.429983Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.838340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.885230Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.053837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.228160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.316660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:07.861453Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333255034691373:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.861583Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.862006Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333255034691382:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:07.862108Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.512940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.553512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.622368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.663445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.690872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.729038Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.808382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.878401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.974115Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259329659559:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.974228Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.974602Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259329659564:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.974640Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333259329659565:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.974928Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.979058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:08.992785Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333259329659568:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:09.083862Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333263624626915:3769] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.402189Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333242149787654:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.402263Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:11.004943Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:11.064705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCr ... posed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:20.124909Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 31247, MsgBus: 28743 2026-01-04T02:50:22.466806Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:22.495592Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:22.563499Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:22.578110Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:22.578197Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:22.582568Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:22.677410Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:22.694458Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:22.694481Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:22.694488Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:22.694587Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:23.216207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:23.222972Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:23.238374Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.321399Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.446284Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:23.486334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:23.568759Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.075071Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333336778192393:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.075164Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.075437Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333336778192403:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.075540Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.146176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.178376Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.206667Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.264625Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.297910Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.331640Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.369355Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.417825Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.496963Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333336778193278:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.497056Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.497158Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333336778193283:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.497640Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333336778193285:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.497758Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:26.500944Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:26.512240Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333336778193286:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:26.576649Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333336778193338:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:28.271251Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertTransaction-UseSink [GOOD] >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink >> TestYmqHttpProxy::TestTagQueue [GOOD] >> TTxLocatorTest::TestWithReboot >> KqpImmediateEffects::UpsertAfterInsert [GOOD] >> KqpImmediateEffects::UpsertAfterInsertWithIndex >> KqpImmediateEffects::Replace [GOOD] >> KqpImmediateEffects::ReplaceDuplicates >> TestKinesisHttpProxy::GoodRequestGetRecordsCbor [GOOD] >> KqpInplaceUpdate::SingleRowStr-UseSink [GOOD] >> KqpOverload::OltpOverloaded+Distributed >> TestYmqHttpProxy::TestUntagQueue >> KqpImmediateEffects::TxWithWriteAtTheEnd-UseSink [GOOD] >> KqpImmediateEffects::UnobservedUncommittedChangeConflict >> KqpImmediateEffects::ConflictingKeyW1WR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 >> TTxLocatorTest::TestWithReboot [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::DeleteWithIndex+UseSecondaryIndex+UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 61261, MsgBus: 8384 2026-01-04T02:50:04.414844Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333240305177745:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:04.418150Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:04.764756Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:04.764856Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:04.883566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:04.889562Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:04.889909Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333240305177712:2081] 1767495004403945 != 1767495004403948 2026-01-04T02:50:04.907442Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:05.111210Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:05.188850Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:05.188863Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:05.188871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:05.188918Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:05.440871Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:05.829786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:05.836832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:05.885505Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.057923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.253939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:06.352986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.090742Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257485048770:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.090857Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.091282Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333257485048780:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.091335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:08.516146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.561305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.596796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.647205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.690911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.755246Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.884160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:08.972371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:09.065836Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261780016955:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.065918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.066059Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261780016960:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.066111Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333261780016961:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.066229Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:09.070792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:09.086508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333261780016964:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:09.183330Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333261780017015:3771] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:09.414779Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333240305177745:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:09.414876Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:11.210563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... tect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:25.709218Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:25.855398Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333329890414466:2081] 1767495025653873 != 1767495025653876 2026-01-04T02:50:25.867116Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:25.867220Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:25.868128Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:25.873391Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:25.969287Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:25.969311Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:25.969319Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:25.969404Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:25.991872Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:26.434846Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:26.454906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.515404Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.676367Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.716601Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:26.736925Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.291190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333347070285517:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.291291Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.291629Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333347070285527:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.291674Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.364041Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.419971Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.479663Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.519073Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.579312Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.620465Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.660993Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.716583Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:29.811434Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333347070286397:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.811542Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.811786Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333347070286402:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.811845Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333347070286403:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.811949Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:29.816081Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:29.827840Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333347070286406:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:29.889990Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333347070286457:3768] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:30.654656Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333329890414493:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:30.654736Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:31.689309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.773126Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpPg::CheckPgAutoParams+useSink [GOOD] >> KqpPg::CheckPgAutoParams-useSink >> KqpEffects::InsertAbort_Params_Success [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName >> TCdcStreamWithInitialScanTests::DropStream [GOOD] >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart >> KqpEffects::UpdateOn_Literal [GOOD] >> KqpEffects::UpdateOn_Params |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/tx_allocator/ut/unittest >> TTxLocatorTest::TestWithReboot [GOOD] Test command err: 2026-01-04T02:50:38.280306Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:2150: Tablet: 72057594046447617 LockedInitializationPath Marker# TSYS32 2026-01-04T02:50:38.283929Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:979: Tablet: 72057594046447617 HandleFindLatestLogEntry, NODATA Promote Marker# TSYS19 2026-01-04T02:50:38.286537Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:2:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:50:38.299465Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.303265Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:50:38.319991Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:1:28672:35:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.320125Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.320274Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 2:0 Marker# TSYS28 2026-01-04T02:50:38.320550Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:1:8192:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.320642Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:2:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.320759Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:50:38.320893Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 2, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:50:38.324248Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:80:2115] requested range size#100000 2026-01-04T02:50:38.326616Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:82:2117] requested range size#100000 2026-01-04T02:50:38.327677Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:84:2119] requested range size#100000 2026-01-04T02:50:38.328076Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:86:2121] requested range size#100000 2026-01-04T02:50:38.328586Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:88:2123] requested range size#100000 2026-01-04T02:50:38.328983Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:90:2125] requested range size#100000 2026-01-04T02:50:38.329292Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:1:24576:70:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.329495Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:72:2107] requested range size#100000 2026-01-04T02:50:38.329666Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:3:0:0:69:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.329744Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330071Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:4:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330167Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:74:2109] requested range size#100000 2026-01-04T02:50:38.330331Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330446Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:76:2111] requested range size#100000 2026-01-04T02:50:38.330616Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:5:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330666Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330798Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:6:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.330882Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:60: tablet# 72057594046447617 HANDLE TEvAllocate Sender# [1:78:2113] requested range size#100000 2026-01-04T02:50:38.331163Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.331312Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 0 Reserved to# 100000 2026-01-04T02:50:38.331359Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:80:2115] TEvAllocateResult from# 0 to# 100000 2026-01-04T02:50:38.331481Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:7:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.331578Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 100000 Reserved to# 200000 2026-01-04T02:50:38.331607Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:82:2117] TEvAllocateResult from# 100000 to# 200000 2026-01-04T02:50:38.331674Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 200000 Reserved to# 300000 2026-01-04T02:50:38.331723Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:84:2119] TEvAllocateResult from# 200000 to# 300000 2026-01-04T02:50:38.331838Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.331945Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 300000 Reserved to# 400000 2026-01-04T02:50:38.331969Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:86:2121] TEvAllocateResult from# 300000 to# 400000 2026-01-04T02:50:38.332053Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332161Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:1:24576:74:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332221Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 400000 Reserved to# 500000 2026-01-04T02:50:38.332250Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:88:2123] TEvAllocateResult from# 400000 to# 500000 2026-01-04T02:50:38.332365Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332443Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332524Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 500000 Reserved to# 600000 2026-01-04T02:50:38.332562Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:90:2125] TEvAllocateResult from# 500000 to# 600000 2026-01-04T02:50:38.332673Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332732Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 600000 Reserved to# 700000 2026-01-04T02:50:38.332767Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:72:2107] TEvAllocateResult from# 600000 to# 700000 2026-01-04T02:50:38.332917Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.332969Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 700000 Reserved to# 800000 2026-01-04T02:50:38.333013Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:74:2109] TEvAllocateResult from# 700000 to# 800000 2026-01-04T02:50:38.333138Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.333233Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:1:24576:76:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.333279Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 800000 Reserved to# 900000 2026-01-04T02:50:38.333304Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:76:2111] TEvAllocateResult from# 800000 to# 900000 2026-01-04T02:50:38.333391Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:2:12:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.333449Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 900000 Reserved to# 1000000 2026-01-04T02:50:38.333473Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:78:2113] TEvAllocateResult from# 900000 to# 1000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-04T02:50:38.342636Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 7205759404 ... # 9500000 2026-01-04T02:50:38.701358Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:619:2556] TEvAllocateResult from# 9400000 to# 9500000 2026-01-04T02:50:38.701416Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:1:24576:75:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.701462Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9500000 Reserved to# 9600000 2026-01-04T02:50:38.701481Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:621:2558] TEvAllocateResult from# 9500000 to# 9600000 2026-01-04T02:50:38.701617Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:8:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.701789Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9600000 Reserved to# 9700000 2026-01-04T02:50:38.701838Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:623:2560] TEvAllocateResult from# 9600000 to# 9700000 2026-01-04T02:50:38.701938Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702051Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:9:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702108Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:1:24576:78:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702196Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9700000 Reserved to# 9800000 2026-01-04T02:50:38.702232Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:625:2562] TEvAllocateResult from# 9700000 to# 9800000 2026-01-04T02:50:38.702331Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:10:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702387Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9800000 Reserved to# 9900000 2026-01-04T02:50:38.702403Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:627:2564] TEvAllocateResult from# 9800000 to# 9900000 2026-01-04T02:50:38.702457Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:1:24576:72:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702533Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:11:11:0:0:71:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.702598Z node 1 :TX_ALLOCATOR DEBUG: txallocator__reserve.cpp:56: tablet# 72057594046447617 TTxReserve Complete Successed# 1 Reserved from# 9900000 Reserved to# 10000000 2026-01-04T02:50:38.702622Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:70: tablet# 72057594046447617 Send to Sender# [1:629:2566] TEvAllocateResult from# 9900000 to# 10000000 expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS expected SUCCESS 2026-01-04T02:50:38.706458Z node 1 :TABLET_MAIN NOTICE: tablet_sys.cpp:1925: Tablet: 72057594046447617 Type: TxAllocator, EReason: ReasonPill, SuggestedGeneration: 0, KnownGeneration: 11 Marker# TSYS31 2026-01-04T02:50:38.707468Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:876: Tablet: 72057594046447617 HandleStateStorageInfoResolve, KnownGeneration: 11 Promote Marker# TSYS16 2026-01-04T02:50:38.708155Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:421: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::ProcessKeyEntry, LastBlobID: [72057594046447617:11:11:0:0:71:0] Snap: 11:1 for 72057594046447617 Marker# TRRH04 2026-01-04T02:50:38.708210Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:11:0:0:71:0], refs: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617 2026-01-04T02:50:38.708353Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:1:0:0:42:0], refs: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-04T02:50:38.708417Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:2:0:0:69:0], refs: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-04T02:50:38.708460Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:3:0:0:71:0], refs: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708506Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:4:0:0:71:0], refs: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-04T02:50:38.708587Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:5:0:0:71:0], refs: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708628Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:6:0:0:71:0], refs: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708680Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:7:0:0:71:0], refs: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708718Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:8:0:0:71:0], refs: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617 2026-01-04T02:50:38.708746Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:9:0:0:71:0], refs: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708770Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:356: TTabletReqRebuildHistoryGraph::ProcessLogEntry - TabletID: 72057594046447617, id [72057594046447617:11:10:0:0:71:0], refs: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.708866Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:632: TabletId# 72057594046447617 TTabletReqRebuildHistoryGraph::BuildHistory - Process generation 11 from 1 with 11 steps Marker# TRRH09 2026-01-04T02:50:38.708900Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:1:1:28672:1485:0],] for 72057594046447617 2026-01-04T02:50:38.708956Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:2:1:24576:76:0],] for 72057594046447617 2026-01-04T02:50:38.709045Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:3:1:24576:78:0],] for 72057594046447617 2026-01-04T02:50:38.709081Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:736: TTabletReqRebuildHistoryGraph::BuildHistory - NOT A TAIL - References: [[72057594046447617:11:4:1:24576:75:0],] for 72057594046447617 2026-01-04T02:50:38.709112Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:5:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:5:1:24576:78:0],] 2026-01-04T02:50:38.709137Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:6:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:6:1:24576:78:0],] 2026-01-04T02:50:38.709168Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:7:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:7:1:24576:78:0],] 2026-01-04T02:50:38.709189Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:8:1:24576:75:0],] for 72057594046447617, Gc+: [[72057594046447617:11:8:1:24576:75:0],] 2026-01-04T02:50:38.709216Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:9:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:9:1:24576:78:0],] 2026-01-04T02:50:38.709248Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:10:1:24576:78:0],] for 72057594046447617, Gc+: [[72057594046447617:11:10:1:24576:78:0],] 2026-01-04T02:50:38.709270Z node 1 :TABLET_MAIN DEBUG: tablet_req_rebuildhistory.cpp:698: TTabletReqRebuildHistoryGraph::BuildHistory - THE TAIL - References: [[72057594046447617:11:11:1:24576:72:0],] for 72057594046447617, Gc+: [[72057594046447617:11:11:1:24576:72:0],] 2026-01-04T02:50:38.709442Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:233: Tablet: 72057594046447617 TTablet::WriteZeroEntry. logid# [72057594046447617:12:0:0:0:0:0] Marker# TSYS01 2026-01-04T02:50:38.710431Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:0:0:0:20:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.713411Z node 1 :TX_ALLOCATOR DEBUG: txallocator_impl.cpp:17: tablet# 72057594046447617 OnActivateExecutor 2026-01-04T02:50:38.713600Z node 1 :TX_ALLOCATOR DEBUG: txallocator__scheme.cpp:22: tablet# 72057594046447617 TTxSchema Complete 2026-01-04T02:50:38.714187Z node 1 :TABLET_MAIN INFO: tablet_sys.cpp:1081: Tablet: 72057594046447617 Active! Generation: 12, Type: TxAllocator started in 0msec Marker# TSYS24 2026-01-04T02:50:38.714231Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:1:28672:1641:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.714323Z node 1 :TABLET_MAIN DEBUG: tablet_req_writelog.cpp:56: Put Result: TEvPutResult {Id# [72057594046447617:12:1:0:0:42:0] Status# OK StatusFlags# { Valid } ApproximateFreeSpaceShare# 0} 2026-01-04T02:50:38.714401Z node 1 :TABLET_MAIN DEBUG: tablet_sys.cpp:1470: Tablet: 72057594046447617 GcCollect 0 channel, tablet:gen:step => 12:0 Marker# TSYS28 |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/tx_allocator/ut/unittest >> TestYmqHttpProxy::TestListQueues [GOOD] |95.7%| [TA] $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {RESULT} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/tx_allocator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpPg::MkqlTerminate [GOOD] >> KqpPg::NoSelectFullScan >> TestYmqHttpProxy::TestPurgeQueue >> KqpImmediateEffects::InteractiveTxWithWriteAtTheEnd [GOOD] >> KqpImmediateEffects::ManyFlushes >> KqpImmediateEffects::UpdateAfterInsert [GOOD] >> TestYmqHttpProxy::BillingRecordsForJsonApi [GOOD] >> KqpImmediateEffects::Delete [GOOD] >> KqpImmediateEffects::DeleteAfterInsert >> KqpOverload::OltpOverloaded-Distributed [GOOD] >> KqpReattach::ReattachDeliveryProblem >> TestYmqHttpProxy::TestChangeMessageVisibility >> TCdcStreamWithInitialScanTests::RacyAlterStreamAndRestart [GOOD] >> TCdcStreamWithInitialScanTests::MeteringServerless |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.7%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::TestWrongStream [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpdateAfterInsert [GOOD] Test command err: Trying to start YDB, gRPC: 15587, MsgBus: 23023 2026-01-04T02:50:27.198449Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333340929056045:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:27.201277Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:27.424890Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:27.459393Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:27.459501Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:27.497671Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:27.499210Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333340929056008:2081] 1767495027196417 != 1767495027196420 2026-01-04T02:50:27.514998Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:27.600646Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:27.623511Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:27.623538Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:27.623551Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:27.623648Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:28.074365Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:28.087914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:28.148922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.206638Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:28.300946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.435139Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.496042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.455373Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333353813959778:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.455516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.455848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333353813959788:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.455882Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.853474Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.885336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.913488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.943655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.973738Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.013131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.057407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.097074Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.179972Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333358108927954:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.180060Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.180271Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333358108927959:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.180296Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333358108927960:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.180364Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.184212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:31.197666Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333358108927963:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:31.293254Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333358108928016:3776] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:32.201921Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333340929056045:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:32.201995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:32.899878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperatio ... alization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:34.408025Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:34.475056Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:34.476304Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591333367120770581:2081] 1767495034398795 != 1767495034398798 2026-01-04T02:50:34.489025Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:34.522034Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:34.522109Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:34.532346Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:34.561960Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:34.561983Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:34.561990Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:34.562070Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:34.727166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:34.917477Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:34.922647Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:34.936818Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.988986Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.137471Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.199883Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.408283Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:37.629670Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333380005674336:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.629788Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.630227Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333380005674346:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.630287Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.702370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.737281Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.775830Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.806673Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.839676Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.872284Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.905685Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.951543Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.056691Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333384300642509:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.056780Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.056867Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333384300642514:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.056978Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333384300642516:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.057030Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.059851Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:38.067351Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333384300642518:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:38.143487Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333384300642569:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:39.401905Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333367120770611:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:39.401986Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:39.651612Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyR1WRR2 [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 >> KqpImmediateEffects::Insert [GOOD] >> KqpImmediateEffects::InsertDuplicates+UseSink >> KqpPg::SelectIndex+useSink [GOOD] >> KqpPg::SelectIndex-useSink >> KqpFail::Immediate [GOOD] >> KqpFail::OnCommit >> TestKinesisHttpProxy::TestListStreamConsumersWithToken [GOOD] >> TestKinesisHttpProxy::CreateDeleteStreamWithConsumerWithFlag [GOOD] >> TestKinesisHttpProxy::TestWrongStream2 >> TestKinesisHttpProxy::ListShardsExclusiveStartShardId [GOOD] >> TestYmqHttpProxy::TestDeleteMessage [GOOD] >> TestKinesisHttpProxy::TestCounters >> TestKinesisHttpProxy::BadRequestUnknownMethod |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::ListShardsTimestamp >> TestYmqHttpProxy::TestDeleteMessageBatch |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpImmediateEffects::ReplaceDuplicates [GOOD] >> KqpImmediateEffects::MultipleEffectsWithIndex >> TCdcStreamWithInitialScanTests::MeteringServerless [GOOD] >> TCdcStreamWithInitialScanTests::MeteringDedicated >> KqpEffects::UpdateOn_Params [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] >> TestYmqHttpProxy::TestUntagQueue [GOOD] >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] >> KqpEffects::InsertAbort_Select_Conflict+UseSink [GOOD] >> KqpEffects::InsertAbort_Select_Conflict-UseSink >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::AlterAfterUpsertBeforeUpsertTransaction-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 12270, MsgBus: 22313 2026-01-04T02:50:24.152070Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333327562845495:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.157892Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:24.465962Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:24.478792Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:24.478940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.550693Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:24.578028Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:24.694352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:24.694376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:24.694383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:24.694473Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:24.715279Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:25.127090Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.170813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:25.182022Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:25.255300Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.459819Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.626921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.701401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.397025Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333340447749203:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.397133Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.397501Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333340447749213:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.397569Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.745790Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.777580Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.809286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.839394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.873582Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.924402Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.996371Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.044985Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.120155Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344742717384:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.120253Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.120431Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344742717389:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.120482Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344742717391:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.120766Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.124739Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.136551Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333344742717393:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:28.216946Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333344742717446:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:29.152513Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333327562845495:2085];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.152591Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:29.832947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:6 ... execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:37.723877Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:37.745008Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:37.745075Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:37.755528Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:37.791093Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:37.791119Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:37.791125Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:37.791177Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:37.875766Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:38.231626Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:38.239416Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.288495Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.423955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.484982Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.649209Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:41.088662Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333396989682403:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.088756Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.088987Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333396989682413:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.089059Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.147240Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.176230Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.205435Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.233721Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.261207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.293252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.325252Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.367602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.453258Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333396989683284:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.453335Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333396989683289:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.453383Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.453628Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333396989683292:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.453706Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.457208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:41.466683Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333396989683291:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:41.521572Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333396989683344:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:42.638057Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333379809811570:2235];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:42.638120Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:43.176161Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.457678Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:50:43.588512Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=NGEzZTM3ZTUtOWJkZjE3ZGYtMTFjMWZmMjUtZjUyOGE0NmM=, ActorId: [3:7591333405579618200:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3em2g0etybbwarraj7rez1, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestTable`" issue_code: 2001 severity: 1 } trace_id# |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::GoodRequestGetRecordsLongStreamName [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestDeleteQueue [GOOD] >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight >> KqpPg::InsertNoTargetColumns_NotOneSize-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter+useSink |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::Simple >> TestKinesisHttpProxy::ErroneousRequestGetRecords |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1RWR2 [GOOD] Test command err: Trying to start YDB, gRPC: 8038, MsgBus: 21502 2026-01-04T02:50:24.601295Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333327068060134:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.601354Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:24.926870Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:24.934378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:24.940701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:24.940818Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.973901Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:25.096093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:25.096121Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:25.096130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:25.096223Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:25.115342Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:25.546513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:25.617869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.629603Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.809153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.966578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:26.040358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.744216Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333339952963853:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.744335Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.745013Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333339952963863:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.745066Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.068459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.109434Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.145378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.176012Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.207802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.240854Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.272792Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.315332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.385889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344247932029:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.385982Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.386054Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344247932034:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.386221Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333344247932036:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.386271Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.389794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.399386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333344247932038:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:28.500154Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333344247932089:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:29.601432Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333327068060134:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.601484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:30.180675Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 61243, MsgBus: 64668 2026-01-04T02:50:31.651773Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... d: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.674074Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.808428Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.846863Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:42.106435Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333404610080958:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.106536Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.106818Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333404610080968:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.106880Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.180354Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.211650Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.243253Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.269588Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.304176Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.337207Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.372254Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.414408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.499863Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333404610081835:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.499942Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.500151Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333404610081841:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.500170Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333404610081840:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.500195Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.503572Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:42.512824Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333404610081844:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:42.596338Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333404610081895:3761] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:43.822796Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333387430209941:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:43.822874Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:44.168690Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.700130Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because it cannot acquire locks;tx_id=6; 2026-01-04T02:50:44.700382Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 6 at tablet 72075186224037927 errors: Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-04T02:50:44.700570Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 6 at tablet 72075186224037927 Status: STATUS_LOCKS_BROKEN Issues: { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } 2026-01-04T02:50:44.700759Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7591333413200017059:2533], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7591333413200016796:2533]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7591333413200017059:2533].{
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } 2026-01-04T02:50:44.701390Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591333413200017015:2533], SessionActorId: [3:7591333413200016796:2533], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because it cannot acquire locks, code: 2001 . sessionActorId=[3:7591333413200016796:2533]. 2026-01-04T02:50:44.701697Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=ZmZlYjIxNDktYWU0MDkwZGMtZTk2NGM4YjktMzQ0NWUwZDg=, ActorId: [3:7591333413200016796:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3em3fha5b5354swncnp08h, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591333413200017053:2533] from: [3:7591333413200017015:2533] trace_id# 2026-01-04T02:50:44.701836Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591333413200017053:2533] TxId: 281474976710677. Ctx: { TraceId: 01ke3em3fha5b5354swncnp08h, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZmZlYjIxNDktYWU0MDkwZGMtZTk2NGM4YjktMzQ0NWUwZDg=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because it cannot acquire locks, code: 2001 } } trace_id# 2026-01-04T02:50:44.702293Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZmZlYjIxNDktYWU0MDkwZGMtZTk2NGM4YjktMzQ0NWUwZDg=, ActorId: [3:7591333413200016796:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3em3fha5b5354swncnp08h, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because it cannot acquire locks" issue_code: 2001 severity: 1 } } trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeSameOperationId [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:07.540524Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:07.617422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:07.624707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:07.624960Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:07.625050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:07.923817Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:08.002689Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:08.002779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:08.049145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:08.127620Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:08.753108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:08.753517Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:08.753556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:08.753581Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:08.753988Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:08.816537Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:09.323390Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:11.857414Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:11.865915Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:11.869765Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:11.898300Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:11.898426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:11.926592Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:11.928374Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.107799Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:12.107906Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:12.109270Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.109929Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.110433Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.111452Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.111830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.111963Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.112225Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.112375Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.112508Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:12.138656Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:12.348832Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:12.374371Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:12.374463Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:12.404849Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:12.406158Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:12.406391Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:12.406457Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:12.406520Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:12.406570Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:12.406624Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:12.406672Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:12.407190Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:12.407504Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:12.409846Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:12.468295Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:12.468355Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:12.468449Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.480087Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.490747Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:12.495061Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:12.498197Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.498298Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:12.526047Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:12.526415Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:12.641573Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:12.690745Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:12.905733Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:12.907425Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:12.910139Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:12.912517Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:12.912587Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... EvStatistics::TEvSaveStatisticsQueryResponse, TBasicString> const&, NKikimr::TPathId co... 2026-01-04T02:48:33.482743Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4591:3989], ActorId: [2:4592:3990], Got response [2:4593:3991] SUCCESS 2026-01-04T02:48:33.483287Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:33.484324Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:24: [72075186224037894] TTxAnalyze::Execute. ReplyToActorId [1:3158:3431] , Record { OperationId: "operationId" Tables { PathId { OwnerId: 72075186224037897 LocalId: 34 } } Types: TYPE_COUNT_MIN_SKETCH Database: "" } 2026-01-04T02:48:33.484387Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:55: [72075186224037894] TTxAnalyze::Execute. Create new force traversal operation, OperationId: `operationId', DatabaseName: `', Types: 1 2026-01-04T02:48:33.484438Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:78: [72075186224037894] TTxAnalyze::Execute. Create new force traversal table, OperationId: `operationId', PathId: [OwnerId: 72075186224037897, LocalPathId: 34], ColumnTags: 2026-01-04T02:48:33.515537Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:33.515617Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3158:3431] 2026-01-04T02:48:33.515674Z node 2 :STATISTICS DEBUG: tx_analyze.cpp:111: [72075186224037894] TTxAnalyze::Complete 2026-01-04T02:48:33.515897Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:48:33.516633Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:48:33.520353Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:48:33.520501Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], Start read next stream part 2026-01-04T02:48:33.575008Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4641:4026]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:33.575353Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:48:33.575411Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4641:4026], StatRequests.size() = 1 2026-01-04T02:49:46.254773Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:49:46.255033Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], Start read next stream part 2026-01-04T02:49:46.255790Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eg3fg77rd1t14yaz1vf7y", SessionId: ydb://session/3?node_id=2&id=MWVhZDU4N2MtY2QwMDg5MGUtZjY2YmVjNGQtNWRjN2Q5MDM=, Slow query, duration: 72.731473s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:49:46.256428Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:49:46.257348Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:49:46.257417Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4630:4016], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=OTI2MTVjNTgtYWE0MTYwNGMtZDhmMmY0MDUtYmYwMzMyYjA=, TxId: 2026-01-04T02:49:46.257563Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-01-04T02:49:46.261568Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:49:46.261694Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], Start read next stream part 2026-01-04T02:49:46.332875Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:4817:4125]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:49:46.333282Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-04T02:49:46.333335Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:4817:4125], StatRequests.size() = 1 2026-01-04T02:50:39.141725Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:50:39.141915Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], Start read next stream part 2026-01-04T02:50:39.142952Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ejagnb3w05wm407fztqrc", SessionId: ydb://session/3?node_id=2&id=YjQzOTE0Mi1jN2YwNGIwYS0yZjU1N2RjLWM5NjUyYmM0, Slow query, duration: 52.877454s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:50:39.145900Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:50:39.145969Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4628:2462], ActorId: [2:4798:4110], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZGEzNmU1YzYtZTM3OGM1ZWEtZGMyMjQzODAtZDFiZDc5Zjc=, TxId: 2026-01-04T02:50:39.146093Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 32170, txId: 18446744073709551615] shutting down 2026-01-04T02:50:39.146321Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:4972:4208], ActorId: [2:4974:4210], Starting query actor #1 [2:4975:4211] 2026-01-04T02:50:39.146379Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4974:4210], ActorId: [2:4975:4211], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:50:39.149781Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:4974:4210], ActorId: [2:4975:4211], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=NDZhODMyNGEtMTZhNTUwZC03NjMwNjNhMS04NjA5YzU2OQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:50:39.194067Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4974:4210], ActorId: [2:4975:4211], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NDZhODMyNGEtMTZhNTUwZC03NjMwNjNhMS04NjA5YzU2OQ==, TxId: 2026-01-04T02:50:39.194204Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4974:4210], ActorId: [2:4975:4211], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NDZhODMyNGEtMTZhNTUwZC03NjMwNjNhMS04NjA5YzU2OQ==, TxId: 2026-01-04T02:50:39.194678Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4972:4208], ActorId: [2:4974:4210], Got response [2:4975:4211] SUCCESS 2026-01-04T02:50:39.195130Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:50:39.222514Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:50:39.222604Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3158:3431] 2026-01-04T02:50:39.285332Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:50:39.285437Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:50:39.368047Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4234:3783], schemeshard count = 1 2026-01-04T02:50:44.089665Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:354: [72075186224037894] PropagateStatistics(), node count = 1, schemeshard count = 1 2026-01-04T02:50:44.089963Z node 2 :STATISTICS DEBUG: service_impl.cpp:943: EvPropagateStatistics, node id: 2 cookie: 1 2026-01-04T02:50:44.090099Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:258: [72075186224037894] EvPropagateStatisticsResponse, cookie: 1 2026-01-04T02:50:44.112246Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8463: SendBaseStatsToSA(), path count: 2, paths with incomplete stats: 1, at schemeshard: 72075186224037897 2026-01-04T02:50:44.112362Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8300: Schedule next SendBaseStatsToSA in 5.000000s, at schemeshard: 72075186224037897 2026-01-04T02:50:44.112641Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:27: [72075186224037894] TTxSchemeShardStats::Execute: schemeshard id: 72075186224037897, stats byte size: 48, entries count: 2, are all stats full: 0 2026-01-04T02:50:44.126510Z node 2 :STATISTICS DEBUG: tx_schemeshard_stats.cpp:133: [72075186224037894] TTxSchemeShardStats::Complete |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeTwoColumnTables [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:04.506775Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:04.578048Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:04.583701Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:04.583974Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:04.584070Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:04.898796Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:04.979137Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:04.979253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:05.025550Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:05.122652Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:05.721650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:05.722151Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:05.722203Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:05.722228Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:05.722651Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:05.787776Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:06.289091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:09.044071Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:09.050186Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:09.052861Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:09.076567Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.076669Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.104076Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:09.106051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.263876Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:09.263987Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:09.265589Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.266261Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.266815Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.267904Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.268161Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.268285Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.268533Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.268699Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.268830Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:09.294366Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:09.473912Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:09.500312Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:09.500430Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:09.521577Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:09.522614Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:09.522821Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:09.522877Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:09.522916Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:09.522973Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:09.523022Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:09.523068Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:09.523479Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:09.523688Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1904:2453] 2026-01-04T02:46:09.525593Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:09.532729Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Describe result: PathErrorUnknown 2026-01-04T02:46:09.532786Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Creating table 2026-01-04T02:46:09.532857Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.542876Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2133:2589], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.551086Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2116:2579] txid# 281474976720657, issues: { message: "Schemeshard not available" severity: 1 } 2026-01-04T02:46:09.554056Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 20 Issues { message: "Schemeshard not available" severity: 1 } SchemeShardStatus: 13 SchemeShardReason: "Schemeshard not available" } 2026-01-04T02:46:09.556686Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.556767Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2143:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:09.579081Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2216:2651] 2026-01-04T02:46:09.579374Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2216:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:09.682130Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:09.814502Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:09.950737Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:09.952783Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2542:2869], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:09.955416Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720658:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:09.957854Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720658 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:09.957923Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2102:2571] Owner: [2:2101:2570]. Subscribe on create table tx: 28 ... 32>>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:48:36.251187Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4310:2462], ActorId: [2:4633:4035], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:48:36.251246Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4310:2462], ActorId: [2:4633:4035], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njg2ZDJmZjUtYmI1NTZmNTAtZDUzMjBlYWQtOTczOTQ2MWQ=, TxId: 2026-01-04T02:48:36.334311Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 3 ], ReplyToActorId[ [2:4833:4157]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:36.334661Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 3 ] 2026-01-04T02:48:36.334709Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 3, ReplyToActorId = [2:4833:4157], StatRequests.size() = 1 2026-01-04T02:48:36.520249Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:4815:4142], ActorId: [2:4816:4143], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=NTJjN2UyZmQtMzlhNWNmYjUtNTg0OTk1MS1jZWE3MWViOA==, TxId: 2026-01-04T02:48:36.520346Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4815:4142], ActorId: [2:4816:4143], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=NTJjN2UyZmQtMzlhNWNmYjUtNTg0OTk1MS1jZWE3MWViOA==, TxId: 2026-01-04T02:48:36.520806Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:4814:4141], ActorId: [2:4815:4142], Got response [2:4816:4143] SUCCESS 2026-01-04T02:48:36.521220Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:48:36.552248Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:48:36.552332Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:49: [72075186224037894] TTxFinishTraversal::Complete. Don't send TEvAnalyzeResponse. There are pending operations, OperationId operationId , ActorId=[1:3417:3505] 2026-01-04T02:48:36.601581Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:228: [72075186224037894] EvFastPropagateCheck 2026-01-04T02:48:36.601683Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:383: [72075186224037894] PropagateFastStatistics(), node count = 0, schemeshard count = 0 2026-01-04T02:48:36.690604Z node 2 :STATISTICS DEBUG: service_impl.cpp:1214: EvRequestTimeout, pipe client id = [2:4457:3935], schemeshard count = 1 2026-01-04T02:48:37.666698Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:48:37.673246Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:48:37.676852Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2` 2026-01-04T02:48:37.676980Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], Start read next stream part 2026-01-04T02:48:37.728454Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 4 ], ReplyToActorId[ [2:4907:4202]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:48:37.728743Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 4 ] 2026-01-04T02:48:37.728788Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 4, ReplyToActorId = [2:4907:4202], StatRequests.size() = 1 2026-01-04T02:49:50.580088Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:49:50.580244Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], Start read next stream part 2026-01-04T02:49:50.580881Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eg7hc7betdvzzaks69raq", SessionId: ydb://session/3?node_id=2&id=MjhmZjAyNWItOWJiZjhiM2MtYTM4NmNkN2UtYWVlMTBhYjA=, Slow query, duration: 72.899971s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-04T02:49:50.581325Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:49:50.584112Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:49:50.584158Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:4890:4192], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=MWRkNWQ4ZDgtZWZkMGQ0NzMtNDdiYThlMmYtM2ZjNDM5NmI=, TxId: 2026-01-04T02:49:50.584238Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33000, txId: 18446744073709551615] shutting down 2026-01-04T02:49:50.584647Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2` 2026-01-04T02:49:50.584741Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], Start read next stream part 2026-01-04T02:49:50.644770Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 5 ], ReplyToActorId[ [2:5087:4304]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:49:50.645193Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 5 ] 2026-01-04T02:49:50.645247Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 5, ReplyToActorId = [2:5087:4304], StatRequests.size() = 1 2026-01-04T02:50:44.074757Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:50:44.074912Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], Start read next stream part 2026-01-04T02:50:44.076039Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3ejeqrfx39mm1m7x15dxst", SessionId: ydb://session/3?node_id=2&id=MThiODU3NWEtMTNhNmQwNjItY2I5MmM5NjYtN2Y5ZTBiZDY=, Slow query, duration: 53.487697s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table2`", parameters: 0b 2026-01-04T02:50:44.078736Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5241:4387], ActorId: [2:5243:4388], Starting query actor #1 [2:5244:4389] 2026-01-04T02:50:44.078814Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5243:4388], ActorId: [2:5244:4389], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:50:44.079154Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:50:44.079214Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:4880:2462], ActorId: [2:5068:4289], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZTM2N2IxOWYtMWY4NGIzMzYtOTY2YWI0ZGEtNDgwOWM0NjA=, TxId: 2026-01-04T02:50:44.079325Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 33100, txId: 18446744073709551615] shutting down 2026-01-04T02:50:44.083638Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5243:4388], ActorId: [2:5244:4389], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YzNhZGI2MmItZjkxMDRhODUtYmRjZjk5YzQtNTM3NDhiYQ==, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:50:44.139555Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5243:4388], ActorId: [2:5244:4389], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YzNhZGI2MmItZjkxMDRhODUtYmRjZjk5YzQtNTM3NDhiYQ==, TxId: 2026-01-04T02:50:44.139636Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5243:4388], ActorId: [2:5244:4389], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YzNhZGI2MmItZjkxMDRhODUtYmRjZjk5YzQtNTM3NDhiYQ==, TxId: 2026-01-04T02:50:44.140036Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5241:4387], ActorId: [2:5243:4388], Got response [2:5244:4389] SUCCESS 2026-01-04T02:50:44.140400Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:50:44.179008Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 35] 2026-01-04T02:50:44.179086Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId, ActorId=[1:3417:3505] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UnobservedUncommittedChangeConflict [GOOD] Test command err: Trying to start YDB, gRPC: 30278, MsgBus: 2590 2026-01-04T02:50:24.146739Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333324117477500:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.147006Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:24.469361Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:24.469488Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.501348Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:24.536497Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:24.536764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:24.706556Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:24.706592Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:24.706603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:24.706727Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:24.792388Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:25.156008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.167818Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:25.187021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:25.248057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.421336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.596192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.667548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.651817Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333337002381214:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.651935Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.656015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333337002381224:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.656105Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.988110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.028838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.067946Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.102523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.132366Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.173500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.213373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.285122Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.353707Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333341297349403:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.353775Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.353856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333341297349408:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.354201Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333341297349410:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.354248Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.357516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.368067Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333341297349412:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:28.445414Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333341297349463:3777] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:29.148312Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333324117477500:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.199416Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:29.950523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... r_subdomain.cpp:311) 2026-01-04T02:50:39.471772Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.555481Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.705984Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.773683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.913448Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:42.165698Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333405156948362:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.165784Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.166102Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333405156948371:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.166163Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.243491Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.279598Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.346712Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.377876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.412485Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.455824Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.497186Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.546409Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.626095Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333405156949243:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.626189Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.626256Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333405156949248:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.626701Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333405156949250:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.626769Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:42.631143Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:42.641936Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333405156949251:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:42.738435Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333405156949303:3764] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:43.827395Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333387977077342:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:43.827456Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:44.222338Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.792283Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_LOCKS_BROKEN;details=Operation is aborting because locks are not valid;tx_id=281474976710677; 2026-01-04T02:50:44.807713Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:952: SelfId: [3:7591333413746884477:2559], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7591333413746884411:2559]Got LOCKS BROKEN for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7591333413746884477:2559].{
: Error: Operation is aborting because locks are not valid, code: 2001 } 2026-01-04T02:50:44.808484Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591333413746884470:2559], SessionActorId: [3:7591333413746884411:2559], statusCode=ABORTED. Issue=
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001
: Error: Operation is aborting because locks are not valid, code: 2001 . sessionActorId=[3:7591333413746884411:2559]. 2026-01-04T02:50:44.808791Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=Yzc4NzhlNzktMzU3Y2MyYmQtNWIzMjY3YWQtZTRmYTEwMGY=, ActorId: [3:7591333413746884411:2559], ActorState: ExecuteState, LegacyTraceId: 01ke3em3njdtg4d91hcek3fzfw, got TEvKqpBuffer::TEvError in ExecuteState, status: ABORTED send to: [3:7591333413746884471:2559] from: [3:7591333413746884470:2559] trace_id# 2026-01-04T02:50:44.808916Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591333413746884471:2559] TxId: 281474976710677. Ctx: { TraceId: 01ke3em3njdtg4d91hcek3fzfw, Database: /Root, SessionId: ydb://session/3?node_id=3&id=Yzc4NzhlNzktMzU3Y2MyYmQtNWIzMjY3YWQtZTRmYTEwMGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Transaction locks invalidated. Table: `/Root/TestImmediateEffects`., code: 2001 subissue: {
: Error: Operation is aborting because locks are not valid, code: 2001 } } trace_id# 2026-01-04T02:50:44.809343Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=Yzc4NzhlNzktMzU3Y2MyYmQtNWIzMjY3YWQtZTRmYTEwMGY=, ActorId: [3:7591333413746884411:2559], ActorState: ExecuteState, LegacyTraceId: 01ke3em3njdtg4d91hcek3fzfw, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/TestImmediateEffects`." issue_code: 2001 severity: 1 issues { message: "Operation is aborting because locks are not valid" issue_code: 2001 severity: 1 } } trace_id# |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::DeleteAfterInsert [GOOD] >> KqpImmediateEffects::ConflictingKeyW1WRR2 |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] >> TestYmqHttpProxy::TestPurgeQueue [GOOD] |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpPg::NoSelectFullScan [GOOD] >> KqpPg::LongDomainName |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpImmediateEffects::InsertDuplicates+UseSink [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted >> KqpEffects::EffectWithSelect-UseSink [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] >> KqpOverload::OltpOverloaded+Distributed [GOOD] |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.8%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::TestWrongStream2 [GOOD] >> TestYmqHttpProxy::TestChangeMessageVisibility [GOOD] >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ManyFlushes [GOOD] Test command err: Trying to start YDB, gRPC: 8952, MsgBus: 8896 2026-01-04T02:50:27.705254Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333337167274885:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:27.710029Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:27.954588Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:27.954712Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:27.973947Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:28.007648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:28.028165Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:28.031466Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333337167274841:2081] 1767495027699569 != 1767495027699572 2026-01-04T02:50:28.174033Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:28.174061Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:28.174067Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:28.174198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:28.294845Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:28.552142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:28.625034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.716724Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:28.747842Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.895428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.976696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.915274Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333350052178603:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.915409Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.915976Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333350052178613:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.916041Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.210108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.239257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.271102Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.304069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.337617Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.374941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.418966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.466607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:31.556449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333354347146776:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.556539Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.556581Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333354347146781:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.556775Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333354347146783:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.556812Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:31.561559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:31.574940Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333354347146785:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:31.641801Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333354347146836:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:32.702266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333337167274885:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:32.702334Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:33.265798Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create ... 46131:7762515]; 2026-01-04T02:50:41.136177Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:41.144940Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:41.224379Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:41.225277Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333399331098318:2081] 1767495041135221 != 1767495041135224 2026-01-04T02:50:41.263741Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:41.268083Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:41.268179Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:41.272300Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:41.312209Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:41.312234Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:41.312243Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:41.312354Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:41.491092Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:41.675274Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:41.684332Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.738309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.852005Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.910521Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:42.141475Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:44.212731Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333412216002074:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.212818Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.213089Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333412216002083:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.213138Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.300782Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.338391Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.368538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.442644Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.474015Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.505882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.539857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.581381Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.657555Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333412216002955:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.657631Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.657891Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333412216002960:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.657989Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333412216002961:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.658061Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.661599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:44.672925Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333412216002964:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:44.761601Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333412216003015:3765] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:46.136559Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333399331098357:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:46.136628Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:46.329038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestKinesisHttpProxy::BadRequestUnknownMethod [GOOD] >> KqpPg::SelectIndex-useSink [GOOD] >> KqpPg::TableDeleteAllData+useSink >> TestKinesisHttpProxy::TestCounters [GOOD] >> YdbTableSplit::RenameTablesAndSplit [GOOD] >> TestKinesisHttpProxy::TestWrongRequest |95.8%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::EffectWithSelect-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 18571, MsgBus: 13023 2026-01-04T02:50:26.854353Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333335003576844:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:26.854462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:27.084866Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:27.093490Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:27.093604Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:27.121438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:27.184542Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:27.185710Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333335003576804:2081] 1767495026852298 != 1767495026852301 2026-01-04T02:50:27.244364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:27.244388Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:27.244394Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:27.244497Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:27.329302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:27.667264Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:27.673825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:27.864259Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:30.082668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333352183446874:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.082790Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.083260Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333352183446884:2325], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.083298Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:30.367403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.388467Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:7591333352183446922:2519], Recipient [1:7591333352183446931:2331]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:50:30.389331Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:7591333352183446922:2519], Recipient [1:7591333352183446931:2331]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:50:30.389604Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7591333352183446931:2331] 2026-01-04T02:50:30.389895Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:50:30.401104Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:7591333352183446922:2519], Recipient [1:7591333352183446931:2331]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:50:30.401451Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:50:30.401525Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:50:30.403545Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:50:30.403601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:50:30.403633Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:50:30.404057Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:50:30.404111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:50:30.404167Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7591333352183446946:2331] in generation 1 2026-01-04T02:50:30.405239Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:50:30.438238Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:50:30.438416Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:50:30.438510Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7591333352183446948:2332] 2026-01-04T02:50:30.438521Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:50:30.438554Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:50:30.438567Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:50:30.438756Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:7591333352183446931:2331], Recipient [1:7591333352183446931:2331]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:50:30.438801Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:50:30.438950Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:50:30.439046Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:50:30.439081Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:50:30.439103Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:50:30.439137Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:50:30.439162Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:50:30.439174Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:50:30.439183Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:50:30.439199Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:50:30.440733Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:7591333352183446933:2525], Recipient [1:7591333352183446931:2331]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:50:30.440763Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:50:30.440786Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591333352183446929:2524], serverId# [1:7591333352183446933:2525], sessionId# [0:0:0] 2026-01-04T02:50:30.440823Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:7591333335003577153:2143], Recipient [1:7591333352183446933:2525] 2026-01-04T02:50:30.440845Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:50:30.440953Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:50:30.441281Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715658] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:50:30.441317Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:50:30.441390Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-04T02:50:30.441417Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715658] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T02:50:30.441445Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715658] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T02:50:30.441463Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715658] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T02:50:30.441474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715658] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T02:50:30.441883Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715658] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T02:50:30.441911Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715658] at ... ng to start YDB, gRPC: 23503, MsgBus: 12223 2026-01-04T02:50:36.350173Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591333377017845670:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:36.350239Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:36.364322Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:36.492439Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:36.492529Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:36.517094Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:36.535365Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:36.594382Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:36.594398Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:36.594403Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:36.594494Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:36.600551Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:37.091992Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:37.098538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:37.114780Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.189789Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.356538Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.357286Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:37.428907Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.624276Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333389902749397:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.624349Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.624772Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333389902749407:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.624815Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.722668Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.755213Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.815999Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.850817Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.887834Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.925303Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.970762Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:40.020004Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:40.091597Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333394197717572:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:40.091727Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:40.092225Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333394197717578:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:40.092273Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333394197717577:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:40.092305Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:40.095959Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:40.106633Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333394197717581:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:40.173234Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333394197717632:3772] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:41.350351Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333377017845670:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:41.350593Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:41.896043Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::UpsertAfterInsertWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 4648, MsgBus: 10723 2026-01-04T02:50:31.054253Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333357653793326:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:31.054349Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:31.096839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:31.372657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:31.372754Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:31.374830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:31.457839Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:31.459974Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:31.467496Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333357653793273:2081] 1767495031049628 != 1767495031049631 2026-01-04T02:50:31.553344Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:31.553364Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:31.553371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:31.553496Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:31.665510Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:31.990539Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:31.998574Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:32.066459Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.070159Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:32.269638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.432269Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.504906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.372101Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333370538697044:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.372230Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.372543Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333370538697054:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.372610Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.740008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.769013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.802108Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.832840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.857763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.887906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.936469Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.997562Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.088095Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333374833665223:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.088152Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.088364Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333374833665228:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.088389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333374833665229:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.088420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.091520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:35.101095Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333374833665232:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:35.200044Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333374833665283:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:36.054870Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333357653793326:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:36.056331Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence ... _script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:38.200540Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:38.269019Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:38.269042Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:38.269049Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:38.269123Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:38.274435Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:38.274517Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:38.277101Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:38.362216Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:38.631162Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:38.646109Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.703454Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.815845Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.875287Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.138087Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:41.276223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333398227738970:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.276310Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.276571Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333398227738979:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.276621Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.346196Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.380541Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.412243Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.440866Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.471781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.506678Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.541799Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.589684Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:41.656994Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333398227739850:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.657060Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333398227739855:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.657103Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.657274Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591333398227739858:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.657334Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:41.660358Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:41.670306Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591333398227739857:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:41.734557Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591333398227739910:3775] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:43.137922Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591333385342835250:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:43.140738Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:43.235230Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.277699Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.315550Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> TestKinesisHttpProxy::ListShardsTimestamp [GOOD] >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpOverload::OltpOverloaded+Distributed [GOOD] Test command err: Trying to start YDB, gRPC: 18140, MsgBus: 9358 2026-01-04T02:50:31.341073Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333357862757171:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:31.341586Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:31.586600Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:31.587243Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:31.587336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:31.624949Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:31.693430Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:31.695018Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333357862757018:2081] 1767495031315474 != 1767495031315477 2026-01-04T02:50:31.830576Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:31.830601Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:31.830608Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:31.830700Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:31.861950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:32.341255Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:32.346941Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:32.362144Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:32.445239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.580894Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.766763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.839607Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.715247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333370747660783:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.715359Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.720046Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333370747660793:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.720145Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.083165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.121081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.154426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.188289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.225326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.260218Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.296174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.360055Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.426315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333375042628960:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.426391Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.426675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333375042628965:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.426718Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.426740Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333375042628966:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.430262Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:35.440760Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333375042628969:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:35.545182Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333375042629020:3774] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:36.338271Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333357862757171:2176];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:36.338543Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:37.366268Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... DB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 19122, MsgBus: 8395 ... waiting for SysViewsRoster update finished 2026-01-04T02:50:41.628405Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:41.635005Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:50:41.639836Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:41.640095Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:41.640239Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:41.917502Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:41.917640Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:41.940695Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767495038731690 != 1767495038731694 2026-01-04T02:50:41.946480Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:41.991993Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:42.105049Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:42.466251Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:42.466319Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:42.466376Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:42.467086Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:42.479464Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:42.812295Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:42.839042Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.089239Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.464330Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.745552Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.256394Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1902:3507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.256572Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.257373Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1975:3526], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.257574Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:44.285688Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.514415Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:44.763515Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:45.044890Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:45.283410Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:45.540455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:45.798560Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.097430Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.479990Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2793:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.480093Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.480814Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2797:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.480886Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.480945Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2799:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.486681Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:46.641708Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2802:4181], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:46.698986Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2857:4217] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=281474976715675; |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> Secret::SimpleQueryService >> TestKinesisHttpProxy::TestEmptyHttpBody >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyRW1RR2 [GOOD] Test command err: Trying to start YDB, gRPC: 11004, MsgBus: 2076 2026-01-04T02:50:29.113685Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333347285149598:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.116615Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:29.396988Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:29.399216Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:29.418391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:29.459825Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:29.462540Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:29.553621Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:29.553638Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:29.553643Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:29.553694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:29.680566Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:29.965972Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:29.971654Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:30.023814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.126944Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:30.185051Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.349136Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:30.418192Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.226268Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333360170053310:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.226402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.227996Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333360170053320:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.228055Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:32.666167Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.740225Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.815983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.856374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.911097Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.967378Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.055159Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.106430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.198489Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333364465021498:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:33.198547Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:33.198746Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333364465021503:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:33.198795Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333364465021504:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:33.198860Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:33.202197Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:33.211287Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333364465021507:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:33.290860Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333364465021558:3781] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:34.108567Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333347285149598:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:34.108628Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:34.713552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:68 ... 46131:7762515]; 2026-01-04T02:50:42.763249Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:42.771033Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:42.869888Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:42.870993Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:42.872330Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333403696641937:2081] 1767495042760314 != 1767495042760317 2026-01-04T02:50:42.911325Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:42.911417Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:42.914418Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:42.985046Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:42.985074Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:42.985083Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:42.985174Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:43.083565Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:43.430193Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:43.445382Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.495171Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.617738Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.671554Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:43.779817Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:46.111912Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333420876512990:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.111991Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.112507Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333420876513000:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.112563Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.175061Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.205022Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.238038Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.268528Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.303852Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.333882Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.366278Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.407936Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.498326Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333420876513866:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.498400Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.498515Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333420876513871:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.498577Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333420876513873:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.498641Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:46.502000Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:46.511687Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333420876513875:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:46.573589Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333420876513926:3766] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:47.762571Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333403696642050:2143];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:47.762633Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:48.147906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/services/ydb/table_split_ut/unittest >> YdbTableSplit::RenameTablesAndSplit [GOOD] Test command err: 2026-01-04T02:49:19.058535Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333046096339579:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:19.062431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:19.309950Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:19.369659Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:49:19.369779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:49:19.425261Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:49:19.437762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:49:19.527366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:49:19.653687Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:49:19.653721Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:49:19.653752Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:49:19.653852Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:49:20.062575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:20.074894Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:49:22.347278Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333058981242830:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.347423Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.347874Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333058981242840:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.347930Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.773937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /Root/Dir, operationId: 281474976710658:0, at schemeshard: 72057594046644480 2026-01-04T02:49:22.774232Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:22.774376Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_create_table.cpp:445: TCreateTable Propose, path: /Root/Dir/Foo, opId: 281474976710658:1, at schemeshard: 72057594046644480 2026-01-04T02:49:22.776675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710658:2, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:22.776719Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:49:22.779216Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710658, database: /Root, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /Root/Dir/Foo 2026-01-04T02:49:22.863186Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494962905, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:49:22.921184Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:0 2026-01-04T02:49:22.921249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710658:1 2026-01-04T02:49:22.948150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333058981243083:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.948254Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.948655Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333058981243085:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.948693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:49:22.991456Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_table.cpp:522: TAlterTable Propose, path: /Root/Dir/Foo, pathId: , opId: 281474976710659:0, at schemeshard: 72057594046644480 2026-01-04T02:49:22.992005Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710659:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:22.992030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:49:22.994272Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710659, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE, path: /Root/Dir/Foo 2026-01-04T02:49:23.007009Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767494963052, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T02:49:23.014865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976710659:0 Fast forward 1m 2026-01-04T02:49:24.058791Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333046096339579:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:24.058871Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; partitions 2 Fast forward 1m partitions 2 Fast forward 1m partitions 2 Fast forward 1m 2026-01-04T02:49:32.977278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-04T02:49:32.980888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /Root/Dir/Foo, tableId: , opId: 281474976715693:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" TabletID: 72075186224037888 ShardIdx: 1 } SourceRanges { KeyRangeBegin: "\002\000\004\000\000\000\377\377\377\177\000\000\000\200" KeyRangeEnd: "" TabletID: 72075186224037889 ShardIdx: 2 } DestinationRanges { KeyRangeBegin: "\002\000\000\000\000\200\000\000\000\200" KeyRangeEnd: "" ShardIdx: 3 }, request: TablePath: "/Root/Dir/Foo" SourceTabletId: 72075186224037888 SourceTabletId: 72075186224037889 SchemeshardId: 72057594046644480 2026-01-04T02:49:32.980952Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715693:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:33.086963Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715693:0 2026-01-04T02:49:33.106962Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:49:33.107252Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:49:34.294095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:49:34.294130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded partitions 1 2026-01-04T02:49:35.150926Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_move_table.cpp:668: TMoveTable Propose, from: /Root/Dir/Foo, to: /Root/Dir/Bar, opId: 281474976710660:0, at schemeshard: 72057594046644480 2026-01-04T02:49:35.151891Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710660:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:49:35.154865Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710660, database: /Root, subject: , status: StatusAccepted, operation: ALTER TABLE RENAME, dst path: /Root/Dir/Foo, dst path: /Root/Dir/Bar 2026-01-04T02:49:35.165636Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOp ... FO: datashard_loans.cpp:177: 72075186224037892 Initiating switch from PreOffline to Offline state 2026-01-04T02:50:48.883897Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-04T02:50:48.883920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710664 2026-01-04T02:50:48.883921Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7591333428348434501:2733], serverId# [1:7591333428348434505:5011], sessionId# [0:0:0] 2026-01-04T02:50:48.884651Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037890 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:48.885279Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:50:48.885352Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:128: 72075186224037891 parts [ [72075186224037890:1:114:1:12288:11016:0] ] return ack processed 2026-01-04T02:50:48.885385Z node 1 :TX_DATASHARD DEBUG: datashard_loans.cpp:220: 72075186224037891 in PreOffline state HasSharedBobs: 0 SchemaOperations: [ ] OutReadSets count: 0 ChangesQueue size: 0 ChangeExchangeSplit: 1 siblings to be activated: wait to activation from: 2026-01-04T02:50:48.885442Z node 1 :TX_DATASHARD INFO: datashard_loans.cpp:177: 72075186224037891 Initiating switch from PreOffline to Offline state 2026-01-04T02:50:48.885589Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7591333428348434498:2731], serverId# [1:7591333428348434506:5012], sessionId# [0:0:0] 2026-01-04T02:50:48.885707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333101930916444 RawX2: 4503603922340177 } TabletId: 72075186224037890 State: 4 2026-01-04T02:50:48.885774Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:48.886765Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037892 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:48.887016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:50:48.887063Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:50:48.887078Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037890 state Offline 2026-01-04T02:50:48.887191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333428348434095 RawX2: 4503603922340492 } TabletId: 72075186224037892 State: 4 2026-01-04T02:50:48.887260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037892, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:48.888582Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:48.888603Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:50:48.888618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:5 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:50:48.888633Z node 1 :TX_DATASHARD INFO: datashard_impl.h:3339: 72075186224037891 Reporting state Offline to schemeshard 72057594046644480 2026-01-04T02:50:48.888744Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037892 state Offline 2026-01-04T02:50:48.888928Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333428348434100 RawX2: 4503603922340493 } TabletId: 72075186224037891 State: 4 2026-01-04T02:50:48.888974Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:48.889155Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333428348434100 RawX2: 4503603922340493 } TabletId: 72075186224037891 State: 4 2026-01-04T02:50:48.889232Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:50:48.890552Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:50:48.890827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 3 2026-01-04T02:50:48.891023Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 5, at schemeshard: 72057594046644480 2026-01-04T02:50:48.891200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 2 2026-01-04T02:50:48.891387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:50:48.891412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:50:48.891512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:50:48.891526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:50:48.892595Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037890 reason = ReasonStop 2026-01-04T02:50:48.892648Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3758: Server disconnected at leader tablet# 72075186224037890, clientId# [1:7591333114815818689:3540], serverId# [1:7591333114815818690:3541], sessionId# [0:0:0] 2026-01-04T02:50:48.892676Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037892 reason = ReasonStop 2026-01-04T02:50:48.892698Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:50:48.892751Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2974: Handle TEvStateChangedResult datashard 72075186224037891 state Offline 2026-01-04T02:50:48.893013Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:50:48.893060Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-04T02:50:48.893135Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037890 2026-01-04T02:50:48.893220Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037890 2026-01-04T02:50:48.894292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:50:48.894574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 40] was 1 2026-01-04T02:50:48.894855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:50:48.894918Z node 1 :TX_DATASHARD INFO: datashard.cpp:196: OnTabletStop: 72075186224037891 reason = ReasonStop 2026-01-04T02:50:48.895077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:50:48.895117Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 40], at schemeshard: 72057594046644480 2026-01-04T02:50:48.895187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:50:48.895441Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-04T02:50:48.895512Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037892 2026-01-04T02:50:48.895578Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037892 2026-01-04T02:50:48.895901Z node 1 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037891 2026-01-04T02:50:48.895971Z node 1 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037891 2026-01-04T02:50:48.896192Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:50:48.896212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:50:48.896250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:5 2026-01-04T02:50:48.896258Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:5 tabletId 72075186224037892 2026-01-04T02:50:48.900028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:50:48.900068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:50:48.900112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:50:48.900139Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/ydb/table_split_ut/unittest >> TestKinesisHttpProxy::ListShardsToken |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TA] $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {RESULT} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ydb/table_split_ut/test-results/unittest/{meta.json ... results_accumulator.log} |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> HttpProxyInsideYdb::TestIfEnvVariableSet [GOOD] Test command err: 2026-01-04T02:50:00.809660Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333223378228261:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809750Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.071953Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.080887Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.080962Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.184397Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.254311Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.258967Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333223378228220:2081] 1767495000805846 != 1767495000805849 2026-01-04T02:50:01.369976Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.487637Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.487660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.487675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.487755Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.826135Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.958815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:02.170120Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.183130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.202177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:50:02.264470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.416793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.460118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:50:02.465863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.509647Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.514840Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.551636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.592410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.633969Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.674401Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.714602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.764472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.233974Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240558099059:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.233988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240558099070:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.234098Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.234397Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240558099074:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.234468Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.238029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.248242Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333240558099073:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.344410Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333240558099126:3103] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.046023Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.080707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.137265Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.174400Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... a Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:48.935748Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:50:48.935817Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 19ms 2026-01-04T02:50:48.936108Z node 7 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:48.936796Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:48.936821Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 19ms 2026-01-04T02:50:48.937209Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:48.937242Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:50:48.937348Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 20ms 2026-01-04T02:50:48.937669Z node 7 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:49.079127Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7591333429651472492:2441]: Pool not found 2026-01-04T02:50:49.079277Z node 7 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:50:49.354028Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7591333429651472471:2437]: Pool not found 2026-01-04T02:50:49.354372Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:50:49.396792Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [7:7591333433946439883:2457], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:49.396908Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591333433946439882:2456], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.397019Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.398290Z node 7 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [7:7591333433946439886:2458], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.398355Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.705567Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [7:7591333433946439880:2455]: Pool not found 2026-01-04T02:50:49.706052Z node 7 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:50:49.908450Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:45532) incoming connection opened 2026-01-04T02:50:49.908577Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:45532) -> (POST /Root, 3 bytes) 2026-01-04T02:50:49.908736Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [985a:6008:477c:0:805a:6008:477c:0] request [UnknownMethodName] url [/Root] database [/Root] requestId: 38bbbdf2-e15b2232-d911bb10-5bf1e304 2026-01-04T02:50:49.909031Z node 7 :HTTP_PROXY INFO: http_req.cpp:1602: http request [UnknownMethodName] requestId [38bbbdf2-e15b2232-d911bb10-5bf1e304] reply with status: UNSUPPORTED message: Missing method name UnknownMethodName 2026-01-04T02:50:49.909212Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:45532) <- (400 InvalidAction, 76 bytes) 2026-01-04T02:50:49.909284Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:45532) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.UnknownMethodName X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { } 2026-01-04T02:50:49.909321Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:45532) Response: HTTP/1.1 400 InvalidAction Connection: close x-amzn-requestid: 38bbbdf2-e15b2232-d911bb10-5bf1e304 Content-Type: application/x-amz-json-1.1 Content-Length: 76 2026-01-04T02:50:49.909409Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:45532) connection closed Http output full {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} 400 {"__type":"InvalidAction","message":"Missing method name UnknownMethodName"} |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> Secret::Deactivated |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] >> KqpReattach::ReattachDeliveryProblem [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestDeleteMessageBatch [GOOD] Test command err: 2026-01-04T02:50:00.808935Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333223741340033:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809020Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.060896Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.065799Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.065959Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.174802Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.236630Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.261964Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333223741339996:2081] 1767495000805794 != 1767495000805797 2026-01-04T02:50:01.272893Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.490707Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.490736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.490764Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.490849Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.823952Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.924299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:02.148884Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.162242Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.167073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.198126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:50:02.256872Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.414342Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.470638Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.513871Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.518408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.547435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.580100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.618084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.656013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.690553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.725891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.284672Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240921210839:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.284694Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240921210848:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.284828Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.285213Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240921210854:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.285330Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.288843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.300472Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333240921210853:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:50:04.356245Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333240921210906:3105] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.040396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.085974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.125344Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.168251Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.c ... .cpp:147: Request [41482ca3-c6ca9931-26484cd3-83a63939] Sending reply from proxy actor: { DeleteMessageBatch { RequestId: "41482ca3-c6ca9931-26484cd3-83a63939" Entries { Id: "Id-0" } Entries { Id: "Id-1" } } RequestId: "41482ca3-c6ca9931-26484cd3-83a63939" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-04T02:50:50.823792Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [DeleteMessageBatch] requestId [41482ca3-c6ca9931-26484cd3-83a63939] Got succesfult GRPC response. 2026-01-04T02:50:50.823920Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DeleteMessageBatch] requestId [41482ca3-c6ca9931-26484cd3-83a63939] reply ok 2026-01-04T02:50:50.824053Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [DeleteMessageBatch] requestId [41482ca3-c6ca9931-26484cd3-83a63939] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 716 ResponseSizeInBytes: 197 SourceAddress: b82a:b47f:677b:0:a02a:b47f:677b:0 ResourceId: 000000000000000101v0 Action: DeleteMessageBatch 2026-01-04T02:50:50.824183Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:41834) <- (200 , 44 bytes) 2026-01-04T02:50:50.824293Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:41834) connection closed Http output full {"Successful":[{"Id":"Id-0"},{"Id":"Id-1"}]} 2026-01-04T02:50:50.825116Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46608) incoming connection opened 2026-01-04T02:50:50.825207Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46608) -> (POST /Root, 106 bytes) 2026-01-04T02:50:50.825323Z node 7 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1889:d87e:677b:0:89:d87e:677b:0] request [ReceiveMessage] url [/Root] database [/Root] requestId: 9d1c01bd-af19819c-6fdea0e6-16e133ba 2026-01-04T02:50:50.825602Z node 7 :HTTP_PROXY INFO: http_req.cpp:543: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] got new request from [1889:d87e:677b:0:89:d87e:677b:0] 2026-01-04T02:50:50.825855Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:477: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] Got cloud auth response. FolderId: folder4 CloudId: cloud4 UserSid: fake_user_sid@as 2026-01-04T02:50:50.825877Z node 7 :HTTP_PROXY INFO: http_req.cpp:301: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:50:50.826001Z node 7 :SQS DEBUG: ymq_proxy.cpp:148: Got new request in YMQ proxy. FolderId: folder4, CloudId: cloud4, UserSid: fake_user_sid@as, RequestId: 9d1c01bd-af19819c-6fdea0e6-16e133ba 2026-01-04T02:50:50.826110Z node 7 :SQS DEBUG: proxy_actor.cpp:263: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Proxy actor: used user_name='cloud4', queue_name='000000000000000101v0', folder_id='folder4' 2026-01-04T02:50:50.826122Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Request proxy started 2026-01-04T02:50:50.826169Z node 7 :SQS DEBUG: service.cpp:761: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Answer configuration for queue [cloud4/000000000000000101v0] without leader 2026-01-04T02:50:50.826203Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Get configuration duration: 0ms 2026-01-04T02:50:50.826260Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Send get leader node request to sqs service for cloud4/000000000000000101v0 2026-01-04T02:50:50.826281Z node 7 :SQS DEBUG: service.cpp:581: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Leader node for queue [cloud4/000000000000000101v0] is 7 2026-01-04T02:50:50.826327Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Got leader node for queue response. Node id: 7. Status: 0 2026-01-04T02:50:50.826421Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Sending request from proxy to leader node 7: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" 2026-01-04T02:50:50.826493Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Received Sqs Request: ReceiveMessage { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000101v0" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" 2026-01-04T02:50:50.826560Z node 7 :SQS DEBUG: action.h:133: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Request started. Actor: [7:7591333437358295400:3932] 2026-01-04T02:50:50.826610Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7591333437358295400:3932] 2026-01-04T02:50:50.826625Z node 7 :SQS DEBUG: service.cpp:754: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Forward configuration request to queue [cloud4/000000000000000101v0] leader 2026-01-04T02:50:50.826650Z node 7 :SQS DEBUG: action.h:627: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Get configuration duration: 0ms 2026-01-04T02:50:50.826668Z node 7 :SQS TRACE: action.h:647: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Got configuration. Root url: http://ghrun-46vtxmsa7i.auto.internal:8771, Shards: 4, Fail: 0 2026-01-04T02:50:50.826687Z node 7 :SQS TRACE: action.h:662: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Got configuration. Attributes: { ContentBasedDeduplication: 0 DelaySeconds: 0.000000s FifoQueue: 0 MaximumMessageSize: 262144 MessageRetentionPeriod: 345600.000000s ReceiveMessageWaitTime: 0.000000s VisibilityTimeout: 30.000000s } 2026-01-04T02:50:50.826698Z node 7 :SQS TRACE: action.h:427: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] DoRoutine 2026-01-04T02:50:50.826707Z node 7 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] HandleResponse { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-04T02:50:50.826724Z node 7 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Attempt 1 execution duration: 3ms 2026-01-04T02:50:50.826731Z node 7 :SQS TRACE: queue_leader.cpp:2426: Increment active message requests for [cloud4/000000000000000101v0/0]. ActiveMessageRequests: 1 2026-01-04T02:50:50.826751Z node 7 :SQS DEBUG: queue_leader.cpp:938: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Received empty result from shard 0 infly. Infly capacity: 0. Messages count: 0 2026-01-04T02:50:50.826761Z node 7 :SQS DEBUG: queue_leader.cpp:1164: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] No known messages in this shard. Skip attempt to add messages to infly 2026-01-04T02:50:50.826768Z node 7 :SQS DEBUG: queue_leader.cpp:1170: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Already tried to add messages to infly 2026-01-04T02:50:50.826797Z node 7 :SQS TRACE: queue_leader.cpp:2436: Decrement active message requests for [[cloud4/000000000000000101v0/0]. ActiveMessageRequests: 0 2026-01-04T02:50:50.826858Z node 7 :SQS TRACE: action.h:264: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] SendReplyAndDie from action actor { ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } } 2026-01-04T02:50:50.826884Z node 7 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Sending mkql execution result: { Status: 48 TxId: 281474976710712 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "messages" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Offset" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "SentTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } } } Value { Struct { Optional { } } } } } 2026-01-04T02:50:50.826912Z node 7 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Minikql data response: {"messages": []} 2026-01-04T02:50:50.826959Z node 7 :SQS TRACE: proxy_service.h:35: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Sending sqs response: { ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-04T02:50:50.826977Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] execution duration: 4ms 2026-01-04T02:50:50.827074Z node 7 :SQS DEBUG: queue_leader.cpp:384: Request ReceiveMessage working duration: 0ms 2026-01-04T02:50:50.827100Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [] Sending executed reply 2026-01-04T02:50:50.827106Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-04T02:50:50.827147Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7591333437358295399:2541]: ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false 2026-01-04T02:50:50.827187Z node 7 :SQS DEBUG: queue_leader.cpp:1915: Handle oldest timestamp metrics for [cloud4/000000000000000101v0/3] 2026-01-04T02:50:50.827189Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7591333437358295400:3932]. Found: 1 2026-01-04T02:50:50.827269Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] HandleResponse: { ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false }, status: OK 2026-01-04T02:50:50.827342Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [9d1c01bd-af19819c-6fdea0e6-16e133ba] Sending reply from proxy actor: { ReceiveMessage { RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" } RequestId: "9d1c01bd-af19819c-6fdea0e6-16e133ba" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: false } 2026-01-04T02:50:50.827467Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] Got succesfult GRPC response. 2026-01-04T02:50:50.827514Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] reply ok 2026-01-04T02:50:50.881118Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ReceiveMessage] requestId [9d1c01bd-af19819c-6fdea0e6-16e133ba] Send metering event. HttpStatusCode: 200 IsFifo: 0 FolderId: folder4 RequestSizeInBytes: 526 ResponseSizeInBytes: 154 SourceAddress: 1889:d87e:677b:0:89:d87e:677b:0 ResourceId: 000000000000000101v0 Action: ReceiveMessage 2026-01-04T02:50:50.881297Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46608) <- (200 , 2 bytes) 2026-01-04T02:50:50.881421Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46608) connection closed Http output full {} |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ErroneousRequestGetRecords [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpSnapshotIsolation::ConflictWrite+IsOlap-FillTables [GOOD] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables >> KqpPg::InsertNoTargetColumns_Alter+useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Alter-useSink |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestListDeadLetterSourceQueues [GOOD] >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::InsertAbort_Select_Conflict-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 3124, MsgBus: 21839 2026-01-04T02:50:32.030195Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333359438707792:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:32.030415Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:32.055728Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:32.409073Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:32.409329Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:32.416071Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:32.478674Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:32.531273Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:32.532778Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333355143740392:2081] 1767495032010269 != 1767495032010272 2026-01-04T02:50:32.649372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:32.666316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:32.666352Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:32.666358Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:32.666428Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:33.034884Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:33.152046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:33.207838Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.319862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.481731Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:33.558430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.517150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333372323611458:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.517273Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.517668Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333372323611468:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.517737Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.888995Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.927989Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.964281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.996349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.031280Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.078004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.148919Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.204134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.284763Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376618579640:2483], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.284835Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.286108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376618579646:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.286110Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376618579645:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.286160Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.290424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:36.302314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333376618579649:2488], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:36.400137Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333376618579700:3777] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:37.017501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333359438707792:2141];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:37.019035Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 30115, MsgBus: 28020 2026-01-04T02:50:39.282599Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... ed, will use file: (empty maybe) 2026-01-04T02:50:45.934541Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:45.934552Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:45.934644Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:45.990695Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:46.301567Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:46.309007Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.358206Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.475338Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.522167Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.798468Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:48.849297Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427582546454:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.849437Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.849824Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427582546464:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.849889Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.898052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.933065Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.957478Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.986011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.015398Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.049985Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.081961Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.119268Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.188756Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431877514628:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.188833Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.189105Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431877514633:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.189506Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431877514634:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.189575Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.194052Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:49.209394Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333431877514637:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:49.296735Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333431877514688:3762] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:50.788939Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333414697642745:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:50.789000Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:51.098643Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.621894Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591333440467449692:2548], TxId: 281474976710677, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3em9z21k8h8jqe8tyrfkzv. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZDQ1NzY2OTgtNWY0ZDFjNDItNDUwZTA1NWQtNDNjNmQwNWQ=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED KIKIMR_CONSTRAINT_VIOLATION: {
: Error: Conflict with existing key., code: 2012 }. 2026-01-04T02:50:51.622170Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [3:7591333440467449694:2549], TxId: 281474976710677, task: 2. Ctx: { TraceId : 01ke3em9z21k8h8jqe8tyrfkzv. CheckpointId : . CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=3&id=ZDQ1NzY2OTgtNWY0ZDFjNDItNDUwZTA1NWQtNDNjNmQwNWQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [3:7591333440467449689:2521], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:50:51.622598Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZDQ1NzY2OTgtNWY0ZDFjNDItNDUwZTA1NWQtNDNjNmQwNWQ=, ActorId: [3:7591333440467449544:2521], ActorState: ExecuteState, LegacyTraceId: 01ke3em9z21k8h8jqe8tyrfkzv, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Conflict with existing key." issue_code: 2012 severity: 1 } trace_id# ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpReattach::ReattachDeliveryProblem [GOOD] Test command err: Trying to start YDB, gRPC: 15308, MsgBus: 2987 ... waiting for SysViewsRoster update finished 2026-01-04T02:50:33.254777Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:33.381734Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:33.382176Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:33.382242Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:33.668344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:33.668500Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:33.739511Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495030561709 != 1767495030561712 2026-01-04T02:50:33.753295Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:33.796298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:33.925494Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:34.258458Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:34.258519Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:34.258573Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:34.258752Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:34.270891Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:34.574778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:34.668667Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.949196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.324057Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.605828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.429725Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1905:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.430137Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.431252Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1978:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.431420Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.472573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.660629Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.915714Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.206766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.458364Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.713286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.005442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.295740Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.638273Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2786:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.638404Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.638834Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.638910Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.638988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.645286Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:38.813763Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2795:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:38.874105Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2855:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=3; FALLBACK_ACTOR_LOGGING;priority=WARN;component=0;fline=events.h:105;event=ev_write_error;status=STATUS_OVERLOADED;details=;tx_id=5; Trying to start YDB, gRPC: 20625, MsgBus: 8537 ... waiting for SysViewsRoster update finished 2026-01-04T02:50:44.987086Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script ex ... State: Connecting -> Connected 2026-01-04T02:50:45.437205Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:45.821616Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:45.821670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:45.821707Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:45.822515Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:45.834598Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:46.171419Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:46.237141Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.466172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.894793Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:47.194405Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:47.745703Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1901:3507], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:47.745921Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:47.746820Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1974:3526], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:47.747040Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:47.776695Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:47.980370Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.228225Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.501548Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.749732Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.039174Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.333922Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.629632Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.023891Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2788:4167], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.024058Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.024596Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2792:4171], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.024697Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.024789Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:2795:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.031350Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:50.203461Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:2797:4176], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:50.249369Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:2857:4217] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:52.003024Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3125:4400], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3102:4400]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-04T02:50:52.013671Z node 2 :KQP_COMPUTE WARN: kqp_write_actor.cpp:1288: SelfId: [2:3125:4400], Table: `/Root/TwoShard` ([72057594046644480:38:1]), SessionActorId: [2:3102:4400]TEvDeliveryProblem was received from tablet: 72075186224037888 2026-01-04T02:50:52.014459Z node 2 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [2:3118:4400], SessionActorId: [2:3102:4400], statusCode=UNDETERMINED. Issue=
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 . sessionActorId=[2:3102:4400]. 2026-01-04T02:50:52.014855Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=2&id=NTI3MDUyOTQtMjE2M2M4M2ItYmI0ZjRiNmYtZTJiOGE3ZGQ=, ActorId: [2:3102:4400], ActorState: ExecuteState, LegacyTraceId: 01ke3emajy61mk2r38pcvssz6y, got TEvKqpBuffer::TEvError in ExecuteState, status: UNDETERMINED send to: [2:3119:4400] from: [2:3118:4400] trace_id# 2026-01-04T02:50:52.015051Z node 2 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [2:3119:4400] TxId: 281474976715673. Ctx: { TraceId: 01ke3emajy61mk2r38pcvssz6y, Database: /Root, SessionId: ydb://session/3?node_id=2&id=NTI3MDUyOTQtMjE2M2M4M2ItYmI0ZjRiNmYtZTJiOGE3ZGQ=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# UNDETERMINED Issues# {
: Error: State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888., code: 2026 } trace_id# 2026-01-04T02:50:52.015525Z node 2 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=2&id=NTI3MDUyOTQtMjE2M2M4M2ItYmI0ZjRiNmYtZTJiOGE3ZGQ=, ActorId: [2:3102:4400], ActorState: ExecuteState, LegacyTraceId: 01ke3emajy61mk2r38pcvssz6y, Create QueryResponse for error on request, msg: status# UNDETERMINED issues# { message: "State of operation is unknown. Error writing to table `/Root/TwoShard`. Transaction state unknown for tablet 72075186224037888." issue_code: 2026 severity: 1 } trace_id# |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> RetryPolicy::RetryWithBatching [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> KqpFail::OnCommit [GOOD] |95.9%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestListQueueTags >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamWithInitialScanTests::MeteringDedicated [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.898375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.898484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.898544Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.898588Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.898630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.898656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.898714Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.898782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.899638Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.901858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:03.000059Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:03.000138Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:03.016481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:03.016782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:03.016997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:03.027108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:03.028657Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:03.031237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:03.035306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:03.043790Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:03.045372Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:03.051741Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:03.051842Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:03.052004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:03.052057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:03.052255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:03.053144Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:03.254190Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256164Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256337Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257167Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257250Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1120: NTableState::TProposedWaitParts operationId# 281474976715657:1 ProgressState at tablet: 72075186233409546 2026-01-04T02:50:47.751553Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72075186233409546 2026-01-04T02:50:47.751593Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:0 ProgressState 2026-01-04T02:50:47.751659Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-04T02:50:47.751682Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-04T02:50:47.751741Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:0 progress is 2/3 2026-01-04T02:50:47.751769Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 2/3 2026-01-04T02:50:47.751811Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: false 2026-01-04T02:50:47.752615Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.752767Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 33 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.752834Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-04T02:50:47.752903Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 33], version: 5 2026-01-04T02:50:47.752977Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 4 2026-01-04T02:50:47.754133Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.754212Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 5 PathOwnerId: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.754249Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 281474976715657 2026-01-04T02:50:47.754276Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 281474976715657, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 5 2026-01-04T02:50:47.754303Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 6 2026-01-04T02:50:47.754390Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 2/3, is published: true 2026-01-04T02:50:47.759496Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.759634Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 281474976715657 2026-01-04T02:50:47.772286Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6725: Handle TEvProposeTransactionResult, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1040 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-04T02:50:47.772363Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-04T02:50:47.772561Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1040 } } CommitVersion { Step: 400 TxId: 281474976715657 } 2026-01-04T02:50:47.772721Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72075186233409546, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 281474976715657 Step: 400 OrderId: 281474976715657 ExecLatency: 0 ProposeLatency: 5 DomainCoordinators: 72075186233409547 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1040 } } CommitVersion { Step: 400 TxId: 281474976715657 } debug: NTableState::TProposedWaitParts operationId# 281474976715657:1 2026-01-04T02:50:47.774029Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72075186233409546, at schemeshard: 72075186233409546, message: Source { RawX1: 1253 RawX2: 85899349061 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-04T02:50:47.774129Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 281474976715657, tablet: 72075186233409552, partId: 1 2026-01-04T02:50:47.774341Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546, message: Source { RawX1: 1253 RawX2: 85899349061 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-04T02:50:47.774434Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 2026-01-04T02:50:47.774571Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 281474976715657:1 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72075186233409546 message: Source { RawX1: 1253 RawX2: 85899349061 } Origin: 72075186233409552 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-04T02:50:47.774686Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 281474976715657:1, shardIdx: 72075186233409546:4, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72075186233409546 2026-01-04T02:50:47.774746Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-04T02:50:47.774797Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 281474976715657:1, datashard: 72075186233409552, at schemeshard: 72075186233409546 2026-01-04T02:50:47.774876Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:1 129 -> 240 2026-01-04T02:50:47.776368Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-04T02:50:47.777737Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-04T02:50:47.778057Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:1, at schemeshard: 72075186233409546 2026-01-04T02:50:47.778136Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72075186233409546] TDone opId# 281474976715657:1 ProgressState 2026-01-04T02:50:47.778372Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-04T02:50:47.778429Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-04T02:50:47.778495Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715657:1 progress is 3/3 2026-01-04T02:50:47.778551Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-04T02:50:47.778618Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 3/3, is published: true 2026-01-04T02:50:47.778712Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715657 ready parts: 3/3 2026-01-04T02:50:47.778789Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:0 2026-01-04T02:50:47.778840Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:0 2026-01-04T02:50:47.778950Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 33] was 3 2026-01-04T02:50:47.778995Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:1 2026-01-04T02:50:47.779018Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:1 2026-01-04T02:50:47.779132Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 5 2026-01-04T02:50:47.779172Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715657:2 2026-01-04T02:50:47.779194Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715657:2 2026-01-04T02:50:47.779222Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 |95.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> KqpPg::LongDomainName [GOOD] |95.9%| [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpEffects::RandomWithIndex-UseSecondaryIndex-UseSink [GOOD] Test command err: Trying to start YDB, gRPC: 27300, MsgBus: 61665 2026-01-04T02:50:33.103800Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333363154078986:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:33.117879Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:33.161400Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:33.428113Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:33.446637Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:33.446770Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:33.472878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:33.529507Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:33.537939Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333363154078956:2081] 1767495033100518 != 1767495033100521 2026-01-04T02:50:33.606513Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:33.606537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:33.606555Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:33.606645Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:33.718001Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:33.992662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:34.037522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.120042Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:34.162112Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.288880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.351520Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.228701Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376038982726:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.228818Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.230243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376038982736:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.230309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.597949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.636994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.665918Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.694796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.731419Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.768017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.830198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.871129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.951778Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376038983606:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.951849Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.952299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376038983611:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.952338Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333376038983612:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.952378Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:36.957165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:36.971660Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333376038983615:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:37.055339Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333380333950962:3769] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:38.103899Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333363154078986:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:38.103992Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 6411, MsgBus: 21832 2026-01-04T02:50:39.570059Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: flin ... tect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:45.658501Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:45.725313Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:45.725393Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:45.726752Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:45.728696Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591333414182161726:2081] 1767495045646931 != 1767495045646934 2026-01-04T02:50:45.750303Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:45.846668Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:45.846690Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:45.846698Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:45.846778Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:45.852098Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:46.219112Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:46.227768Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.281136Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.433672Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.494957Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.652738Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:48.851873Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427067065483:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.851958Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.852169Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427067065492:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.852202Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.919654Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.947960Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.985208Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.014823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.042630Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.102636Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.131031Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.175210Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.247505Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431362033658:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.247589Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.247628Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431362033663:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.247814Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431362033665:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.247866Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.251222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:49.260733Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333431362033666:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:49.362146Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333431362033718:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:50.647941Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333414182161757:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:50.648026Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:51.052236Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.150048Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.0%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpFail::OnCommit [GOOD] Test command err: Trying to start YDB, gRPC: 13224, MsgBus: 13142 2026-01-04T02:50:24.188942Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333325583940620:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:24.190053Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:24.209633Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:24.447163Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:24.505460Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:24.505613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:24.507658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:24.549539Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:24.658286Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:24.658305Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:24.658315Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:24.658382Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:24.671771Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:25.161239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:25.174975Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:25.194008Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:25.219336Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.421883Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.604763Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:25.684403Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.460021Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333338468844309:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.460148Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.460649Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333338468844319:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.460726Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:27.859788Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.890811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.922248Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.957276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:27.989351Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.027444Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.067100Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.113833Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:28.190288Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333342763812492:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.190422Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.190479Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333342763812497:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.190664Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333342763812499:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.190742Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:28.194307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:28.205538Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333342763812501:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:28.286303Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333342763812554:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:29.180969Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333325583940620:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:29.181038Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 31986, MsgBus: 8692 ... waiting for SysViewsRoster update finished 2026-01-04T02:50:34.258304Z node 2 :KQP_PROX ... D_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:114:2161], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:45.987742Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:45.987975Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:46.252059Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:46.252180Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:46.275482Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1767495042888765 != 1767495042888769 2026-01-04T02:50:46.283608Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:46.327816Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:46.469486Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:46.848017Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:46.848091Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:46.848132Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:46.848703Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:46.860239Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:47.252130Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:47.298249Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:47.550623Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:47.914595Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.180547Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.708366Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1903:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.708830Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.709786Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:1977:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.709908Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.741713Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.956402Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.188976Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.469602Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.710568Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.973222Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.250704Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.562869Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.931301Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2789:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.931446Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.931916Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2793:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.931992Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.932164Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:2796:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:50.938823Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:51.117047Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:2798:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:51.163520Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:2858:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:53.715332Z node 3 :RPC_REQUEST WARN: rpc_execute_query.cpp:487: Client lost 2026-01-04T02:50:53.716344Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:3183:4426] TxId: 281474976715674. Ctx: { TraceId: 01ke3embde053g3sw26ffqf47m, Database: /Root, SessionId: ydb://session/3?node_id=3&id=ZWY2MzZhZTItOGNhYTFjYTgtYTRjODNkODctNGRjODkxMGY=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# ABORTED Issues# {
: Error: Client lost } trace_id# 2026-01-04T02:50:53.716818Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=ZWY2MzZhZTItOGNhYTFjYTgtYTRjODNkODctNGRjODkxMGY=, ActorId: [3:3127:4426], ActorState: ExecuteState, LegacyTraceId: 01ke3embde053g3sw26ffqf47m, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Client lost" severity: 1 } trace_id# |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest >> RetryPolicy::RetryWithBatching [GOOD] Test command err: 2026-01-04T02:44:34.395757Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.395849Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.395878Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.000000s 2026-01-04T02:44:34.398284Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:34.398576Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.398615Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.402163Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006820s 2026-01-04T02:44:34.402671Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:34.402711Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.402749Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.402807Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.006236s 2026-01-04T02:44:34.403204Z :ERROR: [db] [sessionid] [cluster] Got error. Status: INTERNAL_ERROR. Description: 2026-01-04T02:44:34.403229Z :DEBUG: [db] [sessionid] [cluster] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.403252Z :DEBUG: [db] [sessionid] [cluster] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T02:44:34.403322Z :DEBUG: [db] [sessionid] [cluster] Reconnecting session to cluster cluster in 0.007082s 2026-01-04T02:44:34.429720Z :TWriteSession_TestPolicy INFO: Random seed for debugging is 1767494674429689 2026-01-04T02:44:34.768541Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591331821537543994:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:34.768603Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:34.834690Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:34.838335Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591331821547659459:2151];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:44:34.838392Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:44:34.857819Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:44:35.002723Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:35.002302Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:35.171892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:35.171995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:35.177104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:44:35.177193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:44:35.238493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:44:35.244263Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:35.245888Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:35.247158Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:44:35.249593Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:44:35.282592Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:35.560385Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/003089/r3tmp/yandexFBmm4I.tmp 2026-01-04T02:44:35.560480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/003089/r3tmp/yandexFBmm4I.tmp 2026-01-04T02:44:35.561714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/003089/r3tmp/yandexFBmm4I.tmp 2026-01-04T02:44:35.561833Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:44:35.775433Z INFO: TTestServer started on Port 2203 GrpcPort 4626 2026-01-04T02:44:35.775402Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:44:35.778795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:44:35.843529Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:4626 2026-01-04T02:44:36.005822Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:38.047296Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331838727528884:2302], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.047387Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.047645Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331838727528893:2303], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.047744Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.048985Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591331838727528898:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.051891Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331838717414488:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.051956Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331838717414494:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.052014Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.052529Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331838717414504:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.052591Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.056382Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:44:38.077231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331838717414538:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.077315Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.077494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591331838717414540:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.077575Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:44:38.116199Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591331838727528901:2132] txid# 281474976720657, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: pat ... b2c9f871-a7e7794b-f9d35bf7_0] Write session: acknoledged message 9 2026-01-04T02:50:51.471716Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session: acknoledged message 10 2026-01-04T02:50:51.471993Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session: close. Timeout = 0 ms 2026-01-04T02:50:51.472065Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session will now close 2026-01-04T02:50:51.472137Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session: aborting 2026-01-04T02:50:51.473053Z :INFO: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session: gracefully shut down, all writes complete 2026-01-04T02:50:51.473083Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session is aborting and will not restart 2026-01-04T02:50:51.473160Z :DEBUG: [/Root] MessageGroupId [test-message-group-id] SessionId [test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0] Write session: destroy 2026-01-04T02:50:51.473315Z node 17 :PQ_WRITE_PROXY DEBUG: write_session_actor.cpp:254: session v1 cookie: 7 sessionId: test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0 grpc read done: success: 0 data: 2026-01-04T02:50:51.473349Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:256: session v1 cookie: 7 sessionId: test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0 grpc read failed 2026-01-04T02:50:51.473780Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:824: session v1 closed cookie: 7 sessionId: test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0 2026-01-04T02:50:51.473835Z node 17 :PQ_WRITE_PROXY INFO: write_session_actor.cpp:304: session v1 cookie: 7 sessionId: test-message-group-id|952e6f41-b2c9f871-a7e7794b-f9d35bf7_0 is DEAD 2026-01-04T02:50:51.474334Z node 17 :PQ_WRITE_PROXY DEBUG: writer.cpp:562: TPartitionWriter 72075186224037892 (partition=0) Received event: NActors::TEvents::TEvPoison 2026-01-04T02:50:51.474512Z node 17 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037892] server disconnected, pipe [17:7591333442887471614:2609] destroyed 2026-01-04T02:50:51.474564Z node 17 :PERSQUEUE DEBUG: partition_write.cpp:140: [72075186224037892][Partition][0][StateIdle] TPartition::DropOwner. 2026-01-04T02:50:51.474610Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.474633Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.474658Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.474683Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.474701Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.478408Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.478445Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.478468Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.478493Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.478511Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.578712Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.578751Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.578771Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.578794Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.578812Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.679118Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.679168Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.679192Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.679220Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.679237Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.779456Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.779501Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.779522Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.779546Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.779563Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.880596Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.880658Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.880683Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.880709Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.880727Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:51.981209Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:51.981262Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.981290Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:51.981336Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:51.981358Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:52.081562Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:52.081612Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.081640Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:52.081673Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.081694Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:52.181889Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:52.181934Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.181960Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:52.181987Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.182019Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:52.282258Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:52.282307Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.282335Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:52.282363Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.282384Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:52.382666Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:52.382711Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.382734Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:52.382759Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.382778Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:50:52.482916Z node 17 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:52.482960Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.482989Z node 17 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:52.483016Z node 17 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:52.483036Z node 17 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::ConflictingKeyW1WRR2 [GOOD] Test command err: Trying to start YDB, gRPC: 25807, MsgBus: 5889 2026-01-04T02:50:34.804432Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333368566386588:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:34.804666Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:35.032765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:35.032911Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:35.058138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:35.093334Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:35.114536Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:35.206634Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:35.206657Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:35.206665Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:35.206754Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:35.251801Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:35.605847Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:35.660021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.807965Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.817172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:35.980015Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.048146Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.849997Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333381451290303:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.850511Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.850856Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333381451290313:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:37.850890Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.168013Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.195876Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.224245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.250784Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.282982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.313964Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.370003Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.414148Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:38.485112Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333385746258476:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.485191Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.485195Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333385746258481:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.485337Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333385746258483:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.485387Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:38.488749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:38.498346Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333385746258484:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:38.568425Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333385746258536:3763] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:39.804782Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333368566386588:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:39.804834Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:40.082991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 22030, MsgBus: 10039 2026-01-04T02:50:41.557572Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undeliver ... LASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:48.301451Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:48.420296Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:48.650533Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:48.662282Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.705079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.821879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.910670Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.156471Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:51.249906Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333443722027049:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.249998Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.250310Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333443722027058:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.250382Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.331684Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.371660Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.405835Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.437381Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.469516Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.504334Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.538783Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.581408Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:51.672053Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333443722027929:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.672151Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.672256Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333443722027934:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.672318Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333443722027936:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.672357Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:51.675754Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:51.686080Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333443722027938:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:51.784674Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333443722027989:3765] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:53.151112Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333430837123351:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:53.151996Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:53.437937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:53.946975Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2068: SelfId: [3:7591333452311963165:2533], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3emch949jyaxeb9xhf317k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzE3MzdlNGYtMzViMzU0MWUtNDI2MWU0NmMtMWMwZjBiZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. Source[0] fatal error: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } } 2026-01-04T02:50:53.950463Z node 3 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [3:7591333452311963165:2533], TxId: 281474976710678, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3emch949jyaxeb9xhf317k. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=3&id=YzE3MzdlNGYtMzViMzU0MWUtNDI2MWU0NmMtMWMwZjBiZQ==. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: ABORTED DEFAULT_ERROR: {
: Error: Read request aborted subissue: {
: Error: Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready) } }. 2026-01-04T02:50:53.951443Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YzE3MzdlNGYtMzViMzU0MWUtNDI2MWU0NmMtMWMwZjBiZQ==, ActorId: [3:7591333452311962891:2533], ActorState: ExecuteState, LegacyTraceId: 01ke3emch949jyaxeb9xhf317k, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Read request aborted" severity: 1 issues { message: "Read conflict with concurrent transaction (shard# 72075186224037927 node# 3 state# Ready)" severity: 1 } } trace_id# |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] >> KqpPg::CheckPgAutoParams-useSink [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::LongDomainName [GOOD] Test command err: Trying to start YDB, gRPC: 26621, MsgBus: 32145 2026-01-04T02:48:51.646291Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332926590079069:2137];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.647016Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.969657Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:52.006427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:52.006512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.102929Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.113657Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.164795Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.320154Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.320176Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.320190Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.320288Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.681947Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:52.997741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.006686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:55.210959Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943769949044:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.210988Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943769949052:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.211042Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.211323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332943769949058:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.211355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.215553Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.233285Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332943769949059:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:48:55.408483Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332943769949111:2539] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 19590, MsgBus: 1634 2026-01-04T02:48:56.304624Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591332949888465953:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.304673Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:56.343107Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:56.430327Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591332949888465916:2081] 1767494936303334 != 1767494936303337 2026-01-04T02:48:56.436169Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:56.449452Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:56.449535Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:56.452088Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:56.515670Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:56.515693Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:56.515700Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:56.515786Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:56.578857Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:56.896465Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:56.903335Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:48:57.321632Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:59.563091Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962773368688:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.563199Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.563649Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962773368700:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.563690Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591332962773368701:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.563806Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:59.567615Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:59.581628Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591332962773368704:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:48:59.672725Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591332962773368755:2533] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 62920, MsgBus: 27561 2026-01-04T02:49:00.718403Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591332963892288826:2083];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:49:00.734345Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:49:00.755580Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:49:00.836090Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... lt, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:45.293789Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333417401768831:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:45.293994Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:45.297988Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333417401768846:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:45.298125Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:45.299121Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:45.311119Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7591333417401768845:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:50:45.410976Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7591333417401768898:2537] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:45.419915Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591333395926931507:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:45.420001Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:45.446413Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Operators":[{"Inputs":[{"ExternalPlanNodeId":2}],"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (null, 3)","aid [7, 7]"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":2,"Plans":[{"PlanNodeId":4,"Operators":[{"E-Size":"0","Name":"TableRangeScan","E-Rows":"0","Table":"pgbench_accounts","ReadRangesKeys":["aid"],"ReadColumns":["aid (null, 3)","aid [7, 7]","abalance"],"E-Cost":"0","ReadRangesExpectedSize":"2"}],"Node Type":"TableRangeScan"}],"Operators":[{"SortBy":"input.abalance","Name":"Sort"}],"Node Type":"Sort"}],"Node Type":"ResultSet_1","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} {"Plan":{"Plans":[{"PlanNodeId":4,"Plans":[{"PlanNodeId":3,"Plans":[{"PlanNodeId":2,"Plans":[{"Tables":["pgbench_accounts"],"PlanNodeId":1,"Operators":[{"Scan":"Parallel","ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","Inputs":[],"Path":"\/Root\/pgbench_accounts","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"UnionAll","PlanNodeType":"Connection"}],"Node Type":"Collect"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","Stats":{"ResourcePoolId":"default"},"PlanNodeType":"Query"},"meta":{"version":"0.2","type":"query"},"tables":[{"name":"\/Root\/pgbench_accounts","reads":[{"columns":["abalance"],"scan_by":["aid (4, 3)"],"type":"Scan"}]}],"SimplifiedPlan":{"PlanNodeId":0,"Plans":[{"PlanNodeId":1,"Plans":[{"PlanNodeId":4,"Operators":[{"ReadRange":["aid (4, 3)"],"E-Size":"0","Name":"TableRangeScan","E-Rows":"1","Table":"pgbench_accounts","ReadColumns":["abalance"],"E-Cost":"0"}],"Node Type":"TableRangeScan"}],"Node Type":"ResultSet","PlanNodeType":"ResultSet"}],"Node Type":"Query","OptimizerStats":{"EquiJoinsCount":0,"JoinsCount":0},"PlanNodeType":"Query"}} Trying to start YDB, gRPC: 12145, MsgBus: 20821 2026-01-04T02:50:48.682022Z node 11 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[11:7591333430402295444:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:48.682155Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:48.704801Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-04T02:50:48.830252Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:48.833085Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:48.833208Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:48.859678Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:48.918582Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/script_executions 2026-01-04T02:50:48.922680Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:48.922705Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:48.922718Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:48.922835Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:49.688675Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:49.689968Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:53.610945Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333451877132772:2324], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:53.611055Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333451877132781:2327], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:53.611151Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:53.615940Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:53.626005Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333451877132786:2328], DatabaseId: /aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:50:53.682054Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591333430402295444:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:53.682203Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:53.719326Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333451877132851:2540] txid# 281474976715659, issues: { message: "Check failed: path: \'/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:53.753609Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::InsertConflictTxAborted [GOOD] Test command err: Trying to start YDB, gRPC: 9030, MsgBus: 7363 2026-01-04T02:50:35.965468Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333373578306059:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:35.965670Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:36.208938Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:36.266952Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:36.267078Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:36.293317Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:36.297937Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333373578306025:2081] 1767495035963827 != 1767495035963830 2026-01-04T02:50:36.325274Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:36.364493Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:36.434500Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:36.434525Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:36.434535Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:36.434608Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:36.867599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:36.875452Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:36.919241Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:36.986773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:37.071856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.225004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:37.285640Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.105899Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333390758177092:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.106099Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.106449Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333390758177102:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.106519Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.459814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.493195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.524782Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.560741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.597705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.634322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.671725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.719179Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:39.780483Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333390758177975:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.780559Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.780848Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333390758177980:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.780889Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333390758177981:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.780918Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:39.784898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:39.795693Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333390758177984:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:39.876291Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333390758178035:3771] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:40.965573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333373578306059:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:40.965659Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:41.301826Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... hard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.561706Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.614851Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.808762Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:52.006945Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333444210614642:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.007064Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.007304Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333444210614651:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.007358Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.078878Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.107026Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.139937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.171173Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.203467Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.235983Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.310579Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.353159Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.426763Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333444210615519:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.426889Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.427020Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333444210615524:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.427140Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333444210615526:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.427201Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:52.431293Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:52.441708Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333444210615528:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:50:52.514729Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333444210615579:3770] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:53.803375Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333427030743621:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:53.803460Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:54.013145Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:54.331124Z node 3 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=4; 2026-01-04T02:50:54.331347Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 4 at tablet 72075186224037927 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:50:54.331478Z node 3 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 4 at tablet 72075186224037927 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:50:54.331693Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [3:7591333452800550686:2530], Table: `/Root/TestImmediateEffects` ([72057594046644480:54:1]), SessionActorId: [3:7591333448505583180:2530]Got CONSTRAINT VIOLATION for table `/Root/TestImmediateEffects`. ShardID=72075186224037927, Sink=[3:7591333452800550686:2530].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:50:54.331795Z node 3 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [3:7591333452800550677:2530], SessionActorId: [3:7591333448505583180:2530], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[3:7591333448505583180:2530]. 2026-01-04T02:50:54.332098Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=3&id=YTRhNWRjZWMtNDA4MGU5NDItNzE5ZjRiNmEtZGYyOTFlMzA=, ActorId: [3:7591333448505583180:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3emctn1b5ndeg4nd3zhtnz, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [3:7591333452800550678:2530] from: [3:7591333452800550677:2530] trace_id# 2026-01-04T02:50:54.332201Z node 3 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [3:7591333452800550678:2530] TxId: 281474976710675. Ctx: { TraceId: 01ke3emctn1b5ndeg4nd3zhtnz, Database: /Root, SessionId: ydb://session/3?node_id=3&id=YTRhNWRjZWMtNDA4MGU5NDItNzE5ZjRiNmEtZGYyOTFlMzA=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `/Root/TestImmediateEffects`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:50:54.332457Z node 3 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=3&id=YTRhNWRjZWMtNDA4MGU5NDItNzE5ZjRiNmEtZGYyOTFlMzA=, ActorId: [3:7591333448505583180:2530], ActorState: ExecuteState, LegacyTraceId: 01ke3emctn1b5ndeg4nd3zhtnz, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `/Root/TestImmediateEffects`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest |96.0%| [TA] $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestKinesisHttpProxy::TestWrongRequest [GOOD] |96.0%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/src/client/persqueue_public/ut/with_offset_ranges_mode_ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestSendMessageBatch [GOOD] Test command err: 2026-01-04T02:50:00.808996Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333223318937248:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809417Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.076665Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.076757Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.137985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.154051Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.207505Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333223318937211:2081] 1767495000805918 != 1767495000805921 2026-01-04T02:50:01.229373Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.389916Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.482218Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.482243Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.482256Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.482349Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.822040Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.944770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.950346Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.137793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.146878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.148869Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.170484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:50:02.227196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.408191Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.456278Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:50:02.460924Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.510035Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.516301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.553017Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.602500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.649982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.720928Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.760415Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.794573Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.053285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240498808051:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.053402Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.053760Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240498808060:2369], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.053795Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.055455Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240498808065:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.064117Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.080836Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333240498808067:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.176898Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333240498808118:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.060428Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.102708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.148954Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_tabl ... 6a0b8062135ae945eca30bf" MessageId: "c62463fd-1e98eece-23e6fd85-a8226569" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2026-01-04T02:50:55.332360Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7591333458680453283:3740]. Found: 1 2026-01-04T02:50:55.332455Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse SendMessageBatch { RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "ca34f1e1-59283113-630558aa-1f11d764" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "c62463fd-1e98eece-23e6fd85-a8226569" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-04T02:50:55.332611Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7591333458680453279:2498]: SendMessageBatch { RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "ca34f1e1-59283113-630558aa-1f11d764" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "c62463fd-1e98eece-23e6fd85-a8226569" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true 2026-01-04T02:50:55.332827Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [b535109c-c8439fbb-970bae46-bc7642bc] HandleResponse: { SendMessageBatch { RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "ca34f1e1-59283113-630558aa-1f11d764" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "c62463fd-1e98eece-23e6fd85-a8226569" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true }, status: OK 2026-01-04T02:50:55.333002Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [b535109c-c8439fbb-970bae46-bc7642bc] Sending reply from proxy actor: { SendMessageBatch { RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" Entries { MD5OfMessageAttributes: "3d778967e1fa431d626ffb890c486385" MD5OfMessageBody: "94a29778a1f1f41bf68142847b2e6106" MessageId: "ca34f1e1-59283113-630558aa-1f11d764" SequenceNumber: 1 Id: "Id-0" } Entries { MD5OfMessageBody: "3bf7e6d806a0b8062135ae945eca30bf" MessageId: "c62463fd-1e98eece-23e6fd85-a8226569" SequenceNumber: 2 Id: "Id-1" } Entries { Error { Status: 400 Message: "No MessageGroupId parameter." ErrorCode: "MissingParameter" } Id: "Id-2" } } RequestId: "b535109c-c8439fbb-970bae46-bc7642bc" FolderId: "folder4" ResourceId: "000000000000000101v0" IsFifo: true } 2026-01-04T02:50:55.333328Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [SendMessageBatch] requestId [b535109c-c8439fbb-970bae46-bc7642bc] Got succesfult GRPC response. 2026-01-04T02:50:55.333579Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [SendMessageBatch] requestId [b535109c-c8439fbb-970bae46-bc7642bc] reply ok 2026-01-04T02:50:55.333742Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [SendMessageBatch] requestId [b535109c-c8439fbb-970bae46-bc7642bc] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 1063 ResponseSizeInBytes: 619 SourceAddress: f861:c919:f17b:0:e061:c919:f17b:0 ResourceId: 000000000000000101v0 Action: SendMessageBatch 2026-01-04T02:50:55.333841Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:48236) <- (200 , 465 bytes) 2026-01-04T02:50:55.333946Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:48236) connection closed Http output full {"Successful":[{"SequenceNumber":"1","Id":"Id-0","MD5OfMessageBody":"94a29778a1f1f41bf68142847b2e6106","MD5OfMessageAttributes":"3d778967e1fa431d626ffb890c486385","MessageId":"ca34f1e1-59283113-630558aa-1f11d764"},{"SequenceNumber":"2","Id":"Id-1","MD5OfMessageBody":"3bf7e6d806a0b8062135ae945eca30bf","MessageId":"c62463fd-1e98eece-23e6fd85-a8226569"}],"Failed":[{"Message":"No MessageGroupId parameter.","Id":"Id-2","Code":"MissingParameter","SenderFault":true}]} 2026-01-04T02:50:55.334049Z node 7 :SQS TRACE: executor.cpp:256: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Compile program response: { Status: 48 MiniKQLCompileResults { CompiledProgram: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } } 2026-01-04T02:50:55.334089Z node 7 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] compilation duration: 2ms 2026-01-04T02:50:55.334151Z node 7 :SQS DEBUG: queue_leader.cpp:464: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) has been prepared 2026-01-04T02:50:55.334168Z node 7 :SQS DEBUG: queue_leader.cpp:514: Request [] Executing compiled query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) 2026-01-04T02:50:55.334248Z node 7 :SQS DEBUG: executor.cpp:83: Request [] Starting executor actor for query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID). Mode: COMPILE_AND_EXEC 2026-01-04T02:50:55.334335Z node 7 :SQS TRACE: executor.cpp:154: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "TIME_FROM": 0} 2026-01-04T02:50:55.334641Z node 7 :SQS TRACE: executor.cpp:203: Request [] Query(idx=GET_OLDEST_MESSAGE_TIMESTAMP_METRIC_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "\037\016\nFlags\010Name\010Args\016Payload\022Parameter\014Offset\032SentTimestamp\006\002\206\202\t\211\004\202\203\005@\206\205\004\207\203\010\207\203\010\026\032$SetResult\000\003?\002\020messages\t\211\004?\016\205\004?\016\203\014\020List$Truncated\203\004\030Member\000\t\211\026?\026\203\005\004\200\205\004\203\004\203\004\026\032\213\010\203\010\203\010\203\010\203\010\213\010?$?&\203\010\203\010\203\004\203\010\203\010\203\004\206\203\014\203\014,SelectRange\000\003?\034 \000\001\205\000\000\000\000\001>\000\000\000\000\000\000\000?\014\005?\"\003?\036\010\003? \006\003\013?,\t\351\000?$\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?R\003?T(QUEUE_ID_NUMBER_HASH\003\022\000\t\351\000?&\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?h\003?j\036QUEUE_ID_NUMBER\003\022\000\t\351\000?(\005\205\004\206\205\004\203\010\203\005@\002\006\203\005@\n\016\006\000?~\003?\200\022TIME_FROM\003\022\000\003?*\000\010\013?2?`?v\003?.\177\377\377\377\377\377\377\377\377\003?0\177\377\377\377\377\377\377\377\377\014\003?4\000\003?6\002\003?8\000\003?:\000\006\010?>\003\203\014\000\003\203\014\000\003\203\014\000\003\203\014\000\017\003?@\000\377\007\003?\030\000\002\001\000/" } Params { Bin: "\037\000\005\205\n\203\010\203\010\203\010\203\004\203\010> TestYmqHttpProxy::TestTagQueueMultipleQueriesInflight [GOOD] Test command err: 2026-01-04T02:50:00.809061Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333221912742003:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809124Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.064660Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.093956Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.094143Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.182024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.255527Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.268452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.482233Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.482280Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.482293Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.482357Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.821968Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.928606Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.935568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.120827Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.133813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.227188Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.376432Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.435304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:50:02.441393Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.498880Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.504744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.540681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.589937Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.625094Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.672453Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.709131Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.733857Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.225467Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239092612809:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.225562Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239092612801:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.225885Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.226536Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239092612816:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.226649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.230163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.244386Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333239092612815:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.302258Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333239092612868:3100] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.040395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.086865Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.133276Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.176281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.209008Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTabl ... 'cloud4', queue_name='000000000000000301v0', folder_id='folder4' 2026-01-04T02:50:55.274098Z node 7 :SQS DEBUG: proxy_actor.cpp:78: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Request proxy started 2026-01-04T02:50:55.274134Z node 7 :SQS DEBUG: service.cpp:761: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Answer configuration for queue [cloud4/000000000000000301v0] without leader 2026-01-04T02:50:55.274175Z node 7 :SQS DEBUG: proxy_actor.cpp:97: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Get configuration duration: 0ms 2026-01-04T02:50:55.274262Z node 7 :SQS DEBUG: proxy_service.cpp:246: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Send get leader node request to sqs service for cloud4/000000000000000301v0 2026-01-04T02:50:55.274304Z node 7 :SQS DEBUG: service.cpp:581: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Leader node for queue [cloud4/000000000000000301v0] is 7 2026-01-04T02:50:55.274339Z node 7 :SQS DEBUG: proxy_service.cpp:170: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Got leader node for queue response. Node id: 7. Status: 0 2026-01-04T02:50:55.274424Z node 7 :SQS TRACE: proxy_service.cpp:303: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Sending request from proxy to leader node 7: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" 2026-01-04T02:50:55.274504Z node 7 :SQS DEBUG: proxy_service.cpp:70: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Received Sqs Request: ListQueueTags { Auth { UserName: "cloud4" FolderId: "folder4" UserSID: "fake_user_sid@as" } QueueName: "000000000000000301v0" } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" 2026-01-04T02:50:55.274576Z node 7 :SQS DEBUG: action.h:133: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Request started. Actor: [7:7591333458961446666:5662] 2026-01-04T02:50:55.274621Z node 7 :SQS TRACE: service.cpp:1472: Inc local leader ref for actor [7:7591333458961446666:5662] 2026-01-04T02:50:55.274646Z node 7 :SQS DEBUG: service.cpp:754: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Forward configuration request to queue [cloud4/000000000000000301v0] leader 2026-01-04T02:50:55.279375Z node 7 :SQS TRACE: executor.cpp:286: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976710929 Step: 1767495055326 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k4\":\"v\"}" } } } } } 2026-01-04T02:50:55.279412Z node 7 :SQS DEBUG: executor.cpp:287: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 6ms 2026-01-04T02:50:55.279976Z node 7 :SQS TRACE: executor.cpp:325: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976710929 Step: 1767495055326 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{\"k4\":\"v\"}" } } } } } 2026-01-04T02:50:55.280088Z node 7 :SQS TRACE: executor.cpp:327: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{\"k4\":\"v\"}"} 2026-01-04T02:50:55.280258Z node 7 :SQS DEBUG: executor.cpp:401: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 8ms 2026-01-04T02:50:55.280287Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [d6dd3da5-9437f4f3-6dc9450a-c4cf3aab] Sending executed reply 2026-01-04T02:50:55.280616Z node 7 :SQS DEBUG: action.h:627: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Get configuration duration: 6ms 2026-01-04T02:50:55.280644Z node 7 :SQS TRACE: action.h:647: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Got configuration. Root url: http://ghrun-46vtxmsa7i.auto.internal:8771, Shards: 1, Fail: 0 2026-01-04T02:50:55.280665Z node 7 :SQS TRACE: action.h:427: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] DoRoutine 2026-01-04T02:50:55.280758Z node 7 :SQS TRACE: action.h:264: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] SendReplyAndDie from action actor { ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } } 2026-01-04T02:50:55.280885Z node 7 :SQS TRACE: proxy_service.h:35: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Sending sqs response: { ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k4" Value: "v" } } 2026-01-04T02:50:55.280984Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7591333458961446666:5662]. Found: 1 2026-01-04T02:50:55.281028Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k4" Value: "v" } 2026-01-04T02:50:55.281113Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7591333458961446665:2772]: ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k4" Value: "v" } 2026-01-04T02:50:55.281274Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] HandleResponse: { ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k4" Value: "v" } }, status: OK 2026-01-04T02:50:55.281380Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Sending reply from proxy actor: { ListQueueTags { RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" Tags { Key: "k4" Value: "v" } } RequestId: "2cf09b8-66fc10b6-894b9d4d-1aa0e425" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true QueueTags { Key: "k4" Value: "v" } } 2026-01-04T02:50:55.281560Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Got succesfult GRPC response. 2026-01-04T02:50:55.281681Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [2cf09b8-66fc10b6-894b9d4d-1aa0e425] reply ok 2026-01-04T02:50:55.281793Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [2cf09b8-66fc10b6-894b9d4d-1aa0e425] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 171 SourceAddress: 7834:31f4:9a7b:0:6034:31f4:9a7b:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-01-04T02:50:55.281890Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:57622) <- (200 , 19 bytes) 2026-01-04T02:50:55.281991Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:57622) connection closed Http output full {"Tags":{"k4":"v"}} >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] >> TestKinesisHttpProxy::ListShardsToken [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/effects/unittest >> KqpImmediateEffects::MultipleEffectsWithIndex [GOOD] Test command err: Trying to start YDB, gRPC: 6888, MsgBus: 3001 2026-01-04T02:50:31.269495Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333355152456260:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:31.274237Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:31.512086Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:31.517762Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:31.518016Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:31.591755Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:31.592095Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:31.595934Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333355152456141:2081] 1767495031265353 != 1767495031265356 2026-01-04T02:50:31.688719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:31.704343Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:31.704362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:31.704371Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:31.704482Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:32.206319Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:32.215199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:32.275036Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:32.284803Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.459846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.629765Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:32.703257Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.447200Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333368037359898:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.447337Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.447718Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333368037359908:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.447769Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:34.758442Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.786909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.816299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.846409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.874177Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.910006Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:34.977959Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.016435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:35.096684Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333372332328071:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.096853Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.097247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333372332328076:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.097299Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333372332328077:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.097340Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:35.100685Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:35.109865Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333372332328080:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:35.210881Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333372332328131:3764] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:36.270337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333355152456260:2147];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:36.271845Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:36.900843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation ... 2026-01-04T02:50:45.491727Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:45.526538Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:45.526616Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:45.528444Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:45.565467Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:45.565484Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:45.565489Z node 3 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:45.565542Z node 3 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:45.676933Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:45.928937Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:45.934756Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:45.943815Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:45.983977Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.102854Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.226275Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:46.415725Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:48.863190Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427145216693:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.863279Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.863613Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333427145216703:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.863691Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:48.936599Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.967876Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:48.998879Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.029011Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.057110Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.090780Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.125294Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.196978Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:49.273816Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431440184872:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.273905Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.273973Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431440184877:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.274025Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:7591333431440184879:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.274066Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:49.277346Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:49.287267Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:7591333431440184881:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:50:49.369423Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:7591333431440184932:3767] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:50.411155Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[3:7591333414260312977:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:50.411942Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:50.858226Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.895490Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:50.939311Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715675:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/effects/unittest >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder >> TSchemeshardCompactionQueueTest::UpdateBelowThreshold [GOOD] >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] >> DataShardReadTableSnapshots::ReadTableSnapshot >> DataShardReadTableSnapshots::ReadTableSplitBefore >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit >> TSchemeshardCompactionQueueTest::EnqueueEmptyShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShard [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueEmptyShard [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled >> TSchemeshardCompactionQueueTest::ShouldNotEnqueueSinglePartedShardWithMemData [GOOD] >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::CheckPgAutoParams-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 6021, MsgBus: 12368 2026-01-04T02:48:53.226241Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332937044185289:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.226300Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.253455Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:53.489112Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:53.490160Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.490268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.497349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:53.578537Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.585906Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332937044185169:2081] 1767494933158841 != 1767494933158844 2026-01-04T02:48:53.726258Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:53.768345Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:53.768369Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:53.768376Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:53.768468Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:54.250148Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:54.258552Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.265101Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:56.573666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.762662Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-04T02:48:56.823929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) \x62797465612030 \x62797465612030 \x62797465612031 \x62797465612031 \x62797465612032 \x62797465612032 \x62797465612033 \x62797465612033 \x62797465612034 \x62797465612034 \x62797465612035 \x62797465612035 \x62797465612036 \x62797465612036 \x62797465612037 \x62797465612037 \x62797465612038 \x62797465612038 \x62797465612039 \x62797465612039 2026-01-04T02:48:56.983844Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.045292Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-04T02:48:57.159317Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.211615Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {"\\x6130","\\x623130"} {"\\x6130","\\x623130"} {"\\x6131","\\x623131"} {"\\x6131","\\x623131"} {"\\x6132","\\x623132"} {"\\x6132","\\x623132"} {"\\x6133","\\x623133"} {"\\x6133","\\x623133"} {"\\x6134","\\x623134"} {"\\x6134","\\x623134"} {"\\x6135","\\x623135"} {"\\x6135","\\x623135"} {"\\x6136","\\x623136"} {"\\x6136","\\x623136"} {"\\x6137","\\x623137"} {"\\x6137","\\x623137"} {"\\x6138","\\x623138"} {"\\x6138","\\x623138"} {"\\x6139","\\x623139"} {"\\x6139","\\x623139"} 2026-01-04T02:48:57.327542Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.367727Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2026-01-04T02:48:57.445128Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.488367Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill f f t t 2026-01-04T02:48:57.543195Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {f,f} {f,f} {t,t} {t,t} 2026-01-04T02:48:57.653236Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710684:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {f,f} {f,f} {t,t} {t,t} 2026-01-04T02:48:57.750686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.805623Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-04T02:48:57.859772Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710692:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.913873Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-04T02:48:57.964090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710696:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-04T02:48:58.062069Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710699:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.125091Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill {0,0} {0,0} {1,1} {1,1} {2,2} {2,2} {3,3} {3,3} {4,4} {4,4} {5,5} {5,5} {6,6} {6,6} {7,7} {7,7} {8,8} {8,8} {9,9} {9,9} 2026-01-04T02:48:58.181503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710703:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.225412Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332937044185289:2148];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.225455Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 2026-01-04T02:48:58.319160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710706:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01 ... RROR: schemereq.cpp:624: Actor# [14:7591333445087374858:2543] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:52.151348Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:52.516892Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:53.185400Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:504: Get parsing result with error, self: [14:7591333449382342493:2392], owner: [14:7591333440792407457:2312], statement id: 0 2026-01-04T02:50:53.185880Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=OThkYTM4ODYtM2Y1MjMwNTctMjk1YTMyOTItMTQ0ZDllMzQ=, ActorId: [14:7591333449382342491:2391], ActorState: ExecuteState, LegacyTraceId: 01ke3embvv8aq5swj1ygeyhp8x, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Error while parsing query." severity: 1 issues { position { row: 1 column: 1 } message: "alternative is not implemented yet : 34" end_position { row: 1 column: 1 } severity: 1 } } tx_id# trace_id# 2026-01-04T02:50:53.458494Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7591333449382342519:2402], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:56: Error: At function: PgOp
:2:56: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-04T02:50:53.459104Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=YTNkZTllZjctNDIzNmRlMjgtZjk1ZjgxNmUtNzljMmZhNjc=, ActorId: [14:7591333449382342516:2400], ActorState: ExecuteState, LegacyTraceId: 01ke3emc3gdm9ax95jbvh9fc17, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 56 } message: "At function: PgOp" end_position { row: 2 column: 56 } severity: 1 issues { position { row: 2 column: 56 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 56 } severity: 1 } } } } } } tx_id# trace_id# 2026-01-04T02:50:53.497662Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7591333449382342531:2408], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple
: Error: At tuple
:1:1: Error: At function: PgWhere, At lambda
:2:58: Error: At function: PgAnd
:2:68: Error: At function: PgOp
:2:68: Error: Unable to find an overload for operator = with given argument type(s): (text,int4) 2026-01-04T02:50:53.498317Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=NzI1NzVkNzQtYzY1YTNkYTEtNzNhNTY1NzAtZDU0YjNmYzQ=, ActorId: [14:7591333449382342528:2406], ActorState: ExecuteState, LegacyTraceId: 01ke3emc4v9ybdt1e69xv1n4c6, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: RemovePrefixMembers, At function: PgSelect, At tuple, At tuple, At tuple, At function: PgSetItem, At tuple" end_position { row: 1 column: 1 } severity: 1 issues { message: "At tuple" severity: 1 issues { position { row: 1 column: 1 } message: "At function: PgWhere, At lambda" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 2 column: 58 } message: "At function: PgAnd" end_position { row: 2 column: 58 } severity: 1 issues { position { row: 2 column: 68 } message: "At function: PgOp" end_position { row: 2 column: 68 } severity: 1 issues { position { row: 2 column: 68 } message: "Unable to find an overload for operator = with given argument type(s): (text,int4)" end_position { row: 2 column: 68 } severity: 1 } } } } } } } tx_id# trace_id# 2026-01-04T02:50:53.513850Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3emc62c2ybcm7j2qvrszmj, Database: /Root, SessionId: ydb://session/3?node_id=14&id=NTQ0MzY4MTgtYmFlZjUzMjgtYzRkZjVlODYtMTE1NmI5YTI=, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-04T02:50:53.514152Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=NTQ0MzY4MTgtYmFlZjUzMjgtYzRkZjVlODYtMTE1NmI5YTI=, ActorId: [14:7591333449382342540:2412], ActorState: ExecuteState, LegacyTraceId: 01ke3emc62c2ybcm7j2qvrszmj, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-04T02:50:53.558970Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:53.642906Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:53.717329Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7591333449382342712:2439], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: values have 3 columns, INSERT INTO expects: 2 2026-01-04T02:50:53.717847Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=ZDQzYzFjYzgtNjYxMWU4YTYtMWFkMzRjOWYtMjdlZDU2Y2I=, ActorId: [14:7591333449382342709:2437], ActorState: ExecuteState, LegacyTraceId: 01ke3emcbwbm3vqhe89rq5rz2n, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "values have 3 columns, INSERT INTO expects: 2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-04T02:50:53.752354Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:7591333449382342724:2445], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Failed to convert type: List> to List>
:1:1: Error: Failed to convert 'id': pgunknown to Optional
:1:1: Error: Row type mismatch for table: db.[/Root/PgTable2] 2026-01-04T02:50:53.752899Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=Zjk5Yjc3OWMtOTU4ZmNkZGQtYTAwNmFmZGMtNmNhZmU3Mjk=, ActorId: [14:7591333449382342721:2443], ActorState: ExecuteState, LegacyTraceId: 01ke3emccz9vejh1e6get0wa04, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert type: List> to List>" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Failed to convert \'id\': pgunknown to Optional" end_position { row: 1 column: 1 } severity: 1 } } issues { position { row: 1 column: 1 } message: "Row type mismatch for table: db.[/Root/PgTable2]" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-04T02:50:54.235300Z node 14 :KQP_EXECUTER CRIT: {KQPLIT@kqp_literal_executer.cpp:118} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: 01ke3emce11bh16wknjn7zbwb8, Database: /Root, SessionId: ydb://session/3?node_id=14&id=YmNiN2U2YWUtMjEzNjE5YzUtOTJlNDk4OTItMTAxOTA3ZjE=, PoolId: default, IsStreamingQuery: 0}. TKqpLiteralExecuter, unexpected exception caught: (NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: "a" trace_id# 2026-01-04T02:50:54.236098Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=YmNiN2U2YWUtMjEzNjE5YzUtOTJlNDk4OTItMTAxOTA3ZjE=, ActorId: [14:7591333449382342733:2449], ActorState: ExecuteState, LegacyTraceId: 01ke3emce11bh16wknjn7zbwb8, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "(NKikimr::NMiniKQL::TTerminateException) Terminate was called, reason(51): ERROR: invalid input syntax for type integer: \"a\"\n\n" issue_code: 2029 severity: 1 } trace_id# 2026-01-04T02:50:54.287101Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:54.860766Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037892 not found 2026-01-04T02:50:54.901173Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710681:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> TSchemeshardCompactionQueueTest::EnqueueBelowSearchHeightThreshold [GOOD] >> TSchemeshardCompactionQueueTest::EnqueueBelowRowDeletesThreshold [GOOD] >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false |96.0%| [TA] $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::EnqueueSinglePartedShardWhenEnabled [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::RemoveLastShardFromSubQueues [GOOD] |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::UpdateWithEmptyShard [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::ShouldPopWhenOnlyLastCompactionQueue [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardCompactionQueueTest::CheckOrderWhenAllQueues [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.0%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.0%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/effects/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestChangeMessageVisibilityBatch [GOOD] Test command err: 2026-01-04T02:50:00.808988Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333223797788567:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809061Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.110442Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.110578Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.162551Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.171230Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.266935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.273903Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333223797788530:2081] 1767495000805791 != 1767495000805794 2026-01-04T02:50:01.455877Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.481922Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.481951Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.481963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.482043Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.825664Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.924999Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.933156Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.123907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.136963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.139885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.163764Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T02:50:02.249899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.406056Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.468200Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710663, at schemeshard: 72057594046644480 2026-01-04T02:50:02.474635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.531481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.561853Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.593696Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.624778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.664874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.698176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.731040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.065907Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240977659369:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.066018Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.066419Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240977659381:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.066468Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240977659382:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.066500Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.070369Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.084243Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333240977659385:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.161019Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333240977659436:3102] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.058024Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.108648Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.177856Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.213320Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... CHANGE_VISIBILITY_ID). Mode: COMPILE_AND_EXEC 2026-01-04T02:50:56.676087Z node 7 :SQS TRACE: executor.cpp:154: Request [73282e16-fdd26659-c78b2c84-b54769ae] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Serializing params: {"QUEUE_ID_NUMBER": 2, "QUEUE_ID_NUMBER_HASH": 17472595041006102391, "SHARD": 0, "QUEUE_ID_NUMBER_AND_SHARD_HASH": 12311263855443095412, "NOW": 1767495056675, "GROUPS_READ_ATTEMPT_IDS_PERIOD": 300000, "KEYS": [{"LockTimestamp": 1767495056585, "Offset": 1, "NewVisibilityDeadline": 1767495057675}, {"LockTimestamp": 1767495056613, "Offset": 2, "NewVisibilityDeadline": 1767495058675}]} 2026-01-04T02:50:56.676548Z node 7 :SQS TRACE: executor.cpp:203: Request [73282e16-fdd26659-c78b2c84-b54769ae] Query(idx=CHANGE_VISIBILITY_ID) Queue [cloud4/000000000000000101v0] Execute program: { Transaction { MiniKQLTransaction { Mode: COMPILE_AND_EXEC Program { Bin: "O\034\014Exists*NewVisibilityDeadline\014Offset\006Arg\014Member\nFlags\010Name\010Args\016Payload\022Parameter\006And\032LockTimestamp$VisibilityDeadline\014Invoke\t\211\004\206\202?\000\206\202\030Extend\000\006\002?\000\t\211\004\202\203\005@\206\205\n\203\014\207\203\010\203\014\203\010?\020(ChangeConddCurrentVisibilityDeadline\002\006\n$SetResult\000\003?\006\014result\t\211\006?\024\206\205\006?\020?\020?\020.\006\n?\032?\0220MapParameter\000\t\351\000?\034\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?&\003?(\010KEYS\003&\000\t\251\000?\032\016\000\005?\022\t\211\004?\010\207\203\014?\010 Coalesce\000\t\211\004?<\207\203\014\207\203\014*\000\t\211\006?B\203\005@\203\010?\0146\000\003?J\026LessOrEqual\t\351\000?L\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?X\003?Z\006NOW\003&\000\t\211\004?\014\207\205\004\207\203\010?\014.2\203\004\022\000\t\211\n?n\203\005\004\200\205\004\203\004\203\004.2\213\010\203\010\203\010\203\004?\020\203\004$SelectRow\000\003?t \000\001\205\000\000\000\000\001<\000\000\000\000\000\000\000?l\005?z\003?v\020\003?x\026\003\013?\202\t\351\000?|\005\205\004\206\205\004\203\010\203\005@\026\032\203\005@\036\"\006\000?\226\003?\230> TestKinesisHttpProxy::TestWrongRequest [GOOD] Test command err: 2026-01-04T02:50:00.810640Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333221273510726:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.810738Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.068242Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.071344Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.071457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.108466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.225989Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333221273510684:2081] 1767495000805794 != 1767495000805797 2026-01-04T02:50:01.238754Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.376727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.484464Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.484516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.484539Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.484647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.825010Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.939126Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:02.164161Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.175510Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.187425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.273963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.424906Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.471824Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.526802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.563121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.598579Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.633837Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.669832Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.707327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.749114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.235481Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238453381523:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.235485Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238453381534:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.235601Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.235918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238453381538:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.235978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.239620Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.255607Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333238453381537:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.336445Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333238453381590:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.058279Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.098079Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.156615Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.219537Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.273953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp: ... onse: {"settings": [], "truncated": false} 2026-01-04T02:50:55.657950Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 17ms 2026-01-04T02:50:55.658184Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976710685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:55.659281Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:55.659307Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 16ms 2026-01-04T02:50:55.659674Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:55.659723Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:50:55.659821Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 17ms 2026-01-04T02:50:55.660353Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976710686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:55.770757Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333457414601980:2437]: Pool not found 2026-01-04T02:50:55.770891Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:50:56.006999Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333457414601986:2439]: Pool not found 2026-01-04T02:50:56.007331Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:50:56.010455Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7591333461709569396:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:56.010496Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591333461709569395:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.010547Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.010689Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591333461709569399:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.010762Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.313935Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333461709569393:2454]: Pool not found 2026-01-04T02:50:56.314194Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:50:56.636464Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:51696) incoming connection opened 2026-01-04T02:50:56.636585Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:51696) -> (POST /, 87 bytes) 2026-01-04T02:50:56.636738Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [1822:5031:427c:0:22:5031:427c:0] request [CreateStream] url [/] database [] requestId: 94bd2230-a506db1-5387d67-f59893f5 2026-01-04T02:50:56.637243Z node 8 :HTTP_PROXY WARN: http_req.cpp:971: http request [CreateStream] requestId [94bd2230-a506db1-5387d67-f59893f5] got new request with incorrect json from [1822:5031:427c:0:22:5031:427c:0] database '' 2026-01-04T02:50:56.637425Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [94bd2230-a506db1-5387d67-f59893f5] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName 2026-01-04T02:50:56.637589Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:51696) <- (400 InvalidArgumentException, 135 bytes) 2026-01-04T02:50:56.637653Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:51696) Request: POST / HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked { "ShardCount":5, "StreamName":"testtopic", "WrongStreamName":"WrongStreamName" } 2026-01-04T02:50:56.637691Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:51696) Response: HTTP/1.1 400 InvalidArgumentException Connection: close x-amzn-requestid: 94bd2230-a506db1-5387d67-f59893f5 Content-Type: application/x-amz-json-1.1 Content-Length: 135 2026-01-04T02:50:56.637781Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:51696) connection closed Http output full {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} 400 {"__type":"InvalidArgumentException","message":"ydb/core/http_proxy/json_proto_conversion.h:400: Unexpected json key: WrongStreamName"} |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::TestEmptyHttpBody [GOOD] Test command err: 2026-01-04T02:50:00.808889Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333222510311321:2065];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809315Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.045573Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.125302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.127046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.172697Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.218609Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.243231Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333222510311296:2081] 1767495000806617 != 1767495000806620 2026-01-04T02:50:01.354951Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.481676Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.481697Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.481786Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.481913Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.826403Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.923974Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.934547Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.135860Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.150406Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.163093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.186294Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715660, at schemeshard: 72057594046644480 2026-01-04T02:50:02.240593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.369081Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.422800Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.466903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.470882Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.545274Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.592595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.625621Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.653494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.681754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.713099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.093967Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239690182151:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.094108Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239690182140:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.094246Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.094851Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333239690182155:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.094946Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.098544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.110320Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333239690182154:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:50:04.180618Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333239690182207:3102] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.039408Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.088841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.120976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.152817Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ES ... } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.239652Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Attempt 1 execution duration: 15ms 2026-01-04T02:50:56.240134Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.240176Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] Minikql data response: {"queues": [], "truncated": false} 2026-01-04T02:50:56.240293Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_QUEUES_LIST_ID) Queue [] execution duration: 16ms 2026-01-04T02:50:56.240706Z node 8 :SQS TRACE: queues_list_reader.cpp:82: Handle queues list: { Status: 48 TxId: 281474976715685 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "queues" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "CreatedTimestamp" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "CustomQueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "FolderId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "MasterTabletId" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "QueueName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "QueueState" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "Shards" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "TablesFormat" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "Version" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.240736Z node 8 :SQS TRACE: executor.cpp:286: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] HandleResponse { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.240751Z node 8 :SQS DEBUG: executor.cpp:287: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Attempt 1 execution duration: 16ms 2026-01-04T02:50:56.240984Z node 8 :SQS TRACE: executor.cpp:325: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Sending mkql execution result: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.241022Z node 8 :SQS TRACE: executor.cpp:327: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] Minikql data response: {"settings": [], "truncated": false} 2026-01-04T02:50:56.241083Z node 8 :SQS DEBUG: executor.cpp:401: Request [] Query(idx=GET_USER_SETTINGS_ID) Queue [] execution duration: 16ms 2026-01-04T02:50:56.241244Z node 8 :SQS TRACE: user_settings_reader.cpp:89: Handle user settings: { Status: 48 TxId: 281474976715686 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "settings" Type { Kind: Optional Optional { Item { Kind: List List { Item { Kind: Struct Struct { Member { Name: "Account" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Name" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "Value" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } } } } } } Member { Name: "truncated" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } } } Value { Struct { Optional { } } Struct { Optional { Bool: false } } } } } 2026-01-04T02:50:56.410844Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333462297011477:2439]: Pool not found 2026-01-04T02:50:56.411017Z node 8 :SQS DEBUG: monitoring.cpp:60: [monitoring] Report deletion queue data lag: 0.000000s, count: 0 2026-01-04T02:50:56.595725Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333462297011474:2437]: Pool not found 2026-01-04T02:50:56.596041Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:100: [cleanup removed queues] getting queues... 2026-01-04T02:50:56.598836Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591333462297011588:2455], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.598897Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:594: [WorkloadService] [TDatabaseFetcherActor] ActorId: [8:7591333462297011589:2456], Database: /Root/SQS, Failed to fetch database info, UNSUPPORTED, issues: {
: Error: Invalid database path /Root/SQS, please check the correctness of the path } 2026-01-04T02:50:56.598904Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.599111Z node 8 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [8:7591333462297011592:2457], DatabaseId: /Root/SQS, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.599161Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root/SQS, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:56.839469Z node 8 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:559: [WorkloadService] [Service] Reply cleanup error NOT_FOUND to [8:7591333462297011586:2454]: Pool not found 2026-01-04T02:50:56.839763Z node 8 :SQS DEBUG: cleanup_queue_data.cpp:138: [cleanup removed queues] there are no queues to delete 2026-01-04T02:50:57.217714Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:39350) incoming connection opened 2026-01-04T02:50:57.217828Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:39350) -> (POST /Root, 4 bytes) 2026-01-04T02:50:57.217972Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b870:d53b:bc7b:0:a070:d53b:bc7b:0] request [CreateStream] url [/Root] database [/Root] requestId: e0a4a3fd-ea0fe387-cac44cd1-3736d725 2026-01-04T02:50:57.218505Z node 8 :HTTP_PROXY INFO: http_req.cpp:1602: http request [CreateStream] requestId [e0a4a3fd-ea0fe387-cac44cd1-3736d725] reply with status: BAD_REQUEST message: ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map 2026-01-04T02:50:57.218634Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:39350) <- (400 MissingParameter, 127 bytes) 2026-01-04T02:50:57.218693Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:313: (#37,[::1]:39350) Request: POST /Root HTTP/1.1 Host: example.amazonaws.com X-Amz-Target: kinesisApi.CreateStream X-Amz-Date: 20150830T123600Z Authorization: Content-Type: application/json Connection: Close Transfer-Encoding: chunked null 2026-01-04T02:50:57.218729Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:320: (#37,[::1]:39350) Response: HTTP/1.1 400 MissingParameter Connection: close x-amzn-requestid: e0a4a3fd-ea0fe387-cac44cd1-3736d725 Content-Type: application/x-amz-json-1.1 Content-Length: 127 2026-01-04T02:50:57.218821Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:39350) connection closed Http output full {"__type":"MissingParameter","message":"ydb/core/http_proxy/json_proto_conversion.h:395: Top level of json value is not a map"} >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas >> TSchemeShardSubDomainTest::Restart >> TNetClassifierTest::TestInitFromRemoteSource >> TNetClassifierTest::TestInitFromBadlyFormattedFile |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |96.0%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TSchemeShardSubDomainTest::SchemeQuotas |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] >> TSchemeShardSubDomainTest::CreateWithoutPlanResolution [GOOD] >> TSchemeShardSubDomainTest::CreateWithNoEqualName ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::ListShardsToken [GOOD] Test command err: 2026-01-04T02:50:00.808943Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333224378153990:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809028Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.052168Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.182409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.182513Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.187144Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.306746Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.313927Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333224378153953:2081] 1767495000805771 != 1767495000805774 2026-01-04T02:50:01.323576Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.483550Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.483569Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.483582Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.483746Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.850333Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.942933Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.957850Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.164157Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.172676Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.251291Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.409036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.457997Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.504947Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715665, at schemeshard: 72057594046644480 2026-01-04T02:50:02.515435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.549590Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.582053Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.619575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.654949Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.690796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.723828Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.191150Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333241558024794:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.191247Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333241558024805:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.191320Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.191766Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333241558024808:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.191821Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.194698Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333241558024810:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.194760Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.198028Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.209061Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333241558024812:2374], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715673 completed, doublechecking } 2026-01-04T02:50:04.276860Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333241558024863:3104] txid# 281474976715674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.041982Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.071424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.102794Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/cor ... 5: (#37,[::1]:46606) connection closed Http output full {"NextToken":"COD/qLe4MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"COD/qLe4MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-01-04T02:50:57.377578Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:46618) incoming connection opened 2026-01-04T02:50:57.377631Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:46618) -> (POST /Root, 157 bytes) 2026-01-04T02:50:57.377696Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [58dc:ee66:e87b:0:40dc:ee66:e87b:0] request [ListShards] url [/Root] database [/Root] requestId: 89ec3e73-217c484d-ef715991-1b58da55 2026-01-04T02:50:57.378004Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [ListShards] requestId [89ec3e73-217c484d-ef715991-1b58da55] got new request from [58dc:ee66:e87b:0:40dc:ee66:e87b:0] database '/Root' stream 'teststream' 2026-01-04T02:50:57.378250Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [ListShards] requestId [89ec3e73-217c484d-ef715991-1b58da55] [auth] Authorized successfully 2026-01-04T02:50:57.378313Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [ListShards] requestId [89ec3e73-217c484d-ef715991-1b58da55] sending grpc request to '' database: '/Root' iam token size: 0 E0000 00:00:1767495057.378363 859753 message_lite.cc:131] Can't parse message of type "NKikimrPQ.TYdsNextToken" because it is missing required fields: CreationTimestamp, MaxResults, AlreadyRead, StreamArn 2026-01-04T02:50:57.378924Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7591333466632276614:2488], now have 1 active actors on pipe 2026-01-04T02:50:57.378927Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7591333466632276615:2489], now have 1 active actors on pipe 2026-01-04T02:50:57.379299Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListShards] requestId [89ec3e73-217c484d-ef715991-1b58da55] reply ok 2026-01-04T02:50:57.379480Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7591333466632276614:2488] destroyed 2026-01-04T02:50:57.379487Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7591333466632276615:2489] destroyed 2026-01-04T02:50:57.379532Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:46618) <- (200 , 449 bytes) 2026-01-04T02:50:57.379591Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:46618) connection closed Http output full {"NextToken":"COP/qLe4MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 200 {"NextToken":"COP/qLe4MxACGAIiCnRlc3RzdHJlYW0=","Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}}]} 2026-01-04T02:50:57.451667Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:50:57.451669Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:50:57.451706Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.451714Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.451721Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.451725Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.451739Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.451739Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.451750Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-04T02:50:57.451751Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-04T02:50:57.455201Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:50:57.455201Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:50:57.455216Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.455220Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.455225Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.455232Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.455239Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.455242Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.455249Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-04T02:50:57.455249Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-04T02:50:57.458252Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:57.458269Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.458276Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.458286Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.458294Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist 2026-01-04T02:50:57.552042Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037910][Partition][4][StateIdle] Process user action and tx events 2026-01-04T02:50:57.552042Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037908][Partition][2][StateIdle] Process user action and tx events 2026-01-04T02:50:57.552066Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.552074Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.552081Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037908][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.552085Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037910][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.552098Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037908][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.552101Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037910][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.552110Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037908][Partition][2][StateIdle] Try persist 2026-01-04T02:50:57.552116Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037910][Partition][4][StateIdle] Try persist 2026-01-04T02:50:57.555570Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037909][Partition][3][StateIdle] Process user action and tx events 2026-01-04T02:50:57.555587Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.555596Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037909][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.555606Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037909][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.555603Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037907][Partition][1][StateIdle] Process user action and tx events 2026-01-04T02:50:57.555614Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037909][Partition][3][StateIdle] Try persist 2026-01-04T02:50:57.555619Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.555631Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037907][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.555646Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037907][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.555657Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037907][Partition][1][StateIdle] Try persist 2026-01-04T02:50:57.558611Z node 8 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037911][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:50:57.558626Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.558632Z node 8 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037911][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:50:57.558641Z node 8 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037911][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:50:57.558647Z node 8 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037911][Partition][0][StateIdle] Try persist |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::QuoteNonexistentPool-IsExternalSubdomain-false [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.417940Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.418017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.418054Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.418088Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.418121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.418153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.418197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.418257Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.418912Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.419122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.481323Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.481390Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.493877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.494069Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.494229Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.500726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.500917Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.501497Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.501718Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.503983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.504171Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.505138Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.505196Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.505311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.505348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.505423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.505563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.606197Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.606986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607165Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607272Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607310Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607423Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607469Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.607784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000038 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:59.157724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:50:59.157991Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-04T02:50:59.158054Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:50:59.158231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:50:59.158315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:50:59.158368Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:50:59.160321Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.160373Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:59.160549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:50:59.160640Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.160674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:50:59.160734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 38 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:50:59.161040Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.161106Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:50:59.161210Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:50:59.161261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:50:59.161329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:50:59.161361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:50:59.161396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:50:59.161439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:50:59.161478Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:50:59.161511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:50:59.161597Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:50:59.161656Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:50:59.161694Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:50:59.161744Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:50:59.162576Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:50:59.162695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:50:59.162737Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:50:59.162777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:50:59.162836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:50:59.163597Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:50:59.163685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:50:59.163765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:50:59.163819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:50:59.163860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:50:59.163933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:50:59.167295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:50:59.167390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestModificationResults wait txId: 102 2026-01-04T02:50:59.170919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterSubDomain SubDomain { PlanResolution: 50 Coordinators: 1 Mediators: 1 Name: "SomeDatabase" TimeCastBucketsPerMediator: 2 DatabaseQuotas { storage_quotas { unit_kind: "nonexistent_storage_kind" data_size_hard_quota: 1 } } } } TxId: 102 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:50:59.171149Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: /MyRoot/SomeDatabase, opId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.171349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 102:1, propose status:StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, at schemeshard: 72057594046678944 2026-01-04T02:50:59.174016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 102, response: Status: StatusInvalidParameter Reason: "Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2" TxId: 102 SchemeshardId: 72057594046678944 PathId: 38, at schemeshard: 72057594046678944 2026-01-04T02:50:59.174276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 102, database: /MyRoot, subject: , status: StatusInvalidParameter, reason: Malformed subdomain request: cannot set storage quotas of the following kinds: nonexistent_storage_kind, because no storage pool in the subdomain /MyRoot/SomeDatabase has the specified kinds. Existing storage kinds are: pool-kind-1, pool-kind-2, operation: ALTER DATABASE, path: /MyRoot/SomeDatabase TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-04T02:50:59.174602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:50:59.174648Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:50:59.174742Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:50:59.174765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:50:59.175243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:50:59.175397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:50:59.175476Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:719:2709] 2026-01-04T02:50:59.175635Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:50:59.175781Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:50:59.175810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:719:2709] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop >> TestYmqHttpProxy::TestListQueueTags [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenDrop [GOOD] >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::Restart [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:59.469906Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:59.470018Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.470063Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:59.470103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:59.470150Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:59.470180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:59.470244Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.470362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:59.471309Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:59.471614Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:59.547900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:59.547953Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:59.559382Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:59.559560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:59.559715Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:59.567512Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:59.567790Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:59.568442Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:59.568675Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:59.571119Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.571300Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:59.572220Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.572272Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.572393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:59.572427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:59.572525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:59.572691Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.578610Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:50:59.700840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:50:59.701102Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.701342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:50:59.701397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:50:59.701630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:50:59.701704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:59.704331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:59.704554Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:50:59.704821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.704881Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:50:59.704923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:50:59.704960Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:50:59.707257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.707314Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:50:59.707355Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:50:59.709329Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.709393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.709481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:50:59.709541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:50:59.712970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:50:59.714770Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:50:59.714954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:50:59.716095Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:59.716238Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:59.716286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:50:59.716590Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:50:59.716652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:50:59.716835Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:50:59.716942Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:50:59.719146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.719195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... G: schemeshard_impl.cpp:442: AttachChild: child attached as only one child to the parent, parent id: [OwnerId: 72057594046678944, LocalPathId: 1], parent name: MyRoot, child name: USER_0, child id: [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:50:59.865357Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1492: TTxInit for UserAttributes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.865440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1518: TTxInit for UserAttributesAlterData, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.865657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 0 2026-01-04T02:50:59.865962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.866038Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-04T02:50:59.866391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.866507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.866619Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-04T02:50:59.866667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:50:59.866699Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:50:59.866722Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:50:59.866834Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.866935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.867200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-04T02:50:59.867539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.867657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868130Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868246Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868499Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868608Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868677Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868795Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.868988Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869092Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869275Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869534Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869705Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869769Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.869985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.870032Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.873997Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:59.876130Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.876193Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.876750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:59.876801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:59.876845Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:59.877924Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 Leader for TabletID 72057594046678944 is [1:463:2416] sender: [1:525:2058] recipient: [1:15:2062] 2026-01-04T02:50:59.941597Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:50:59.941919Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 290us result status StatusSuccess 2026-01-04T02:50:59.942417Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:50:59.943037Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:50:59.943198Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 167us result status StatusSuccess 2026-01-04T02:50:59.943624Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] >> TKesusTest::TestSessionDetach >> TKesusTest::TestReleaseLockFailure >> TSchemeShardSubDomainTest::DeleteAndRestart >> KqpPg::InsertNoTargetColumns_Alter-useSink [GOOD] >> KqpPg::InsertNoTargetColumns_Serial+useSink >> TSchemeShardSubDomainTest::RestartAtInFly >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq >> TSchemeShardSubDomainTest::ConcurrentCreateSubDomainAndDescribe [GOOD] >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas >> TKesusTest::TestSessionDetach [GOOD] >> TKesusTest::TestSessionDetachFutureId >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] >> TKesusTest::TestReleaseLockFailure [GOOD] >> TKesusTest::TestReleaseSemaphore >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] >> TConsoleConfigSubscriptionTests::TestConfigSubscriptionsCleanup [GOOD] >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] >> TKesusTest::TestReleaseSemaphore [GOOD] >> TKesusTest::TestSemaphoreData ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateWithNoEqualName [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.639674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.639782Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.639828Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.639930Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.639970Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.639999Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.640053Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.640129Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.640983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.641422Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.730163Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.730244Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.745754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.746022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.746227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.755818Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.756076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.756924Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.757211Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.760694Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.760962Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.762216Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.762286Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.762430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.762501Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.762617Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.762817Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.912734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.913741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.913916Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.913991Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914265Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914405Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914573Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.914838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... ecute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: "USER_3" } } TxId: 108 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.744888Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_mkdir.cpp:115: TMkDir Propose, path: /MyRoot/USER_3, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.744981Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 108:1, propose status:StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), at schemeshard: 72057594046678944 2026-01-04T02:51:00.746694Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 108, response: Status: StatusAlreadyExists Reason: "Check failed: path: \'/MyRoot/USER_3\', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges)" TxId: 108 SchemeshardId: 72057594046678944 PathId: 41 PathCreateTxId: 106, at schemeshard: 72057594046678944 2026-01-04T02:51:00.746899Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 108, database: /MyRoot, subject: , status: StatusAlreadyExists, reason: Check failed: path: '/MyRoot/USER_3', error: path exist, request accepts it (id: [OwnerId: 72057594046678944, LocalPathId: 41], type: EPathTypeSubDomain, state: EPathStateNoChanges), operation: CREATE DIRECTORY, path: /MyRoot/USER_3 TestModificationResult got TxId: 108, wait until txId: 108 2026-01-04T02:51:00.747476Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.747660Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 206us result status StatusSuccess 2026-01-04T02:51:00.747979Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.748378Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.748538Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 163us result status StatusSuccess 2026-01-04T02:51:00.748824Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000041 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "USER_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.749408Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_2" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.749544Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_2" took 154us result status StatusSuccess 2026-01-04T02:51:00.749840Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_2" PathDescription { Self { Name: "USER_2" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 104 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 2 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 40 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.750438Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_3" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.750555Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_3" took 130us result status StatusSuccess 2026-01-04T02:51:00.750772Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_3" PathDescription { Self { Name: "USER_3" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 106 CreateStep: 5000040 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409549 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409550 } DomainKey { SchemeShard: 72057594046678944 PathId: 41 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 41 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TKesusTest::TestAcquireUpgrade >> TKesusTest::TestSessionDetachFutureId [GOOD] >> TKesusTest::TestSessionDestroy ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateSubDomainWithoutTabletsThenForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:59.952023Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:59.952105Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.952141Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:59.952166Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:59.952205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:59.952242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:59.952283Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.952336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:59.953134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:59.953341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.021418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.021463Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.032350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.032492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.032650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.039595Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.039862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.040607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.040859Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.043345Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.043517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.044519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.044568Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.044652Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.044692Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.044784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.044928Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.050311Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:00.170767Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.171007Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.171186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:00.171231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:00.171471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:00.171563Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:00.173661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.173871Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:00.174125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.174183Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:00.174219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:00.174263Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:00.176075Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.176141Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:00.176206Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:00.177918Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.177966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.178017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.178066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:00.181985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:00.183682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:00.183862Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:00.184880Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.184997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.185060Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.185353Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:00.185410Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.185565Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.185673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:00.187563Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.187607Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 101, path id: 1 2026-01-04T02:51:00.890376Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:211:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-01-04T02:51:00.890540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.890572Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:418: [72057594046678944] TDeleteParts opId# 101:0 ProgressState 2026-01-04T02:51:00.890637Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:00.890667Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:00.890702Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:00.890725Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:00.890759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:51:00.890787Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:00.890809Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:51:00.890832Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:51:00.890876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:51:00.890900Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 0 2026-01-04T02:51:00.890923Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:51:00.890962Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 18446744073709551615 2026-01-04T02:51:00.891400Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:00.891450Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:00.891478Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:51:00.891504Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:51:00.891536Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:51:00.892024Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:00.892077Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:00.892094Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:51:00.892113Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 18446744073709551615 2026-01-04T02:51:00.892131Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:51:00.892192Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 0 2026-01-04T02:51:00.892791Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:00.892852Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:51:00.892941Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:51:00.893164Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:00.893211Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:51:00.893257Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.894538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:00.895776Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:00.895837Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:00.895926Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 101 2026-01-04T02:51:00.896076Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:51:00.896107Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:51:00.896341Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:51:00.896398Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:00.896432Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [2:348:2337] TestWaitNotification: OK eventTxId 101 2026-01-04T02:51:00.896707Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.896840Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 145us result status StatusPathDoesNotExist 2026-01-04T02:51:00.896950Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:51:00.897298Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:00.897448Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 138us result status StatusSuccess 2026-01-04T02:51:00.897701Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDeclareAndDefine [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.518616Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.518682Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.518707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.518735Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.518763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.518786Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.518820Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.518892Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.519484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.519685Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.586622Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.586666Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.597865Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.598042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.598214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.605518Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.605718Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.606415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.606692Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.609472Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.609674Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.610889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.610948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.611057Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.611103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.611212Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.611500Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.712554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713575Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713640Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713724Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713792Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.713992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.714033Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.714098Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.714169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.714248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.714304Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... HEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-04T02:51:01.182181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 100:0 128 -> 240 2026-01-04T02:51:01.182234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 100:0, at tablet# 72057594046678944 2026-01-04T02:51:01.182365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:51:01.182419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:51:01.182467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 38], at schemeshard: 72057594046678944 2026-01-04T02:51:01.184255Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.184293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.184431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 100, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:51:01.184565Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.184610Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 1 2026-01-04T02:51:01.184671Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 100, path id: 38 FAKE_COORDINATOR: Erasing txId 100 2026-01-04T02:51:01.185014Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 100:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.185056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 100:0 ProgressState 2026-01-04T02:51:01.185151Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:51:01.185189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:51:01.185250Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#100:0 progress is 1/1 2026-01-04T02:51:01.185285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:51:01.185317Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 100, ready parts: 1/1, is published: false 2026-01-04T02:51:01.185364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 100 ready parts: 1/1 2026-01-04T02:51:01.185415Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 100:0 2026-01-04T02:51:01.185448Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 100:0 2026-01-04T02:51:01.185551Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:51:01.185592Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 100, publications: 2, subscribers: 0 2026-01-04T02:51:01.185621Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 1], 8 2026-01-04T02:51:01.185650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 100, [OwnerId: 72057594046678944, LocalPathId: 38], 3 2026-01-04T02:51:01.186190Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:51:01.186271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:51:01.186319Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:51:01.186358Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:51:01.186403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:51:01.187162Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:51:01.187233Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 3 PathOwnerId: 72057594046678944, cookie: 100 2026-01-04T02:51:01.187271Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 100 2026-01-04T02:51:01.187299Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 100, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 3 2026-01-04T02:51:01.187350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 2 2026-01-04T02:51:01.187416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 100, subscribers: 0 2026-01-04T02:51:01.189995Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 2026-01-04T02:51:01.190830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 100 TestModificationResult got TxId: 100, wait until txId: 100 TestModificationResults wait txId: 101 TestModificationResult got TxId: 101, wait until txId: 101 TestWaitNotification wait txId: 100 2026-01-04T02:51:01.191082Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:51:01.191124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:51:01.191206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:51:01.191229Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 2026-01-04T02:51:01.191611Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:51:01.191743Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:51:01.191794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.191830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:717:2707] 2026-01-04T02:51:01.191964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.191999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:717:2707] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 2026-01-04T02:51:01.192370Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.192552Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 191us result status StatusSuccess 2026-01-04T02:51:01.193003Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 0 TimeCastBucketsPerMediator: 0 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.462793Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.462862Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.462904Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.462931Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.462958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.462978Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.463016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.463068Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.463761Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.463975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.525296Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.525340Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.537474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.537646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.537877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.545645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.545883Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.546545Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.546756Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.549426Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.549617Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.550709Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.550765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.550880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.550929Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.551039Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.551218Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.557617Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:00.689084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.689306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.689520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:00.689576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:00.689831Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:00.689915Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:00.691868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.692080Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:00.692353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.692427Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:00.692467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:00.692499Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:00.694093Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.694139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:00.694169Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:00.695731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.695768Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.695817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.695860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:00.698342Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:00.699756Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:00.699882Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:00.700629Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.700720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.700753Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.700964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:00.700998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.701122Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.701180Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:00.702822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.702866Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... gnore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409552 Status: COMPLETE TxId: 101 Step: 140 OrderId: 101 ExecLatency: 0 ProposeLatency: 3 DomainCoordinators: 72075186233409546 DomainCoordinators: 72075186233409547 DomainCoordinators: 72075186233409548 TxStats { PerShardStats { ShardId: 72075186233409552 CpuTimeUsec: 1070 } } CommitVersion { Step: 140 TxId: 101 } debug: NTableState::TProposedWaitParts operationId# 101:0 2026-01-04T02:51:01.005555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 620 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:51:01.005593Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 101, tablet: 72075186233409552, partId: 0 2026-01-04T02:51:01.005680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 101:0, at schemeshard: 72057594046678944, message: Source { RawX1: 620 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:51:01.005721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:51:01.005783Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 101:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 620 RawX2: 4294969830 } Origin: 72075186233409552 State: 2 TxId: 101 Step: 0 Generation: 2 2026-01-04T02:51:01.005858Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 101:0, shardIdx: 72057594046678944:7, shard: 72075186233409552, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.005896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.005965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 101:0, datashard: 72075186233409552, at schemeshard: 72057594046678944 2026-01-04T02:51:01.006001Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 129 -> 240 2026-01-04T02:51:01.008262Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:01.008365Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:01.008780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.009016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.009257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.009288Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:51:01.009356Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:01.009380Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:01.009420Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:01.009442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:01.009465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: true 2026-01-04T02:51:01.009510Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:279:2269] message: TxId: 101 2026-01-04T02:51:01.009544Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:01.009571Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:51:01.009605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:51:01.009709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-04T02:51:01.010829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.010857Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:280:2270] TestWaitNotification: OK eventTxId 101 2026-01-04T02:51:01.011240Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.011397Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 173us result status StatusSuccess 2026-01-04T02:51:01.011871Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.012394Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.012552Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 168us result status StatusSuccess 2026-01-04T02:51:01.012895Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 10 Coordinators: 72075186233409546 Coordinators: 72075186233409547 Coordinators: 72075186233409548 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409549 Mediators: 72075186233409550 Mediators: 72075186233409551 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 7 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] >> TSchemeShardSubDomainTest::DeleteAndRestart [GOOD] >> TKesusTest::TestAcquireUpgrade [GOOD] >> TKesusTest::TestAcquireTimeout >> TSchemeShardSubDomainTest::DiskSpaceUsage >> TKesusTest::TestSessionDestroy [GOOD] >> TKesusTest::TestSessionStealing ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestKinesisHttpProxy::GoodRequestCreateStream [GOOD] Test command err: 2026-01-04T02:50:00.809428Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333221103915953:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809495Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.057274Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.066161Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.066258Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.139957Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.327116Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.365491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.482493Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.482537Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.482559Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.482657Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.824089Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.923245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.930533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.156836Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.173712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.179734Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.298665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.469901Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.527757Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.573443Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.610354Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.647746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.684041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.724326Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.757849Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.808297Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.256726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238283786760:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.256726Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238283786769:2371], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.256895Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.257301Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333238283786775:2373], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.257363Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.260769Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.274143Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333238283786774:2372], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.329078Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333238283786827:3104] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.050722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.100754Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.150922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.200009Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.231021Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-0 ... 84: [PQ: 72075186224037907] server connected, pipe [8:7591333480404836181:2478], now have 1 active actors on pipe 2026-01-04T02:51:00.036740Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7591333480404836185:2482], now have 1 active actors on pipe 2026-01-04T02:51:00.036782Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037908] server connected, pipe [8:7591333480404836182:2479], now have 1 active actors on pipe 2026-01-04T02:51:00.037582Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037908] server disconnected, pipe [8:7591333480404836182:2479] destroyed 2026-01-04T02:51:00.037600Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037909] server disconnected, pipe [8:7591333480404836183:2480] destroyed 2026-01-04T02:51:00.037611Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7591333480404836185:2482] destroyed 2026-01-04T02:51:00.037621Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7591333480404836181:2478] destroyed 2026-01-04T02:51:00.037631Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037910] server disconnected, pipe [8:7591333480404836184:2481] destroyed 2026-01-04T02:51:00.038617Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [ac1b83ec-8fab3f21-cd30ebe1-48abab9e] reply ok 2026-01-04T02:51:00.038854Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56854) <- (200 , 1672 bytes) 2026-01-04T02:51:00.038944Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56854) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1767495060,"StorageLimitMb":0,"StreamName":"testtopic"}} 200 {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1767495060,"StorageLimitMb":0,"StreamName":"testtopic"}} 2026-01-04T02:51:00.040137Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56860) incoming connection opened 2026-01-04T02:51:00.040240Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56860) -> (POST /Root, 30 bytes) 2026-01-04T02:51:00.040375Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [b832:1f55:1d7c:0:a032:1f55:1d7c:0] request [DescribeStreamSummary] url [/Root] database [/Root] requestId: ea4b82ec-a5a809de-a2f8059e-8600815f 2026-01-04T02:51:00.040784Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [DescribeStreamSummary] requestId [ea4b82ec-a5a809de-a2f8059e-8600815f] got new request from [b832:1f55:1d7c:0:a032:1f55:1d7c:0] database '/Root' stream 'testtopic' 2026-01-04T02:51:00.041176Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStreamSummary] requestId [ea4b82ec-a5a809de-a2f8059e-8600815f] [auth] Authorized successfully 2026-01-04T02:51:00.041301Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStreamSummary] requestId [ea4b82ec-a5a809de-a2f8059e-8600815f] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:51:00.042357Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStreamSummary] requestId [ea4b82ec-a5a809de-a2f8059e-8600815f] reply ok 2026-01-04T02:51:00.042526Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56860) <- (200 , 238 bytes) 2026-01-04T02:51:00.042615Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56860) connection closed Http output full {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1767495.06,"StreamName":"testtopic"}} 200 {"StreamDescriptionSummary":{"RetentionPeriodHours":24,"OpenShardCount":5,"StreamArn":"testtopic","ConsumerCount":0,"KeyId":"","StreamStatus":"ACTIVE","EncryptionType":"NONE","StreamCreationTimestamp":1767495.06,"StreamName":"testtopic"}} 2026-01-04T02:51:00.043327Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:97: (#37,[::1]:56864) incoming connection opened 2026-01-04T02:51:00.043428Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:170: (#37,[::1]:56864) -> (POST /Root, 30 bytes) 2026-01-04T02:51:00.043560Z node 8 :HTTP_PROXY INFO: http_service.cpp:102: proxy service: incoming request from [7863:1f55:1d7c:0:6063:1f55:1d7c:0] request [DescribeStream] url [/Root] database [/Root] requestId: ba733106-5f196a3a-99d64a30-7f8f2a4b 2026-01-04T02:51:00.043912Z node 8 :HTTP_PROXY INFO: http_req.cpp:982: http request [DescribeStream] requestId [ba733106-5f196a3a-99d64a30-7f8f2a4b] got new request from [7863:1f55:1d7c:0:6063:1f55:1d7c:0] database '/Root' stream 'testtopic' 2026-01-04T02:51:00.044259Z node 8 :HTTP_PROXY DEBUG: auth_actors.cpp:130: http request [DescribeStream] requestId [ba733106-5f196a3a-99d64a30-7f8f2a4b] [auth] Authorized successfully 2026-01-04T02:51:00.044323Z node 8 :HTTP_PROXY INFO: http_req.cpp:701: http request [DescribeStream] requestId [ba733106-5f196a3a-99d64a30-7f8f2a4b] sending grpc request to '' database: '/Root' iam token size: 0 2026-01-04T02:51:00.045041Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037907] server connected, pipe [8:7591333480404836208:2490], now have 1 active actors on pipe 2026-01-04T02:51:00.045105Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037910] server connected, pipe [8:7591333480404836211:2493], now have 1 active actors on pipe 2026-01-04T02:51:00.045110Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037908] server connected, pipe [8:7591333480404836209:2491], now have 1 active actors on pipe 2026-01-04T02:51:00.045163Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037909] server connected, pipe [8:7591333480404836210:2492], now have 1 active actors on pipe 2026-01-04T02:51:00.045185Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2784: [PQ: 72075186224037911] server connected, pipe [8:7591333480404836212:2494], now have 1 active actors on pipe 2026-01-04T02:51:00.045866Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037907] server disconnected, pipe [8:7591333480404836208:2490] destroyed 2026-01-04T02:51:00.045881Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037908] server disconnected, pipe [8:7591333480404836209:2491] destroyed 2026-01-04T02:51:00.045896Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037909] server disconnected, pipe [8:7591333480404836210:2492] destroyed 2026-01-04T02:51:00.045903Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037910] server disconnected, pipe [8:7591333480404836211:2493] destroyed 2026-01-04T02:51:00.045914Z node 8 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037911] server disconnected, pipe [8:7591333480404836212:2494] destroyed 2026-01-04T02:51:00.046156Z node 8 :HTTP_PROXY INFO: http_req.cpp:1598: http request [DescribeStream] requestId [ba733106-5f196a3a-99d64a30-7f8f2a4b] reply ok 2026-01-04T02:51:00.046362Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:56864) <- (200 , 1672 bytes) 2026-01-04T02:51:00.046441Z node 8 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:56864) connection closed Http output full {"StreamDescription":{"RetentionPeriodHours":24,"WriteQuotaKbPerSec":1024,"StreamModeDetails":{"StreamMode":"ON_DEMAND"},"StreamArn":"testtopic","PartitioningSettings":{"MinActivePartitions":5,"AutoPartitioningSettings":{"Strategy":"AUTO_PARTITIONING_STRATEGY_DISABLED","PartitionWriteSpeed":{"StabilizationWindow":{"Nanos":0,"Seconds":300},"DownUtilizationPercent":30,"UpUtilizationPercent":90}},"MaxActivePartitions":5},"Shards":[{"ShardId":"shard-000000","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"68056473384187692692674921486353642290","StartingHashKey":"0"}},{"ShardId":"shard-000001","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"136112946768375385385349842972707284581","StartingHashKey":"68056473384187692692674921486353642291"}},{"ShardId":"shard-000002","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"204169420152563078078024764459060926872","StartingHashKey":"136112946768375385385349842972707284582"}},{"ShardId":"shard-000003","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"272225893536750770770699685945414569163","StartingHashKey":"204169420152563078078024764459060926873"}},{"ShardId":"shard-000004","SequenceNumberRange":{"StartingSequenceNumber":"0"},"HashKeyRange":{"EndingHashKey":"340282366920938463463374607431768211455","StartingHashKey":"272225893536750770770699685945414569164"}}],"KeyId":"","Owner":"Service1_id@as","StreamStatus":"ACTIVE","HasMoreShards":false,"EncryptionType":"ENCRYPTION_UNDEFINED","StreamCreationTimestamp":1767495060,"StorageLimitMb":0,"StreamName":"testtopic"}} |96.0%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.760437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.760500Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.760527Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.760550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.760594Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.760612Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.760650Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.760719Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.761305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.761496Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.821770Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.821843Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.832001Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.832128Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.832252Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.838369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.838529Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.839012Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.839179Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.841868Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.842058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.843285Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.843350Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.843475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.843520Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.843625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.843832Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.849272Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:00.950194Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.950361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.950532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:00.950592Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:00.950824Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:00.950908Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:00.952860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.953042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:00.953259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.953308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:00.953354Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:00.953393Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:00.955134Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.955194Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:00.955236Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:00.957004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.957050Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.957104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.957176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:00.959821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:00.960921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:00.961037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:00.961741Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.961863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.961896Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.962103Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:00.962138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.962261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.962315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:00.963531Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.963562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... :611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 Forgetting tablet 72075186233409549 2026-01-04T02:51:01.272241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:51:01.272361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:51:01.272745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:205: TTxCleanDroppedSubDomains Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:01.272801Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:224: TTxCleanDroppedPaths: PersistRemoveSubDomain for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:51:01.272913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:51:01.275426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:51:01.275468Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:51:01.275656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-04T02:51:01.275687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:7 tabletId 72075186233409552 2026-01-04T02:51:01.275821Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:01.275906Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409552, at schemeshard: 72057594046678944 2026-01-04T02:51:01.276051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:51:01.276074Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:51:01.276147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:01.276188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:51:01.276248Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:01.278769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:51:01.278810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:51:01.278863Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:51:01.278880Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:51:01.278912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:51:01.278937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:51:01.278970Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:51:01.278992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:51:01.279077Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:01.280408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 101 2026-01-04T02:51:01.280622Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:51:01.280663Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:51:01.280729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:51:01.280757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:51:01.281087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:51:01.281157Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.281189Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:720:2611] 2026-01-04T02:51:01.281349Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:51:01.281412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.281434Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:720:2611] TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-04T02:51:01.281785Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.281992Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 187us result status StatusPathDoesNotExist 2026-01-04T02:51:01.282140Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:51:01.282462Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.282620Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 188us result status StatusPathDoesNotExist 2026-01-04T02:51:01.282735Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:51:01.283111Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.283280Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 204us result status StatusSuccess 2026-01-04T02:51:01.283629Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 7 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 7 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousDefineAndCreateTable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.656925Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.657006Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.657042Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.657071Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.657103Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.657124Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.657161Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.657219Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.657852Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.658051Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.733775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.733843Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.744779Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.744947Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.745146Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.751689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.751878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.752425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.752622Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.754787Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.754961Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.755862Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.755907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.755986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.756018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.756115Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.756323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.762551Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:00.880020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.880196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.880364Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:00.880398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:00.880565Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:00.880623Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:00.882353Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.882519Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:00.882693Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.882733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:00.882764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:00.882795Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:00.884196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.884243Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:00.884281Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:00.885703Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.885747Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.885826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.885874Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:00.888403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:00.889570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:00.889701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:00.890523Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.890612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.890648Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.890864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:00.890903Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.891060Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.891118Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:00.892519Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.892554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... ts { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1104 } } CommitVersion { Step: 140 TxId: 102 } 2026-01-04T02:51:01.213860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_part.cpp:108: HandleReply TEvDataShard::TEvProposeTransactionResult Ignore message: tablet# 72057594046678944, ev# TxKind: TX_KIND_SCHEME Origin: 72075186233409549 Status: COMPLETE TxId: 102 Step: 140 OrderId: 102 ExecLatency: 0 ProposeLatency: 2 DomainCoordinators: 72075186233409546 TxStats { PerShardStats { ShardId: 72075186233409549 CpuTimeUsec: 1104 } } CommitVersion { Step: 140 TxId: 102 } debug: NTableState::TProposedWaitParts operationId# 102:0 2026-01-04T02:51:01.214771Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5926: Handle TEvSchemaChanged, tabletId: 72057594046678944, at schemeshard: 72057594046678944, message: Source { RawX1: 509 RawX2: 4294969758 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:51:01.214816Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 102, tablet: 72075186233409549, partId: 0 2026-01-04T02:51:01.214929Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 102:0, at schemeshard: 72057594046678944, message: Source { RawX1: 509 RawX2: 4294969758 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:51:01.214978Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1081: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 2026-01-04T02:51:01.215043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:1085: NTableState::TProposedWaitParts operationId# 102:0 HandleReply TEvDataShard::TEvSchemaChanged at tablet: 72057594046678944 message: Source { RawX1: 509 RawX2: 4294969758 } Origin: 72075186233409549 State: 2 TxId: 102 Step: 0 Generation: 2 2026-01-04T02:51:01.215106Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:673: CollectSchemaChanged accept TEvDataShard::TEvSchemaChanged, operationId: 102:0, shardIdx: 72057594046678944:4, shard: 72075186233409549, left await: 0, txState.State: ProposedWaitParts, txState.ReadyForNotifications: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.215139Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.215182Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 102:0, datashard: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:51:01.215214Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:0 129 -> 240 2026-01-04T02:51:01.217713Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:51:01.217850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 2026-01-04T02:51:01.219026Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.219129Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.219260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.219307Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:0 ProgressState 2026-01-04T02:51:01.219431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:51:01.219474Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:51:01.219522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:0 progress is 1/1 2026-01-04T02:51:01.219585Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:51:01.219664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/1, is published: true 2026-01-04T02:51:01.219752Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:314:2304] message: TxId: 102 2026-01-04T02:51:01.219800Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 1/1 2026-01-04T02:51:01.219841Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T02:51:01.219873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T02:51:01.219975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 3 2026-01-04T02:51:01.221352Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.221384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:537:2482] TestWaitNotification: OK eventTxId 102 2026-01-04T02:51:01.221761Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.221942Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 205us result status StatusSuccess 2026-01-04T02:51:01.222339Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 2 SecurityStateVersion: 0 } } Children { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.222825Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.222978Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 151us result status StatusSuccess 2026-01-04T02:51:01.223321Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 3 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 140 ParentPathId: 2 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 2 PlanResolution: 10 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } PathsInside: 1 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 3 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBadRootStatusInGetNodeRequest [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic >> TKesusTest::TestSemaphoreData [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::RestartAtInFly [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:01.196618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:01.196685Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.196711Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:01.196734Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:01.196763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:01.196784Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:01.196817Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.196882Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:01.197484Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:01.197691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:01.272775Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.272827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.286673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:01.286850Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:01.287031Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:01.294541Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:01.294773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:01.295260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.295454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:01.297798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.297994Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.298820Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.298866Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.298956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.298986Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.299059Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.299204Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.304531Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:01.422375Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:01.422561Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.422721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:01.422749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:01.422936Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:01.423001Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.424558Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.424737Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:01.424898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.424939Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:01.424969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:01.424997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:01.426435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.426479Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:01.426514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:01.427979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.428017Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.428066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.428107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:01.430741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:01.432218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:01.432394Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:01.433221Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.433314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.433347Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.433551Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:01.433599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.433739Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:01.433817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:01.435308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.435340Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... CE: schemeshard__init.cpp:1820: TTxInit for Tables, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.567575Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__root_shred_manager.cpp:452: [RootShredManager] Restore: Generation# 0, Status# 0, WakeupInterval# 604800 s, NumberShredTenantsInRunning# 0 2026-01-04T02:51:01.567800Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2093: TTxInit for Columns, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.567869Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2153: TTxInit for ColumnsAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.567947Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2211: TTxInit for Shards, read records: 3, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:51:01.568021Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 2 2026-01-04T02:51:01.568035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 3 2026-01-04T02:51:01.568115Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2297: TTxInit for TablePartitions, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2363: TTxInit for TableShardPartitionConfigs, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2514: TTxInit for ChannelsBinding, read records: 9, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2893: TTxInit for TableIndexes, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:2972: TTxInit for TableIndexKeys, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.568922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3473: TTxInit for KesusInfos, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3509: TTxInit for KesusAlters, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3754: TTxInit for TxShards, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569325Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3899: TTxInit for ShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569355Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3916: TTxInit for SystemShardToDelete, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:3933: TTxInit for BackupSettings, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4094: TTxInit for ShardBackupStatus, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4110: TTxInit for CompletedBackup, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569736Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4395: TTxInit for Publications, read records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.569942Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4762: IndexBuild , records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.570014Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4828: KMeansTreeSample records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.570051Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4887: KMeansTreeCluster records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.570164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:4976: SnapshotTables: snapshots: 0 tables: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.570201Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5003: SnapshotSteps: snapshots: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.570231Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:5030: LongLocks: records: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.573529Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.574864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.574911Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.575424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.575473Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.575509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.575966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 TestWaitNotification wait txId: 100 2026-01-04T02:51:01.638450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:51:01.638494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 Leader for TabletID 72057594046678944 is [1:457:2410] sender: [1:521:2058] recipient: [1:15:2062] 2026-01-04T02:51:01.639110Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:51:01.639200Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:51:01.639228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:519:2458] TestWaitNotification: OK eventTxId 100 2026-01-04T02:51:01.639563Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.639714Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 157us result status StatusSuccess 2026-01-04T02:51:01.640112Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 1 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 Mediators: 72075186233409548 } DomainKey { SchemeShard: 72057594046678944 PathId: 2 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 2 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 2 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.640554Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:01.640666Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 123us result status StatusSuccess 2026-01-04T02:51:01.640963Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] >> TKesusTest::TestAttachOutOfSequence |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TKesusTest::TestSessionStealing [GOOD] >> TKesusTest::TestSessionStealingAnyKey >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailesOnNotATopic [GOOD] >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/http_proxy/ut/inside_ydb_ut/unittest >> TestYmqHttpProxy::TestListQueueTags [GOOD] Test command err: 2026-01-04T02:50:00.808964Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333222949461364:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:00.809025Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:01.134662Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:01.134772Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:01.164869Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:01.191372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:01.305423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333222949461327:2081] 1767495000805798 != 1767495000805801 2026-01-04T02:50:01.313103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:01.482625Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:01.482654Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:01.482669Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:01.482764Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:01.486435Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:01.823271Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:01.923661Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:01.933312Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:50:02.150430Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterUserAttributes, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_user_attrs.cpp:72) 2026-01-04T02:50:02.163609Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:50:02.177054Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:50:02.288991Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.441920Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.485176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.534694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.579855Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.610418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.644843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.675298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.709477Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:02.745944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:04.055310Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240129332168:2368], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.055463Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.056035Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240129332178:2370], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.056116Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.056320Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333240129332182:2372], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:04.064073Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710673:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:04.081639Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333240129332184:2373], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710673 completed, doublechecking } 2026-01-04T02:50:04.134092Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333240129332235:3101] txid# 281474976710674, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 54], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:05.058327Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.110076Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.145391Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710678:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.182125Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:05.214289Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESche ... : Optional Optional { Item { Kind: Data Data { Scheme: 2 } } } } } Member { Name: "VisibilityDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "inflyCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "inflyVersion" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "messageCount" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 3 } } } } } } } Member { Name: "readOffset" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } Value { Struct { Optional { Optional { Uint64: 1767495060138 } } } Struct { Optional { } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Uint64: 0 } } Struct { Optional { Optional { Int64: 0 } } } Struct { Optional { Optional { Uint64: 0 } } } } } } 2026-01-04T02:51:00.466994Z node 7 :SQS DEBUG: queue_leader.cpp:2148: Infly version for shard [cloud4/000000000000000101v0/0]: 0 2026-01-04T02:51:00.474410Z node 7 :SQS TRACE: executor.cpp:286: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] HandleResponse { Status: 48 TxId: 281474976710715 Step: 1767495060520 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-01-04T02:51:00.474453Z node 7 :SQS DEBUG: executor.cpp:287: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Attempt 1 execution duration: 7ms 2026-01-04T02:51:00.474959Z node 7 :SQS TRACE: executor.cpp:325: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Sending mkql execution result: { Status: 48 TxId: 281474976710715 Step: 1767495060520 StatusCode: SUCCESS ExecutionEngineStatus: 1 ExecutionEngineResponseStatus: 2 ExecutionEngineEvaluatedResponse { Type { Kind: Struct Struct { Member { Name: "attrs" Type { Kind: Optional Optional { Item { Kind: Optional Optional { Item { Kind: Struct Struct { Member { Name: "ContentBasedDeduplication" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "DelaySeconds" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "DlqArn" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "DlqName" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } Member { Name: "FifoQueue" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "MaxReceiveCount" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MaximumMessageSize" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "MessageRetentionPeriod" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ReceiveMessageWaitTime" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "ShowDetailedCountersDeadline" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } Member { Name: "VisibilityTimeout" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4 } } } } } } } } } } } } Member { Name: "queueExists" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 6 } } } } } Member { Name: "tags" Type { Kind: Optional Optional { Item { Kind: Data Data { Scheme: 4608 } } } } } } } Value { Struct { Optional { Optional { Struct { Optional { Bool: false } } Struct { Optional { Uint64: 0 } } Struct { Optional { Text: "" } } Struct { Optional { Text: "" } } Struct { Optional { Bool: true } } Struct { Optional { Uint64: 0 } } Struct { Optional { Uint64: 262144 } } Struct { Optional { Uint64: 345600000 } } Struct { Optional { Uint64: 0 } } Struct { } Struct { Optional { Uint64: 30000 } } } } } Struct { Optional { Bool: true } } Struct { Optional { Text: "{}" } } } } } 2026-01-04T02:51:00.475049Z node 7 :SQS TRACE: executor.cpp:327: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] Minikql data response: {"attrs": {"ContentBasedDeduplication": false, "DelaySeconds": 0, "DlqArn": "", "DlqName": "", "FifoQueue": true, "MaxReceiveCount": 0, "MaximumMessageSize": 262144, "MessageRetentionPeriod": 345600000, "ReceiveMessageWaitTime": 0, "ShowDetailedCountersDeadline": null, "VisibilityTimeout": 30000}, "queueExists": true, "tags": "{}"} 2026-01-04T02:51:00.475210Z node 7 :SQS DEBUG: executor.cpp:401: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Query(idx=INTERNAL_GET_QUEUE_ATTRIBUTES_ID) Queue [cloud4/000000000000000301v0] execution duration: 9ms 2026-01-04T02:51:00.475287Z node 7 :SQS DEBUG: queue_leader.cpp:2038: Created new Deduplication cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7591333481695157481:3904] 2026-01-04T02:51:00.475316Z node 7 :SQS DEBUG: queue_leader.cpp:2038: Created new Reads cleanup actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7591333481695157482:3905] 2026-01-04T02:51:00.475347Z node 7 :SQS DEBUG: queue_leader.cpp:2050: Created new retention actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7591333481695157483:3906] 2026-01-04T02:51:00.475349Z node 7 :SQS INFO: fifo_cleanup.cpp:31: Request [bca452cb-a9cedd11-631bbfcb-3861e2f] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2026-01-04T02:51:00.475368Z node 7 :SQS INFO: fifo_cleanup.cpp:31: Request [4d8f64a-b278ac83-4807af10-99637677] Bootstrap cleanup actor for queue [cloud4/000000000000000301v0] 2026-01-04T02:51:00.475376Z node 7 :SQS DEBUG: queue_leader.cpp:2054: Created new purge actor for queue [cloud4/000000000000000301v0]. Actor id: [7:7591333481695157484:3907] 2026-01-04T02:51:00.475386Z node 7 :SQS DEBUG: queue_leader.cpp:556: Request [aa68052d-728df9a6-7f827fe5-8b6d51b3] Sending executed reply 2026-01-04T02:51:00.475387Z node 7 :SQS INFO: retention.cpp:30: Request [80b44a50-c1ef32a7-bcd466e1-caf4b3d1] Bootstrap retention actor for queue [cloud4/000000000000000301v0] 2026-01-04T02:51:00.475407Z node 7 :SQS INFO: purge.cpp:35: Request [f4771e95-93c8e62f-9b6d96e0-c9310b8c] Create purge actor for queue /Root/SQS/cloud4/000000000000000301v0 2026-01-04T02:51:00.475692Z node 7 :SQS DEBUG: action.h:627: Request [83e28274-fd6849ba-95381adb-50faa8bf] Get configuration duration: 18ms 2026-01-04T02:51:00.475728Z node 7 :SQS TRACE: action.h:647: Request [83e28274-fd6849ba-95381adb-50faa8bf] Got configuration. Root url: http://ghrun-46vtxmsa7i.auto.internal:8771, Shards: 1, Fail: 0 2026-01-04T02:51:00.475748Z node 7 :SQS TRACE: action.h:427: Request [83e28274-fd6849ba-95381adb-50faa8bf] DoRoutine 2026-01-04T02:51:00.475822Z node 7 :SQS TRACE: action.h:264: Request [83e28274-fd6849ba-95381adb-50faa8bf] SendReplyAndDie from action actor { ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } } 2026-01-04T02:51:00.475923Z node 7 :SQS TRACE: proxy_service.h:35: Request [83e28274-fd6849ba-95381adb-50faa8bf] Sending sqs response: { ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-01-04T02:51:00.476006Z node 7 :SQS TRACE: service.cpp:1483: Dec local leader ref for actor [7:7591333481695157459:3885]. Found: 1 2026-01-04T02:51:00.476023Z node 7 :SQS TRACE: proxy_service.cpp:194: HandleSqsResponse ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-01-04T02:51:00.476078Z node 7 :SQS TRACE: proxy_service.cpp:208: Sending answer to proxy actor [7:7591333481695157458:2549]: ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true 2026-01-04T02:51:00.476212Z node 7 :SQS TRACE: proxy_actor.cpp:178: Request [83e28274-fd6849ba-95381adb-50faa8bf] HandleResponse: { ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true }, status: OK 2026-01-04T02:51:00.476283Z node 7 :SQS DEBUG: proxy_actor.cpp:147: Request [83e28274-fd6849ba-95381adb-50faa8bf] Sending reply from proxy actor: { ListQueueTags { RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" } RequestId: "83e28274-fd6849ba-95381adb-50faa8bf" FolderId: "folder4" ResourceId: "000000000000000301v0" IsFifo: true } 2026-01-04T02:51:00.476395Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:401: http request [ListQueueTags] requestId [83e28274-fd6849ba-95381adb-50faa8bf] Got succesfult GRPC response. 2026-01-04T02:51:00.476443Z node 7 :HTTP_PROXY INFO: http_req.cpp:1598: http request [ListQueueTags] requestId [83e28274-fd6849ba-95381adb-50faa8bf] reply ok 2026-01-04T02:51:00.476519Z node 7 :HTTP_PROXY DEBUG: http_req.cpp:1658: http request [ListQueueTags] requestId [83e28274-fd6849ba-95381adb-50faa8bf] Send metering event. HttpStatusCode: 200 IsFifo: 1 FolderId: folder4 RequestSizeInBytes: 530 ResponseSizeInBytes: 154 SourceAddress: b819:132d:4b7c:0:a019:132d:4b7c:0 ResourceId: 000000000000000301v0 Action: ListQueueTags 2026-01-04T02:51:00.476611Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:304: (#37,[::1]:48394) <- (200 , 2 bytes) 2026-01-04T02:51:00.476698Z node 7 :HTTP DEBUG: http_proxy_incoming.cpp:365: (#37,[::1]:48394) connection closed Http output full {} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromBadlyFormattedFile [GOOD] Test command err: 2026-01-04T02:50:59.415007Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333477514096094:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:59.415076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:59.670114Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:59.670266Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:59.701249Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:50:59.706955Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:59.738249Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:59.788675Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/002467/r3tmp/yandexd0sRcd.tmp 2026-01-04T02:50:59.788772Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/002467/r3tmp/yandexd0sRcd.tmp 2026-01-04T02:50:59.790198Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:344: invalid NetData format 2026-01-04T02:50:59.790242Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: /home/runner/.ya/build/build_root/c5zo/002467/r3tmp/yandexd0sRcd.tmp 2026-01-04T02:50:59.790317Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:59.882704Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/http_proxy/ut/inside_ydb_ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest >> TKesusTest::TestAttachOutOfSequence [GOOD] >> TKesusTest::TestAttachOutOfSequenceInTx >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] >> TKesusTest::TestSessionStealingAnyKey [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeLimitsCreatePq [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:01.360703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:01.360829Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.360872Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:01.360905Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:01.360943Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:01.360983Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:01.361043Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.361134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:01.361984Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:01.362246Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:01.440440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.440498Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.457430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:01.457605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:01.457788Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:01.465127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:01.465363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:01.466090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.466315Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:01.469081Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.469260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.470497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.470556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.470661Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.470725Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.470841Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.471069Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.477094Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:01.571537Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:01.571755Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.571891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:01.571920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:01.572090Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:01.572151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.573689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.573877Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:01.574042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.574081Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:01.574110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:01.574133Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:01.575400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.575440Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:01.575470Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:01.576556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.576587Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.576625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.576737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:01.579208Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:01.580259Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:01.580369Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:01.581137Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.581218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.581247Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.581460Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:01.581504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.581650Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:01.581734Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:01.582954Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.582987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... lt> execute, operationId: 104:0, at schemeshard: 72057594046678944, message: TabletId: 72075186233409551 TxId: 104 Status: OK 2026-01-04T02:51:01.998964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:654: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult triggers early, at schemeshard: 72057594046678944 message# TabletId: 72075186233409551 TxId: 104 Status: OK 2026-01-04T02:51:01.998986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:659: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionAttachResult CollectPQConfigChanged: false 2026-01-04T02:51:01.999003Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 2 2026-01-04T02:51:02.011655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2026-01-04T02:51:02.012540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.047827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409550, partId: 0 2026-01-04T02:51:02.047956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-04T02:51:02.048009Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409550 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-04T02:51:02.048046Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:5, shard: 72075186233409550, left await: 1, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.048088Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: false 2026-01-04T02:51:02.048130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:765: NPQState::TPropose operationId# 104:0 can't persist state: ShardsInProgress is not empty, remain: 1 FAKE_COORDINATOR: advance: minStep5000003 State->FrontStep: 5000003 2026-01-04T02:51:02.049036Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 104, tablet: 72075186233409551, partId: 0 2026-01-04T02:51:02.049109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 104:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-04T02:51:02.049143Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_pq.cpp:635: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult triggers early, at schemeshard: 72057594046678944 message# Origin: 72075186233409551 Status: COMPLETE TxId: 104 Step: 5000003 2026-01-04T02:51:02.049176Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:270: CollectPQConfigChanged accept TEvPersQueue::TEvProposeTransactionResult, operationId: 104:0, shardIdx: 72057594046678944:6, shard: 72075186233409551, left await: 0, txState.State: Propose, txState.ReadyForNotifications: 0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.049211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_pq.cpp:640: NPQState::TPropose operationId# 104:0 HandleReply TEvProposeTransactionResult CollectPQConfigChanged: true 2026-01-04T02:51:02.049326Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 128 -> 240 2026-01-04T02:51:02.049460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:51:02.049505Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 FAKE_COORDINATOR: Erasing txId 104 2026-01-04T02:51:02.052089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.052213Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.052495Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:02.052532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:02.052701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 104, path id: [OwnerId: 72057594046678944, LocalPathId: 3] 2026-01-04T02:51:02.052849Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:02.052894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2316], at schemeshard: 72057594046678944, txId: 104, path id: 1 2026-01-04T02:51:02.052934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:340:2316], at schemeshard: 72057594046678944, txId: 104, path id: 3 2026-01-04T02:51:02.053360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.053419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 104:0 ProgressState 2026-01-04T02:51:02.053521Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:51:02.053556Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:02.053595Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:51:02.053624Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:02.053658Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: false 2026-01-04T02:51:02.053715Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:02.053760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:51:02.053796Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:51:02.053966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 6 2026-01-04T02:51:02.054015Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 104, publications: 2, subscribers: 0 2026-01-04T02:51:02.054047Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 1], 7 2026-01-04T02:51:02.054101Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 104, [OwnerId: 72057594046678944, LocalPathId: 3], 2 2026-01-04T02:51:02.054851Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:51:02.054925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 7 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:51:02.054971Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:51:02.055007Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 7 2026-01-04T02:51:02.055043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:51:02.055816Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:51:02.055905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 3 Version: 2 PathOwnerId: 72057594046678944, cookie: 104 2026-01-04T02:51:02.055937Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 104 2026-01-04T02:51:02.055962Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 104, pathId: [OwnerId: 72057594046678944, LocalPathId: 3], version: 2 2026-01-04T02:51:02.055997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 3] was 5 2026-01-04T02:51:02.056056Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 104, subscribers: 0 2026-01-04T02:51:02.058894Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 2026-01-04T02:51:02.059885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 104 TestModificationResult got TxId: 104, wait until txId: 104 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SchemeQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:59.549180Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:59.549277Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.549318Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:59.549356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:59.549416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:59.549458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:59.549537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.549620Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:59.550535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:59.550819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:59.616720Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:59.616775Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:59.628147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:59.628307Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:59.628454Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:59.634997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:59.635185Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:59.635805Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:59.636049Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:59.638641Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.638813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:59.639815Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.639860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.639938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:59.639968Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:59.640045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:59.640197Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.747215Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748019Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748131Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748252Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748420Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748506Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748580Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748687Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748764Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... athId: 38] was 11 2026-01-04T02:51:02.093643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 3 2026-01-04T02:51:02.095707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 137, response: Status: StatusAccepted TxId: 137 SchemeshardId: 72057594046678944 PathId: 46, at schemeshard: 72057594046678944 2026-01-04T02:51:02.095933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 137, database: /MyRoot/USER_0, subject: , status: StatusAccepted, operation: CREATE TABLE, path: /MyRoot/USER_0/Table11 2026-01-04T02:51:02.096198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:02.096236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:51:02.096460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 137, path id: [OwnerId: 72057594046678944, LocalPathId: 46] 2026-01-04T02:51:02.096582Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:02.096632Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1399:3262], at schemeshard: 72057594046678944, txId: 137, path id: 38 2026-01-04T02:51:02.096678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:1399:3262], at schemeshard: 72057594046678944, txId: 137, path id: 46 2026-01-04T02:51:02.097293Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.097370Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 137:0 ProgressState, operation type: TxCreateTable, at tablet# 72057594046678944 2026-01-04T02:51:02.097596Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:359: TCreateParts opId# 137:0 CreateRequest Event to Hive: 72057594037968897 msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-04T02:51:02.098343Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-01-04T02:51:02.098429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 38 Version: 15 PathOwnerId: 72057594046678944, cookie: 137 2026-01-04T02:51:02.098469Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-04T02:51:02.098511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 15 2026-01-04T02:51:02.098543Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 12 2026-01-04T02:51:02.099443Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-04T02:51:02.099504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 4 LocalPathId: 46 Version: 1 PathOwnerId: 72057594046678944, cookie: 137 2026-01-04T02:51:02.099526Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 137 2026-01-04T02:51:02.099547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 137, pathId: [OwnerId: 72057594046678944, LocalPathId: 46], version: 1 2026-01-04T02:51:02.099570Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 46] was 4 2026-01-04T02:51:02.099625Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 137, ready parts: 0/1, is published: true 2026-01-04T02:51:02.102196Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72057594037968897 cookie: 72057594046678944:10 msg type: 268697601 2026-01-04T02:51:02.102322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72057594037968897 2026-01-04T02:51:02.102359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1852: TOperation RegisterRelationByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-04T02:51:02.103076Z node 1 :HIVE INFO: tablet_helpers.cpp:1293: [72057594037968897] TEvCreateTablet, msg: Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } FAKEHIVE 72057594037968897 TEvCreateTablet Owner: 72057594046678944 OwnerIdx: 10 TabletType: DataShard ObjectDomain { SchemeShard: 72057594046678944 PathId: 38 } ObjectId: 46 BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } BindedChannels { StoragePoolName: "pool-1" } AllowedDomains { SchemeShard: 72057594046678944 PathId: 38 } 2026-01-04T02:51:02.103229Z node 1 :HIVE INFO: tablet_helpers.cpp:1357: [72057594037968897] TEvCreateTablet, Owner 72057594046678944, OwnerIdx 10, type DataShard, boot OK, tablet id 72075186233409555 2026-01-04T02:51:02.103314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6307: Handle TEvCreateTabletReply at schemeshard: 72057594046678944 message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-04T02:51:02.103348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1866: TOperation FindRelatedPartByShardIdx, TxId: 137, shardIdx: 72057594046678944:10, partId: 0 2026-01-04T02:51:02.103472Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 137:0, at schemeshard: 72057594046678944, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-04T02:51:02.103528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:177: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, at tabletId: 72057594046678944 2026-01-04T02:51:02.103590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:180: TCreateParts opId# 137:0 HandleReply TEvCreateTabletReply, message: Status: OK Owner: 72057594046678944 OwnerIdx: 10 TabletID: 72075186233409555 Origin: 72057594037968897 2026-01-04T02:51:02.103662Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 137:0 2 -> 3 2026-01-04T02:51:02.104830Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-04T02:51:02.105284Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 137 2026-01-04T02:51:02.107540Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.107824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 137:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.107882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_create_table.cpp:219: TCreateTable TConfigureParts operationId# 137:0 ProgressState at tabletId# 72057594046678944 2026-01-04T02:51:02.107953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:239: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 seqNo: 4:5 2026-01-04T02:51:02.108236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_create_table.cpp:255: TCreateTable TConfigureParts operationId# 137:0 ProgressState Propose modify scheme on datashard datashardId: 72075186233409555 message: TxKind: TX_KIND_SCHEME SourceDeprecated { RawX1: 1345 RawX2: 4294970515 } TxBody: "\n\236\004\n\007Table11\020.\032\r\n\003key\030\002 \001(\000@\000\032\020\n\005Value\030\200$ \002(\000@\000(\001:\262\003\022\253\003\010\200\200\200\002\020\254\002\030\364\003 \200\200\200\010(\0000\200\200\200 8\200\200\200\010@\2008H\000RX\010\000\020\000\030\010 \010(\200\200\200@0\377\377\377\377\0178\001B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen1P\nX\200\200\001`nh\000p\000Rb\010\001\020\200\200\200\024\030\005 \020(\200\200\200\200\0020\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen2P\nX\200\200\001`nh\200\200\200\004p\200\200\200\004Rc\010\002\020\200\200\200\310\001\030\005 \020(\200\200\200\200@0\377\377\377\377\0178\000B$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionJ\017compaction_gen3P\nX\200\200\001`nh\200\200\200(p\200\200\200(X\001`\005j$\010e\020d\031\000\000\000\000\000\000\360?*\025background_compactionr\017compaction_gen0z\017compaction_gen0\202\001\004scan\210\001\200\200\200\010\220\001\364\003\230\0012\270\001\2008\300\001\006R\002\020\001J\026/MyRoot/USER_0/Table11\242\001\006\001\000\000\000\000\200\252\001\000\260\001\001\270\001\000\210\002\001\222\002\013\t\240\207\205\000\000\000\000\001\020.:\004\010\004\020\005" TxId: 137 ExecLevel: 0 Flags: 0 SchemeShardId: 72057594046678944 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } SubDomainPathId: 38 2026-01-04T02:51:02.111175Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 137:0 from tablet: 72057594046678944 to tablet: 72075186233409555 cookie: 72057594046678944:10 msg type: 269549568 2026-01-04T02:51:02.111355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 137, partId: 0, tablet: 72075186233409555 TestModificationResult got TxId: 137, wait until txId: 137 >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] >> TKesusTest::TestSemaphoreReleaseReacquire [GOOD] >> TKesusTest::TestSemaphoreSessionFailures >> DataShardReadTableSnapshots::ReadTableSplitBefore [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitFinished |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/address_classification/ut/unittest >> TNetClassifierTest::TestInitFromRemoteSource [GOOD] Test command err: 2026-01-04T02:50:59.415040Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333476104111447:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:59.415111Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:50:59.655139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:50:59.712466Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:59.712553Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:59.735662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:59.738830Z node 1 :HTTP ERROR: http_proxy_outgoing.cpp:124: (#26,[::1]:24460) connection closed with error: Connection refused 2026-01-04T02:50:59.740297Z node 1 :CMS_CONFIGS ERROR: net_classifier_updater.cpp:278: NetClassifierUpdater failed to get subnets: Connection refused 2026-01-04T02:50:59.754299Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:59.789833Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:59.789871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:59.789877Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:59.790003Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:59.882340Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:51:00.423908Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/mind/address_classification/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::ColumnSchemeLimitsRejects [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.501598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.501688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.501733Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.501776Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.501835Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.501866Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.501937Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.502016Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.502826Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.503080Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.589841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.589902Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.603643Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.603864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.604181Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.611979Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.612188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.612939Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.613187Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.616010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.616208Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.617249Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.617308Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.617433Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.617481Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.617606Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.617784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.624117Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:00.754316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:00.754512Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.754674Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:00.754711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:00.754876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:00.754987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:00.756794Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.756975Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:00.757181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.757227Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:00.757256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:00.757295Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:00.758868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.758914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:00.758964Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:00.760256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.760297Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.760341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.760392Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:00.763185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:00.764513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:00.764664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:00.765448Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.765562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:00.765601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.765900Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:00.765950Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:00.766112Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:00.766191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:00.767819Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.767860Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000004, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:02.216310Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 108 Coordinator: 72057594046316545 AckTo { RawX1: 137 RawX2: 8589936750 } } Step: 5000004 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:02.216371Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:199: TAlterOlapStore TPropose operationId# 108:0 HandleReply TEvOperationPlan at tablet: 72057594046678944, stepId: 5000004 2026-01-04T02:51:02.216526Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 128 -> 129 2026-01-04T02:51:02.216707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:51:02.216770Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 FAKE_COORDINATOR: advance: minStep5000004 State->FrontStep: 5000004 2026-01-04T02:51:02.219060Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:02.219116Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:02.219288Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 108, path id: [OwnerId: 72057594046678944, LocalPathId: 5] 2026-01-04T02:51:02.219434Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:02.219478Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:339:2314], at schemeshard: 72057594046678944, txId: 108, path id: 1 2026-01-04T02:51:02.219540Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:339:2314], at schemeshard: 72057594046678944, txId: 108, path id: 5 2026-01-04T02:51:02.219830Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.219887Z node 2 :FLAT_TX_SCHEMESHARD INFO: alter_store.cpp:305: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState at tablet: 72057594046678944 2026-01-04T02:51:02.219951Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: alter_store.cpp:332: TAlterOlapStore TProposedWaitParts operationId# 108:0 ProgressState wait for NotifyTxCompletionResult tabletId: 72075186233409549 2026-01-04T02:51:02.220994Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:51:02.221094Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 1 Version: 8 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:51:02.221142Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:51:02.221183Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 8 2026-01-04T02:51:02.221223Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 3 2026-01-04T02:51:02.222558Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:51:02.222652Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 3 LocalPathId: 5 Version: 5 PathOwnerId: 72057594046678944, cookie: 108 2026-01-04T02:51:02.222685Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 108 2026-01-04T02:51:02.222716Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 108, pathId: [OwnerId: 72057594046678944, LocalPathId: 5], version: 5 2026-01-04T02:51:02.222748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 4 2026-01-04T02:51:02.222816Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 0/1, is published: true 2026-01-04T02:51:02.224640Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 108:0 from tablet: 72057594046678944 to tablet: 72075186233409549 cookie: 72057594046678944:4 msg type: 275382275 2026-01-04T02:51:02.225490Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 2026-01-04T02:51:02.226510Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 108 FAKE_COORDINATOR: Erasing txId 108 2026-01-04T02:51:02.238819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6586: Handle TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-04T02:51:02.238876Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1837: TOperation FindRelatedPartByTabletId, TxId: 108, tablet: 72075186233409549, partId: 0 2026-01-04T02:51:02.239027Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 108:0, at schemeshard: 72057594046678944, message: Origin: 72075186233409549 TxId: 108 2026-01-04T02:51:02.239084Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 108:0 129 -> 240 2026-01-04T02:51:02.241057Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.241214Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 108:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.241263Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 108:0 ProgressState 2026-01-04T02:51:02.241382Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:51:02.241438Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:51:02.241480Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#108:0 progress is 1/1 2026-01-04T02:51:02.241516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:51:02.241555Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 108, ready parts: 1/1, is published: true 2026-01-04T02:51:02.241614Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:500:2449] message: TxId: 108 2026-01-04T02:51:02.241660Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 108 ready parts: 1/1 2026-01-04T02:51:02.241697Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 108:0 2026-01-04T02:51:02.241730Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 108:0 2026-01-04T02:51:02.241860Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 5] was 3 2026-01-04T02:51:02.243487Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 108: got EvNotifyTxCompletionResult 2026-01-04T02:51:02.243553Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 108: satisfy waiter [2:951:2861] TestWaitNotification: OK eventTxId 108 TestModificationResults wait txId: 109 2026-01-04T02:51:02.246895Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpAlterColumnStore AlterColumnStore { Name: "OlapStore1" AlterSchemaPresets { Name: "default" AlterSchema { AddColumns { Name: "comment2" Type: "Utf8" } } } } } TxId: 109 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:02.247075Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: alter_store.cpp:465: TAlterOlapStore Propose, path: /MyRoot/OlapStore1, opId: 109:0, at schemeshard: 72057594046678944 2026-01-04T02:51:02.247356Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 109:1, propose status:StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, at schemeshard: 72057594046678944 2026-01-04T02:51:02.249108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 109, response: Status: StatusSchemeError Reason: "Too many columns. new: 4. Limit: 3" TxId: 109 SchemeshardId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:02.249305Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 109, database: /MyRoot, subject: , status: StatusSchemeError, reason: Too many columns. new: 4. Limit: 3, operation: ALTER COLUMN STORE, path: /MyRoot/OlapStore1 TestModificationResult got TxId: 109, wait until txId: 109 TestWaitNotification wait txId: 109 2026-01-04T02:51:02.249735Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 109: send EvNotifyTxCompletion 2026-01-04T02:51:02.249819Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 109 2026-01-04T02:51:02.250418Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 109, at schemeshard: 72057594046678944 2026-01-04T02:51:02.250531Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 109: got EvNotifyTxCompletionResult 2026-01-04T02:51:02.250572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 109: satisfy waiter [2:986:2896] TestWaitNotification: OK eventTxId 109 >> TKesusTest::TestAttachOutOfSequenceInTx [GOOD] >> TKesusTest::TestAttachThenReRegister >> TKesusTest::TestQuoterAccountResourcesBurst >> DataShardReadTableSnapshots::ReadTableSnapshot [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSessionStealingAnyKey [GOOD] Test command err: 2026-01-04T02:51:01.096124Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.096238Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.115136Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.115259Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.140206Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.140742Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=10558294059947502373, session=0, seqNo=0) 2026-01-04T02:51:01.140918Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.163223Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=10558294059947502373, session=1) 2026-01-04T02:51:01.164863Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:134:2159], cookie=12429584421731029507, session=2) 2026-01-04T02:51:01.164948Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:134:2159], cookie=12429584421731029507) 2026-01-04T02:51:01.165522Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[1:134:2159], cookie=4752069156122318350 2026-01-04T02:51:01.166341Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=14959248580344293611, session=1, seqNo=0) 2026-01-04T02:51:01.178315Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=14959248580344293611, session=1) 2026-01-04T02:51:01.178742Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:51:01.178905Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:51:01.178991Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:01.179178Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[1:134:2159], cookie=14056443612059214017, session=1) 2026-01-04T02:51:01.189584Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:51:01.189676Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:51:01.189732Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Lock1" owner link 2026-01-04T02:51:01.202022Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:51:01.202121Z node 1 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[1:134:2159], cookie=14056443612059214017) 2026-01-04T02:51:01.202165Z node 1 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:51:01.509294Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.509405Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.528844Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.528980Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.563527Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.563920Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:28: [72057594037927937] TTxConfigSet::Execute (sender=[2:136:2161], cookie=16301380402610668490, path="") 2026-01-04T02:51:01.576382Z node 2 :KESUS_TABLET DEBUG: tx_config_set.cpp:94: [72057594037927937] TTxConfigSet::Complete (sender=[2:136:2161], cookie=16301380402610668490, status=SUCCESS) 2026-01-04T02:51:01.577092Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:146:2168], cookie=111, session=0, seqNo=0) 2026-01-04T02:51:01.577232Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.577415Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:28: [72057594037927937] TTxSessionDetach::Execute (sender=[2:146:2168], cookie=2657742253755769827, session=1) 2026-01-04T02:51:01.587781Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:27: [72057594037927937] TTxSessionTimeout::Execute (session=1) 2026-01-04T02:51:01.587845Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:51:01.599983Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:146:2168], cookie=111, session=1) 2026-01-04T02:51:01.600069Z node 2 :KESUS_TABLET DEBUG: tx_session_detach.cpp:59: [72057594037927937] TTxSessionDetach::Complete (sender=[2:146:2168], cookie=2657742253755769827) 2026-01-04T02:51:01.600123Z node 2 :KESUS_TABLET DEBUG: tx_session_timeout.cpp:56: [72057594037927937] TTxSessionTimeout::Complete (session=1) 2026-01-04T02:51:01.871079Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.871167Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.889605Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.890080Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.924818Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.925362Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=11485762950688690238, session=0, seqNo=0) 2026-01-04T02:51:01.925526Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.937053Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=11485762950688690238, session=1) 2026-01-04T02:51:01.937568Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=3228151463785446794, session=1) 2026-01-04T02:51:01.937653Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:51:01.949557Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=3228151463785446794) 2026-01-04T02:51:01.950451Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=5527277272520294048) 2026-01-04T02:51:01.950527Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=5527277272520294048) 2026-01-04T02:51:01.951079Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:157:2179], cookie=375587605606886182, session=0, seqNo=0) 2026-01-04T02:51:01.951212Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:51:01.962950Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:157:2179], cookie=375587605606886182, session=2) 2026-01-04T02:51:01.963930Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[3:136:2161], cookie=799299099965137170, session=2) 2026-01-04T02:51:01.964014Z node 3 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 2 2026-01-04T02:51:01.975794Z node 3 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[3:136:2161], cookie=799299099965137170) 2026-01-04T02:51:02.296245Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:02.296356Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:02.314789Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:02.315279Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:02.338962Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:02.339888Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:137:2161], cookie=12345, session=0, seqNo=0) 2026-01-04T02:51:02.340056Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:02.351926Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:137:2161], cookie=12345, session=1) 2026-01-04T02:51:02.352516Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[4:144:2166], cookie=23456, session=1, seqNo=0) 2026-01-04T02:51:02.364530Z node 4 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[4:144:2166], cookie=23456, session=1) 2026-01-04T02:51:02.685868Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:02.685965Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:02.698990Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:02.699121Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:02.713044Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:02.713718Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=12345, session=0, seqNo=0) 2026-01-04T02:51:02.713867Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:02.735962Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=12345, session=1) 2026-01-04T02:51:02.736541Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:141:2164], cookie=23456, session=1, seqNo=0) 2026-01-04T02:51:02.748130Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:141:2164], cookie=23456, session=1) >> DataShardReadTableSnapshots::ReadTableDropColumnLatePropose [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows >> DataShardReadTableSnapshots::ReadTableSplitNewTxIdResolveResultReorder [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/server/ut/unittest >> TMessageBusServerPersQueueGetReadSessionsInfoMetaRequestTest::FailsOnBalancerDescribeResultFailureWhenTopicsAreGivenExplicitly [GOOD] Test command err: Assert failed: Check response: { Status: 128 ErrorReason: "path \'Root/PQ\' has unknown/invalid root prefix \'Root\', Marker# PQ14" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "the following topics are not created: rt3.dc1--topic2, Marker# PQ95" ErrorCode: UNKNOWN_TOPIC } Assert failed: Check response: { Status: 128 ErrorReason: "topic \'Root/PQ\' describe error, Status# LookupError, Marker# PQ1" ErrorCode: ERROR } |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/server/ut/unittest >> TKesusTest::TestAttachThenReRegister [GOOD] >> TKesusTest::TestAttachTimeoutTooBig |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] |96.1%| [TA] $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/console/ut_console/unittest >> FeatureFlagsConfiguratorTest::TestFeatureFlagsUpdates [GOOD] Test command err: 2026-01-04T02:44:06.029502Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:06.029579Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:06.243178Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:07.170190Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:07.170262Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:07.239061Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:10.712365Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:10.712420Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:10.811818Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:11.771996Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:11.772046Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:11.862628Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:12.848750Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:12.848816Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:12.941990Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:13.967746Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:13.967857Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:14.066594Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:15.273603Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:15.273693Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:15.368253Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:16.556975Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:16.557051Z node 13 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:16.653316Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:17.701691Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:17.701776Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:17.786390Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:19.016230Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:19.016313Z node 15 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:19.119471Z node 15 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:20.893631Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:20.893689Z node 16 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:21.001282Z node 16 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:22.866926Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:22.867009Z node 17 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:22.969000Z node 17 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:24.749577Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:24.749655Z node 18 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:24.847150Z node 18 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:26.717846Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:26.717926Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:26.835678Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:28.927426Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:28.927532Z node 20 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:29.033795Z node 20 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:31.064840Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:31.064923Z node 21 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:31.167203Z node 21 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:33.230674Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:33.230760Z node 22 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:33.326312Z node 22 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:35.232171Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:35.232269Z node 23 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:35.342274Z node 23 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:37.740086Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:37.740181Z node 24 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:37.845943Z node 24 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:38.367823Z node 24 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1240: Unexpected config sender died for subscription id=1 2026-01-04T02:44:39.108780Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:39.108864Z node 25 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:39.207329Z node 25 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:40.499937Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:40.500027Z node 26 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:40.595184Z node 26 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:41.875954Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:44:41.876045Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:44:41.978329Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:44:48.494123Z node 27 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:44:48.494245Z node 27 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.002230Z node 27 :CMS_CONFIGS ERROR: console_configs_provider.cpp:1206: Couldn't deliver config notification for subscription id=1 tabletid=0 serviceid=[100:28538277257700723:0] nodeid=100 host=host100 tenant=tenant-100 nodetype=type100 kinds=2 lastprovidedconfig= 2026-01-04T02:48:53.887161Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:48:53.887287Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:54.002013Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:49:00.982658Z node 28 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:49:00.982757Z node 28 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.906922Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.907035Z node 29 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.997870Z node 29 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) >> KqpPg::ValuesInsert+useSink [GOOD] >> KqpPg::ValuesInsert-useSink |96.1%| [TA] {RESULT} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/address_classification/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCachingProxyTest::TestPublishAndForget >> TKesusTest::TestAttachTimeoutTooBig [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/cms/console/ut_console/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop >> TKesusTest::TestCreateSemaphore |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestSemaphoreSessionFailures [GOOD] Test command err: 2026-01-04T02:51:01.064820Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.064902Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.076788Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.076876Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.100850Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.101245Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=15095374867967111847, session=0, seqNo=0) 2026-01-04T02:51:01.101379Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.123941Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=15095374867967111847, session=1) 2026-01-04T02:51:01.124356Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=13509339634359621774, session=0, seqNo=0) 2026-01-04T02:51:01.124471Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:51:01.136250Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=13509339634359621774, session=2) 2026-01-04T02:51:01.136515Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:134:2159], cookie=111, name="Lock1") 2026-01-04T02:51:01.148544Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:51:01.148844Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:51:01.148991Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:51:01.149094Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:01.160718Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=222) 2026-01-04T02:51:01.160934Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[1:134:2159], cookie=333, name="Lock1") 2026-01-04T02:51:01.172981Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[1:134:2159], cookie=333) 2026-01-04T02:51:01.448626Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.448738Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.468589Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.468731Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.503172Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.503607Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=1222006253601848898, session=0, seqNo=0) 2026-01-04T02:51:01.503768Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.515484Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=1222006253601848898, session=1) 2026-01-04T02:51:01.515753Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=11447874735540753685, session=0, seqNo=0) 2026-01-04T02:51:01.515855Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:51:01.527461Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=11447874735540753685, session=2) 2026-01-04T02:51:01.527872Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[2:148:2170], cookie=1352769558128359452, name="Sem1", limit=1) 2026-01-04T02:51:01.527987Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:51:01.540075Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[2:148:2170], cookie=1352769558128359452) 2026-01-04T02:51:01.540359Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-04T02:51:01.540501Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:51:01.540650Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-04T02:51:01.552388Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-04T02:51:01.552487Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=222) 2026-01-04T02:51:01.552951Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:156:2178], cookie=14190041802595689029, name="Sem1") 2026-01-04T02:51:01.553044Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:156:2178], cookie=14190041802595689029) 2026-01-04T02:51:01.553398Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:159:2181], cookie=9721749104619189734, name="Sem1") 2026-01-04T02:51:01.553455Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:159:2181], cookie=9721749104619189734) 2026-01-04T02:51:01.553672Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:136:2161], cookie=333, name="Sem1") 2026-01-04T02:51:01.553761Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Sem1" waiter link 2026-01-04T02:51:01.565384Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:136:2161], cookie=333) 2026-01-04T02:51:01.565888Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:164:2186], cookie=9369740838346357681, name="Sem1") 2026-01-04T02:51:01.565963Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:164:2186], cookie=9369740838346357681) 2026-01-04T02:51:01.566316Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:167:2189], cookie=3899251960126033590, name="Sem1") 2026-01-04T02:51:01.566362Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:167:2189], cookie=3899251960126033590) 2026-01-04T02:51:01.566595Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[2:136:2161], cookie=444, name="Sem1") 2026-01-04T02:51:01.566691Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-04T02:51:01.578299Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[2:136:2161], cookie=444) 2026-01-04T02:51:01.578811Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:172:2194], cookie=1256713832362524788, name="Sem1") 2026-01-04T02:51:01.578869Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:172:2194], cookie=1256713832362524788) 2026-01-04T02:51:01.579213Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[2:175:2197], cookie=1629208020941048243, name="Sem1") 2026-01-04T02:51:01.579261Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[2:175:2197], cookie=1629208020941048243) 2026-01-04T02:51:01.873588Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.873672Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.885394Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.885861Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.920155Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.920575Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:136:2161], cookie=8351463418338515633, name="Sem1", limit=1) 2026-01-04T02:51:01.920744Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:51:01.932654Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:136:2161], cookie=8351463418338515633) 2026-01-04T02:51:01.933175Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[3:146:2168], cookie=6905550670370763480, name="Sem2", limit=1) 2026-01-04T02:51:01.933328Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem2" 2026-01-04T02:51:01.945526Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[3:146:2168], cookie=6905550670370763480) 2026-01-04T02:51:01.946144Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:151:2173], cookie=8862428113596382143, name="Sem1") 2026-01-04T02:51:01.946238Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:151:2173], cookie=8862428113596382143) 2026-01-04T02:51:01.946713Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[3:154:2176], cookie=11537853474251066973, name="Sem2") 2026-01-04T02:51:01.946788Z node 3 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[3:154:2176], cookie=11537853474251066973) 2026-01-04T02:51:01.960321Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.960439Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execu ... TTxSemaphoreCreate::Complete (sender=[4:249:2270], cookie=7935068284892972943) 2026-01-04T02:51:02.784829Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=111, session=1, semaphore="Sem1" count=1) 2026-01-04T02:51:02.784996Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #1 session 1 2026-01-04T02:51:02.797303Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=111) 2026-01-04T02:51:02.797998Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=222, session=2, semaphore="Sem1" count=1) 2026-01-04T02:51:02.810594Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=222) 2026-01-04T02:51:02.811198Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=333, name="Sem1") 2026-01-04T02:51:02.811339Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 11 "Sem1" waiter link 2026-01-04T02:51:02.834424Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=333) 2026-01-04T02:51:02.835122Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[4:137:2161], cookie=444, session=2, semaphore="Sem1" count=1) 2026-01-04T02:51:02.847652Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[4:137:2161], cookie=444) 2026-01-04T02:51:02.848266Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[4:137:2161], cookie=555, name="Sem1") 2026-01-04T02:51:02.848391Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 11 "Sem1" owner link 2026-01-04T02:51:02.848464Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 11 "Sem1" queue: next order #3 session 2 2026-01-04T02:51:02.860880Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[4:137:2161], cookie=555) 2026-01-04T02:51:03.251649Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:03.251802Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:03.269712Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:03.269873Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:03.283794Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:03.284176Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=18173392017038551972, session=0, seqNo=0) 2026-01-04T02:51:03.284284Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:03.306448Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=18173392017038551972, session=1) 2026-01-04T02:51:03.306721Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:135:2159], cookie=112, name="Sem1", limit=5) 2026-01-04T02:51:03.306834Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:51:03.318630Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:135:2159], cookie=112) 2026-01-04T02:51:03.318893Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:135:2159], cookie=113, name="Sem1") 2026-01-04T02:51:03.330789Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:135:2159], cookie=113) 2026-01-04T02:51:03.331075Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:135:2159], cookie=114, name="Sem1", force=0) 2026-01-04T02:51:03.331168Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:58: [72057594037927937] Deleting semaphore 1 "Sem1" 2026-01-04T02:51:03.343059Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:135:2159], cookie=114) 2026-01-04T02:51:03.343299Z node 5 :KESUS_TABLET DEBUG: tx_session_detach.cpp:100: [72057594037927937] Fast-path detach session=1 from sender=[5:135:2159], cookie=3679671567399401824 2026-01-04T02:51:03.343514Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:135:2159], cookie=115, name="Sem1", limit=5) 2026-01-04T02:51:03.355319Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:135:2159], cookie=115) 2026-01-04T02:51:03.355661Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:135:2159], cookie=116, name="Sem1") 2026-01-04T02:51:03.367430Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:135:2159], cookie=116) 2026-01-04T02:51:03.367744Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:135:2159], cookie=117, name="Sem1", force=0) 2026-01-04T02:51:03.379839Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:135:2159], cookie=117) 2026-01-04T02:51:03.380137Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=118, session=1, semaphore="Sem1" count=1) 2026-01-04T02:51:03.391719Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=118) 2026-01-04T02:51:03.391961Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:135:2159], cookie=119, name="Sem1") 2026-01-04T02:51:03.403831Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:135:2159], cookie=119) 2026-01-04T02:51:03.404160Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:135:2159], cookie=120, name="Sem1") 2026-01-04T02:51:03.404241Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:135:2159], cookie=120) 2026-01-04T02:51:03.404476Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:37: [72057594037927937] TTxSessionDestroy::Execute (sender=[5:135:2159], cookie=5914011140698644844, session=1) 2026-01-04T02:51:03.404582Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:32: [72057594037927937] Deleting session 1 2026-01-04T02:51:03.416492Z node 5 :KESUS_TABLET DEBUG: tx_session_destroy.cpp:75: [72057594037927937] TTxSessionDestroy::Complete (sender=[5:135:2159], cookie=5914011140698644844) 2026-01-04T02:51:03.416783Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:135:2159], cookie=121, name="Sem1", limit=5) 2026-01-04T02:51:03.428702Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:135:2159], cookie=121) 2026-01-04T02:51:03.429014Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:135:2159], cookie=122, name="Sem1") 2026-01-04T02:51:03.441055Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:135:2159], cookie=122) 2026-01-04T02:51:03.441365Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:135:2159], cookie=123, name="Sem1", force=0) 2026-01-04T02:51:03.453507Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:135:2159], cookie=123) 2026-01-04T02:51:03.453874Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=124, session=1, semaphore="Sem1" count=1) 2026-01-04T02:51:03.465830Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=124) 2026-01-04T02:51:03.466099Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:135:2159], cookie=125, name="Sem1") 2026-01-04T02:51:03.477665Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:135:2159], cookie=125) 2026-01-04T02:51:03.477938Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:135:2159], cookie=126, name="Sem1") 2026-01-04T02:51:03.478015Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:135:2159], cookie=126) 2026-01-04T02:51:03.478443Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:135:2159], cookie=127, name="Sem1", limit=5) 2026-01-04T02:51:03.478494Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:135:2159], cookie=127) 2026-01-04T02:51:03.478666Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:28: [72057594037927937] TTxSemaphoreUpdate::Execute (sender=[5:135:2159], cookie=128, name="Sem1") 2026-01-04T02:51:03.478709Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_update.cpp:84: [72057594037927937] TTxSemaphoreUpdate::Complete (sender=[5:135:2159], cookie=128) 2026-01-04T02:51:03.478843Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:28: [72057594037927937] TTxSemaphoreDelete::Execute (sender=[5:135:2159], cookie=129, name="Sem1", force=0) 2026-01-04T02:51:03.478888Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_delete.cpp:95: [72057594037927937] TTxSemaphoreDelete::Complete (sender=[5:135:2159], cookie=129) 2026-01-04T02:51:03.479037Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=130, session=1, semaphore="Sem1" count=1) 2026-01-04T02:51:03.479079Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=130) 2026-01-04T02:51:03.479243Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:135:2159], cookie=131, name="Sem1") 2026-01-04T02:51:03.479294Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:135:2159], cookie=131) 2026-01-04T02:51:03.479426Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:135:2159], cookie=132, name="Sem1") 2026-01-04T02:51:03.479460Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:135:2159], cookie=132) >> TPQCachingProxyTest::TestPublishAndForget [GOOD] |96.1%| [TA] {RESULT} $(B)/ydb/core/http_proxy/ut/inside_ydb_ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |96.1%| [TA] {RESULT} $(B)/ydb/core/client/server/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TPQCachingProxyTest::TestDeregister >> TKesusTest::TestCreateSemaphore [GOOD] >> Secret::DeactivatedQueryService [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocateResource [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAllocationGranularity [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestAmountIsLessThanEpsilon [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveSessionDisconnectsAndThenConnectsAgain [GOOD] >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestPublishAndForget [GOOD] Test command err: 2026-01-04T02:51:04.066945Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:51:04.120939Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:51:04.121010Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:51:04.121118Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:51:04.121162Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:51:04.133936Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:51:04.134047Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-04T02:51:04.134139Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-04T02:51:04.134168Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-04T02:51:04.134243Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] >> TPQCachingProxyTest::TestDeregister [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestCreateSemaphore [GOOD] Test command err: 2026-01-04T02:51:02.613717Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:02.613831Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:02.625488Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:02.625596Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:02.650029Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:02.650647Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=11015129029036140096, session=0, seqNo=222) 2026-01-04T02:51:02.650764Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:02.672832Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=11015129029036140096, session=1) 2026-01-04T02:51:02.673055Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:135:2160], cookie=2375345721552449387, session=1, seqNo=111) 2026-01-04T02:51:02.684470Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:135:2160], cookie=2375345721552449387, session=1) 2026-01-04T02:51:02.969607Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:02.969719Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:02.982133Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:02.982237Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:03.016514Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:03.017002Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=111, session=0, seqNo=42) 2026-01-04T02:51:03.017122Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:03.017249Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=222, session=1, seqNo=41) 2026-01-04T02:51:03.029009Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=111, session=1) 2026-01-04T02:51:03.029091Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=222, session=1) 2026-01-04T02:51:03.364251Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:03.364351Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:03.381467Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:03.381935Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:03.416069Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:03.416581Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[3:136:2161], cookie=6731454490818504454, session=0, seqNo=0) 2026-01-04T02:51:03.416720Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:03.428633Z node 3 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[3:136:2161], cookie=6731454490818504454, session=1) 2026-01-04T02:51:03.430171Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[3:154:2176], cookie=17293306990039706476) 2026-01-04T02:51:03.430245Z node 3 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[3:154:2176], cookie=17293306990039706476) 2026-01-04T02:51:03.746636Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:03.746737Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:03.761386Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:03.761847Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:03.785119Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:04.105707Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:04.105794Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:04.118091Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:04.118184Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:04.132051Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:04.132491Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=2788003178037896738, session=0, seqNo=0) 2026-01-04T02:51:04.132618Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:04.155479Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=2788003178037896738, session=1) 2026-01-04T02:51:04.155881Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:51:04.156059Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:51:04.156172Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:04.168199Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=111) 2026-01-04T02:51:04.168901Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:149:2171], cookie=1350181866799101466, name="Sem1", limit=42) 2026-01-04T02:51:04.169061Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 2 "Sem1" 2026-01-04T02:51:04.181454Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:149:2171], cookie=1350181866799101466) 2026-01-04T02:51:04.182068Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:154:2176], cookie=16492321132858370915, name="Sem1", limit=42) 2026-01-04T02:51:04.194468Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:154:2176], cookie=16492321132858370915) 2026-01-04T02:51:04.195090Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:159:2181], cookie=17951105791033269564, name="Sem1", limit=51) 2026-01-04T02:51:04.207122Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:159:2181], cookie=17951105791033269564) 2026-01-04T02:51:04.207637Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:164:2186], cookie=6427798584974418151, name="Lock1", limit=42) 2026-01-04T02:51:04.219834Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:164:2186], cookie=6427798584974418151) 2026-01-04T02:51:04.220343Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:169:2191], cookie=16882011887878796543, name="Lock1", limit=18446744073709551615) 2026-01-04T02:51:04.232869Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:169:2191], cookie=16882011887878796543) 2026-01-04T02:51:04.233542Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:174:2196], cookie=15341650825422549139, name="Sem1") 2026-01-04T02:51:04.233667Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:174:2196], cookie=15341650825422549139) 2026-01-04T02:51:04.234244Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:177:2199], cookie=10616238675438438822, name="Sem2") 2026-01-04T02:51:04.234320Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:177:2199], cookie=10616238675438438822) 2026-01-04T02:51:04.249152Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:04.249276Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:04.249836Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:04.250530Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:04.308569Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:04.308691Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:04.308979Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:216:2229], cookie=1768806265693239022, name="Sem1") 2026-01-04T02:51:04.309045Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:216:2229], cookie=1768806265693239022) 2026-01-04T02:51:04.309559Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:223:2235], cookie=6999983442976612369, name="Sem2") 2026-01-04T02:51:04.309621Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:223:2235], cookie=6999983442976612369) |96.1%| [TA] $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> THDRRQuoterResourceTreeRuntimeTest::TestActiveMultiresourceSessionDisconnectsAndThenConnectsAgain [GOOD] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateTenantTableForceDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:04.158214Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:04.158306Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:04.158352Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:04.158392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:04.158431Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:04.158458Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:04.158537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:04.158618Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:04.159387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:04.159645Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:04.230086Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:04.230134Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:04.241786Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:04.242010Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:04.242192Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:04.250519Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:04.250693Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:04.251125Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.251287Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:04.253378Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:04.253517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:04.254324Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:04.254363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:04.254435Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:04.254463Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:04.254549Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:04.254677Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.259183Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:04.369237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:04.369474Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.369676Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:04.369726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:04.369950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:04.370029Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:04.372339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.372550Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:04.372788Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.372864Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:04.372905Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:04.372938Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:04.374926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.375006Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:04.375051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:04.376847Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.376902Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.376957Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.377004Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:04.379442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:04.380769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:04.380891Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:04.381609Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.381695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:04.381723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.381961Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:04.381999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.382124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:04.382181Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:04.383536Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:04.383569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 2026-01-04T02:51:04.520735Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 1 2026-01-04T02:51:04.521714Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:5 2026-01-04T02:51:04.521773Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:5 tabletId 72075186233409550 2026-01-04T02:51:04.521932Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409550, at schemeshard: 72057594046678944 2026-01-04T02:51:04.522034Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:7 2026-01-04T02:51:04.522082Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 1 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:04.522253Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:04.522281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 2026-01-04T02:51:04.522322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:04.525311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:1 2026-01-04T02:51:04.525349Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:1 tabletId 72075186233409546 2026-01-04T02:51:04.525647Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:51:04.525668Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:3 tabletId 72075186233409548 2026-01-04T02:51:04.525695Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:6 2026-01-04T02:51:04.525707Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:6 tabletId 72075186233409551 2026-01-04T02:51:04.526178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:2 2026-01-04T02:51:04.526211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:2 tabletId 72075186233409547 2026-01-04T02:51:04.526355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:51:04.526398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046678944:4 tabletId 72075186233409549 2026-01-04T02:51:04.526501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:252: TTxCleanDroppedSubDomains Complete, done PersistRemoveSubDomain for 1 subdomains, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:04.526801Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409551, at schemeshard: 72057594046678944 2026-01-04T02:51:04.526935Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6153: Failed to connect, to tablet: 72075186233409549, at schemeshard: 72057594046678944 2026-01-04T02:51:04.526986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 100 2026-01-04T02:51:04.527273Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 100: send EvNotifyTxCompletion 2026-01-04T02:51:04.527316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 100 TestWaitNotification wait txId: 101 2026-01-04T02:51:04.527383Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 101: send EvNotifyTxCompletion 2026-01-04T02:51:04.527403Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 101 TestWaitNotification wait txId: 102 2026-01-04T02:51:04.527494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T02:51:04.527515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T02:51:04.527944Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 100, at schemeshard: 72057594046678944 2026-01-04T02:51:04.528064Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 101, at schemeshard: 72057594046678944 2026-01-04T02:51:04.528114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 100: got EvNotifyTxCompletionResult 2026-01-04T02:51:04.528138Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 100: satisfy waiter [1:608:2523] 2026-01-04T02:51:04.528269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:04.528311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:608:2523] 2026-01-04T02:51:04.528421Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T02:51:04.528504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T02:51:04.528532Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:608:2523] TestWaitNotification: OK eventTxId 100 TestWaitNotification: OK eventTxId 101 TestWaitNotification: OK eventTxId 102 2026-01-04T02:51:04.528958Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:04.529145Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 212us result status StatusPathDoesNotExist 2026-01-04T02:51:04.529320Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:51:04.529828Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:04.529991Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 189us result status StatusPathDoesNotExist 2026-01-04T02:51:04.530155Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/USER_0/table_0\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot\' (id: [OwnerId: 72057594046678944, LocalPathId: 1])" Path: "/MyRoot/USER_0/table_0" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: false } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:51:04.530662Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:04.530874Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 170us result status StatusSuccess 2026-01-04T02:51:04.531216Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 6 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 6 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithTable-DisableStatsBatching [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D8B76FABD00 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:00.483106Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:00.483203Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.483256Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:00.483299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:00.483340Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:00.483371Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:00.483447Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:00.483537Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:00.484535Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:00.484806Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:00.580153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:00.580231Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.596218Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:00.596437Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:00.596623Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:00.605701Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:00.605970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:00.606749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:00.607017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:00.611059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.611285Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:00.612663Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:00.612736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:00.612913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:00.612970Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:00.613125Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:00.613374Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:00.778287Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779202Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779346Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779429Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779498Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779569Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779657Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779737Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779833Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.779909Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.780002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.780077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.780140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.780237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:00.780333Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_r ... aitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 Leader for TabletID 72057594046678944 is [1:896:2845] sender: [1:969:2058] recipient: [1:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 2026-01-04T02:51:04.542428Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 0 2026-01-04T02:51:04.542535Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:51:04.542574Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:51:04.542672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 2026-01-04T02:51:04.552999Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 2026-01-04T02:51:04.563717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 1 2026-01-04T02:51:04.563808Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:51:04.563840Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:51:04.563902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007D8B7705C100, stats written 2 2026-01-04T02:51:04.564238Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:04.564416Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table" took 192us result status StatusSuccess 2026-01-04T02:51:04.564736Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Table" PathDescription { Self { Name: "Table" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "Table" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 1168 RowCount: 2 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 2 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 165248 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1168 DataSize: 1168 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::SimultaneousCreateDelete [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:04.184126Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:04.184212Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:04.184241Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:04.184269Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:04.184298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:04.184321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:04.184362Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:04.184425Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:04.185076Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:04.185308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:04.248511Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:04.248562Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:04.258965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:04.259109Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:04.259238Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:04.265728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:04.265914Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:04.266476Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.266670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:04.268773Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:04.268913Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:04.269688Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:04.269735Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:04.269852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:04.269898Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:04.269986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:04.270123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.274989Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:04.380056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:04.380253Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.380439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:04.380479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:04.380665Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:04.380723Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:04.382475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.382653Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:04.382868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.382927Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:04.382968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:04.382993Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:04.384509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.384557Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:04.384602Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:04.385827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.385876Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.385953Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.385994Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:04.388768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:04.389984Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:04.390114Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:04.390874Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.390972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:04.391010Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.391222Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:04.391275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:04.391432Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:04.391503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:04.392963Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:04.392997Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... 4 to tablet: 72057594046316545 cookie: 0:101 msg type: 269090816 2026-01-04T02:51:04.535242Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 101, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 101 at step: 5000002 FAKE_COORDINATOR: advance: minStep5000002 State->FrontStep: 5000001 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 101 at step: 5000002 2026-01-04T02:51:04.535563Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000002, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:04.535666Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 101 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000002 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:04.535719Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:51:04.536091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 101:0 128 -> 240 2026-01-04T02:51:04.536147Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 101:0, at tablet# 72057594046678944 2026-01-04T02:51:04.536321Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:04.536384Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-04T02:51:04.536430Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 2], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 101 2026-01-04T02:51:04.538123Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:04.538169Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:04.538318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 101, path id: [OwnerId: 72057594046678944, LocalPathId: 2] 2026-01-04T02:51:04.538435Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:04.538481Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 1 2026-01-04T02:51:04.538534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 101, path id: 2 2026-01-04T02:51:04.538591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 101:0, at schemeshard: 72057594046678944 2026-01-04T02:51:04.538658Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 101:0 ProgressState 2026-01-04T02:51:04.538765Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:04.538804Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:04.538861Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#101:0 progress is 1/1 2026-01-04T02:51:04.538900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:04.538937Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 101, ready parts: 1/1, is published: false 2026-01-04T02:51:04.538976Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 101 ready parts: 1/1 2026-01-04T02:51:04.539017Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 101:0 2026-01-04T02:51:04.539065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 101:0 2026-01-04T02:51:04.539308Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 9 2026-01-04T02:51:04.539353Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 101, publications: 2, subscribers: 1 2026-01-04T02:51:04.539397Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 1], 5 2026-01-04T02:51:04.539438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 101, [OwnerId: 72057594046678944, LocalPathId: 2], 3 2026-01-04T02:51:04.540363Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:04.540489Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 1 Version: 5 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:04.540537Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:51:04.540594Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], version: 5 2026-01-04T02:51:04.540636Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 2 2026-01-04T02:51:04.541431Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:04.541523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 2 Version: 3 PathOwnerId: 72057594046678944, cookie: 101 2026-01-04T02:51:04.541556Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 101 2026-01-04T02:51:04.541584Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 101, pathId: [OwnerId: 72057594046678944, LocalPathId: 2], version: 3 2026-01-04T02:51:04.541626Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 2] was 8 2026-01-04T02:51:04.541691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 101, subscribers: 1 2026-01-04T02:51:04.541736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046678944, to actorId: [1:561:2478] 2026-01-04T02:51:04.544187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:04.544920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 101 2026-01-04T02:51:04.544974Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 101: got EvNotifyTxCompletionResult 2026-01-04T02:51:04.545002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 101: satisfy waiter [1:562:2479] TestWaitNotification: OK eventTxId 101 2026-01-04T02:51:04.545333Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:04.545479Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 161us result status StatusSuccess 2026-01-04T02:51:04.545880Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: "USER_0" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000002 ParentPathId: 1 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 1 PathsLimit: 10000 ShardsInside: 0 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 >> TFlatTest::SplitEmptyToMany [GOOD] >> TFlatTest::SplitEmptyTwice |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.1%| [TA] {RESULT} $(B)/ydb/core/cms/console/ut_console/test-results/unittest/{meta.json ... results_accumulator.log} >> TKesusTest::TestQuoterAccountResourcesBurst [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestDeregister [GOOD] Test command err: 2026-01-04T02:51:04.736485Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:51:04.789961Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:51:04.790038Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:51:04.790162Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:51:04.790230Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:51:04.806574Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:51:04.806706Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 1 2026-01-04T02:51:04.806764Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session2:1 with generation 1 2026-01-04T02:51:04.806886Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: session1 ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::DeactivatedQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:50.935814Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:51.033902Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:50:51.040929Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:298:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:51.293387Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:51.293502Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:51.359954Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495048469261 != 1767495048469264 2026-01-04T02:50:51.373743Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:51.419117Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:51.513884Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:52.023005Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:52.023633Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:52.023714Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:52.023766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:52.024258Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:52.039263Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:03.739666Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:882:2761], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:03.739788Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:893:2766], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:03.739855Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:03.740675Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:898:2771], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:03.740864Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:03.744221Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:03.759296Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:896:2769], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710657 completed, doublechecking } 2026-01-04T02:51:03.808626Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:949:2803] txid# 281474976710658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:03.969311Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:958:2811], status: GENERIC_ERROR, issues:
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled 2026-01-04T02:51:03.971101Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NWNjYzlkNmYtZmQxNjU3MmMtZGNlYWU3MWQtZjIzZmRjMjc=, ActorId: [1:880:2759], ActorState: ExecuteState, LegacyTraceId: 01ke3emp5m77qdx3fa57cmfsyv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 50 } message: "Executing CREATE OBJECT SECRET" end_position { row: 1 column: 50 } severity: 1 issues { message: "metadata provider service is disabled" severity: 1 } } } tx_id# trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> KqpPg::TableInsert+useSink [GOOD] >> KqpPg::TableInsert-useSink >> IncrementalBackup::BackupRestore >> IncrementalBackup::E2EBackupCollection >> IncrementalBackup::MultiBackup >> TPQCachingProxyTest::TestWrongSessionOrGeneration >> IncrementalBackup::SimpleBackup >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateClients [GOOD] >> TKesusTest::TestQuoterAccountResourcesAggregateResources >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] >> AsyncIndexChangeCollector::InsertSingleRow >> AsyncIndexChangeCollector::UpsertSingleRow >> CdcStreamChangeCollector::InsertSingleRow ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/dread_cache_service/ut/unittest >> TPQCachingProxyTest::TestWrongSessionOrGeneration [GOOD] Test command err: 2026-01-04T02:51:07.219149Z node 1 :TABLET_RESOLVER INFO: tablet_resolver.cpp:696: No candidates for SelectForward, node 1 selfDC leaderDC 1:2:0 local 0 localDc 0 other 0 disallowed 0 2026-01-04T02:51:07.272588Z node 1 :PQ_TX INFO: pq_impl.cpp:564: [PQ: 72057594037927937] doesn't have tx info 2026-01-04T02:51:07.272643Z node 1 :PQ_TX INFO: pq_impl.cpp:576: [PQ: 72057594037927937] PlanStep 0, PlanTxId 0, ExecStep 0, ExecTxId 0 2026-01-04T02:51:07.272726Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72057594037927937] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T02:51:07.272770Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72057594037927937] doesn't have tx writes info 2026-01-04T02:51:07.283961Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T02:51:07.284051Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: session1:1 with generation 2 2026-01-04T02:51:07.284105Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:171: Direct read cache: staged direct read id 1 for session: session1 2026-01-04T02:51:07.284132Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 2 2026-01-04T02:51:07.284178Z node 1 :PQ_READ_PROXY INFO: caching_service.cpp:297: Direct read cache: attempted to register server session: session1:1 with stale generation 1, ignored 2026-01-04T02:51:07.284216Z node 1 :PQ_READ_PROXY ALERT: caching_service.cpp:159: Direct read cache: tried to stage direct read for session session1 with generation 1, previously had this session with generation 2. Data ignored 2026-01-04T02:51:07.284253Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:179: Direct read cache: publish read: 1 for session session1, Generation: 1 2026-01-04T02:51:07.284302Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:218: Direct read cache: forget read: 1 for session session1 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/dread_cache_service/ut/unittest >> AsyncIndexChangeCollector::DeleteNothing >> CdcStreamChangeCollector::UpsertIntoTwoStreams >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn |96.1%| [TA] $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {RESULT} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/persqueue/dread_cache_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> Secret::Deactivated [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::InsertNoTargetColumns_Serial+useSink [GOOD] Test command err: Trying to start YDB, gRPC: 12659, MsgBus: 26536 2026-01-04T02:48:53.266076Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332935234793400:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:53.266156Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:53.605535Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:53.630427Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:53.630521Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:53.656080Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:53.786304Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:53.833908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:53.933330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:53.933353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:53.933368Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:53.933627Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:54.280054Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:54.457986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:54.470749Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 16 2026-01-04T02:48:56.606760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.789042Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:56.804027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.855475Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:56.901533Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332948119696339:2345], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:56.901605Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332948119696328:2342], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:56.901978Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:56.902734Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332948119696343:2347], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:56.902806Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:56.905368Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710663:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:56.919438Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332948119696342:2346], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710663 completed, doublechecking } 2026-01-04T02:48:57.023391Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332948119696395:2657] txid# 281474976710664, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } f f t t 18 2026-01-04T02:48:57.442934Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.497124Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:57.505970Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 21 2026-01-04T02:48:57.933649Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.998152Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:58.003118Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.267152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332935234793400:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:58.267209Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 23 2026-01-04T02:48:58.470593Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.548063Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:58.553592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710679:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.610701Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 20 2026-01-04T02:48:59.006534Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.113927Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:48:59.122063Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710685:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.221165Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0 0 1 1 2 2 3 3 4 4 5 5 6 6 7 7 8 8 9 9 700 2026-01-04T02:48:59.659793Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.728994Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710690:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:59.829202Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 0.5 0.5 1.5 1.5 2.5 2.5 3.5 3.5 4.5 4.5 5.5 5.5 6.5 6.5 7.5 7.5 8.5 8.5 9.5 9.5 701 2026-01-04T02:49:00.292599Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at sch ... de 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:50:54.172750Z node 11 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(11, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:54.184144Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:50:54.184181Z node 11 :GRPC_SERVER WARN: grpc_request_proxy.cpp:575: SchemeBoardDelete /Root Strong=0 2026-01-04T02:50:54.222902Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:54.222937Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:54.222949Z node 11 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:54.223068Z node 11 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:54.240805Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:54.521660Z node 11 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:50:55.001946Z node 11 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:50:55.092496Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:58.583745Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333473481615354:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:58.583746Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333473481615362:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:58.583882Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:58.584235Z node 11 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [11:7591333473481615369:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:58.584342Z node 11 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:50:58.588401Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:50:58.601384Z node 11 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [11:7591333473481615368:2328], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:50:58.693403Z node 11 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [11:7591333473481615421:2537] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:50:58.727125Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:50:58.994970Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[11:7591333452006778023:2071];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:50:58.995057Z node 11 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:50:59.367813Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) Trying to start YDB, gRPC: 6843, MsgBus: 19179 2026-01-04T02:51:01.113015Z node 12 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[12:7591333486518834398:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:51:01.113134Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:51:01.132521Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:51:01.235391Z node 12 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.252368Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:01.252479Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:01.272178Z node 12 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(12, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:01.354504Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:01.354530Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:01.354540Z node 12 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:01.354635Z node 12 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:01.384495Z node 12 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:51:02.009617Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:02.121824Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:05.768699Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333503698704439:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:05.768739Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333503698704445:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:05.768785Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:05.769014Z node 12 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [12:7591333503698704453:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:05.769062Z node 12 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:05.772720Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:05.785628Z node 12 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [12:7591333503698704454:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:51:05.871183Z node 12 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [12:7591333503698704506:2536] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:05.953983Z node 12 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:06.113354Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[12:7591333486518834398:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:51:06.113451Z node 12 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableUUID [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:00.019797Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:00.163304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:00.163742Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:00.163788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:00.545655Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:00.545794Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:00.622186Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495057822703 != 1767495057822706 2026-01-04T02:51:00.635972Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.679535Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:00.787766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:01.083039Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.096476Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:01.196814Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:51:01.196876Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:51:01.196970Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:51:01.304371Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:51:01.304463Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:51:01.305056Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:51:01.305147Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:51:01.305465Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:51:01.305591Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:51:01.305717Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:51:01.306030Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:51:01.309508Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:01.311365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:51:01.311447Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:51:01.357329Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:51:01.358171Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:51:01.358359Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:51:01.358534Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:01.408189Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:51:01.408971Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:01.409071Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:01.410693Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:01.410789Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:01.410848Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:01.411219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:01.411338Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:01.411427Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:01.422207Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:01.468914Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:01.469089Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:01.469187Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:01.469221Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:01.469246Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:01.469271Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:01.469395Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.469440Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.469736Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:01.469823Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:01.469885Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:01.470046Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:01.470085Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:51:01.470112Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:01.470136Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:01.470158Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:01.470186Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:01.470275Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.470303Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.470353Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:01.470647Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:51:01.470674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:51:01.470744Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:01.470934Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:51:01.470974Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:01.471043Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:01.471082Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... on unit ReadTableScan 2026-01-04T02:51:07.270565Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-04T02:51:07.270704Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Continue 2026-01-04T02:51:07.270730Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:51:07.270754Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:51:07.270785Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:07.270810Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:07.270857Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:07.271119Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-04T02:51:07.271192Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1074:2890], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T02:51:07.271218Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-04T02:51:07.271428Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.271472Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-04T02:51:07.271517Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.271659Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.271741Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-04T02:51:07.271806Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-04T02:51:07.271854Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-04T02:51:07.272126Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.272146Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-04T02:51:07.272175Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.272219Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.272251Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-04T02:51:07.272280Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-04T02:51:07.272301Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-04T02:51:07.272428Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.272444Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-04T02:51:07.272467Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.272497Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037888, TxId: 281474976715662, Size: 38, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.272522Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037888 2026-01-04T02:51:07.272546Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037888, TxId: 281474976715662, PendingAcks: 0 2026-01-04T02:51:07.272571Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037888 2026-01-04T02:51:07.272746Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.272763Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037888 2026-01-04T02:51:07.272786Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.272826Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037888, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.272924Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037888 2026-01-04T02:51:07.272952Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1062:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037888 2026-01-04T02:51:07.273022Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037888 2026-01-04T02:51:07.273053Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037888 2026-01-04T02:51:07.273148Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:886:2765], Recipient [2:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.273178Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.273238Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:07.273265Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:51:07.273295Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037888 for ReadTableScan 2026-01-04T02:51:07.273320Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit ReadTableScan 2026-01-04T02:51:07.273349Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037888 error: , IsFatalError: 0 2026-01-04T02:51:07.273384Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-04T02:51:07.273425Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit ReadTableScan 2026-01-04T02:51:07.273459Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit FinishPropose 2026-01-04T02:51:07.273499Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-04T02:51:07.273534Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is DelayComplete 2026-01-04T02:51:07.273565Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit FinishPropose 2026-01-04T02:51:07.273598Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T02:51:07.273630Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037888 on unit CompletedOperations 2026-01-04T02:51:07.273676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037888 is Executed 2026-01-04T02:51:07.273699Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T02:51:07.273724Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037888 has finished 2026-01-04T02:51:07.273755Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:07.273787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037888 2026-01-04T02:51:07.273837Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:07.273869Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:07.273919Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:07.273945Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037888 on unit FinishPropose 2026-01-04T02:51:07.273975Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037888 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:51:07.274029Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:07.274150Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037888 2026-01-04T02:51:07.274221Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1062:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.012483s execute time: 0.091568s total time: 0.104051s 2026-01-04T02:51:07.274516Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1062:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitAfter [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:00.177678Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:00.282398Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:00.282721Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:00.282775Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:00.545718Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:00.545874Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:00.614386Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495057822682 != 1767495057822685 2026-01-04T02:51:00.629188Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.673043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:00.775159Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:01.077581Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.091504Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:01.194322Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:51:01.194402Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:51:01.195402Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:51:01.330582Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:51:01.330664Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:51:01.331180Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:51:01.331268Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:51:01.331475Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:51:01.331598Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:51:01.331726Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:51:01.333473Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:01.333893Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:51:01.334534Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:51:01.334594Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:51:01.366018Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:51:01.367015Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:51:01.367238Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:51:01.367439Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:01.399178Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:51:01.399805Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:01.399893Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:01.401246Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:01.401313Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:01.401359Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:01.408103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:01.408278Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:01.408376Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:01.419201Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:01.457107Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:01.458381Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:01.458601Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:01.458658Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:01.458693Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:01.458742Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:01.458898Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.458946Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.459986Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:01.460069Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:01.460136Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:01.460166Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:01.460280Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:51:01.460324Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:01.460351Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:01.460393Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:01.460455Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:01.460582Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.460612Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.460663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:01.461022Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:51:01.461062Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:51:01.461195Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:01.461483Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:51:01.461537Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:01.462276Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:01.462346Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... sactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\003\000\000\000b\005\035!\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\003\000\000\000" 2026-01-04T02:51:07.085707Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-04T02:51:07.085738Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1062:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1197:2985] ShardId# 72075186224037890 2026-01-04T02:51:07.085838Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1197:2985], Recipient [2:1062:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-04T02:51:07.085866Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-04T02:51:07.085922Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-04T02:51:07.086208Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1061:2879], Recipient [2:1062:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-04T02:51:07.086244Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.086335Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-04T02:51:07.086381Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.086456Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715662, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.086575Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1197:2985], Recipient [2:1062:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715662 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-04T02:51:07.086602Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream data from ShardId# 72075186224037890 2026-01-04T02:51:07.086628Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1062:2879] TxId# 281474976715661] Sending TEvStreamDataAck to [2:1197:2985] ShardId# 72075186224037890 2026-01-04T02:51:07.086676Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715662, PendingAcks: 0 2026-01-04T02:51:07.086735Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1197:2985], Recipient [2:1062:2879]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715662 ShardId: 72075186224037890 2026-01-04T02:51:07.086760Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-04T02:51:07.087032Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1061:2879], Recipient [2:1062:2879]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715661 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-04T02:51:07.087065Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1062:2879] TxId# 281474976715661] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.087090Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1062:2879] TxId# 281474976715661] Reserving quota 1 messages for ShardId# 72075186224037890 2026-01-04T02:51:07.087124Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.087173Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715662, MessageQuota: 1 2026-01-04T02:51:07.087308Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1197:2985], Recipient [2:1062:2879]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715662 ShardId: 72075186224037890 2026-01-04T02:51:07.087336Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1062:2879] TxId# 281474976715661] Received TEvStreamQuotaRelease from ShardId# 72075186224037890 2026-01-04T02:51:07.087360Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1062:2879] TxId# 281474976715661] Released quota 1 reserved messages from ShardId# 72075186224037890 2026-01-04T02:51:07.087414Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-04T02:51:07.087442Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715662, at: 72075186224037890 2026-01-04T02:51:07.087542Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1101:2910], Recipient [2:1101:2910]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.087571Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.087609Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:51:07.087636Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:51:07.087662Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715662] at 72075186224037890 for ReadTableScan 2026-01-04T02:51:07.087686Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit ReadTableScan 2026-01-04T02:51:07.087727Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715662] at 72075186224037890 error: , IsFatalError: 0 2026-01-04T02:51:07.087760Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-04T02:51:07.087787Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit ReadTableScan 2026-01-04T02:51:07.087811Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit FinishPropose 2026-01-04T02:51:07.087858Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-04T02:51:07.087916Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is DelayComplete 2026-01-04T02:51:07.087971Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit FinishPropose 2026-01-04T02:51:07.087996Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715662] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T02:51:07.088019Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715662] at 72075186224037890 on unit CompletedOperations 2026-01-04T02:51:07.088055Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715662] at 72075186224037890 is Executed 2026-01-04T02:51:07.088092Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715662] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T02:51:07.088115Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715662] at 72075186224037890 has finished 2026-01-04T02:51:07.088142Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:07.088165Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-04T02:51:07.088188Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-04T02:51:07.088211Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T02:51:07.088251Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:07.088278Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715662] at 72075186224037890 on unit FinishPropose 2026-01-04T02:51:07.088310Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715662 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:51:07.088370Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:51:07.088582Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1101:2910], Recipient [2:1062:2879]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: COMPLETE TxId: 281474976715662 Step: 0 OrderId: 281474976715662 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037890 CpuTimeUsec: 297 } } CommitVersion { Step: 0 TxId: 281474976715662 } 2026-01-04T02:51:07.088631Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1062:2879] TxId# 281474976715661] Received stream complete from ShardId# 72075186224037890 2026-01-04T02:51:07.088707Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1062:2879] TxId# 281474976715661] RESPONSE Status# ExecComplete prepare time: 0.013531s execute time: 0.246506s total time: 0.260037s 2026-01-04T02:51:07.089035Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1062:2879], Recipient [2:886:2765]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-04T02:51:07.089284Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1062:2879], Recipient [2:1098:2908]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 2026-01-04T02:51:07.089534Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1062:2879], Recipient [2:1101:2910]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715661 |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableSplitFinished [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:00.115405Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:00.214000Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:00.214374Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:00.214435Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:00.545639Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:00.545789Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:00.632048Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495057822662 != 1767495057822665 2026-01-04T02:51:00.641713Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.687058Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:00.787562Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:01.077527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.090585Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:01.194290Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:51:01.194354Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:51:01.195343Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:51:01.301731Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:51:01.301870Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:51:01.303740Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:51:01.303920Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:51:01.304365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:51:01.304508Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:51:01.304685Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:51:01.305081Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:51:01.309538Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:01.311365Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:51:01.311452Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:51:01.354553Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:51:01.355620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:51:01.357002Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:51:01.357256Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:01.396847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:51:01.397736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:01.397872Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:01.400586Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:01.400711Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:01.400776Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:01.408111Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:01.408309Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:01.408416Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:01.419253Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:01.476006Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:01.476215Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:01.476332Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:01.476382Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:01.476417Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:01.476453Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:01.476620Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.476676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.477087Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:01.477202Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:01.477293Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:01.477330Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:01.477381Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:51:01.477426Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:01.477480Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:01.477514Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:01.477561Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:01.477708Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.477743Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.477827Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:01.478242Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:51:01.478286Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:51:01.478396Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:01.478656Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:51:01.478709Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:01.478798Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:01.478854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... ead_table_scan.cpp:662: Send response data ShardId: 72075186224037896, TxId: 281474976715664, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.307475Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1546:3255], Recipient [2:1278:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: RESPONSE_DATA TxId: 281474976715664 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\006\000\000\000b\005\035B\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\006\000\000\000" 2026-01-04T02:51:07.307502Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1278:3043] TxId# 281474976715663] Received stream data from ShardId# 72075186224037896 2026-01-04T02:51:07.307522Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1278:3043] TxId# 281474976715663] Sending TEvStreamDataAck to [2:1546:3255] ShardId# 72075186224037896 2026-01-04T02:51:07.307580Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037896, TxId: 281474976715664, PendingAcks: 0 2026-01-04T02:51:07.307621Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1546:3255], Recipient [2:1278:3043]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715664 ShardId: 72075186224037896 2026-01-04T02:51:07.307637Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1278:3043] TxId# 281474976715663] Received TEvStreamQuotaRequest from ShardId# 72075186224037896 2026-01-04T02:51:07.307925Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1277:3043], Recipient [2:1278:3043]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715663 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-04T02:51:07.307946Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1278:3043] TxId# 281474976715663] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.307963Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1278:3043] TxId# 281474976715663] Reserving quota 1 messages for ShardId# 72075186224037896 2026-01-04T02:51:07.307994Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-04T02:51:07.308048Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037896, TxId: 281474976715664, MessageQuota: 1 2026-01-04T02:51:07.308168Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287429, Sender [2:1546:3255], Recipient [2:1278:3043]: NKikimrTx.TEvStreamQuotaRelease TxId: 281474976715664 ShardId: 72075186224037896 2026-01-04T02:51:07.308188Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2121: [ReadTable [2:1278:3043] TxId# 281474976715663] Received TEvStreamQuotaRelease from ShardId# 72075186224037896 2026-01-04T02:51:07.308207Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2189: [ReadTable [2:1278:3043] TxId# 281474976715663] Released quota 1 reserved messages from ShardId# 72075186224037896 2026-01-04T02:51:07.308239Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037896 2026-01-04T02:51:07.308259Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715664, at: 72075186224037896 2026-01-04T02:51:07.308332Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1446:3176], Recipient [2:1446:3176]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.308382Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.308418Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037896 2026-01-04T02:51:07.308436Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:51:07.308455Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715664] at 72075186224037896 for ReadTableScan 2026-01-04T02:51:07.308471Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit ReadTableScan 2026-01-04T02:51:07.308503Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715664] at 72075186224037896 error: , IsFatalError: 0 2026-01-04T02:51:07.308532Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-04T02:51:07.308559Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit ReadTableScan 2026-01-04T02:51:07.308581Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit FinishPropose 2026-01-04T02:51:07.308599Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-04T02:51:07.308622Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is DelayComplete 2026-01-04T02:51:07.308639Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit FinishPropose 2026-01-04T02:51:07.308657Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715664] at 72075186224037896 to execution unit CompletedOperations 2026-01-04T02:51:07.308676Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715664] at 72075186224037896 on unit CompletedOperations 2026-01-04T02:51:07.308703Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715664] at 72075186224037896 is Executed 2026-01-04T02:51:07.308718Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715664] at 72075186224037896 executing on unit CompletedOperations 2026-01-04T02:51:07.308732Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715664] at 72075186224037896 has finished 2026-01-04T02:51:07.308751Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037896 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:07.308782Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037896 2026-01-04T02:51:07.308801Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037896 has no attached operations 2026-01-04T02:51:07.308817Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037896 2026-01-04T02:51:07.308848Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037896 2026-01-04T02:51:07.308868Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715664] at 72075186224037896 on unit FinishPropose 2026-01-04T02:51:07.308894Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715664 at tablet 72075186224037896 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:51:07.308938Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037896 2026-01-04T02:51:07.309092Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1446:3176], Recipient [2:1278:3043]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037896 Status: COMPLETE TxId: 281474976715664 Step: 0 OrderId: 281474976715664 ExecLatency: 0 ProposeLatency: 0 DomainCoordinators: 72057594046316545 TxStats { PerShardStats { ShardId: 72075186224037896 CpuTimeUsec: 217 } } CommitVersion { Step: 0 TxId: 281474976715664 } 2026-01-04T02:51:07.309115Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1851: [ReadTable [2:1278:3043] TxId# 281474976715663] Received stream complete from ShardId# 72075186224037896 2026-01-04T02:51:07.309156Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1278:3043] TxId# 281474976715663] RESPONSE Status# ExecComplete prepare time: 0.017128s execute time: 0.477927s total time: 0.495055s 2026-01-04T02:51:07.309555Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1083:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.309861Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1192:2977]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.310095Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1194:2979]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.310385Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1444:3174]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.310589Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1446:3176]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.310738Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1549:3258], Recipient [2:1335:3092]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:07.310760Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:07.310798Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037893, clientId# [2:1547:3256], serverId# [2:1549:3258], sessionId# [0:0:0] 2026-01-04T02:51:07.310846Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [2:1550:3259], Recipient [2:1339:3094]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:07.310863Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:07.310882Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037894, clientId# [2:1548:3257], serverId# [2:1550:3259], sessionId# [0:0:0] 2026-01-04T02:51:07.310979Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1335:3092]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 2026-01-04T02:51:07.311072Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1278:3043], Recipient [2:1339:3094]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715663 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.1%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |96.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_read_table/unittest >> DataShardReadTableSnapshots::ReadTableMaxRows [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:00.159727Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:00.253030Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:00.253401Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:00.253439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:00.545713Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:00.545855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:00.621538Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495057928967 != 1767495057928970 2026-01-04T02:51:00.634245Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:00.681111Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:00.786341Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:01.077561Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.090953Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:01.194320Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T02:51:01.194412Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T02:51:01.195398Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T02:51:01.322896Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T02:51:01.323019Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T02:51:01.323724Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T02:51:01.323830Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T02:51:01.324204Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T02:51:01.324380Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T02:51:01.324551Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T02:51:01.324899Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T02:51:01.326699Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:01.327805Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T02:51:01.327897Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T02:51:01.354991Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T02:51:01.355824Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T02:51:01.356979Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:51:01.357217Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:01.405887Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T02:51:01.406564Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:01.406649Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:01.408054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:01.408146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:01.408197Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:01.408485Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:01.408597Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:01.408674Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:01.419411Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:01.458023Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:01.458405Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:01.458549Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:01.458603Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:01.458644Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:01.458693Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:01.458862Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.458931Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:01.460076Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:01.460175Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:01.460269Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:01.460312Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:01.460369Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T02:51:01.460424Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T02:51:01.460460Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T02:51:01.460498Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:01.460541Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:01.460680Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.460725Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:01.460799Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:01.461211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T02:51:01.461264Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T02:51:01.461361Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:01.461579Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T02:51:01.461639Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:01.462272Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:01.462408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:07.092793Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1196:2985], Recipient [2:1087:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T02:51:07.092833Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-04T02:51:07.092910Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2985], Recipient [2:1170:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-04T02:51:07.092940Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1170:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-04T02:51:07.092973Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1170:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 2 rows at [2:1196:2985] 2026-01-04T02:51:07.093092Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-04T02:51:07.093379Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.093465Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2985], Recipient [2:1170:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\004\000\000\000b\005\035,\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 1 DataLastKey: "\001\000\004\000\000\000\004\000\000\000" 2026-01-04T02:51:07.093486Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1170:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-04T02:51:07.093504Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1170:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1196:2985] ShardId# 72075186224037890 2026-01-04T02:51:07.093535Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-04T02:51:07.093576Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287428, Sender [2:1196:2985], Recipient [2:1170:2961]: NKikimrTx.TEvStreamQuotaRequest TxId: 281474976715663 ShardId: 72075186224037890 2026-01-04T02:51:07.093594Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2070: [ReadTable [2:1170:2961] TxId# 281474976715662] Received TEvStreamQuotaRequest from ShardId# 72075186224037890 2026-01-04T02:51:07.093764Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269287941, Sender [2:1169:2961], Recipient [2:1170:2961]: NKikimrTx.TEvStreamQuotaResponse TxId: 281474976715662 MessageSizeLimit: 1 ReservedMessages: 1 2026-01-04T02:51:07.093785Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2098: [ReadTable [2:1170:2961] TxId# 281474976715662] Updated quotas, allocated = 1, message size = 1, message rows = 0, available = 1 2026-01-04T02:51:07.093819Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:2161: [ReadTable [2:1170:2961] TxId# 281474976715662] Reserving quota 1 messages for ShardId# 72075186224037890 ... observed row limit of 1 rows at [2:1196:2985] 2026-01-04T02:51:07.093860Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 1 2026-01-04T02:51:07.093899Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:662: Send response data ShardId: 72075186224037890, TxId: 281474976715663, Size: 36, Rows: 0, PendingAcks: 1, MessageQuota: 0 2026-01-04T02:51:07.093984Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1233: StateReadTable, received event# 269550080, Sender [2:1196:2985], Recipient [2:1170:2961]: NKikimrTxDataShard.TEvProposeTransactionResult TxKind: TX_KIND_SCAN Origin: 72075186224037890 Status: RESPONSE_DATA TxId: 281474976715663 TxResult: "\n\016\n\003key\022\007\252\006\004\n\002\010\002\n\020\n\005value\022\007\252\006\004\n\002\010\002\030\001\022\016b\005\035\005\000\000\000b\005\0357\000\000\000" RowOffsets: 36 ApiVersion: 1 DataSeqNo: 2 DataLastKey: "\001\000\004\000\000\000\005\000\000\000" 2026-01-04T02:51:07.094048Z node 2 :TX_PROXY DEBUG: read_table_impl.cpp:1701: [ReadTable [2:1170:2961] TxId# 281474976715662] Received stream data from ShardId# 72075186224037890 2026-01-04T02:51:07.094081Z node 2 :TX_PROXY TRACE: read_table_impl.cpp:1712: [ReadTable [2:1170:2961] TxId# 281474976715662] Sending TEvStreamDataAck to [2:1196:2985] ShardId# 72075186224037890 2026-01-04T02:51:07.094165Z node 2 :TX_PROXY INFO: read_table_impl.cpp:2934: [ReadTable [2:1170:2961] TxId# 281474976715662] RESPONSE Status# ExecComplete prepare time: 0.014134s execute time: 0.143591s total time: 0.157725s 2026-01-04T02:51:07.094335Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:483: Got stream data ack ShardId: 72075186224037890, TxId: 281474976715663, PendingAcks: 0 2026-01-04T02:51:07.094380Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715663, MessageQuota: 0 2026-01-04T02:51:07.094750Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1170:2961], Recipient [2:1083:2895]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 2026-01-04T02:51:07.094931Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-04T02:51:07.094963Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715663, at: 72075186224037890 2026-01-04T02:51:07.095142Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549569, Sender [2:1170:2961], Recipient [2:1087:2897]: NKikimrTxDataShard.TEvCancelTransactionProposal TxId: 281474976715663 2026-01-04T02:51:07.095179Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3195: StateWork, processing event TEvDataShard::TEvCancelTransactionProposal 2026-01-04T02:51:07.095237Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:73: Got TEvDataShard::TEvCancelTransactionProposal 72075186224037890 txId 281474976715663 2026-01-04T02:51:07.095297Z node 2 :TX_DATASHARD DEBUG: datashard__cancel_tx_proposal.cpp:44: Start TTxCancelTransactionProposal at tablet 72075186224037890 txId 281474976715663 2026-01-04T02:51:07.095399Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:1087:2897], Recipient [2:1087:2897]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.095430Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T02:51:07.095468Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T02:51:07.095500Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 0 immediate 1 planned 0 2026-01-04T02:51:07.095532Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [0:281474976715663] at 72075186224037890 for ReadTableScan 2026-01-04T02:51:07.095560Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit ReadTableScan 2026-01-04T02:51:07.095592Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [0:281474976715663] at 72075186224037890 error: , IsFatalError: 0 2026-01-04T02:51:07.095629Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-04T02:51:07.095659Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit ReadTableScan 2026-01-04T02:51:07.095687Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit FinishPropose 2026-01-04T02:51:07.095728Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-04T02:51:07.095762Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is DelayComplete 2026-01-04T02:51:07.095781Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit FinishPropose 2026-01-04T02:51:07.095801Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715663] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T02:51:07.095820Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715663] at 72075186224037890 on unit CompletedOperations 2026-01-04T02:51:07.095847Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715663] at 72075186224037890 is Executed 2026-01-04T02:51:07.095863Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715663] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T02:51:07.095879Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:281474976715663] at 72075186224037890 has finished 2026-01-04T02:51:07.095899Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:07.095918Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-04T02:51:07.095940Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-04T02:51:07.095968Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T02:51:07.096012Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:07.096042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474976715663] at 72075186224037890 on unit FinishPropose 2026-01-04T02:51:07.096076Z node 2 :TX_DATASHARD TRACE: finish_propose_unit.cpp:167: Propose transaction complete txid 281474976715663 at tablet 72075186224037890 send to client, exec latency: 0 ms, propose latency: 0 ms, status: COMPLETE 2026-01-04T02:51:07.096139Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:51:07.096367Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287431, Sender [2:1170:2961], Recipient [2:1087:2897]: NKikimrTx.TEvInterruptTransaction TxId: 281474976715663 2026-01-04T02:51:07.096409Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3176: StateWork, processing event TEvTxProcessing::TEvInterruptTransaction 2026-01-04T02:51:07.096497Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553190, Sender [2:1170:2961], Recipient [2:1087:2897]: NKikimrTxDataShard.TEvDiscardVolatileSnapshotRequest OwnerId: 72057594046644480 PathId: 38 Step: 3000 TxId: 281474976715662 |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_read_table/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardSubDomainTest::DiskSpaceUsage-EnablePersistentPartitionStats [GOOD] >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Deactivated [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:54.783139Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:54.891693Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:50:54.899805Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:298:2342], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:55.176090Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:55.176220Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:55.229452Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495052405595 != 1767495052405598 2026-01-04T02:50:55.239310Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:55.282024Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:55.372151Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:55.803766Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:55.804273Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:55.804306Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:55.804330Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:55.804655Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:55.818420Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:07.720584Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:884:2762], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:07.720785Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:07.721234Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:894:2765], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:07.721311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: metadata provider service is disabled ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TA] $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} |96.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_read_table/test-results/unittest/{meta.json ... results_accumulator.log} >> AsyncIndexChangeCollector::UpsertToSameKey >> TKesusTest::TestQuoterAccountResourcesAggregateResources [GOOD] >> TKesusTest::TestQuoterAccountLabels >> TFlatTest::SplitEmptyTwice [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsage [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:01.239263Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:01.239347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.239388Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:01.239421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:01.239457Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:01.239501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:01.239574Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.239659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:01.240510Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:01.240784Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:01.334158Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.334225Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.349119Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:01.349297Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:01.349507Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:01.357667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:01.357934Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:01.358657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.358933Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:01.362109Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.362321Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.363492Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.363552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.363736Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.363785Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.363890Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.364104Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.370517Z node 1 :HIVE INFO: tablet_helpers.cpp:1241: [72057594037968897] started, primary subdomain 0:0 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:243:2058] recipient: [1:15:2062] 2026-01-04T02:51:01.509322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "MyRoot" StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } } } TxId: 1 TabletId: 72057594046678944 , at schemeshard: 72057594046678944 2026-01-04T02:51:01.509597Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //MyRoot, opId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.509819Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 0 2026-01-04T02:51:01.509872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046678944, LocalPathId: 1] source path: 2026-01-04T02:51:01.510117Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046678944 2026-01-04T02:51:01.510194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046678944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:01.512341Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046678944 PathId: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.512549Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //MyRoot 2026-01-04T02:51:01.512782Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.512845Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046678944 2026-01-04T02:51:01.512881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:01.512915Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:01.514741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.514808Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:51:01.514882Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:01.516753Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.516804Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.516856Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.516912Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:01.520793Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046678944 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:01.522560Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:01.522762Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 FAKE_COORDINATOR: Add transaction: 1 at step: 5000001 FAKE_COORDINATOR: advance: minStep5000001 State->FrontStep: 0 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 1 at step: 5000001 2026-01-04T02:51:01.523876Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000001, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.524017Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 1 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000001 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:51:01.524062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.524348Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 128 -> 240 2026-01-04T02:51:01.524400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 1:0, at tablet# 72057594046678944 2026-01-04T02:51:01.524581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 1] was 1 2026-01-04T02:51:01.524723Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 1 2026-01-04T02:51:01.526731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.526780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard Desc ... TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 Leader for TabletID 72057594046678944 is [2:1102:3022] sender: [2:1177:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 2026-01-04T02:51:09.038732Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0.0967 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 2026-01-04T02:51:09.070266Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 0 2026-01-04T02:51:09.070455Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 584 row count 1 2026-01-04T02:51:09.070516Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Table1, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:51:09.070625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409546: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 2026-01-04T02:51:09.081109Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 2026-01-04T02:51:09.102572Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 2026-01-04T02:51:09.102707Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409548 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 584 rowCount 1 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 1 2026-01-04T02:51:09.102806Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 584 row count 1 2026-01-04T02:51:09.102864Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:51:09.102956Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409547: SplitByLoadNotEnabledForTable 2026-01-04T02:51:09.102994Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:3 data size 584 row count 1 2026-01-04T02:51:09.103019Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409548 maps to shardIdx: 72057594046678944:3 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=Table2, is column=0, is olap=0, RowCount 1, DataSize 584 2026-01-04T02:51:09.103074Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186233409548: SplitByLoadNotEnabledForTable TEST waitForFullStatsUpdate, schemeshard 0x00007DF8110BD900, stats written 3 2026-01-04T02:51:09.103586Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:09.103780Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot" took 242us result status StatusSuccess 2026-01-04T02:51:09.104277Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot" PathDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 10 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 10 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 1 SecurityStateVersion: 0 } ChildrenExist: true } Children { Name: ".sys" PathId: 2 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976710657 CreateStep: 5000001 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } Children { Name: "Table1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } Children { Name: "Table2" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 38 PathsLimit: 10000 ShardsInside: 3 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1752 DataSize: 1752 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { Audience: "/MyRoot" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::TopicDiskSpaceQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:01.514158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:01.514278Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.514323Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:01.514361Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:01.514404Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:01.514437Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:01.514490Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.514567Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:01.515439Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:01.515758Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:01.607142Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.607204Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.622235Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:01.622504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:01.622691Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:01.631096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:01.631335Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:01.632154Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.632416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:01.635486Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.635688Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.636853Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.636920Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.637030Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.637077Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.637195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.637409Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.793538Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.794590Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.794750Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.794826Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.794899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.794972Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795108Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795206Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795363Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795455Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795642Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.795755Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 46678944, LocalPathId: 39] 2026-01-04T02:51:08.583733Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:08.583772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:51:08.583851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 38 2026-01-04T02:51:08.583881Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 103, path id: 39 2026-01-04T02:51:08.583992Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:51:08.584035Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 103:0 ProgressState 2026-01-04T02:51:08.584146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:51:08.584188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:51:08.584234Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 1/1 2026-01-04T02:51:08.584268Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:51:08.584311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 1/1, is published: false 2026-01-04T02:51:08.584356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 1/1 2026-01-04T02:51:08.584416Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:51:08.584467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:51:08.584613Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:51:08.584668Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:1004: Publication still in progress, tx: 103, publications: 2, subscribers: 0 2026-01-04T02:51:08.584711Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 38], 9 2026-01-04T02:51:08.584746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:1011: Publication details: tx: 103, [OwnerId: 72057594046678944, LocalPathId: 39], 18446744073709551615 2026-01-04T02:51:08.586013Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 3 TxId_Deprecated: 0 2026-01-04T02:51:08.586135Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:51:08.586237Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 39 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:51:08.586278Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 2, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:51:08.586323Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], version: 18446744073709551615 2026-01-04T02:51:08.586367Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 1 2026-01-04T02:51:08.586870Z node 1 :HIVE INFO: tablet_helpers.cpp:1510: [72057594037968897] TEvDeleteTablet, msg: ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 FAKEHIVE 72057594037968897 TEvDeleteTablet ShardOwnerId: 72057594046678944 ShardLocalIdx: 4 TxId_Deprecated: 0 2026-01-04T02:51:08.587002Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 3, at schemeshard: 72057594046678944 2026-01-04T02:51:08.587386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:51:08.587439Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 39], at schemeshard: 72057594046678944 2026-01-04T02:51:08.587509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 5 2026-01-04T02:51:08.588473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046678944 ShardLocalIdx: 4, at schemeshard: 72057594046678944 2026-01-04T02:51:08.588997Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:51:08.589081Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 9 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:51:08.589113Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:51:08.589146Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 9 2026-01-04T02:51:08.589178Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:51:08.589251Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046678944, txId: 103, subscribers: 0 2026-01-04T02:51:08.591678Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:51:08.593768Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:3 2026-01-04T02:51:08.593890Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 2026-01-04T02:51:08.593964Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046678944:4 2026-01-04T02:51:08.594029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 TestModificationResult got TxId: 103, wait until txId: 103 TestWaitNotification wait txId: 103 2026-01-04T02:51:08.594466Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 103: send EvNotifyTxCompletion 2026-01-04T02:51:08.594513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 103 2026-01-04T02:51:08.594976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 103, at schemeshard: 72057594046678944 2026-01-04T02:51:08.595065Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:51:08.595105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [1:1157:3060] TestWaitNotification: OK eventTxId 103 2026-01-04T02:51:08.980512Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:08.980724Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_1" took 253us result status StatusSuccess 2026-01-04T02:51:08.981041Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_1" PathDescription { Self { Name: "USER_1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 9 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 9 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 5 SubDomainVersion: 1 SubDomainStateVersion: 2 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 2 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 1 } SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> CdcStreamChangeCollector::UpsertManyRows ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TStoragePoolsQuotasTest::DifferentQuotasInteraction-IsExternalSubdomain-EnableSeparateQuotas [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:59.470074Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:59.470153Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.470193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.000000s, StatsMaxBatchSize# 0, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:59.470230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:59.470271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:59.470299Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:59.470356Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:59.470444Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:59.471317Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:59.471635Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:59.563885Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:59.563954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:59.578900Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:59.579144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:59.579394Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:59.588219Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:59.588451Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:59.589235Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:59.589504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:59.592600Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.592791Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:59.593988Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:59.594066Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:59.594195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:59.594245Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:59.594355Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:59.594559Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:59.747599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748829Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748902Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.748968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749207Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749285Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749356Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749601Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749691Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:59.749791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByRea ... 1-04T02:51:10.770622Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:51:10.770681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:51:10.770873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 104, path id: [OwnerId: 72075186233409546, LocalPathId: 32] 2026-01-04T02:51:10.771056Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:51:10.771105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 104, path id: 1 2026-01-04T02:51:10.771152Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 104, path id: 32 2026-01-04T02:51:10.771608Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:51:10.771673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:1120: NTableState::TProposedWaitParts operationId# 104:0 ProgressState at tablet: 72075186233409546 2026-01-04T02:51:10.771772Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:710: all shard schema changes has been received, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:51:10.771817Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:722: send schema changes ack message, operation: 104:0, datashard: 72075186233409549, at schemeshard: 72075186233409546 2026-01-04T02:51:10.771859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 104:0 129 -> 240 2026-01-04T02:51:10.772983Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:51:10.773104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 14 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:51:10.773150Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:51:10.773198Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 1], version: 14 2026-01-04T02:51:10.773241Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 1] was 6 2026-01-04T02:51:10.774389Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:51:10.774473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 32 Version: 18446744073709551615 PathOwnerId: 72075186233409546, cookie: 104 2026-01-04T02:51:10.774503Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72075186233409546, txId: 104 2026-01-04T02:51:10.774534Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72075186233409546, txId: 104, pathId: [OwnerId: 72075186233409546, LocalPathId: 32], version: 18446744073709551615 2026-01-04T02:51:10.774562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 4 2026-01-04T02:51:10.774633Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 0/1, is published: true 2026-01-04T02:51:10.777100Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 104:0, at schemeshard: 72075186233409546 2026-01-04T02:51:10.777160Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 104:0 ProgressState, at schemeshard: 72075186233409546 2026-01-04T02:51:10.777470Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 3 2026-01-04T02:51:10.777641Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:51:10.777680Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:10.777723Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#104:0 progress is 1/1 2026-01-04T02:51:10.777754Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:10.777791Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 104, ready parts: 1/1, is published: true 2026-01-04T02:51:10.777871Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [1:999:2940] message: TxId: 104 2026-01-04T02:51:10.777920Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 104 ready parts: 1/1 2026-01-04T02:51:10.777958Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 104:0 2026-01-04T02:51:10.777993Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 104:0 2026-01-04T02:51:10.778090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72075186233409546, LocalPathId: 32] was 2 2026-01-04T02:51:10.778598Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72075186233409546 2026-01-04T02:51:10.778637Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72075186233409546, txId: 0, path id: [OwnerId: 72075186233409546, LocalPathId: 1] 2026-01-04T02:51:10.779275Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:51:10.780301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186233409546, cookie: 104 2026-01-04T02:51:10.781547Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72075186233409546 2026-01-04T02:51:10.781599Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:854:2808], at schemeshard: 72075186233409546, txId: 0, path id: 1 2026-01-04T02:51:10.781987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 104: got EvNotifyTxCompletionResult 2026-01-04T02:51:10.782032Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 104: satisfy waiter [1:2033:3943] 2026-01-04T02:51:10.782502Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72075186233409546, msg: Owner: 72075186233409546 Generation: 2 LocalPathId: 1 Version: 15 PathOwnerId: 72075186233409546, cookie: 0 TestWaitNotification: OK eventTxId 104 2026-01-04T02:51:10.785953Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/SomeDatabase" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72075186233409546 2026-01-04T02:51:10.786156Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72075186233409546 describe path "/MyRoot/SomeDatabase" took 226us result status StatusSuccess 2026-01-04T02:51:10.786649Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/SomeDatabase" PathDescription { Self { Name: "MyRoot/SomeDatabase" PathId: 1 SchemeshardId: 72075186233409546 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 1 CreateStep: 0 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 15 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 15 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 8 SubDomainVersion: 2 SubDomainStateVersion: 4 SecurityStateVersion: 0 } } Children { Name: ".sys" PathId: 2 SchemeshardId: 72075186233409546 PathType: EPathTypeDir CreateFinished: true CreateTxId: 281474976720657 CreateStep: 250 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "metadata@system" ACL: "" ChildrenExist: true } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 38 ProcessingParams { Version: 2 PlanResolution: 50 Coordinators: 72075186233409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409548 SchemeShard: 72075186233409546 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "fast" Kind: "fast_kind" } StoragePools { Name: "large" Kind: "large_kind" } PathsInside: 30 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } StoragePoolsUsage { PoolKind: "fast_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } StoragePoolsUsage { PoolKind: "large_kind" TotalSize: 0 DataSize: 0 IndexSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 DatabaseQuotas { data_size_hard_quota: 2800 data_size_soft_quota: 2200 storage_quotas { unit_kind: "fast_kind" data_size_hard_quota: 600 data_size_soft_quota: 500 } storage_quotas { unit_kind: "large_kind" data_size_hard_quota: 2200 data_size_soft_quota: 1700 } } SecurityState { Audience: "/MyRoot/SomeDatabase" } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 1 PathOwnerId: 72075186233409546, at schemeshard: 72075186233409546 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TKesusTest::TestQuoterAccountLabels [GOOD] >> TKesusTest::TestPassesUpdatedPropsToSession |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::DiskSpaceUsageWithColumnTableInStore [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] TEST create schemeshard, 0x00007D9F8EEAE500 Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:51:01.606301Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:51:01.606387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.606423Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:51:01.606455Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:51:01.606509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:51:01.606539Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:51:01.606606Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:51:01.606686Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:51:01.607481Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:51:01.607765Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:51:01.668533Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:51:01.668577Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:01.682926Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:51:01.683121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:51:01.683417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:51:01.691478Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:51:01.691711Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:51:01.692342Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:51:01.692602Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:51:01.695760Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.695959Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:51:01.697126Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:51:01.697188Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:51:01.697320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:51:01.697387Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:51:01.697511Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:51:01.697721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:51:01.851295Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.851965Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852053Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852107Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852151Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852204Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852244Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852348Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852388Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852473Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852513Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:51:01.852631Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by ... ten 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 Leader for TabletID 72057594046678944 is [2:1018:2977] sender: [2:1077:2058] recipient: [2:15:2062] TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 2026-01-04T02:51:11.533984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 1263320 rowCount 100000 cpuUsage 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 2026-01-04T02:51:11.575947Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 0 2026-01-04T02:51:11.576174Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 1263320 row count 100000 2026-01-04T02:51:11.576239Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Store, is column=0, is olap=1, RowCount 100000, DataSize 1263320 2026-01-04T02:51:11.576329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:451: OLAP store contains 1 tables. 2026-01-04T02:51:11.576386Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:473: Aggregated stats for pathId 38: RowCount 100000, DataSize 1263320 TEST waitForFullStatsUpdate, schemeshard 0x00007D9F8F049900, stats written 1 2026-01-04T02:51:11.576920Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Store" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:51:11.577143Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Store" took 271us result status StatusSuccess 2026-01-04T02:51:11.577789Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/Store" PathDescription { Self { Name: "Store" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnStore CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 3 ColumnStoreVersion: 1 } ChildrenExist: true } Children { Name: "ColumnTable" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeColumnTable CreateFinished: true CreateTxId: 102 CreateStep: 5000039 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: false } TableStats { DataSize: 1263320 RowCount: 100000 IndexSize: 0 LastAccessTime: 165 LastUpdateTime: 165 ImmediateTxCompleted: 0 PlannedTxCompleted: 3 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 100000 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 } DomainKey { SchemeShard: 72057594046678944 PathId: 1 } PathsInside: 38 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 1263320 DataSize: 1263320 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } ColumnStoreDescription { Name: "Store" ColumnShardCount: 1 ColumnShards: 72075186233409546 SchemaPresets { Id: 1 Name: "default" Schema { Columns { Id: 1 Name: "timestamp" Type: "Timestamp" TypeId: 50 NotNull: true StorageId: "" DefaultValue { } } Columns { Id: 2 Name: "data" Type: "Utf8" TypeId: 4608 NotNull: false StorageId: "" DefaultValue { } } KeyColumnNames: "timestamp" NextColumnId: 3 Version: 1 Options { SchemeNeedActualization: false } NextColumnFamilyId: 1 } } NextSchemaPresetId: 2 NextTtlSettingsPresetId: 1 } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest >> AsyncIndexChangeCollector::UpsertSingleRow [GOOD] >> AsyncIndexChangeCollector::UpsertManyRows >> AsyncIndexChangeCollector::InsertSingleRow [GOOD] >> AsyncIndexChangeCollector::InsertManyRows >> CdcStreamChangeCollector::InsertSingleRow [GOOD] >> CdcStreamChangeCollector::InsertSingleUuidRow >> AsyncIndexChangeCollector::DeleteNothing [GOOD] >> AsyncIndexChangeCollector::DeleteSingleRow >> AsyncIndexChangeCollector::CoveredIndexUpdateCoveredColumn [GOOD] >> AsyncIndexChangeCollector::CoveredIndexUpsert >> CdcStreamChangeCollector::UpsertIntoTwoStreams [GOOD] >> CdcStreamChangeCollector::PageFaults |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestPassesUpdatedPropsToSession [GOOD] Test command err: 2026-01-04T02:51:03.461939Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:03.462052Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:03.474329Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:03.474439Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:03.498820Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:03.506394Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:134:2159], cookie=8712058194359861115, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:51:03.506713Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:51:03.528716Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:134:2159], cookie=8712058194359861115) 2026-01-04T02:51:03.529168Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[1:145:2167], cookie=11604846569043950874, path="/Root/Res", config={ }) 2026-01-04T02:51:03.529344Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-04T02:51:03.540866Z node 1 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[1:145:2167], cookie=11604846569043950874) 2026-01-04T02:51:03.542182Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [1:150:2172]. Cookie: 6552147462485084302. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:03.542236Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[1:150:2172], cookie=6552147462485084302) 2026-01-04T02:51:03.542612Z node 1 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [1:150:2172]. Cookie: 3853720812689887931. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 28000 } } 2026-01-04T02:51:03.542646Z node 1 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[1:150:2172], cookie=3853720812689887931) 2026-01-04T02:51:05.628141Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:05.628245Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:05.646462Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:05.646607Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:05.681100Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:05.681564Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[2:136:2161], cookie=14342482749174347372, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:51:05.681880Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:51:05.693852Z node 2 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[2:136:2161], cookie=14342482749174347372) 2026-01-04T02:51:05.694597Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 9715664016676170150. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:05.694654Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=9715664016676170150) 2026-01-04T02:51:05.695160Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [2:147:2169]. Cookie: 1086032622095541341. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedUnitsPerSecond: 100 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { Enabled: true BillingPeriodSec: 2 } OnDemand { Enabled: true BillingPeriodSec: 2 } Overshoot { Enabled: true BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:05.695208Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[2:147:2169], cookie=1086032622095541341) 2026-01-04T02:51:05.695689Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 8087405184329864243. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-04T02:51:05.695750Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=8087405184329864243) 2026-01-04T02:51:05.696148Z node 2 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [2:147:2169]. Cookie: 11204639755836101477. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 1017500 } } 2026-01-04T02:51:05.696195Z node 2 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[2:147:2169], cookie=11204639755836101477) 2026-01-04T02:51:07.841335Z node 3 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:07.841446Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:07.859686Z node 3 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:07.860136Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:07.894154Z node 3 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:07.894492Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:136:2161], cookie=14205871325749885883, path="/Root", config={ MaxUnitsPerSecond: 300 PrefetchCoefficient: 1 }) 2026-01-04T02:51:07.894716Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:51:07.906238Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:136:2161], cookie=14205871325749885883) 2026-01-04T02:51:07.906617Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:147:2169], cookie=9045003389835907570, path="/Root/Res1", config={ }) 2026-01-04T02:51:07.906773Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res1" 2026-01-04T02:51:07.918301Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:147:2169], cookie=9045003389835907570) 2026-01-04T02:51:07.918698Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[3:152:2174], cookie=11157697761540756048, path="/Root/Res2", config={ }) 2026-01-04T02:51:07.918853Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 3 "Root/Res2" 2026-01-04T02:51:07.930762Z node 3 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[3:152:2174], cookie=11157697761540756048) 2026-01-04T02:51:07.931300Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 305317573810175872. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res1" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:07.931338Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=305317573810175872) 2026-01-04T02:51:07.931802Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [3:157:2179]. Cookie: 8765698407507828264. Data: { Results { ResourceId: 3 Error { Status: SUCCESS } EffectiveProps { ResourceId: 3 ResourcePath: "Root/Res2" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 300 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 1 } AccountingConfig { ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 1 OvershootCoefficient: 1 Provisioned { BillingPeriodSec: 2 } OnDemand { BillingPeriodSec: 2 } Overshoot { BillingPeriodSec: 2 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:07.931833Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[3:157:2179], cookie=8765698407507828264) 2026-01-04T02:51:07.932177Z node 3 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [3:157:2179]. Cookie: 12002883858542840672. Data: { ResourcesInfo { ResourceId: 2 AcceptedUs: 1020500 } ResourcesInfo { ResourceId: 3 AcceptedUs: 1020500 } } 2026-01-04T02:51:07.932215Z node 3 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[3:157:2179], cookie=12002883858542840672) 2026-01-04T02:51:10.042534Z node 4 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:10.042650Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:10.063000Z node 4 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:10.063418Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:10.087967Z node 4 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:10.088490Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[4:137:2161], cookie=15997469534215754468, path="/Root", config={ MaxUnitsPerSecond: 100 PrefetchCoefficient: 300 }) 2026-01-04T02:51:10.088858Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:51:10.101106Z node 4 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[4:137:2161], cookie=15997469534215754468) 2026-01-04T02:51:10.102322Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [4:147:2169]. Cookie: 10122393854548745878. Data: { Results { ResourceId: 1 Error { Status: SUCCESS } EffectiveProps { ResourceId: 1 ResourcePath: "Root" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 PrefetchCoefficient: 300 } AccountingConfig { Enabled: true ReportPeriodMs: 1000 AccountPeriodMs: 1000 CollectPeriodSec: 2 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { Enabled: true BillingPeriodSec: 2 Labels { key: "k1" value: "v1" } Labels { key: "k2" value: "v2" } } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:10.102404Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[4:147:2169], cookie=10122393854548745878) 2026-01-04T02:51:10.102956Z node 4 :KESUS_TABLET TRACE: quoter_runtime.cpp:224: [72057594037927937] Send TEvAccountResourcesAck to [4:147:2169]. Cookie: 4290385953932359297. Data: { ResourcesInfo { ResourceId: 1 AcceptedUs: 27500 } } 2026-01-04T02:51:10.103011Z node 4 :KESUS_TABLET DEBUG: quoter_runtime.cpp:229: [72057594037927937] Account quoter resources (sender=[4:147:2169], cookie=4290385953932359297) 2026-01-04T02:51:12.243784Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:12.243899Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:12.262909Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:12.263049Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:12.277499Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:12.278014Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:135:2159], cookie=4797929236931501145, path="/Root", config={ MaxUnitsPerSecond: 100 }) 2026-01-04T02:51:12.278241Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 1 "Root" 2026-01-04T02:51:12.301069Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:135:2159], cookie=4797929236931501145) 2026-01-04T02:51:12.301757Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:36: [72057594037927937] TTxQuoterResourceAdd::Execute (sender=[5:144:2166], cookie=8423851676226563251, path="/Root/Res", config={ }) 2026-01-04T02:51:12.302044Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:76: [72057594037927937] Created new quoter resource 2 "Root/Res" 2026-01-04T02:51:12.314174Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_add.cpp:85: [72057594037927937] TTxQuoterResourceAdd::Complete (sender=[5:144:2166], cookie=8423851676226563251) 2026-01-04T02:51:12.314821Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:145: [72057594037927937] Send TEvSubscribeOnResourcesResult to [5:149:2171]. Cookie: 17181777384119055927. Data: { Results { ResourceId: 2 Error { Status: SUCCESS } EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } } ProtocolVersion: 1 } 2026-01-04T02:51:12.314861Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:150: [72057594037927937] Subscribe on quoter resources (sender=[5:149:2171], cookie=17181777384119055927) 2026-01-04T02:51:12.315239Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:34: [72057594037927937] TTxQuoterResourceUpdate::Execute (sender=[5:153:2175], cookie=5186360060132088930, id=0, path="/Root", config={ MaxUnitsPerSecond: 150 }) 2026-01-04T02:51:12.315371Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:61: [72057594037927937] Updated quoter resource 1 "Root" 2026-01-04T02:51:12.315512Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72057594037927937] Send TEvResourcesAllocated to [5:149:2171]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 2 EffectiveProps { ResourceId: 2 ResourcePath: "Root/Res" HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 150 MaxBurstSizeCoefficient: 1 Weight: 1 } AccountingConfig { ReportPeriodMs: 5000 AccountPeriodMs: 1000 CollectPeriodSec: 30 ProvisionedCoefficient: 60 OvershootCoefficient: 1.1 Provisioned { BillingPeriodSec: 60 } OnDemand { BillingPeriodSec: 60 } Overshoot { BillingPeriodSec: 60 } } } StateNotification { Status: SUCCESS } } } 2026-01-04T02:51:12.328040Z node 5 :KESUS_TABLET DEBUG: tx_quoter_resource_update.cpp:75: [72057594037927937] TTxQuoterResourceUpdate::Complete (sender=[5:153:2175], cookie=5186360060132088930) 2026-01-04T02:51:12.328668Z node 5 :KESUS_TABLET TRACE: quoter_runtime.cpp:193: [72057594037927937] Send TEvUpdateConsumptionStateAck to [5:149:2171]. Cookie: 507775774528421723. Data: { } 2026-01-04T02:51:12.328730Z node 5 :KESUS_TABLET DEBUG: quoter_runtime.cpp:198: [72057594037927937] Update quoter resources consumption state (sender=[5:149:2171], cookie=507775774528421723) |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsSchemeshardRestart [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless >> AsyncIndexChangeCollector::UpsertToSameKey [GOOD] >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> IncrementalBackup::SimpleBackup [GOOD] >> IncrementalBackup::SimpleRestore |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad |96.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitEmptyTwice [GOOD] Test command err: 2026-01-04T02:48:19.715342Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332789406501812:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:19.718252Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:20.069598Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.107871Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:20.107997Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:20.167565Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:20.180074Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:20.331146Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:48:20.476527Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:20.486409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:20.496874Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:48:20.610404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:20.724121Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:20.968016Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.015s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:48:20.975447Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1155 521 2626)b }, ecr=1.000 2026-01-04T02:48:21.031630Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.004s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1621 647 6413)b }, ecr=1.000 2026-01-04T02:48:21.045328Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.003s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2390 1432 5183)b }, ecr=1.000 2026-01-04T02:48:23.506527Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.85, eph 1} end=Done, 2 blobs 200r (max 200), put Spent{time=0.028s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (58053 0 0)b }, ecr=1.000 2026-01-04T02:48:23.624606Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.112, eph 1} end=Done, 2 blobs 756r (max 756), put Spent{time=0.018s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (49618 0 0)b }, ecr=1.000 2026-01-04T02:48:23.750220Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.597, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.016s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-04T02:48:23.937415Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.179, eph 2} end=Done, 2 blobs 455r (max 456), put Spent{time=0.051s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (131688 0 0)b }, ecr=1.000 2026-01-04T02:48:24.026487Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.194, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.068s,wait=0.067s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-04T02:48:24.068507Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.195, eph 1} end=Done, 2 blobs 2r (max 2), put Spent{time=0.109s,wait=0.094s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-04T02:48:24.081117Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.196, eph 1} end=Done, 2 blobs 503r (max 503), put Spent{time=0.119s,wait=0.074s,interrupts=1} Part{ 1 pk, lobs 0 +0, (32092 0 0)b }, ecr=1.000 2026-01-04T02:48:24.117596Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.197, eph 1} end=Done, 2 blobs 1506r (max 1506), put Spent{time=0.144s,wait=0.065s,interrupts=1} Part{ 1 pk, lobs 0 +0, (103478 0 0)b }, ecr=1.000 2026-01-04T02:48:24.288485Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.198, eph 2} end=Done, 2 blobs 1509r (max 1512), put Spent{time=0.268s,wait=0.011s,interrupts=1} Part{ 1 pk, lobs 0 +0, (98941 0 0)b }, ecr=1.000 2026-01-04T02:48:24.290324Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.731, eph 1} end=Done, 2 blobs 10001r (max 10001), put Spent{time=0.283s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-04T02:48:24.382071Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.269, eph 3} end=Done, 2 blobs 726r (max 727), put Spent{time=0.049s,wait=0.017s,interrupts=1} Part{ 1 pk, lobs 0 +0, (210005 0 0)b }, ecr=1.000 2026-01-04T02:48:24.414606Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.278, eph 3} end=Done, 2 blobs 2271r (max 2274), put Spent{time=0.048s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (148849 0 0)b }, ecr=1.000 2026-01-04T02:48:24.479932Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1105, eph 2} end=Done, 2 blobs 3r (max 5), put Spent{time=0.006s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-04T02:48:24.705193Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.351, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.126s,wait=0.106s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-04T02:48:24.706387Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.352, eph 2} end=Done, 2 blobs 2r (max 3), put Spent{time=0.127s,wait=0.088s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-04T02:48:24.722742Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.343, eph 4} end=Done, 2 blobs 989r (max 990), put Spent{time=0.165s,wait=0.066s,interrupts=1} Part{ 1 pk, lobs 0 +0, (285981 0 0)b }, ecr=1.000 2026-01-04T02:48:24.748935Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.350, eph 2} end=Done, 2 blobs 3051r (max 3051), put Spent{time=0.171s,wait=0.060s,interrupts=1} Part{ 1 pk, lobs 0 +0, (209483 0 0)b }, ecr=1.000 2026-01-04T02:48:24.750317Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.353, eph 2} end=Done, 2 blobs 1018r (max 1018), put Spent{time=0.172s,wait=0.045s,interrupts=1} Part{ 1 pk, lobs 0 +0, (64787 0 0)b }, ecr=1.000 2026-01-04T02:48:24.829114Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332789406501812:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:24.831035Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.354, eph 4} end=Done, 2 blobs 3054r (max 3057), put Spent{time=0.252s,wait=0.108s,interrupts=1} Part{ 1 pk, lobs 0 +0, (200176 0 0)b }, ecr=1.000 2026-01-04T02:48:24.842637Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:48:24.903631Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1254, eph 2} end=Done, 2 blobs 10001r (max 10518), put Spent{time=0.258s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-04T02:48:24.961781Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.431, eph 5} end=Done, 2 blobs 1244r (max 1245), put Spent{time=0.045s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (359616 0 0)b }, ecr=1.000 2026-01-04T02:48:24.982273Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.436, eph 5} end=Done, 2 blobs 3807r (max 3810), put Spent{time=0.052s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (249445 0 0)b }, ecr=1.000 2026-01-04T02:48:24.998127Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1611, eph 3} end=Done, 2 blobs 3r (max 5), put Spent{time=0.009s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-04T02:48:25.055684Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.500, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.000 2026-01-04T02:48:25.109476Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.501, eph 3} end=Done, 2 blobs 1528r (max 1528), put Spent{time=0.055s,wait=0.037s,interrupts=1} Part{ 1 pk, lobs 0 +0, (97117 0 0)b }, ecr=1.000 2026-01-04T02:48:25.118962Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.502, eph 6} end=Done, 2 blobs 4584r (max 4587), put Spent{time=0.064s,wait=0.020s,interrupts=1} Part{ 1 pk, lobs 0 +0, (300382 0 0)b }, ecr=1.000 2026-01-04T02:48:25.120174Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.499, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.064s,wait=0.021s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-04T02:48:25.121387Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.492, eph 6} end=Done, 2 blobs 1495r (max 1496), put Spent{time=0.083s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (432148 0 0)b }, ecr=1.000 2026-01-04T02:48:25.157679Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.498, eph 3} end=Done, 2 blobs 4581r (max 4581), put Spent{time=0.103s,wait=0.005s,interrupts=1} Part{ 1 pk, lobs 0 +0, (314405 0 0)b }, ecr=1.000 2026-01-04T02:48:25.199880Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.1770, eph 3} end=Done, 2 blobs 10001r (max 10511), put Spent{time=0.120s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (553660 0 0)b }, ecr=1.000 2026-01-04T02:48:25.240392Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.560, eph 7} end=Done, 2 blobs 1747r (max 1748), put Spent{time=0.044s,wait=0.004s,interrupts=1} Part{ 1 pk, lobs 0 +0, (504922 0 0)b }, ecr=1.000 2026-01-04T02:48:25.258346Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.576, eph 7} end=Done, 2 blobs 5352r (max 5355), put Spent{time=0.048s,wait=0.003s,interrupts=1} Part{ 1 pk, lobs 0 +0, (350680 0 0)b }, ecr=1.000 2026-01-04T02:48:25.271857Z node 1 :OPS_COMPACT INFO: Compact{72057594046644480.2.2117, eph 4} end=Done, 2 blobs 3r (max 5), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (188 0 0)b }, ecr=1.000 2026-01-04T02:48:25.348717Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.640, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.015s,wait=0.001s,interrupts=1} Part{ 1 pk, lobs 0 +0, (252 0 0)b }, ecr=1.000 2026-01-04T02:48:25.349911Z node 1 :OPS_COMPACT INFO: Compact{72057594037968897.2.641, eph 4} end=Done, 2 blobs 2r (max 3), put Spent{time=0.016s,wait=0.002s,interrupts=1} Part{ 1 pk, lobs 0 +0, (181 0 0)b }, ecr=1.00 ... WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037888 not found 2026-01-04T02:51:06.717354Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333505803601722 RawX2: 4503608217307454 } TabletId: 72075186224037895 State: 4 2026-01-04T02:51:06.717427Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037895, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.717679Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333505803601716 RawX2: 4503608217307453 } TabletId: 72075186224037894 State: 4 2026-01-04T02:51:06.717711Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037894, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.717851Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333505803601715 RawX2: 4503608217307452 } TabletId: 72075186224037893 State: 4 2026-01-04T02:51:06.717901Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037893, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.718021Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333501508633939 RawX2: 4503608217307375 } TabletId: 72075186224037889 State: 4 2026-01-04T02:51:06.718047Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037889, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.718195Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333505803601539 RawX2: 4503608217307434 } TabletId: 72075186224037890 State: 4 2026-01-04T02:51:06.718226Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037890, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.718335Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5963: Handle TEvStateChanged, at schemeshard: 72057594046644480, message: Source { RawX1: 7591333505803601544 RawX2: 4503608217307435 } TabletId: 72075186224037891 State: 4 2026-01-04T02:51:06.718394Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__state_changed_reply.cpp:78: TTxShardStateChanged DoExecute, datashard informs about state changing, datashardId: 72075186224037891, state: Offline, at schemeshard: 72057594046644480 2026-01-04T02:51:06.718557Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.718600Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:8 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.718687Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.718710Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:7 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.718772Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.718783Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:6 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.718842Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.718857Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:2 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.718910Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.718925Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:3 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.719003Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:20: SendDeleteRequests, shardsToDelete 1, to hive 72057594037968897, at schemeshard 72057594046644480 2026-01-04T02:51:06.719022Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_shard_deleter.cpp:47: Free shard 72057594046644480:4 hive 72057594037968897 at ss 72057594046644480 2026-01-04T02:51:06.720227Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 8, at schemeshard: 72057594046644480 2026-01-04T02:51:06.720509Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-04T02:51:06.720748Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 7, at schemeshard: 72057594046644480 2026-01-04T02:51:06.720916Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-04T02:51:06.721182Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 6, at schemeshard: 72057594046644480 2026-01-04T02:51:06.721353Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:51:06.721538Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 2, at schemeshard: 72057594046644480 2026-01-04T02:51:06.721670Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:51:06.721759Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 3, at schemeshard: 72057594046644480 2026-01-04T02:51:06.721907Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 2 2026-01-04T02:51:06.722535Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037895 not found 2026-01-04T02:51:06.723834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:8 2026-01-04T02:51:06.723854Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:8 tabletId 72075186224037895 2026-01-04T02:51:06.723881Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:7 2026-01-04T02:51:06.723886Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:7 tabletId 72075186224037894 2026-01-04T02:51:06.724096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:6 2026-01-04T02:51:06.724108Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:6 tabletId 72075186224037893 2026-01-04T02:51:06.724123Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:2 2026-01-04T02:51:06.724129Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:2 tabletId 72075186224037889 2026-01-04T02:51:06.724140Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:3 2026-01-04T02:51:06.724146Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:3 tabletId 72075186224037890 2026-01-04T02:51:06.778837Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037894 not found 2026-01-04T02:51:06.779150Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037893 not found 2026-01-04T02:51:06.779184Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037889 not found 2026-01-04T02:51:06.779200Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037890 not found 2026-01-04T02:51:06.782113Z node 2 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 2, TabletId: 72075186224037891 not found 2026-01-04T02:51:06.802008Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:6378: Free tablet reply, message: Status: OK Origin: 72057594037968897 TxId_Deprecated: 0 ShardOwnerId: 72057594046644480 ShardLocalIdx: 4, at schemeshard: 72057594046644480 2026-01-04T02:51:06.802409Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason shard deleted for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 1 2026-01-04T02:51:06.805666Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T02:51:06.805728Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 39], at schemeshard: 72057594046644480 2026-01-04T02:51:06.805906Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 38] was 1 2026-01-04T02:51:06.806279Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:4 2026-01-04T02:51:06.806338Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:4 tabletId 72075186224037891 2026-01-04T02:51:06.806819Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] >> IncrementalBackup::BackupRestore [GOOD] >> IncrementalBackup::ComplexBackupBackupCollection >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> CdcStreamChangeCollector::UpsertManyRows [GOOD] >> CdcStreamChangeCollector::UpsertToSameKey >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> IncrementalBackup::E2EBackupCollection [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> AsyncIndexChangeCollector::InsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> AsyncIndexChangeCollector::CoveredIndexUpsert [GOOD] >> AsyncIndexChangeCollector::AllColumnsInPk |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> IncrementalBackup::MultiBackup [GOOD] >> IncrementalBackup::MultiShardIncrementalRestore >> CdcStreamChangeCollector::InsertSingleUuidRow [GOOD] >> CdcStreamChangeCollector::IndexAndStreamUpsert >> AsyncIndexChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow >> AsyncIndexChangeCollector::UpsertManyRows [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> AsyncIndexChangeCollector::UpsertWithoutIndexedValue [GOOD] >> CdcStreamChangeCollector::DeleteNothing |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> IncrementalBackup::SimpleRestore [GOOD] >> IncrementalBackup::SimpleBackupRestoreWithIndex |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBackups [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed >> CdcStreamChangeCollector::UpsertToSameKey [GOOD] >> CdcStreamChangeCollector::UpsertToSameKeyWithImages |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> AsyncIndexChangeCollector::AllColumnsInPk [GOOD] >> AsyncIndexChangeCollector::CoverIndexedColumn >> AsyncIndexChangeCollector::MultiIndexedTableInsertSingleRow [GOOD] >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow >> AsyncIndexChangeCollector::IndexedPrimaryKeyDeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn >> CdcStreamChangeCollector::IndexAndStreamUpsert [GOOD] >> CdcStreamChangeCollector::NewImage >> AsyncIndexChangeCollector::MultiIndexedTableUpdateOneIndexedColumn [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow >> CdcStreamChangeCollector::DeleteNothing [GOOD] >> CdcStreamChangeCollector::DeleteSingleRow >> CdcStreamChangeCollector::PageFaults [GOOD] >> CdcStreamChangeCollector::OldImage |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown >> IncrementalBackup::ComplexBackupBackupCollection [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> CdcStreamChangeCollector::UpsertToSameKeyWithImages [GOOD] >> CdcStreamChangeCollector::UpsertModifyDelete |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.3%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TKesusTest::TestAcquireTimeout [GOOD] >> TKesusTest::TestAcquireSharedBlocked >> TKesusTest::TestAcquireSharedBlocked [GOOD] >> TKesusTest::TestAcquireTimeoutAfterReboot >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore+WithIncremental [GOOD] >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::CoverIndexedColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.059710Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.185105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.185474Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.185512Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.471582Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.471723Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.545986Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068086759 != 1767495068086762 2026-01-04T02:51:10.558487Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.604311Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.702422Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.984426Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.997913Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.101514Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.144185Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:51:11.144459Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.193732Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:51:11.194011Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.202807Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.202959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.204605Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.204687Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.204737Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.205062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.205277Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.205369Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:51:11.205769Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.205870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.207099Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:51:11.207168Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:51:11.207463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:51:11.207749Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.207839Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.207895Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:51:11.218635Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.256013Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.256218Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.256338Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:51:11.256386Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.256431Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.256466Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.256725Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.256769Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:51:11.256826Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.256889Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:51:11.256917Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:11.256938Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:51:11.256961Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:11.257447Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.257551Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.257673Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.257742Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.257793Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.257859Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.257908Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:51:11.257970Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:51:11.258044Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:51:11.258104Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:51:11.258145Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.258175Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:51:11.258205Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:11.258360Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.258612Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.258709Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.259166Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-04T02:51:11.259641Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:51:11.259826Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:11.259884Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-04T02:51:11.261701Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.261791Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:11.272555Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.272669Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.273241Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:51:11.273327Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.390591Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:950:2806], serverId# [1:953:2809], sessionId# [0:0:0] 2026- ... from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:24.403209Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:24.403256Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:24.403869Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:24.403908Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:24.404583Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:24.404623Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:24.405204Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:24.405234Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:24.405262Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:24.405308Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:24.405335Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:24.405371Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:24.405430Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037890 time 0 2026-01-04T02:51:24.405447Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:51:24.406088Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037890 step# 2000} 2026-01-04T02:51:24.406127Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:24.407202Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:24.407282Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:24.407341Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:24.407853Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:24.407888Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:51:24.407921Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-04T02:51:24.407966Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:24.408002Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:24.408096Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:51:24.412691Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:24.412963Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-04T02:51:24.413034Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:51:24.413544Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:24.414059Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:24.414465Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:24.414516Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:24.415516Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-04T02:51:24.415565Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-04T02:51:24.425508Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.425608Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.425687Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.426698Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.426798Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.431289Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:24.437762Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:24.437880Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:24.437916Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:24.552518Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:24.552647Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:24.552723Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:24.555934Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:24.589738Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:24.650209Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1224:2975], serverId# [4:1225:2976], sessionId# [0:0:0] 2026-01-04T02:51:24.650688Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-04T02:51:24.650963Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495084650875 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 38b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:24.651179Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495084650875 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:24.651324Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-04T02:51:24.662379Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 38 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:24.662472Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:24.667021Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1231:2981], serverId# [4:1232:2982], sessionId# [0:0:0] 2026-01-04T02:51:24.672401Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1233:2983], serverId# [4:1234:2984], sessionId# [0:0:0] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::ImplicitlyUpdateCoveredColumn [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.191410Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.297585Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.297896Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.297942Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.557025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.557142Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.624846Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068058146 != 1767495068058149 2026-01-04T02:51:10.637633Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.682175Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.775784Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:11.031669Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:11.044465Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.148285Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.192239Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:51:11.192515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.227286Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:51:11.227489Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.234150Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.234281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.235610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.235664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.235715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.236000Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.236151Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.236223Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:51:11.236515Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.236577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.237486Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:51:11.237548Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:51:11.237582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:51:11.237755Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.237826Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.237860Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:51:11.248526Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.272336Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.272527Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.272654Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:51:11.272688Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.272717Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.272749Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.272983Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.273014Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:51:11.273054Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.273086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:51:11.273117Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:11.273141Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:51:11.273163Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:11.273469Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.273537Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.273644Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.273681Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.273724Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.273761Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.273816Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:51:11.273936Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:51:11.274012Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:51:11.274055Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:51:11.274087Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.274111Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:51:11.274133Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:11.274290Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.274502Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.274572Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.274906Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-04T02:51:11.275266Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:51:11.275378Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:11.275414Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-04T02:51:11.276695Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.276794Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:11.287462Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.287577Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.288087Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:51:11.288134Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.405212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:950:2806], serverId# [1:953:2809], sessionId# [0:0:0] 2026- ... : [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:24.963191Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:24.963225Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-01-04T02:51:24.963455Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:24.963835Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:24.965171Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-04T02:51:24.965248Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:24.968485Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:24.968547Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:24.968640Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:24.968690Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:24.968730Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-04T02:51:24.968789Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:24.968838Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:24.968914Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:24.969555Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:24.969594Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:24.970779Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:24.970817Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:24.970856Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:24.970899Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:24.970935Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:24.970987Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:24.972256Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:24.972381Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:24.975207Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-04T02:51:24.975280Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:51:24.976008Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:24.976328Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:24.976462Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:24.976503Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:24.986479Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.986619Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1008:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.986710Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.987686Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1014:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.987886Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:24.992288Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:24.998879Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:24.999000Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.115876Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.116013Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.119252Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:25.154006Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:25.237001Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1140:2925], serverId# [4:1141:2926], sessionId# [0:0:0] 2026-01-04T02:51:25.237476Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-04T02:51:25.237790Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495085237684 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:25.238018Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-04T02:51:25.249054Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:25.249138Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:25.319521Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-04T02:51:25.319867Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495085319757 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:25.320041Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767495085319757 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 42b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:25.320129Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-04T02:51:25.331079Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 42 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:25.331162Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:25.335879Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1181:2957], serverId# [4:1182:2958], sessionId# [0:0:0] 2026-01-04T02:51:25.341969Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1183:2959], serverId# [4:1184:2960], sessionId# [0:0:0] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::IndexedPrimaryKeyInsertSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.293107Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.373675Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.373989Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.374027Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.581900Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.581976Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.652771Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068019399 != 1767495068019402 2026-01-04T02:51:10.665853Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.712919Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.799793Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:11.092522Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:11.106028Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.207559Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.253030Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:51:11.253222Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.287210Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:51:11.287440Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.293622Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.293718Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.294932Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.294994Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.295035Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.295288Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.295497Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.295559Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:51:11.295857Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.295905Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.296803Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:51:11.296844Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:51:11.296871Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:51:11.297098Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.297184Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.297246Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:51:11.307870Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.332904Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.333072Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.333163Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:51:11.333195Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.333222Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.333246Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.333482Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.333509Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:51:11.333547Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.333582Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:51:11.333598Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:11.333620Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:51:11.333638Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:11.333947Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.334021Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.334110Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.334144Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.334179Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.334202Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.334228Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:51:11.334268Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:51:11.334310Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:51:11.334344Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:51:11.334367Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.334419Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:51:11.334443Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:11.334541Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.334769Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.334862Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.335216Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-04T02:51:11.335554Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:51:11.335679Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:11.335736Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-04T02:51:11.336922Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.336976Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:11.347641Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.347753Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.348206Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:51:11.348250Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.463894Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:950:2806], serverId# [1:953:2809], sessionId# [0:0:0] 2026- ... ransaction::Execute at 72075186224037889 2026-01-04T02:51:25.414741Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037889 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:51:25.415134Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037889 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:25.415504Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:25.416680Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.416716Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:25.416745Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:25.416942Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:51:25.417042Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:25.417129Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037889 time 0 2026-01-04T02:51:25.417167Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:25.417421Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.417469Z node 4 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 40] schema version# 1 2026-01-04T02:51:25.417871Z node 4 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:25.418174Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:25.419108Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037889 step# 2000} 2026-01-04T02:51:25.419181Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:25.422202Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:25.422270Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.422369Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:25.422403Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:25.422438Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-04T02:51:25.422492Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037889 at tablet 72075186224037889 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.422556Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:25.422629Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:25.423162Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:25.423198Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.424300Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.424333Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:25.424364Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:25.424403Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.424437Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:25.424477Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.425558Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.425651Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.428040Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-04T02:51:25.428122Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:51:25.428787Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.429038Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.429153Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:25.429201Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:25.438071Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:998:2846], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.438169Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1008:2851], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.438261Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.439142Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1014:2856], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.439308Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.442610Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:25.448104Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.448193Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.564887Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.565027Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.567519Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1012:2854], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:25.601753Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1071:2894] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:25.671161Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1140:2925], serverId# [4:1141:2926], sessionId# [0:0:0] 2026-01-04T02:51:25.671535Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-04T02:51:25.671775Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495085671677 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:25.671906Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-04T02:51:25.683075Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:25.683171Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:25.688305Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1147:2931], serverId# [4:1148:2932], sessionId# [0:0:0] 2026-01-04T02:51:25.694585Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1149:2933], serverId# [4:1150:2934], sessionId# [0:0:0] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> IncrementalBackup::MultiShardIncrementalRestore [GOOD] >> IncrementalBackup::ResetOperationIncrementalBackup >> CdcStreamChangeCollector::OldImage [GOOD] >> CdcStreamChangeCollector::SchemaChanges ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::DeleteSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:11.754529Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:11.838597Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:11.839001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:11.839050Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:12.036345Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:12.036438Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:12.086946Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495069780866 != 1767495069780869 2026-01-04T02:51:12.095855Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:12.138043Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:12.232569Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:12.513435Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:12.526413Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:12.629099Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:12.662150Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:51:12.662391Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:12.704545Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:51:12.704772Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:12.713880Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:12.714102Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:12.715808Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:12.715916Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:12.715972Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:12.716347Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:12.716581Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:12.716665Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:51:12.717103Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:12.717197Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:12.718543Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:51:12.718608Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:51:12.718650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:51:12.718921Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:12.719009Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:12.719062Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:51:12.729871Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:12.755467Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:12.755715Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:12.755839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:51:12.755881Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:12.755943Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:12.755982Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:12.756215Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:12.756243Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:51:12.756286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:12.756344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:51:12.756366Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:12.756397Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:51:12.756441Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:12.756747Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:12.756809Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:12.756914Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:12.756952Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:12.757001Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:12.757047Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:12.757087Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:51:12.757200Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:51:12.757278Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:51:12.757320Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:51:12.757372Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:12.757410Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:51:12.757442Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:12.757599Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:12.757850Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:12.757928Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:12.758256Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-04T02:51:12.758574Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:51:12.758691Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:12.758732Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-04T02:51:12.760331Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:12.760435Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:12.771249Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:12.771377Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:12.772086Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:51:12.772146Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:12.889059Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:950:2806], serverId# [1:953:2809], sessionId# [0:0:0] 2026- ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:25.815433Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.816848Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.816893Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:25.816943Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:25.817001Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.817049Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:25.817125Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.817739Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.819436Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.820063Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:25.820134Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:25.830574Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.831130Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:25.831264Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:25.831312Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-04T02:51:25.831343Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-04T02:51:25.845359Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:25.990392Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:25.990463Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.990754Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.990816Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:25.990865Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:25.991067Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-04T02:51:25.991205Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:25.991502Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.992371Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:26.035790Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-04T02:51:26.035919Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:26.035963Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:26.036009Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.036085Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:26.036140Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-04T02:51:26.036232Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.038181Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-04T02:51:26.038268Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:26.045902Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.046009Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.046097Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.046941Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.047125Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.051666Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:26.058426Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:26.213998Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:26.217097Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:51:26.241344Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:26.306996Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1167:2957], serverId# [4:1168:2958], sessionId# [0:0:0] 2026-01-04T02:51:26.307410Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-04T02:51:26.307708Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495086307605 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:26.307928Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-04T02:51:26.318823Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:26.318892Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.374372Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-04T02:51:26.374590Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495086374521 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:26.374715Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-04T02:51:26.385646Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:26.385700Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.387033Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2976], serverId# [4:1196:2977], sessionId# [0:0:0] 2026-01-04T02:51:26.391098Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1197:2978], serverId# [4:1198:2979], sessionId# [0:0:0] |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::NewImage [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.194381Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.289322Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.289644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.289684Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.513971Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.514070Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.573019Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068035596 != 1767495068035599 2026-01-04T02:51:10.584572Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.628261Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.710575Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.994733Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:11.008435Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.112917Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.149965Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:51:11.150247Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.194533Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.194638Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.197268Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.197373Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.197464Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.199427Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.199577Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.199679Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:11.210344Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.237480Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.237659Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.237755Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:11.237788Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.237844Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.237886Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.238245Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.238318Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.238378Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.238435Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.238482Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.238518Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.238636Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:11.239121Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.239350Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.239422Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.240772Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.251387Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.251491Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.368050Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:51:11.376476Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:11.376559Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.376955Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.376996Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:11.377063Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:11.377300Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:51:11.377438Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:11.377605Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.377717Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:51:11.380193Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:11.381276Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.384243Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:11.384284Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.385023Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:11.385079Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.386173Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.386205Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.386249Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:11.386305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:11.386353Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:11.386414Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.389566Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.390944Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:11.390999Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:11.391615Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:11.406709Z node 1 :TX_DATASHARD DEBU ... :98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:25.527809Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.529460Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.529510Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:25.529565Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:25.529635Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.529690Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:25.529821Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.530574Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.532613Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.533329Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:25.533416Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:25.545523Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.546255Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:25.546417Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:25.546469Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-04T02:51:25.546508Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-04T02:51:25.561135Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:25.726959Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:25.727049Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.727382Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.727442Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:25.727503Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:25.727712Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-04T02:51:25.727849Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:25.728165Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:25.729119Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:25.774732Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-04T02:51:25.774857Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.774898Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:25.774952Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.775058Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.775126Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-04T02:51:25.775235Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:25.777379Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-04T02:51:25.777461Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:25.786118Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.786221Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.786299Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.787200Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.787382Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:25.792287Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:25.799829Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.956934Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.960121Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:51:25.986144Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:26.061602Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1167:2957], serverId# [4:1168:2958], sessionId# [0:0:0] 2026-01-04T02:51:26.062035Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-04T02:51:26.062330Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495086062238 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 40b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:26.062535Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-04T02:51:26.073554Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 40 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:26.073649Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.167813Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-04T02:51:26.168149Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495086168028 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 18b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:26.168318Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-04T02:51:26.179300Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 18 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:26.179389Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:26.181556Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1195:2976], serverId# [4:1196:2977], sessionId# [0:0:0] 2026-01-04T02:51:26.188068Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1197:2978], serverId# [4:1198:2979], sessionId# [0:0:0] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldRequestCompactionsConfigRequest [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> AsyncIndexChangeCollector::MultiIndexedTableReplaceSingleRow [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.059702Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.202707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.203145Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.203213Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.471485Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.471569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.525644Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068019402 != 1767495068019405 2026-01-04T02:51:10.537187Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.580824Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.686171Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.965199Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.977874Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.089272Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.139299Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:896:2773] 2026-01-04T02:51:11.139540Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.192651Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037889 actor [1:899:2775] 2026-01-04T02:51:11.192897Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.201621Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.201763Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.203391Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.203479Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.203529Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.203876Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.204067Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.204137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2773] in generation 1 2026-01-04T02:51:11.204491Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.204572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.205759Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037889 2026-01-04T02:51:11.205829Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037889 2026-01-04T02:51:11.205879Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037889 2026-01-04T02:51:11.206112Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.206189Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.206233Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037889 persisting started state actor id [1:929:2775] in generation 1 2026-01-04T02:51:11.216854Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.233944Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.235344Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.235430Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:932:2794] 2026-01-04T02:51:11.235466Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.235505Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.235532Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.236545Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.236575Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037889 2026-01-04T02:51:11.236612Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037889 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.236646Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037889, actorId: [1:933:2795] 2026-01-04T02:51:11.236661Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T02:51:11.236682Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037889, state: WaitScheme 2026-01-04T02:51:11.236707Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:11.236982Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.237055Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.237153Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.237224Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.237257Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.237290Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.237319Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037889 2026-01-04T02:51:11.237412Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037889 2026-01-04T02:51:11.237460Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:891:2769], serverId# [1:902:2776], sessionId# [0:0:0] 2026-01-04T02:51:11.237492Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T02:51:11.237536Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.237566Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T02:51:11.237594Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T02:51:11.237723Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.238027Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.238723Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.239132Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:892:2770], serverId# [1:903:2777], sessionId# [0:0:0] 2026-01-04T02:51:11.239476Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037889 2026-01-04T02:51:11.239577Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037889 txId 281474976715657 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:11.239608Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037889 2026-01-04T02:51:11.240819Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.240885Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:11.251550Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.251656Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.253021Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037889 2026-01-04T02:51:11.253076Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037889 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.369318Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [1:950:2806], serverId# [1:953:2809], sessionId# [0:0:0] 2026- ... , outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:25.985554Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:25.985602Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:25.986146Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T02:51:25.986181Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037890 2026-01-04T02:51:25.986214Z node 4 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037890 2026-01-04T02:51:25.986255Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037890 at tablet 72075186224037890 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:25.986284Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037890 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:25.986335Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T02:51:25.990409Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.990646Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037889 state Ready 2026-01-04T02:51:25.990707Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T02:51:25.991239Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.991613Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037890 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:25.991973Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:25.992022Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:25.992990Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037890 state Ready 2026-01-04T02:51:25.993045Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037890 Got TEvSchemaChangedResult from SS at 72075186224037890 2026-01-04T02:51:26.002075Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1045:2883], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.002170Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1056:2888], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.002237Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.003069Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1061:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.003129Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:26.007043Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:26.013257Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:26.013359Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:26.013398Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:26.128513Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:26.128605Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T02:51:26.128651Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037890 2026-01-04T02:51:26.131099Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1059:2891], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:26.164803Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1119:2932] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 46], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:26.228040Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1224:2975], serverId# [4:1225:2976], sessionId# [0:0:0] 2026-01-04T02:51:26.228393Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-04T02:51:26.228674Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495086228580 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.228810Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495086228580 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.228881Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=1 2026-01-04T02:51:26.239866Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:26.239970Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:26.296006Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037889 2026-01-04T02:51:26.296268Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767495086296181 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.296465Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 4 Group: 1767495086296181 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.296571Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 5 Group: 1767495086296181 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 28b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.296622Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 6 Group: 1767495086296181 Step: 2500 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] Kind: AsyncIndex Source: Unspecified Body: 24b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037889 2026-01-04T02:51:26.296675Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037889, row count=1 2026-01-04T02:51:26.307465Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037889, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 4 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 5 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 28 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 }, { Order: 6 PathId: [OwnerId: 72057594046644480, LocalPathId: 41] BodySize: 24 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 1 } 2026-01-04T02:51:26.307519Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T02:51:26.310893Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1273:3015], serverId# [4:1274:3016], sessionId# [0:0:0] 2026-01-04T02:51:26.316765Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [4:1275:3017], serverId# [4:1276:3018], sessionId# [0:0:0] |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOk [GOOD] >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi >> TSchemeshardBackgroundCompactionTest::ShouldCompactServerless [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> IncrementalBackup::SimpleBackupRestoreWithIndex [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.4%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable >> TNodeBrokerTest::TestRandomActions [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::TestRandomActions [GOOD] Test command err: 2026-01-04T02:49:44.347878Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:44.347952Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) 2026-01-04T02:49:44.701377Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:49:44.871750Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:49:44.877101Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:49:45.850871Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:49:45.864432Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:45.894295Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.381381Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.404868Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.405309Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.406640Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.407062Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.794564Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.795085Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.839894Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.840229Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.840765Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-04T02:49:46.916287Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:49:46.980903Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:49:46.981388Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:49:46.981744Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:49:47.608658Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:49:47.609143Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:49:47.623545Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:47.624898Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:47.682668Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.287276Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.303280Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.345556Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.392811Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.900363Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.916668Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:48.951257Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-04T02:49:49.391331Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:49:49.484334Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-04T02:49:49.485040Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-04T02:49:49.957381Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:49:49.973071Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:50.384629Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:49:50.398842Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:50.399823Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:50.400645Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:50.452784Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.489988Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.496503Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.510920Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2026-01-04T02:49:50.511456Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:49:50.560349Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:49:50.562387Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:49:50.617552Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-04T02:49:51.229646Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:51.286586Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1033: WRONG_REQUEST: Unknown node 2026-01-04T02:49:51.301471Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-04T02:49:51.389224Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:49:51.424087Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-04T02:49:51.425692Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-04T02:49:51.440005Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #0: WRONG_REQUEST: Unknown node 2026-01-04T02:49:51.441173Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:49:51.441977Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host13:12: ERROR_TEMP: No free node IDs 2026-01-04T02:49:51.443451Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-04T02:49:52.160642Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host6:5: ERROR_TEMP: No free node IDs 2026-01-04T02:49:52.162447Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-04T02:49:52.566901Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.567474Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.567994Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.964515Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.980337Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.980917Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:49:52.997272Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:53.525272Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:53.525923Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:49:53.980363Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:53.994528Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:49:54.023932Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:54.026260Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:54.451777Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:49:54.889458Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:49:54.895957Z node 1 :NODE_BROKER ERROR: node_b ... lease for node #1025: WRONG_REQUEST: Node has expired 2026-01-04T02:51:23.034149Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host1:0: ERROR_TEMP: No free node IDs 2026-01-04T02:51:23.542336Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1025: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.563033Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.565509Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.964653Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.969204Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.973444Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:51:23.992599Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.030589Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.033118Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.035432Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1028: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.061037Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.065569Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.070119Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1030: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.093488Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.095872Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.114838Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.203557Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.283372Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host11:10: ERROR_TEMP: No free node IDs 2026-01-04T02:51:24.286407Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.306373Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.308768Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.313179Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.315443Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.317478Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host15:14: ERROR_TEMP: No free node IDs 2026-01-04T02:51:24.321795Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:24.866109Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.884162Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Unknown node 2026-01-04T02:51:24.993469Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-04T02:51:25.089083Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1032: WRONG_REQUEST: Unknown node 2026-01-04T02:51:25.779775Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.838680Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.840284Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.841659Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.844485Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host3:2: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.847823Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.852130Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.917343Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.922040Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.925408Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.926950Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:25.928287Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.929517Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.931021Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host9:8: ERROR_TEMP: No free node IDs 2026-01-04T02:51:25.936870Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:26.017251Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.088702Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.093066Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host2:1: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.167179Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:26.168835Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host8:7: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.170578Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:26.173377Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.256322Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:26.258057Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.262595Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-04T02:51:26.264194Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1024: WRONG_REQUEST: Node has expired 2026-01-04T02:51:26.266851Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host4:3: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.418752Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:51:27.423782Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:51:27.425934Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1027: WRONG_REQUEST: Unknown node 2026-01-04T02:51:27.630067Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.637685Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.639862Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.643682Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.644942Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.647092Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.649049Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host14:13: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.650793Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.652099Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.653442Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.733248Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.802425Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1026: WRONG_REQUEST: Node has expired 2026-01-04T02:51:27.807889Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host5:4: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.809897Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host10:9: ERROR_TEMP: No free node IDs 2026-01-04T02:51:27.811606Z node 1 :NODE_BROKER ERROR: node_broker__register_node.cpp:40: Cannot register node host7:6: ERROR_TEMP: No free node IDs 2026-01-04T02:51:28.533017Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1029: WRONG_REQUEST: Unknown node 2026-01-04T02:51:28.571850Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node 2026-01-04T02:51:28.574389Z node 1 :NODE_BROKER ERROR: node_broker__extend_lease.cpp:31: Cannot extend lease for node #1031: WRONG_REQUEST: Unknown node |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::UpsertModifyDelete [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:13.587426Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:13.676854Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:13.677173Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:13.677217Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:13.894452Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:13.894576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:13.964258Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495071605063 != 1767495071605066 2026-01-04T02:51:13.980434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:14.027984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:14.106971Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:14.390004Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:14.403487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:14.507104Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:14.539146Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T02:51:14.539383Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:14.573592Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:14.573712Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:14.575371Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:14.575448Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:14.575499Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:14.575843Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:14.575955Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:14.576087Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:14.586738Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:14.612009Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:14.612311Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:14.612441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:14.612474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:14.612518Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:14.612553Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:14.612995Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:14.613066Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:14.613128Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:14.613171Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:14.613227Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:14.613267Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:14.613362Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:14.613773Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:14.614104Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:14.614203Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:14.615682Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:14.626448Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:14.626558Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:14.743936Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:51:14.748973Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:14.749065Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:14.749499Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:14.749587Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:14.749659Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:14.749994Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:51:14.750167Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:14.750418Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:14.750500Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:51:14.752808Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:14.753298Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:14.755995Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:14.756051Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:14.756966Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:14.757037Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:14.758489Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:14.758540Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:14.758599Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:14.758674Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:14.758731Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:14.758815Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:14.763794Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:14.765566Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:14.765649Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:14.766546Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:14.779090Z node 1 :TX_DATASHARD DEBU ... Heartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:28.670701Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:28.672914Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:28.673704Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:28.673785Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:28.686182Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:28.686818Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:28.686968Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715658 ssId 72057594046644480 seqNo 2:2 2026-01-04T02:51:28.687022Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 2 current version# 1 expected version# 2 at tablet# 72075186224037888 txId# 281474976715658 2026-01-04T02:51:28.687061Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715658 at tablet 72075186224037888 2026-01-04T02:51:28.701661Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:28.854153Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715658 at step 2500 at tablet 72075186224037888 { Transactions { TxId: 281474976715658 AckTo { RawX1: 0 RawX2: 0 } } Step: 2500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:28.854229Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:28.854456Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:28.854499Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:28.854541Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2500:281474976715658] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:28.854709Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2500:281474976715658 keys extracted: 0 2026-01-04T02:51:28.854812Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:28.855085Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:28.855894Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:28.898569Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-04T02:51:28.898689Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:28.898725Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:28.898766Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:28.898835Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:28.898894Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-04T02:51:28.898982Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:28.900926Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-04T02:51:28.901026Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:28.909244Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:28.909333Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:28.909407Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:28.910281Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:28.910442Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:28.914126Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:28.920095Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:29.075672Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:29.078632Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:51:29.103716Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:29.162985Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1167:2957], serverId# [4:1168:2958], sessionId# [0:0:0] 2026-01-04T02:51:29.163348Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-04T02:51:29.163581Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495089163493 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:29.163744Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-04T02:51:29.174800Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:29.174880Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:29.232474Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037888 2026-01-04T02:51:29.232794Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 1767495089232694 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 50b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:29.232947Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037888, row count=1 2026-01-04T02:51:29.244113Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 50 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:29.244199Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:29.349371Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037888 2026-01-04T02:51:29.349712Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 3 Group: 1767495089349598 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 34b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:29.349896Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037888, row count=1 2026-01-04T02:51:29.361123Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 3 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 34 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:29.361217Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:29.363481Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1214:2987], serverId# [4:1215:2988], sessionId# [0:0:0] 2026-01-04T02:51:29.369110Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1216:2989], serverId# [4:1217:2990], sessionId# [0:0:0] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] >> KqpService::CloseSessionsWithLoad [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test999NodesSubscribers [GOOD] Test command err: 2026-01-04T02:49:22.933168Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:22.934256Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> IncrementalBackup::ResetOperationIncrementalBackup [GOOD] >> IncrementalBackup::ReplaceIntoIncrementalBackup >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> CdcStreamChangeCollector::SchemaChanges [GOOD] >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1001NodesSubscribers [GOOD] Test command err: 2026-01-04T02:49:23.371205Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:49:23.371283Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> IncrementalBackup::ForgedMultiShardIncrementalRestore-WithIncremental [GOOD] >> IncrementalBackup::E2EMultipleBackupRestoreCycles ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 24428, MsgBus: 12327 2026-01-04T02:51:15.262876Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333543672728969:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:51:15.263179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:51:15.559725Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:15.559809Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:15.577635Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:51:15.591558Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:15.609598Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:15.800628Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:51:15.835207Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:15.835326Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:15.835335Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:15.835422Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:16.270314Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:16.358595Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:16.424070Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:16.561636Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:16.701121Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:16.761396Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:17.908819Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333552262665398:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:17.908957Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:17.909225Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333552262665407:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:17.909293Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.522337Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.553966Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.580602Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.605953Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.634660Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.664983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.695516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.739671Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.827813Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333556557633574:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.827909Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.827970Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333556557633579:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.828073Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333556557633581:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.828123Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:18.835438Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:18.844797Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333556557633583:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:51:18.929521Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333556557633636:3773] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:20.262859Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591333543672728969:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:51:20.262958Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:51:20.718207Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=MzI4YTAxODItZGZiNGFhZTYtYjc4YjEyYTktYTc5NjI0Njc=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# MzI4YTAxODItZGZiNGFhZTYtYjc4YjEyYTktYTc5NjI0Njc= temp_dir_name# 1fadf1fe-4c18-1652-7e0f-66a84afc0c5e trace_id# 2026-01-04T02:51:20.718319Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=MzI4YTAxODItZGZiNGFhZTYtYjc4YjEyYTktYTc5NjI0Njc=, ActorId: ... nection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":23}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":19,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":16}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":18}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":12,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":11,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":11}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":13}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":7,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":6,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":6}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":8}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":4,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Sequential\",\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":1}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"UnionAll\",\"PlanNodeType\":\"Connection\"}],\"Operators\":[{\"Inputs\":[{\"ExternalPlanNodeId\":3}],\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"limit\":\"1001\",\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":6,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":7,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Plans\":[{\"PlanNodeId\":11,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":13,\"Plans\":[{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":20,\"Plans\":[{\"PlanNodeId\":21,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":22,\"Plans\":[{\"PlanNodeId\":23,\"Plans\":[{\"PlanNodeId\":25,\"Plans\":[{\"PlanNodeId\":26,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":27,\"Plans\":[{\"PlanNodeId\":28,\"Plans\":[{\"PlanNodeId\":30,\"Plans\":[{\"PlanNodeId\":31,\"Operators\":[{\"E-Size\":\"0\",\"ReadLimit\":\"1001\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Operators\":[{\"Name\":\"Limit\",\"Limit\":\"1001\"}],\"Node Type\":\"Limit\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"4b4f94f2-c71ae5cd-d9ea9c61-ccf25e53","version":"1.0"} 2026-01-04T02:51:24.504393Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591333573737503457:2758], duration: 1.965202s 2026-01-04T02:51:24.504429Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591333573737503457:2758], owner: [1:7591333552262665367:2384], status: SUCCESS, issues: , uid: 4b4f94f2-c71ae5cd-d9ea9c61-ccf25e53 2026-01-04T02:51:24.504556Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591333565147568534:2533], status: SUCCESS, compileActor: [1:7591333573737503457:2758] 2026-01-04T02:51:24.504632Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591333565147568534:2533], queryUid: 4b4f94f2-c71ae5cd-d9ea9c61-ccf25e53, status:SUCCESS still compiling... 0 still active sessions ... 0 received non-success status for session 16 received non-success status for session 7 received non-success status for session 14 received non-success status for session 0 received non-success status for session 1 received non-success status for session 3 received non-success status for session 10 received non-success status for session 11 received non-success status for session 12 received non-success status for session 30 received non-success status for session 15 received non-success status for session 34 received non-success status for session 17 received non-success status for session 18 received non-success status for session 19 received non-success status for session 4 received non-success status for session 5 received non-success status for session 6 received non-success status for session 8 received non-success status for session 26 received non-success status for session 27 received non-success status for session 28 received non-success status for session 46 received non-success status for session 31 received non-success status for session 32 received non-success status for session 33 received non-success status for session 37 received non-success status for session 35 received non-success status for session 20 received non-success status for session 21 received non-success status for session 22 received non-success status for session 23 received non-success status for session 24 received non-success status for session 25 received non-success status for session 43 received non-success status for session 44 received non-success status for session 13 received non-success status for session 47 received non-success status for session 48 received non-success status for session 49 received non-success status for session 36 received non-success status for session 39 received non-success status for session 40 received non-success status for session 41 received non-success status for session 29 received non-success status for session 2 received non-success status for session 38 received non-success status for session 45 received non-success status for session 42 received non-success status for session 9 2026-01-04T02:51:30.510544Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:51:30.510592Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> test_select.py::TestPgSelect::test_as_table >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_change_collector/unittest >> CdcStreamChangeCollector::SchemaChanges [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:10.359676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:10.470231Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:10.470671Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:10.470727Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.702910Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.703001Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.749200Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495068040570 != 1767495068040573 2026-01-04T02:51:10.757364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.802379Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.890761Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:11.151308Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:11.163958Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:11.264692Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.291473Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T02:51:11.291651Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T02:51:11.337319Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T02:51:11.337402Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T02:51:11.338585Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T02:51:11.338650Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T02:51:11.338691Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T02:51:11.338932Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T02:51:11.339024Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T02:51:11.339091Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T02:51:11.349719Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T02:51:11.377400Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T02:51:11.377572Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T02:51:11.377664Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T02:51:11.377696Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.377728Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T02:51:11.377761Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.378213Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T02:51:11.378296Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T02:51:11.378381Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.378440Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.378488Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T02:51:11.378530Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.378640Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T02:51:11.379029Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:11.379254Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T02:51:11.379333Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T02:51:11.380994Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.391557Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:11.391653Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T02:51:11.507196Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:917:2783], serverId# [1:919:2785], sessionId# [0:0:0] 2026-01-04T02:51:11.510317Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715657 at step 2000 at tablet 72075186224037888 { Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 2000 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:11.510376Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.510635Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.510668Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:11.510712Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:11.510952Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T02:51:11.511081Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:11.511243Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:11.511298Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T02:51:11.512627Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T02:51:11.512931Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:11.514610Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T02:51:11.514664Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.515241Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T02:51:11.515295Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.516305Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:11.516336Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T02:51:11.516370Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T02:51:11.516440Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:11.516489Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T02:51:11.516613Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:11.519628Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:11.520742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T02:51:11.520787Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:11.521435Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T02:51:11.529120Z node 1 :TX_DATASHARD DEBU ... 38], version# 2, step# 2500, txId# 281474976715658, at tablet# 72075186224037888 2026-01-04T02:51:31.828577Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:31.876501Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2500} 2026-01-04T02:51:31.876651Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:31.876707Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:31.876764Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:31.876863Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2500 : 281474976715658] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:31.876938Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715658 state Ready TxInFly 0 2026-01-04T02:51:31.877061Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:31.879706Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715658 datashard 72075186224037888 state Ready 2026-01-04T02:51:31.879828Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:31.889909Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1064:2893], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.890026Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1075:2898], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.890141Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.891156Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:1079:2902], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.891411Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.896879Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:31.905652Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:32.066443Z node 4 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T02:51:32.070145Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:1078:2901], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715659 completed, doublechecking } 2026-01-04T02:51:32.096789Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1136:2940] txid# 281474976715660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 44], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:32.236264Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1167:2957], serverId# [4:1168:2958], sessionId# [0:0:0] 2026-01-04T02:51:32.236792Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:3] at 72075186224037888 2026-01-04T02:51:32.237036Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 1 Group: 1767495092236920 Step: 3000 TxId: 18446744073709551615 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcDataChange Source: Unspecified Body: 32b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:32.237251Z node 4 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:3] at 72075186224037888, row count=1 2026-01-04T02:51:32.248530Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 1 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 32 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 2 } 2026-01-04T02:51:32.248647Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:32.306108Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_table.cpp:187) 2026-01-04T02:51:32.309462Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T02:51:32.309764Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715662 ssId 72057594046644480 seqNo 2:3 2026-01-04T02:51:32.309865Z node 4 :TX_DATASHARD INFO: check_scheme_tx_unit.cpp:235: Check scheme tx, proposed scheme version# 3 current version# 2 expected version# 3 at tablet# 72075186224037888 txId# 281474976715662 2026-01-04T02:51:32.309928Z node 4 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715662 at tablet 72075186224037888 2026-01-04T02:51:32.321108Z node 4 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T02:51:32.428461Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976715662 at step 3500 at tablet 72075186224037888 { Transactions { TxId: 281474976715662 AckTo { RawX1: 0 RawX2: 0 } } Step: 3500 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T02:51:32.428534Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:32.428855Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:32.428905Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T02:51:32.428966Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [3500:281474976715662] in PlanQueue unit at 72075186224037888 2026-01-04T02:51:32.429291Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 3500:281474976715662 keys extracted: 0 2026-01-04T02:51:32.429427Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T02:51:32.429577Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T02:51:32.429664Z node 4 :TX_DATASHARD INFO: alter_table_unit.cpp:145: Trying to ALTER TABLE at 72075186224037888 version 3 2026-01-04T02:51:32.430595Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1849: Add schema snapshot: pathId# [OwnerId: 72057594046644480, LocalPathId: 38], version# 3, step# 3500, txId# 281474976715662, at tablet# 72075186224037888 2026-01-04T02:51:32.430770Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:883: PersistChangeRecord: record: { Order: 2 Group: 0 Step: 3500 TxId: 281474976715662 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] Kind: CdcSchemaChange Source: Unspecified Body: 0b TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 LockId: 0 LockOffset: 0 }, at tablet: 72075186224037888 2026-01-04T02:51:32.431252Z node 4 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:51:32.433318Z node 4 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 3500} 2026-01-04T02:51:32.433417Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:32.435238Z node 4 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T02:51:32.435325Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-04T02:51:32.435418Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3500 : 281474976715662] from 72075186224037888 at tablet 72075186224037888 send result to client [4:399:2398], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T02:51:32.435501Z node 4 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-01-04T02:51:32.435622Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:1179: EnqueueChangeRecords: at tablet: 72075186224037888, records: { Order: 2 PathId: [OwnerId: 72057594046644480, LocalPathId: 39] BodySize: 0 TableId: [OwnerId: 72057594046644480, LocalPathId: 38] SchemaVersion: 3 } 2026-01-04T02:51:32.435676Z node 4 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T02:51:32.438921Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037888 state Ready 2026-01-04T02:51:32.439012Z node 4 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T02:51:32.445744Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1210:2995], serverId# [4:1211:2996], sessionId# [0:0:0] 2026-01-04T02:51:32.462306Z node 4 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [4:1213:2998], serverId# [4:1214:2999], sessionId# [0:0:0] |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_change_collector/unittest |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.5%| [TA] $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_change_collector/test-results/unittest/{meta.json ... results_accumulator.log} >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> IncrementalBackup::SimpleBackupBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> IncrementalBackup::ComplexRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental >> KqpKnn::Uint8VectorKnnPushdown [GOOD] >> KqpKnn::FloatVectorKnnPushdown [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] >> KqpPg::TableDeleteAllData+useSink [GOOD] >> KqpPg::TableDeleteAllData-useSink >> IncrementalBackup::ReplaceIntoIncrementalBackup [GOOD] >> IncrementalBackup::ResetVsUpsertColumnStateSerialization ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Uint8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 20457, MsgBus: 32235 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:25.757790Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:25.891767Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:25.892266Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:25.892326Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:26.214937Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:26.215046Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:26.292711Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495083509730 != 1767495083509733 2026-01-04T02:51:26.316208Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:26.365640Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:26.507547Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:27.113402Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:27.113490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:27.113548Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:27.113764Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:27.126172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:27.438404Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:27.534397Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:27.859613Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:28.223418Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:28.471250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.303237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.303354Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.304015Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.304086Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.399899Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.609662Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.850482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.122813Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.357206Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.600889Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.856176Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.137212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.483502Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.483649Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.484241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.484339Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.484438Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2794:4176], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.493395Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:31.656273Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:31.716262Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:33.258707Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::FloatVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 10242, MsgBus: 9483 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:25.994121Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:26.111586Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:26.112107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:26.112179Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:26.402996Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:26.403108Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:26.457836Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495083509754 != 1767495083509757 2026-01-04T02:51:26.470357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:26.513359Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:26.637776Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:27.112366Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:27.112468Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:27.112504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:27.112694Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:27.124489Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:27.427503Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:27.534389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:27.826090Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:28.158034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:28.419130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.304105Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1902:3509], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.304309Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.305828Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1975:3528], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.305953Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:29.399921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.609555Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:29.841258Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.112504Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.355481Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.629040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.886570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.158694Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.488474Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2787:4169], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.488567Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.488844Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.488913Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.488983Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.493362Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:31.654608Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2796:4178], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:31.745448Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2856:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:33.288181Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> TCdcStreamTests::MeteringServerless [GOOD] >> TCdcStreamTests::MeteringDedicated |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> KqpKnn::Int8VectorKnnPushdown [GOOD] >> KqpKnn::BitVectorKnnPushdown [GOOD] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_as_table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::Int8VectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4847, MsgBus: 7794 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:29.196182Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:29.277278Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:29.277638Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:29.277682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:29.525320Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:29.525407Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:29.574810Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495086973374 != 1767495086973377 2026-01-04T02:51:29.583329Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:29.624436Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:29.739491Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:30.059362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:30.059470Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:30.059523Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:30.059825Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:30.072526Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:30.352304Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:30.409243Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.621307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.940478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.206773Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.930724Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.930843Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.931621Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.931721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:31.956263Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.150776Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.393971Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.618448Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.901664Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.157851Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.422298Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.709941Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.067570Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.067711Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.068163Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.068234Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.068317Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.074358Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:34.228901Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:34.291716Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:35.784322Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::BitVectorKnnPushdown [GOOD] Test command err: Trying to start YDB, gRPC: 4970, MsgBus: 64923 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:29.285198Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:29.372707Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:29.373052Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:29.373105Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:29.582590Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:29.582722Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:29.649187Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495087026244 != 1767495087026247 2026-01-04T02:51:29.664827Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:29.710476Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:29.824179Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:30.142710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:30.142802Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:30.142837Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:30.143057Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:30.155335Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:30.384976Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:30.457052Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:30.685778Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.046295Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:31.328708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.087048Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1901:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:32.087233Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:32.088322Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1974:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:32.088560Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:32.121187Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.316931Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.547796Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.770650Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.056175Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.297862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.586228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.861939Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.195764Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2785:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.195867Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.196133Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2790:4173], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.196285Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.196430Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.200557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:34.348554Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2794:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:34.413324Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2854:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:36.026816Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] >> IncrementalBackup::SimpleBackupBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_select.py::TestSelect::test_as_table >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] >> KqpQueryService::CloseSessionsWithLoad [GOOD] >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/close_with_load/unittest >> KqpQueryService::CloseSessionsWithLoad [GOOD] Test command err: Trying to start YDB, gRPC: 12343, MsgBus: 21188 2026-01-04T02:51:18.976786Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591333558152128660:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:51:18.976857Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:51:19.148372Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:51:19.153349Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:19.153446Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:19.170447Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:19.209508Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:19.210890Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591333558152128632:2081] 1767495078975261 != 1767495078975264 2026-01-04T02:51:19.285882Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:19.285902Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:19.285907Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:19.286008Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:19.383131Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:51:19.604708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:19.664630Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:19.764141Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:19.903846Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:19.955710Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:20.055939Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:21.437170Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333571037032400:2398], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.437243Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.437494Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333571037032410:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.437555Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.707224Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.732007Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.753977Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.775519Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.796914Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.823463Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.848247Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.883252Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.947121Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333571037033276:2481], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.947181Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.947231Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333571037033281:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.947267Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591333571037033283:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.947311Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:21.949863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:21.958105Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591333571037033285:2486], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T02:51:22.049358Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591333575332000632:3767] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:23.276598Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1&id=ZTNlN2Y4MzAtZjkyNmM3NTAtYWUwMTljNzUtODJlNjM1Yzk=, ActorId: [0:0:0], ActorState: unknown state, Create session actor ydb_session_id# ZTNlN2Y4MzAtZjkyNmM3NTAtYWUwMTljNzUtODJlNjM1Yzk= temp_dir_name# 7798ff86-4156-1522-e511-649e6d7b8ca2 trace_id# 2026-01-04T02:51:23.276738Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:234} SessionId: ydb://session/3?node_id=1&id=ZTNlN2Y4MzAtZjkyNmM3NTAtYWUwMTljNzUtODJlNjM1Yzk=, ActorId: [1:7591333579626968231:2529], ActorState: unknown state, Session actor bootstrapped trace_id# 2026-01-04T02:51:23.292807Z node 1 :KQP_SESSION DEBUG: {KQPSA@kqp_session_actor.cpp:229} SessionId: ydb://session/3?node_id=1& ... )))))))))) )))) ) trace_id# 2026-01-04T02:51:39.367478Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 1 from task: 1 with index: 0 2026-01-04T02:51:39.367495Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 2 from task: 2 with index: 0 2026-01-04T02:51:39.367506Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 3 from task: 3 with index: 0 2026-01-04T02:51:39.367516Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 4 from task: 4 with index: 0 2026-01-04T02:51:39.367527Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 5 from task: 5 with index: 0 2026-01-04T02:51:39.367536Z node 1 :KQP_EXECUTER DEBUG: kqp_tasks_graph.cpp:340: Create result channelId: 6 from task: 6 with index: 0 2026-01-04T02:51:39.373631Z node 1 :KQP_EXECUTER DEBUG: {KQPLIT@kqp_literal_executer.cpp:293} ActorId: [0:0:0] TxId: 0. Ctx: { TraceId: , Database: , SessionId: , PoolId: , IsStreamingQuery: 0}. Execution is complete results_size# 6 trace_id# 2026-01-04T02:51:39.397612Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:425: [[1:7591333635461544373:3039]]: Built the replay message {"guc_settings":{"session_settings":{"ydb_user":"","ydb_database":"Root"},"settings":{"ydb_user":"","ydb_database":"Root"},"rollback_settings":{}},"query_database":"/Root","query_parameter_types":{},"table_metadata":"[\"CAESAmRiGhAvUm9vdC9FaWdodFNoYXJkIgAqDAiAgpSEgICAgAEQJzABOAFKMwoERGF0YRADGgVJbnQzMiABMABCAEgAUgBYAGIWCP///////////wEQ////////////AUozCgNLZXkQARoGVWludDY0IAQwAEIASABSAFgAYhYI////////////ARD///////////8BSjUKBFRleHQQAhoGU3RyaW5nIIEgMABCAEgAUgBYAGIWCP///////////wEQ////////////AVIDS2V5aABwAHgA\"]","table_meta_serialization_type":1,"created_at":"1767495099","query_text":"\\n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1264944544 + 0;\\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1264944544 + 1;\\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1264944544 + 2;\\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1264944544 + 3;\\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1264944544 + 4;\\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1264944544 + 5;\\n\\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\\n (1284010360ul, \\\"New\\\");\\n ","query_type":"QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY","query_syntax":"1","query_cluster":"db","query_plan":"{\"Plan\":{\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":21,\"Plans\":[{\"PlanNodeId\":20,\"Operators\":[{\"Inputs\":[],\"Iterator\":\"[{Key: 1284010360,Text: \\\"New\\\"}]\",\"Name\":\"Iterator\"}],\"Node Type\":\"ConstantExpr\"}],\"Operators\":[{\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Sink\"},{\"PlanNodeId\":18,\"Plans\":[{\"PlanNodeId\":17,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":16,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":15,\"Plans\":[{\"PlanNodeId\":14,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":13,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":12,\"Plans\":[{\"PlanNodeId\":11,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":10,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":9,\"Plans\":[{\"PlanNodeId\":8,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":7,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":6,\"Plans\":[{\"PlanNodeId\":5,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":4,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":3,\"Plans\":[{\"PlanNodeId\":2,\"Plans\":[{\"Tables\":[\"EightShard\"],\"PlanNodeId\":1,\"Operators\":[{\"Scan\":\"Parallel\",\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"Inputs\":[],\"Path\":\"\\/Root\\/EightShard\",\"ReadRangesPointPrefixLen\":\"1\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"Collect\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"Stats\":{\"ResourcePoolId\":\"default\"},\"PlanNodeType\":\"Query\"},\"meta\":{\"version\":\"0.2\",\"type\":\"query\"},\"tables\":[{\"name\":\"\\/Root\\/EightShard\",\"reads\":[{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"},{\"columns\":[\"Data\",\"Key\",\"Text\"],\"type\":\"Scan\"}],\"writes\":[{\"columns\":[\"Key\",\"Text\"],\"type\":\"MultiUpsert\"}]}],\"SimplifiedPlan\":{\"PlanNodeId\":0,\"Plans\":[{\"PlanNodeId\":1,\"Operators\":[{\"Name\":\"Upsert\",\"Table\":\"EightShard\",\"SinkType\":\"KqpTableSink\"}],\"Node Type\":\"Upsert\"},{\"PlanNodeId\":2,\"Plans\":[{\"PlanNodeId\":4,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_0\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":5,\"Plans\":[{\"PlanNodeId\":7,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_1\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":8,\"Plans\":[{\"PlanNodeId\":10,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_2\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":11,\"Plans\":[{\"PlanNodeId\":13,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_3\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":14,\"Plans\":[{\"PlanNodeId\":16,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_4\",\"PlanNodeType\":\"ResultSet\"},{\"PlanNodeId\":17,\"Plans\":[{\"PlanNodeId\":19,\"Operators\":[{\"E-Size\":\"0\",\"Name\":\"TableRangeScan\",\"E-Rows\":\"0\",\"Table\":\"EightShard\",\"ReadRangesKeys\":[\"Key\"],\"ReadColumns\":[\"Data\",\"Key\",\"Text\"],\"E-Cost\":\"0\",\"ReadRangesExpectedSize\":\"1\"}],\"Node Type\":\"TableRangeScan\"}],\"Node Type\":\"ResultSet_1_5\",\"PlanNodeType\":\"ResultSet\"}],\"Node Type\":\"Query\",\"OptimizerStats\":{\"EquiJoinsCount\":0,\"JoinsCount\":0},\"PlanNodeType\":\"Query\"}}","query_id":"ba51e38d-8a9de473-a5b51042-67f061b1","version":"1.0"} 2026-01-04T02:51:39.400004Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:616: Compilation successful, self: [1:7591333635461544373:3039], duration: 2.686367s 2026-01-04T02:51:39.400052Z node 1 :KQP_COMPILE_ACTOR DEBUG: kqp_compile_actor.cpp:437: Send response, self: [1:7591333635461544373:3039], owner: [1:7591333571037032365:2384], status: SUCCESS, issues: , uid: ba51e38d-8a9de473-a5b51042-67f061b1 2026-01-04T02:51:39.400194Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:628: Received response, sender: [1:7591333579626968371:2668], status: SUCCESS, compileActor: [1:7591333635461544373:3039] 2026-01-04T02:51:39.400402Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:719: Insert query into compile cache, queryId: {Cluster: db, Database: /Root, DatabaseId: /Root, UserSid: , Text: \n SELECT Key, Text, Data FROM `/Root/EightShard` WHERE Key=-1264944544 + 0;\n SELECT Key, Data, Text FROM `/Root/EightShard` WHERE Key=-1264944544 + 1;\n SELECT Text, Key, Data FROM `/Root/EightShard` WHERE Key=-1264944544 + 2;\n SELECT Text, Data, Key FROM `/Root/EightShard` WHERE Key=-1264944544 + 3;\n SELECT Data, Key, Text FROM `/Root/EightShard` WHERE Key=-1264944544 + 4;\n SELECT Data, Text, Key FROM `/Root/EightShard` WHERE Key=-1264944544 + 5;\n\n UPSERT INTO `/Root/EightShard` (Key, Text) VALUES\n (1284010360ul, \"New\");\n , Settings: {DocumentApiRestricted: 1, IsInternalCall: 0, QueryType: QUERY_TYPE_SQL_GENERIC_CONCURRENT_QUERY, RuntimeParameterSizeLimit: 9999RuntimeParameterSizeLimitSatisfied: 1}, QueryParameterTypes: , GUCSettings: { "guc_settings": { "session_settings": { "ydb_user":"", "ydb_database":"Root" }, "settings": { "ydb_user":"", "ydb_database":"Root" }, "rollback_settings": { } } }} 2026-01-04T02:51:39.400646Z node 1 :KQP_COMPILE_SERVICE DEBUG: kqp_compile_service.cpp:901: Send response, sender: [1:7591333579626968371:2668], queryUid: ba51e38d-8a9de473-a5b51042-67f061b1, status:SUCCESS still compiling... 0 still active sessions ... 0 |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/close_with_load/unittest >> IncrementalBackup::ResetVsUpsertColumnStateSerialization [GOOD] >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] |96.5%| [TA] $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/close_with_load/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] >> IncrementalBackup::ComplexRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::DropBackupCollectionSqlNonExistent >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.5%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.5%| [TS] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_partition_stats/unittest |96.5%| [TS] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_partition_stats/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown-Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 29638, MsgBus: 9393 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:31.022129Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:31.127451Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:31.127788Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:31.127830Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:31.365888Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:31.365995Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:31.418011Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495088736141 != 1767495088736144 2026-01-04T02:51:31.427364Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:31.468003Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:31.592162Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:31.914159Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:31.914222Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:31.914255Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:31.914478Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:31.926629Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:32.145998Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:32.195506Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.442032Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:32.749172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.018482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.804480Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1903:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:33.804619Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:33.805745Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1976:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:33.805889Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:33.838383Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.037042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.275171Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.504174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.795277Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.060814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.366963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.686712Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:36.053868Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:36.053984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:36.054346Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2792:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:36.054403Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:36.054464Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2795:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:36.059290Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:36.204573Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2797:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:36.271649Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:37.950283Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037921 2026-01-04T02:51:37.950473Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037911 2026-01-04T02:51:37.950772Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:2482:3935], Recipient [1:2366:3845]: {TEvReadSet step# 8001 txid# 281474976715672 TabletSource# 72075186224037917 TabletDest# 7207518622403792 ... r.cpp:2421: 72075186224037927 read iterator# {[1:3814:4925], 0} finished in read 2026-01-04T02:51:44.579613Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-04T02:51:44.579632Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit ExecuteRead 2026-01-04T02:51:44.579650Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:18] at 72075186224037927 to execution unit CompletedOperations 2026-01-04T02:51:44.579669Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2026-01-04T02:51:44.579716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-04T02:51:44.579733Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-04T02:51:44.579771Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-04T02:51:44.579792Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-04T02:51:44.579853Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-04T02:51:44.579908Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-04T02:51:44.579951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-04T02:51:44.580004Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:44.580021Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-04T02:51:44.580036Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-04T02:51:44.580053Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-04T02:51:44.580080Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-04T02:51:44.580108Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:44.580130Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-04T02:51:44.580165Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-04T02:51:44.580187Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-04T02:51:44.580299Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-04T02:51:44.580397Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-04T02:51:44.580422Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3814:4925], 1} after executionsCount# 1 2026-01-04T02:51:44.580445Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3814:4925], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:51:44.580519Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3814:4925], 1} finished in read 2026-01-04T02:51:44.580554Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:44.580573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-04T02:51:44.580598Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-04T02:51:44.580624Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-04T02:51:44.580661Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:44.580684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-04T02:51:44.580706Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-04T02:51:44.580736Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-04T02:51:44.580784Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-04T02:51:44.580915Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3814:4925], Recipient [1:3192:4480]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-04T02:51:44.581031Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-04T02:51:44.581060Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-04T02:51:44.581101Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:44.581119Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-04T02:51:44.581134Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-04T02:51:44.581153Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-04T02:51:44.581186Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-04T02:51:44.581211Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:44.581227Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-04T02:51:44.581242Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-04T02:51:44.581258Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-04T02:51:44.581331Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-04T02:51:44.581410Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-04T02:51:44.581445Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3814:4925], 2} after executionsCount# 1 2026-01-04T02:51:44.581472Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3814:4925], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:51:44.581542Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3814:4925], 2} finished in read 2026-01-04T02:51:44.581573Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:44.581590Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-04T02:51:44.581615Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-04T02:51:44.581638Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-04T02:51:44.581672Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:44.581695Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-04T02:51:44.581739Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-04T02:51:44.581757Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-04T02:51:44.581819Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-04T02:51:44.607432Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3814:4925], Recipient [1:3186:4475]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:51:44.607501Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-04T02:51:44.607555Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3814:4925], Recipient [1:3190:4478]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:51:44.607583Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-04T02:51:44.608040Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3814:4925], Recipient [1:3192:4480]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-04T02:51:44.608071Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } |96.5%| [TA] $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} |96.5%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_partition_stats/test-results/unittest/{meta.json ... results_accumulator.log} >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] >> test_select.py::TestSelect::test_as_table [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/knn/unittest >> KqpKnn::VectorSearchKnnPushdown+Nullable [GOOD] Test command err: Trying to start YDB, gRPC: 32634, MsgBus: 2347 ... waiting for SysViewsRoster update finished 2026-01-04T02:51:31.793726Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:31.922882Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:31.923324Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:31.923377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:32.209907Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:32.210025Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:32.278959Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495089798413 != 1767495089798416 2026-01-04T02:51:32.296742Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:32.341870Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:32.460669Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:32.800860Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:51:32.800948Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:51:32.800997Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:51:32.801237Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:51:32.813583Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:51:33.083594Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:33.149726Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.383377Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.721440Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:33.992945Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.691711Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1900:3508], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.691838Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.692426Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1973:3527], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.692503Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:34.717512Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:34.917960Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.180564Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.416456Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:35.733388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:36.004986Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:36.327923Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:36.634783Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:37.033331Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2784:4168], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:37.033448Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:37.033885Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2788:4172], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:37.033984Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:37.034078Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2791:4175], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:37.039578Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:37.184131Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2793:4177], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T02:51:37.250199Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2853:4218] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:38.860133Z node 1 :TX_DATASHARD DEBUG: datashard__readset.cpp:91: TTxReadSet::Complete at 72075186224037900 2026-01-04T02:51:38.860367Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287938, Sender [1:1393:3130], Recipient [1:2474:3931]: {TEvReadSet step# 8001 txid# 281474976715672 TabletSource# 72075186224037920 TabletDest# 72075186224037900 SetTabletConsumer# 72075186224037900 Flags# 0 Seqno# 11} 2026-01-04T02:51:38.860422Z node 1 :TX_DATASHARD TRACE: datashard_ ... Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:18] at 72075186224037927 on unit CompletedOperations 2026-01-04T02:51:46.015924Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:18] at 72075186224037927 is Executed 2026-01-04T02:51:46.015948Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:18] at 72075186224037927 executing on unit CompletedOperations 2026-01-04T02:51:46.015974Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:18] at 72075186224037927 has finished 2026-01-04T02:51:46.016003Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037927 2026-01-04T02:51:46.016091Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037927 2026-01-04T02:51:46.016293Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3809:4925], Recipient [1:3186:4478]: NKikimrTxDataShard.TEvRead ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-04T02:51:46.016508Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037928, FollowerId 0 2026-01-04T02:51:46.016560Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CheckRead 2026-01-04T02:51:46.016627Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:46.016654Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CheckRead 2026-01-04T02:51:46.016680Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit BuildAndWaitDependencies 2026-01-04T02:51:46.016705Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit BuildAndWaitDependencies 2026-01-04T02:51:46.016744Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037928 2026-01-04T02:51:46.016775Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:46.016802Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit BuildAndWaitDependencies 2026-01-04T02:51:46.016825Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit ExecuteRead 2026-01-04T02:51:46.016854Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit ExecuteRead 2026-01-04T02:51:46.016971Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037928 Execute read# 1, request: { ReadId: 1 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-04T02:51:46.017125Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037928 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-04T02:51:46.017165Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037928 Complete read# {[1:3809:4925], 1} after executionsCount# 1 2026-01-04T02:51:46.017202Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037928 read iterator# {[1:3809:4925], 1} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:51:46.017319Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037928 read iterator# {[1:3809:4925], 1} finished in read 2026-01-04T02:51:46.017378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:46.017408Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit ExecuteRead 2026-01-04T02:51:46.017434Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037928 to execution unit CompletedOperations 2026-01-04T02:51:46.017462Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037928 on unit CompletedOperations 2026-01-04T02:51:46.017501Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037928 is Executed 2026-01-04T02:51:46.017526Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037928 executing on unit CompletedOperations 2026-01-04T02:51:46.017552Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037928 has finished 2026-01-04T02:51:46.017580Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037928 2026-01-04T02:51:46.017650Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037928 2026-01-04T02:51:46.017841Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [1:3809:4925], Recipient [1:3190:4481]: NKikimrTxDataShard.TEvRead ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } RangesSize: 1 2026-01-04T02:51:46.017978Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037929, FollowerId 0 2026-01-04T02:51:46.018027Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CheckRead 2026-01-04T02:51:46.018088Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:46.018115Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CheckRead 2026-01-04T02:51:46.018145Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit BuildAndWaitDependencies 2026-01-04T02:51:46.018169Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit BuildAndWaitDependencies 2026-01-04T02:51:46.018204Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:19] at 72075186224037929 2026-01-04T02:51:46.018234Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:46.018260Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit BuildAndWaitDependencies 2026-01-04T02:51:46.018283Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit ExecuteRead 2026-01-04T02:51:46.018309Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit ExecuteRead 2026-01-04T02:51:46.018410Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037929 Execute read# 1, request: { ReadId: 2 TableId { OwnerId: 72057594046644480 TableId: 54 SchemaVersion: 1 } Columns: 2 Columns: 1 Snapshot { Step: 9000 TxId: 18446744073709551615 } ResultFormat: FORMAT_CELLVEC MaxRows: 32767 MaxBytes: 5242880 Reverse: false VectorTopK { Column: 0 Settings { metric: DISTANCE_COSINE vector_type: VECTOR_TYPE_FLOAT vector_dimension: 0 } TargetVector: "gq\002" Limit: 3 } } 2026-01-04T02:51:46.018536Z node 1 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037929 promoting UnprotectedReadEdge to v9000/18446744073709551615 2026-01-04T02:51:46.018571Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037929 Complete read# {[1:3809:4925], 2} after executionsCount# 1 2026-01-04T02:51:46.018607Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037929 read iterator# {[1:3809:4925], 2} sends rowCount# 0, bytes# 0, quota rows left# 32767, quota bytes left# 5242880, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T02:51:46.018713Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037929 read iterator# {[1:3809:4925], 2} finished in read 2026-01-04T02:51:46.018765Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:46.018793Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit ExecuteRead 2026-01-04T02:51:46.018819Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:19] at 72075186224037929 to execution unit CompletedOperations 2026-01-04T02:51:46.018845Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:19] at 72075186224037929 on unit CompletedOperations 2026-01-04T02:51:46.018880Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:19] at 72075186224037929 is Executed 2026-01-04T02:51:46.018906Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:19] at 72075186224037929 executing on unit CompletedOperations 2026-01-04T02:51:46.018929Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:19] at 72075186224037929 has finished 2026-01-04T02:51:46.018956Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037929 2026-01-04T02:51:46.019016Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037929 2026-01-04T02:51:46.019676Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3809:4925], Recipient [1:3182:4475]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T02:51:46.019748Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037927 ReadCancel: { ReadId: 0 } 2026-01-04T02:51:46.020591Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3809:4925], Recipient [1:3186:4478]: NKikimrTxDataShard.TEvReadCancel ReadId: 1 2026-01-04T02:51:46.020642Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037928 ReadCancel: { ReadId: 1 } 2026-01-04T02:51:46.020783Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [1:3809:4925], Recipient [1:3190:4481]: NKikimrTxDataShard.TEvReadCancel ReadId: 2 2026-01-04T02:51:46.020821Z node 1 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037929 ReadCancel: { ReadId: 2 } |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/knn/unittest >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] |96.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.6%| [TA] $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.6%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/knn/test-results/unittest/{meta.json ... results_accumulator.log} >> TKeyValueTest::TestInlineWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi >> TKesusTest::TestAcquireTimeoutAfterReboot [GOOD] >> TKesusTest::TestAcquireSemaphoreViaRelease >> IncrementalBackup::DropBackupCollectionSqlNonExistent [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] >> IncrementalBackup::SimpleRestoreBackupCollection+WithIncremental [GOOD] >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kesus/tablet/ut/unittest >> TKesusTest::TestAcquireSemaphoreViaRelease [GOOD] Test command err: 2026-01-04T02:51:01.871257Z node 1 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:01.871339Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:01.883256Z node 1 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:01.883380Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:01.907877Z node 1 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:01.908328Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[1:134:2159], cookie=12223341445682313052, session=0, seqNo=0) 2026-01-04T02:51:01.908465Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:01.930529Z node 1 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[1:134:2159], cookie=12223341445682313052, session=1) 2026-01-04T02:51:01.931069Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=111, session=1, semaphore="Lock1" count=1) 2026-01-04T02:51:01.931189Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:51:01.931265Z node 1 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:01.942944Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=111) 2026-01-04T02:51:01.943215Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[1:134:2159], cookie=222, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:51:01.954828Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[1:134:2159], cookie=222) 2026-01-04T02:51:01.955236Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[1:151:2173], cookie=11244822269734363357, name="Lock1") 2026-01-04T02:51:01.955318Z node 1 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[1:151:2173], cookie=11244822269734363357) 2026-01-04T02:51:02.216717Z node 2 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:02.216791Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:02.229386Z node 2 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:02.229514Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:02.263125Z node 2 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:02.263948Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:136:2161], cookie=2605111920902893160, session=0, seqNo=0) 2026-01-04T02:51:02.264105Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:02.276160Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:136:2161], cookie=2605111920902893160, session=1) 2026-01-04T02:51:02.276492Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[2:137:2162], cookie=6581208333428110177, session=0, seqNo=0) 2026-01-04T02:51:02.276619Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:51:02.288127Z node 2 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[2:137:2162], cookie=6581208333428110177, session=2) 2026-01-04T02:51:02.288894Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=111, session=1, semaphore="Lock1" count=18446744073709551615) 2026-01-04T02:51:02.289003Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 1 "Lock1" 2026-01-04T02:51:02.289062Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Lock1" queue: next order #1 session 1 2026-01-04T02:51:02.300565Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=111) 2026-01-04T02:51:02.300824Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:136:2161], cookie=112, session=1, semaphore="Lock2" count=1) 2026-01-04T02:51:02.300923Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:126: [72057594037927937] Created new ephemeral semaphore 2 "Lock2" 2026-01-04T02:51:02.300978Z node 2 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 2 "Lock2" queue: next order #2 session 1 2026-01-04T02:51:02.312565Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:136:2161], cookie=112) 2026-01-04T02:51:02.312819Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=222, session=2, semaphore="Lock1" count=1) 2026-01-04T02:51:02.312956Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=223, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-04T02:51:02.324498Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=222) 2026-01-04T02:51:02.324557Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=223) 2026-01-04T02:51:02.324796Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=333, session=2, semaphore="Lock1" count=1) 2026-01-04T02:51:02.325030Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[2:137:2162], cookie=334, session=2, semaphore="Lock2" count=18446744073709551615) 2026-01-04T02:51:02.336740Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=333) 2026-01-04T02:51:02.336793Z node 2 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[2:137:2162], cookie=334) 2026-01-04T02:51:02.759008Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:02.770892Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:03.130334Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:03.142396Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:03.491014Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:03.502637Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:03.851965Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:03.864042Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:04.233542Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:04.245043Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:04.584022Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:04.596175Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:04.936079Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:04.947925Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:05.297790Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:05.309825Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:05.670627Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:05.682539Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:06.064715Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:06.076723Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:06.437532Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:06.449125Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:06.808718Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:06.820431Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:07.180071Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:07.191759Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:07.561623Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:07.573866Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:07.975067Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:07.986635Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:08.346104Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:08.358137Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:08.718877Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:08.731004Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:09.091478Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:09.103898Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:09.464072Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:09.476280Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:09.868749Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:09.880771Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:10.240769Z node 2 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [720575940379 ... 6-01-04T02:51:43.102767Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:43.503651Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:43.518434Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:43.894219Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:43.907454Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:44.282701Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:44.295311Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:44.680830Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:44.695642Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:45.061981Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:45.082850Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:45.503919Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:45.520516Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:45.898310Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:45.914639Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:46.269843Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:46.286693Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:46.639186Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:46.654614Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:47.008473Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:47.022171Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:47.398020Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:47.410471Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:47.769406Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:47.782552Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:48.139141Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:48.152008Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:48.500619Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:48.514871Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:48.868698Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:22: [72057594037927937] TTxSelfCheck::Execute 2026-01-04T02:51:48.881098Z node 4 :KESUS_TABLET DEBUG: tx_self_check.cpp:31: [72057594037927937] TTxSelfCheck::Complete 2026-01-04T02:51:49.339015Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:30: [72057594037927937] TTxSemaphoreTimeout::Execute (session=2, semaphore=1) 2026-01-04T02:51:49.339109Z node 4 :KESUS_TABLET DEBUG: tablet_db.cpp:124: [72057594037927937] Deleting session 2 / semaphore 1 "Lock1" waiter link 2026-01-04T02:51:49.351200Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_timeout.cpp:71: [72057594037927937] TTxSemaphoreTimeout::Complete (session=2, semaphore=1) 2026-01-04T02:51:49.372325Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:598:2535], cookie=824532767699436989) 2026-01-04T02:51:49.372419Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:598:2535], cookie=824532767699436989) 2026-01-04T02:51:49.372803Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:23: [72057594037927937] TTxSessionsDescribe::Execute (sender=[4:601:2538], cookie=11545153092088774461) 2026-01-04T02:51:49.372860Z node 4 :KESUS_TABLET DEBUG: tx_sessions_describe.cpp:48: [72057594037927937] TTxSessionsDescribe::Complete (sender=[4:601:2538], cookie=11545153092088774461) 2026-01-04T02:51:49.373286Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:604:2541], cookie=12972933001196849017, name="Lock1") 2026-01-04T02:51:49.373349Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:604:2541], cookie=12972933001196849017) 2026-01-04T02:51:49.373695Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[4:607:2544], cookie=55466761097663698, name="Lock1") 2026-01-04T02:51:49.373747Z node 4 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[4:607:2544], cookie=55466761097663698) 2026-01-04T02:51:49.718543Z node 5 :KESUS_TABLET INFO: tablet_impl.cpp:71: OnActivateExecutor: 72057594037927937 2026-01-04T02:51:49.718657Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:14: [72057594037927937] TTxInitSchema::Execute 2026-01-04T02:51:49.731548Z node 5 :KESUS_TABLET DEBUG: tx_init_schema.cpp:21: [72057594037927937] TTxInitSchema::Complete 2026-01-04T02:51:49.731673Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:30: [72057594037927937] TTxInit::Execute 2026-01-04T02:51:49.745792Z node 5 :KESUS_TABLET DEBUG: tx_init.cpp:242: [72057594037927937] TTxInit::Complete 2026-01-04T02:51:49.746283Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=4498471323322365076, session=0, seqNo=0) 2026-01-04T02:51:49.746418Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 1 2026-01-04T02:51:49.768730Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=4498471323322365076, session=1) 2026-01-04T02:51:49.768976Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=4662555731643383693, session=0, seqNo=0) 2026-01-04T02:51:49.769073Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 2 2026-01-04T02:51:49.781037Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=4662555731643383693, session=2) 2026-01-04T02:51:49.781319Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:35: [72057594037927937] TTxSessionAttach::Execute (sender=[5:135:2159], cookie=3963636798377469849, session=0, seqNo=0) 2026-01-04T02:51:49.781437Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:118: [72057594037927937] Created new session 3 2026-01-04T02:51:49.793531Z node 5 :KESUS_TABLET DEBUG: tx_session_attach.cpp:140: [72057594037927937] TTxSessionAttach::Complete (sender=[5:135:2159], cookie=3963636798377469849, session=3) 2026-01-04T02:51:49.794007Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:32: [72057594037927937] TTxSemaphoreCreate::Execute (sender=[5:148:2170], cookie=13026400625442454819, name="Sem1", limit=3) 2026-01-04T02:51:49.794124Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:104: [72057594037927937] Created new semaphore 1 "Sem1" 2026-01-04T02:51:49.806329Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_create.cpp:112: [72057594037927937] TTxSemaphoreCreate::Complete (sender=[5:148:2170], cookie=13026400625442454819) 2026-01-04T02:51:49.806646Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=111, session=1, semaphore="Sem1" count=2) 2026-01-04T02:51:49.806823Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #1 session 1 2026-01-04T02:51:49.807039Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=222, session=2, semaphore="Sem1" count=2) 2026-01-04T02:51:49.807209Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:48: [72057594037927937] TTxSemaphoreAcquire::Execute (sender=[5:135:2159], cookie=333, session=3, semaphore="Sem1" count=1) 2026-01-04T02:51:49.819292Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=111) 2026-01-04T02:51:49.819379Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=222) 2026-01-04T02:51:49.819409Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_acquire.cpp:263: [72057594037927937] TTxSemaphoreAcquire::Complete (sender=[5:135:2159], cookie=333) 2026-01-04T02:51:49.819966Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:157:2179], cookie=11926067863799738189, name="Sem1") 2026-01-04T02:51:49.820058Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:157:2179], cookie=11926067863799738189) 2026-01-04T02:51:49.820477Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:160:2182], cookie=16554183484778814242, name="Sem1") 2026-01-04T02:51:49.820553Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:160:2182], cookie=16554183484778814242) 2026-01-04T02:51:49.820788Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:37: [72057594037927937] TTxSemaphoreRelease::Execute (sender=[5:135:2159], cookie=444, name="Sem1") 2026-01-04T02:51:49.820885Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:98: [72057594037927937] Deleting session 1 / semaphore 1 "Sem1" owner link 2026-01-04T02:51:49.820944Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #2 session 2 2026-01-04T02:51:49.820996Z node 5 :KESUS_TABLET DEBUG: tablet_db.cpp:152: [72057594037927937] Processing semaphore 1 "Sem1" queue: next order #3 session 3 2026-01-04T02:51:49.833227Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_release.cpp:93: [72057594037927937] TTxSemaphoreRelease::Complete (sender=[5:135:2159], cookie=444) 2026-01-04T02:51:49.833925Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:165:2187], cookie=8417262038980368626, name="Sem1") 2026-01-04T02:51:49.834029Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:165:2187], cookie=8417262038980368626) 2026-01-04T02:51:49.834475Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:29: [72057594037927937] TTxSemaphoreDescribe::Execute (sender=[5:168:2190], cookie=16136619607641889334, name="Sem1") 2026-01-04T02:51:49.834549Z node 5 :KESUS_TABLET DEBUG: tx_semaphore_describe.cpp:134: [72057594037927937] TTxSemaphoreDescribe::Complete (sender=[5:168:2190], cookie=16136619607641889334) |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/kesus/tablet/ut/unittest >> IncrementalBackup::E2EMultipleBackupRestoreCycles [GOOD] >> IncrementalBackup::DropBackupCollectionSqlPathResolution >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] |96.6%| [TA] $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kesus/tablet/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactBorrowedAfterSplitMergeWhenDisabled [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_as_table [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> TKeyValueTest::TestInlineWriteReadWithRestartsThenResponseOkNewApi [GOOD] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> IncrementalBackup::QueryIncrementalBackupImplTableAfterRestore [GOOD] >> IncrementalBackup::OmitIndexesIncrementalBackup >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] >> IncrementalBackup::DropBackupCollectionSqlPathResolution [GOOD] >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_disk.py::TestSafeDiskBreak::test_erase_method >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> IncrementalBackup::SimpleRestoreBackupCollection-WithIncremental [GOOD] >> IncrementalBackup::ShopDemoIncrementalBackupScenario |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KqpPg::ValuesInsert-useSink [GOOD] >> PgCatalog::PgType |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore+WithIncremental [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental >> IncrementalBackup::DropBackupCollectionSqlWithDatabaseLikeNames [GOOD] >> IncrementalBackup::IncrementalBackupNonExistentTable >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleDataShardReboot [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> TKeyValueTest::TestInlineWriteReadWithRestartsWithNotCorrectUTF8NewApi [GOOD] >> TKeyValueTest::TestLargeWriteAndDelete |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_as_table [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> IncrementalBackup::OmitIndexesIncrementalBackup [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> PgCatalog::PgType [GOOD] >> PgCatalog::InformationSchema >> IncrementalBackup::IncrementalBackupNonExistentTable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotCompactBorrowed [GOOD] >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts >> KqpPg::TableInsert-useSink [GOOD] >> KqpPg::TempTablesSessionsIsolation >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> IncrementalBackup::BackupMetadataDirectoriesSkippedDuringRestore-WithIncremental [GOOD] >> IncrementalBackup::BasicIndexIncrementalRestore >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> PgCatalog::InformationSchema [GOOD] >> PgCatalog::CheckSetConfig >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestInlineEmptyWriteReadDeleteWithRestartsThenResponseOkNewApi [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:85:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:88:2057] recipient: [8:87:2117] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:90:2057] recipient: [8:87:2117] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:89:2118] Leader for TabletID 72057594037927937 is [8:89:2118] sender: [8:205:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:85:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:88:2057] recipient: [9:87:2117] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:90:2057] recipient: [9:87:2117] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:89:2118] Leader for TabletID 72057594037927937 is [9:89:2118] sender: [9:205:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:86:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:89:2057] recipient: [10:88:2117] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:91:2057] recipient: [10:88:2117] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:90:2118] Leader for TabletID 72057594037927937 is [10:90:2118] sender: [10:206:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:89:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:92:2057] recipient: [11:91:2120] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:94:2057] recipient: [11:91:2120] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:93:2121] Leader for TabletID 72057594037927937 is [11:93:2121] sender: [11:209:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvRequest ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:89:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:92:2057] recipient: [12:91:2120] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:94:2057] recipient: [12:91:2120] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:93:2121] Leader for TabletID 72057594037927937 is [12:93:2121] sender: [12:209:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] send ... r refreshed! new actor is[31:83:2113] Leader for TabletID 72057594037927937 is [31:83:2113] sender: [31:199:2057] recipient: [31:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [32:56:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:59:2057] recipient: [32:53:2097] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:76:2057] recipient: [32:14:2061] !Reboot 72057594037927937 (actor [32:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:82:2057] recipient: [32:39:2086] Leader for TabletID 72057594037927937 is [32:58:2099] sender: [32:85:2057] recipient: [32:84:2115] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:87:2057] recipient: [32:84:2115] !Reboot 72057594037927937 (actor [32:58:2099]) rebooted! !Reboot 72057594037927937 (actor [32:58:2099]) tablet resolver refreshed! new actor is[32:86:2116] Leader for TabletID 72057594037927937 is [32:86:2116] sender: [32:202:2057] recipient: [32:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [33:56:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:59:2057] recipient: [33:53:2097] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:76:2057] recipient: [33:14:2061] !Reboot 72057594037927937 (actor [33:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:82:2057] recipient: [33:39:2086] Leader for TabletID 72057594037927937 is [33:58:2099] sender: [33:85:2057] recipient: [33:84:2115] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:87:2057] recipient: [33:84:2115] !Reboot 72057594037927937 (actor [33:58:2099]) rebooted! !Reboot 72057594037927937 (actor [33:58:2099]) tablet resolver refreshed! new actor is[33:86:2116] Leader for TabletID 72057594037927937 is [33:86:2116] sender: [33:202:2057] recipient: [33:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [34:56:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:59:2057] recipient: [34:54:2097] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:76:2057] recipient: [34:14:2061] !Reboot 72057594037927937 (actor [34:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:83:2057] recipient: [34:39:2086] Leader for TabletID 72057594037927937 is [34:58:2099] sender: [34:86:2057] recipient: [34:85:2115] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:88:2057] recipient: [34:85:2115] !Reboot 72057594037927937 (actor [34:58:2099]) rebooted! !Reboot 72057594037927937 (actor [34:58:2099]) tablet resolver refreshed! new actor is[34:87:2116] Leader for TabletID 72057594037927937 is [34:87:2116] sender: [34:105:2057] recipient: [34:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [35:56:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:59:2057] recipient: [35:53:2097] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:76:2057] recipient: [35:14:2061] !Reboot 72057594037927937 (actor [35:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:88:2057] recipient: [35:87:2117] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:90:2057] recipient: [35:87:2117] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:89:2118] Leader for TabletID 72057594037927937 is [35:89:2118] sender: [35:205:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvReadRange ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:85:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:88:2057] recipient: [36:87:2117] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:90:2057] recipient: [36:87:2117] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:89:2118] Leader for TabletID 72057594037927937 is [36:89:2118] sender: [36:205:2057] recipient: [36:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] !Reboot 72057594037927937 (actor [37:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:86:2057] recipient: [37:39:2086] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:89:2057] recipient: [37:88:2117] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:91:2057] recipient: [37:88:2117] !Reboot 72057594037927937 (actor [37:58:2099]) rebooted! !Reboot 72057594037927937 (actor [37:58:2099]) tablet resolver refreshed! new actor is[37:90:2118] Leader for TabletID 72057594037927937 is [37:90:2118] sender: [37:108:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] !Reboot 72057594037927937 (actor [38:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:88:2057] recipient: [38:39:2086] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:91:2057] recipient: [38:90:2119] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:93:2057] recipient: [38:90:2119] !Reboot 72057594037927937 (actor [38:58:2099]) rebooted! !Reboot 72057594037927937 (actor [38:58:2099]) tablet resolver refreshed! new actor is[38:92:2120] Leader for TabletID 72057594037927937 is [38:92:2120] sender: [38:208:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:88:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:91:2057] recipient: [39:90:2119] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:93:2057] recipient: [39:90:2119] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:92:2120] Leader for TabletID 72057594037927937 is [39:92:2120] sender: [39:208:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:89:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:92:2057] recipient: [40:91:2119] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:94:2057] recipient: [40:91:2119] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:93:2120] Leader for TabletID 72057594037927937 is [40:93:2120] sender: [40:209:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:92:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:95:2057] recipient: [41:94:2122] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:97:2057] recipient: [41:94:2122] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:96:2123] Leader for TabletID 72057594037927937 is [41:96:2123] sender: [41:212:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvKeyValue::TEvRead ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:92:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:95:2057] recipient: [42:94:2122] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:97:2057] recipient: [42:94:2122] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:96:2123] Leader for TabletID 72057594037927937 is [42:96:2123] sender: [42:212:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest >> KqpPg::TempTablesSessionsIsolation [GOOD] >> KqpPg::TempTablesDrop >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldNotRequestCompactionsAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.156089Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.156193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.156310Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.156373Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.156408Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.156468Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156572Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.157513Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.158168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.243973Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.244023Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.254751Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.255179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.255328Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.261396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.261605Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.262197Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.262467Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.266832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.268078Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.275734Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.275797Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.275899Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.275946Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.276096Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.276259Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.453360Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454651Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454720Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454836Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.454938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455105Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455183Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455269Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455361Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455458Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.455555Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... :403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-04T02:52:16.902626Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:739:2722]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:16.902760Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-04T02:52:16.903092Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:739:2722], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 29 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 139 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-04T02:52:16.903130Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:16.903170Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0029 2026-01-04T02:52:16.903252Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 29 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:16.903301Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:52:16.914180Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:743:2725]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:16.914266Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:16.914362Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409547 outdated step 5000038 last cleanup 0 2026-01-04T02:52:16.914465Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409547 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:52:16.914502Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409547 2026-01-04T02:52:16.914541Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409547 has no attached operations 2026-01-04T02:52:16.914572Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409547 2026-01-04T02:52:16.914738Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:743:2725]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:16.914863Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 38 2026-01-04T02:52:16.915185Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:743:2725], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 38 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 27 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409547 NodeId: 3 StartTime: 139 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-04T02:52:16.915259Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:16.915323Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0027 2026-01-04T02:52:16.915431Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:16.957413Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:16.957505Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:16.957557Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 2 2026-01-04T02:52:16.957676Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-04T02:52:16.957729Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-04T02:52:16.957901Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-04T02:52:16.957976Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-04T02:52:16.958010Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-04T02:52:16.958135Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:29.000000Z at schemeshard 72057594046678944 2026-01-04T02:52:16.958232Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 2 out of 2 partitions 2026-01-04T02:52:16.958294Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:2 data size 0 row count 0 2026-01-04T02:52:16.958347Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 0, DataSize 0 2026-01-04T02:52:16.958378Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-04T02:52:16.958425Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:2 with partCount# 0, rowCount# 0, searchHeight# 0, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-04T02:52:16.958470Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409547 by load, its table already has 2 out of 2 partitions 2026-01-04T02:52:16.958547Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:52:16.969970Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:16.970067Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:16.970121Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:52:16.995982Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1746:3666], Recipient [3:739:2722]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:16.996076Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:16.996140Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409546, clientId# [3:1745:3665], serverId# [3:1746:3666], sessionId# [0:0:0] 2026-01-04T02:52:16.996435Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1744:3664], Recipient [3:739:2722]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } 2026-01-04T02:52:16.999629Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:1749:3669], Recipient [3:743:2725]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:16.999738Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:16.999791Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409547, clientId# [3:1748:3668], serverId# [3:1749:3669], sessionId# [0:0:0] 2026-01-04T02:52:16.999992Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:1747:3667], Recipient [3:743:2725]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72057594046678944 LocalId: 38 } |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax-`.metadata/script_executions`] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldNotCompactAfterDrop [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.156134Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.156233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156276Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.156313Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.156349Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.156375Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.156421Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156517Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.157336Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.158168Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.243870Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.243928Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.257450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.258020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.258193Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.265503Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.265731Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.266405Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.266667Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.269387Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.269530Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.275831Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.275908Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.275985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.276034Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.276144Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.276346Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.423028Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.424884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425020Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425095Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425234Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425300Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425485Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425546Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425616Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425672Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.425945Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... Reads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 5759 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 138 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-04T02:52:17.667463Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:17.667532Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.5759 2026-01-04T02:52:17.667669Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:17.667747Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:52:17.701026Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:17.701116Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:17.701180Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-04T02:52:17.701291Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-04T02:52:17.701354Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-04T02:52:17.701558Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-04T02:52:17.701648Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-04T02:52:17.701696Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-04T02:52:17.701889Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 1, Rows# 100, Deletes# 0, Compaction# 1970-01-01T00:00:00.000000Z}, next wakeup in# 0.000000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-04T02:52:17.702091Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 30 seconds 2026-01-04T02:52:17.702149Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:227: [BackgroundCompaction] [Update] Enqueued shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:00:00.000000Z at schemeshard 72057594046678944 2026-01-04T02:52:17.702236Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-04T02:52:17.702357Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:52:17.702784Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:127:2152], Recipient [3:729:2715]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-04T02:52:17.702985Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:127:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-04T02:52:17.704613Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T02:52:17.704673Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 0, front# 1 2026-01-04T02:52:17.714593Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:719:2707], Recipient [3:729:2715]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:52:17.722632Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.250000Z 2026-01-04T02:52:17.722742Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 1 2026-01-04T02:52:17.722786Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:127:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:52:17.723149Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:729:2715], Recipient [3:127:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-04T02:52:17.723189Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-04T02:52:17.723321Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:31: [BackgroundCompaction] [Start] Compacting for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, compactionInfo# {72057594046678944:1, SH# 0, Rows# 0, Deletes# 0, Compaction# 1970-01-01T00:00:30.000000Z}, next wakeup in# 29.996000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-04T02:52:17.723420Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 29.996000s, rate# 1, in queue# 0 shards, waiting after compaction# 0 shards, running# 1 shards at schemeshard 72057594046678944 2026-01-04T02:52:17.723952Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [3:127:2152], Recipient [3:729:2715]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046678944 LocalId: 38 } CompactSinglePartedShards: true 2026-01-04T02:52:17.724058Z node 3 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 2 of 72075186233409546 tableId# 38 localTid# 1001, requested from [3:127:2152], partsCount# 1, memtableSize# 0, memtableWaste# 0, memtableRows# 0 2026-01-04T02:52:17.725544Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 1, ts 1970-01-01T00:00:30.250000Z 2026-01-04T02:52:17.725585Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 1, front# 2 2026-01-04T02:52:17.729468Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:719:2707], Recipient [3:729:2715]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:52:17.730072Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.250000Z 2026-01-04T02:52:17.733836Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:719:2707], Recipient [3:729:2715]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:52:17.736193Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186233409546, table# 1001, finished edge# 2, ts 1970-01-01T00:00:30.254000Z 2026-01-04T02:52:17.736278Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001, finished edge# 2, front# 2 2026-01-04T02:52:17.736324Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186233409546, table# 1001 sending TEvCompactTableResult to# [3:127:2152]pathId# [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T02:52:17.736989Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553211, Sender [3:729:2715], Recipient [3:127:2152]: NKikimrTxDataShard.TEvCompactTableResult TabletId: 72075186233409546 PathId { OwnerId: 72057594046678944 LocalId: 38 } Status: OK 2026-01-04T02:52:17.737040Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5324: StateWork, processing event TEvDataShard::TEvCompactTableResult 2026-01-04T02:52:17.737141Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 0 seconds 2026-01-04T02:52:17.737212Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_compaction.cpp:112: [BackgroundCompaction] [Finished] Compaction completed for pathId# [OwnerId: 72057594046678944, LocalPathId: 38], datashard# 72075186233409546, shardIdx# 72057594046678944:1 in# 4 ms, with status# 0, next wakeup in# 0.992000s, rate# 1, in queue# 1 shards, waiting after compaction# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-04T02:52:17.739436Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [3:719:2707], Recipient [3:729:2715]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T02:52:17.750393Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:17.750487Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:17.750550Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:52:17.782897Z node 3 :TX_DATASHARD DEBUG: datashard__compaction.cpp:189: Updated last full compaction of tablet# 72075186233409546, tableId# 38, last full compaction# 1970-01-01T00:00:30.254000Z |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedBeforeSplit [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] >> IncrementalBackup::IncrementalBackupWithIndexes [GOOD] >> IncrementalBackup::IncrementalBackupWithCoveringIndex >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] >> IncrementalBackup::ShopDemoIncrementalBackupScenario [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::ShouldNotCompactServerlessAfterDisable [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.156072Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.156143Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156173Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.156200Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.156228Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.156249Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.156304Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156366Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.157010Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.158153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.250501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.250557Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.264089Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.264539Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.264726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.272692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.272945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.273566Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.273856Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.276564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.276732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.278091Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.278149Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.278199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.278239Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.278330Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.278453Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.450261Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451231Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451357Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451497Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451581Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451656Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451855Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451923Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451988Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452048Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452124Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452225Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452299Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... onditionalErase DoExecute: at schemeshard: 72075186233409546 2026-01-04T02:52:22.940829Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409546 2026-01-04T02:52:22.940978Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:772:2750]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-04T02:52:22.941014Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-04T02:52:23.018191Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1270:3155]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:23.018283Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:23.018391Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409552 outdated step 300 last cleanup 0 2026-01-04T02:52:23.018475Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409552 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:52:23.018527Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409552 2026-01-04T02:52:23.018569Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409552 has no attached operations 2026-01-04T02:52:23.018603Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409552 2026-01-04T02:52:23.018751Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1270:3155]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:23.018859Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409552, FollowerId 0, tableId 32 2026-01-04T02:52:23.019203Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1270:3155], Recipient [3:1609:3466]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409552 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 32 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409552 NodeId: 3 StartTime: 219 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-04T02:52:23.019258Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:23.019329Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0032 2026-01-04T02:52:23.019470Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409552 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:23.019519Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:52:23.030698Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [3:1275:3158]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:23.030790Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:23.030888Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409553 outdated step 300 last cleanup 0 2026-01-04T02:52:23.030960Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409553 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:52:23.031000Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409553 2026-01-04T02:52:23.031036Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409553 has no attached operations 2026-01-04T02:52:23.031069Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409553 2026-01-04T02:52:23.031228Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1275:3158]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:23.031354Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409553, FollowerId 0, tableId 32 2026-01-04T02:52:23.031754Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1275:3158], Recipient [3:1609:3466]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409553 TableLocalId: 32 Generation: 2 Round: 10 TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 26 Memory: 119488 } ShardState: 2 UserTablePartOwners: 72075186233409553 NodeId: 3 StartTime: 219 TableOwnerId: 72075186233409549 FollowerId: 0 2026-01-04T02:52:23.031804Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:23.031855Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] state 'Ready' dataSize 0 rowCount 0 cpuUsage 0.0026 2026-01-04T02:52:23.031998Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72075186233409549 from shard 72075186233409553 followerId 0 pathId [OwnerId: 72075186233409549, LocalPathId: 32] raw table stats: DataSize: 0 RowCount: 0 IndexSize: 0 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 0 RangeReadRows: 0 SearchHeight: 0 LastFullCompactionTs: 0 HasLoanedParts: false ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:23.054710Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:1609:3466]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:23.054781Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:23.055447Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:1609:3466], Recipient [3:1609:3466]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:23.055493Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:23.066245Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435076, Sender [0:0:0], Recipient [3:1609:3466]: NKikimr::NSchemeShard::TEvPrivate::TEvRunConditionalErase 2026-01-04T02:52:23.066341Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5308: StateWork, processing event TEvPrivate::TEvRunConditionalErase 2026-01-04T02:52:23.066464Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7072: Handle: TEvRunConditionalErase, at schemeshard: 72075186233409549 2026-01-04T02:52:23.066579Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:58: TTxRunConditionalErase DoExecute: at schemeshard: 72075186233409549 2026-01-04T02:52:23.066655Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__conditional_erase.cpp:197: TTxRunConditionalErase DoComplete: at schemeshard: 72075186233409549 2026-01-04T02:52:23.066796Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435098, Sender [0:0:0], Recipient [3:1609:3466]: NKikimr::NSchemeShard::TEvPrivate::TEvSendBaseStatsToSA 2026-01-04T02:52:23.066832Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5456: StateWork, processing event TEvPrivate::TEvSendBaseStatsToSA 2026-01-04T02:52:23.067146Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269746180, Sender [3:2821:4638], Recipient [3:1609:3466]: NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-04T02:52:23.067188Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5455: StateWork, processing event TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-04T02:52:23.093404Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2824:4641], Recipient [3:1270:3155]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:23.093510Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:23.093599Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409552, clientId# [3:2823:4640], serverId# [3:2824:4641], sessionId# [0:0:0] 2026-01-04T02:52:23.093888Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2822:4639], Recipient [3:1270:3155]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } 2026-01-04T02:52:23.094647Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [3:2827:4644], Recipient [3:1275:3158]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:23.094687Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:52:23.094732Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186233409553, clientId# [3:2826:4643], serverId# [3:2827:4644], sessionId# [0:0:0] 2026-01-04T02:52:23.094865Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553213, Sender [3:2825:4642], Recipient [3:1275:3158]: NKikimrTxDataShard.TEvGetCompactTableStats PathId { OwnerId: 72075186233409549 LocalId: 32 } |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> KqpPg::TempTablesDrop [GOOD] >> KqpPg::TempTablesWithCache >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] >> IncrementalBackup::MultipleIndexesIncrementalRestore [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] >> IncrementalBackup::BasicIndexIncrementalRestore [GOOD] >> IncrementalBackup::CdcVersionSync |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] >> KqpPg::TempTablesWithCache [GOOD] >> KqpPg::TableDeleteWhere+useSink >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/keyvalue/ut/unittest >> TKeyValueTest::TestLargeWriteAndDelete [GOOD] Test command err: Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [1:56:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:59:2057] recipient: [1:52:2097] Leader for TabletID 72057594037927937 is [1:58:2099] sender: [1:76:2057] recipient: [1:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [2:56:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:59:2057] recipient: [2:53:2097] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:76:2057] recipient: [2:14:2061] !Reboot 72057594037927937 (actor [2:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:78:2057] recipient: [2:39:2086] Leader for TabletID 72057594037927937 is [2:58:2099] sender: [2:81:2057] recipient: [2:80:2112] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:83:2057] recipient: [2:80:2112] !Reboot 72057594037927937 (actor [2:58:2099]) rebooted! !Reboot 72057594037927937 (actor [2:58:2099]) tablet resolver refreshed! new actor is[2:82:2113] Leader for TabletID 72057594037927937 is [2:82:2113] sender: [2:198:2057] recipient: [2:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [3:56:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:59:2057] recipient: [3:53:2097] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:76:2057] recipient: [3:14:2061] !Reboot 72057594037927937 (actor [3:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:78:2057] recipient: [3:39:2086] Leader for TabletID 72057594037927937 is [3:58:2099] sender: [3:81:2057] recipient: [3:80:2112] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:83:2057] recipient: [3:80:2112] !Reboot 72057594037927937 (actor [3:58:2099]) rebooted! !Reboot 72057594037927937 (actor [3:58:2099]) tablet resolver refreshed! new actor is[3:82:2113] Leader for TabletID 72057594037927937 is [3:82:2113] sender: [3:198:2057] recipient: [3:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [4:56:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:59:2057] recipient: [4:53:2097] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:76:2057] recipient: [4:14:2061] !Reboot 72057594037927937 (actor [4:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:79:2057] recipient: [4:39:2086] Leader for TabletID 72057594037927937 is [4:58:2099] sender: [4:82:2057] recipient: [4:81:2112] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:84:2057] recipient: [4:81:2112] !Reboot 72057594037927937 (actor [4:58:2099]) rebooted! !Reboot 72057594037927937 (actor [4:58:2099]) tablet resolver refreshed! new actor is[4:83:2113] Leader for TabletID 72057594037927937 is [4:83:2113] sender: [4:199:2057] recipient: [4:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [5:56:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:59:2057] recipient: [5:53:2097] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:76:2057] recipient: [5:14:2061] !Reboot 72057594037927937 (actor [5:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:82:2057] recipient: [5:39:2086] Leader for TabletID 72057594037927937 is [5:58:2099] sender: [5:85:2057] recipient: [5:84:2115] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:87:2057] recipient: [5:84:2115] !Reboot 72057594037927937 (actor [5:58:2099]) rebooted! !Reboot 72057594037927937 (actor [5:58:2099]) tablet resolver refreshed! new actor is[5:86:2116] Leader for TabletID 72057594037927937 is [5:86:2116] sender: [5:202:2057] recipient: [5:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [6:56:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:59:2057] recipient: [6:52:2097] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:76:2057] recipient: [6:14:2061] !Reboot 72057594037927937 (actor [6:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:82:2057] recipient: [6:39:2086] Leader for TabletID 72057594037927937 is [6:58:2099] sender: [6:85:2057] recipient: [6:84:2115] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:87:2057] recipient: [6:84:2115] !Reboot 72057594037927937 (actor [6:58:2099]) rebooted! !Reboot 72057594037927937 (actor [6:58:2099]) tablet resolver refreshed! new actor is[6:86:2116] Leader for TabletID 72057594037927937 is [6:86:2116] sender: [6:202:2057] recipient: [6:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [7:56:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:59:2057] recipient: [7:53:2097] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:76:2057] recipient: [7:14:2061] !Reboot 72057594037927937 (actor [7:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:83:2057] recipient: [7:39:2086] Leader for TabletID 72057594037927937 is [7:58:2099] sender: [7:86:2057] recipient: [7:85:2115] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:88:2057] recipient: [7:85:2115] !Reboot 72057594037927937 (actor [7:58:2099]) rebooted! !Reboot 72057594037927937 (actor [7:58:2099]) tablet resolver refreshed! new actor is[7:87:2116] Leader for TabletID 72057594037927937 is [7:87:2116] sender: [7:203:2057] recipient: [7:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [8:56:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:59:2057] recipient: [8:53:2097] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:76:2057] recipient: [8:14:2061] !Reboot 72057594037927937 (actor [8:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:86:2057] recipient: [8:39:2086] Leader for TabletID 72057594037927937 is [8:58:2099] sender: [8:89:2057] recipient: [8:88:2118] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:91:2057] recipient: [8:88:2118] !Reboot 72057594037927937 (actor [8:58:2099]) rebooted! !Reboot 72057594037927937 (actor [8:58:2099]) tablet resolver refreshed! new actor is[8:90:2119] Leader for TabletID 72057594037927937 is [8:90:2119] sender: [8:206:2057] recipient: [8:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [9:56:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:59:2057] recipient: [9:53:2097] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:76:2057] recipient: [9:14:2061] !Reboot 72057594037927937 (actor [9:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:86:2057] recipient: [9:39:2086] Leader for TabletID 72057594037927937 is [9:58:2099] sender: [9:89:2057] recipient: [9:88:2118] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:91:2057] recipient: [9:88:2118] !Reboot 72057594037927937 (actor [9:58:2099]) rebooted! !Reboot 72057594037927937 (actor [9:58:2099]) tablet resolver refreshed! new actor is[9:90:2119] Leader for TabletID 72057594037927937 is [9:90:2119] sender: [9:206:2057] recipient: [9:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [10:56:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:59:2057] recipient: [10:54:2097] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:76:2057] recipient: [10:14:2061] !Reboot 72057594037927937 (actor [10:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:87:2057] recipient: [10:39:2086] Leader for TabletID 72057594037927937 is [10:58:2099] sender: [10:90:2057] recipient: [10:89:2118] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:92:2057] recipient: [10:89:2118] !Reboot 72057594037927937 (actor [10:58:2099]) rebooted! !Reboot 72057594037927937 (actor [10:58:2099]) tablet resolver refreshed! new actor is[10:91:2119] Leader for TabletID 72057594037927937 is [10:91:2119] sender: [10:207:2057] recipient: [10:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [11:56:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:59:2057] recipient: [11:53:2097] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:76:2057] recipient: [11:14:2061] !Reboot 72057594037927937 (actor [11:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:90:2057] recipient: [11:39:2086] Leader for TabletID 72057594037927937 is [11:58:2099] sender: [11:93:2057] recipient: [11:92:2121] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:95:2057] recipient: [11:92:2121] !Reboot 72057594037927937 (actor [11:58:2099]) rebooted! !Reboot 72057594037927937 (actor [11:58:2099]) tablet resolver refreshed! new actor is[11:94:2122] Leader for TabletID 72057594037927937 is [11:94:2122] sender: [11:210:2057] recipient: [11:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [12:56:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:59:2057] recipient: [12:52:2097] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:76:2057] recipient: [12:14:2061] !Reboot 72057594037927937 (actor [12:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:90:2057] recipient: [12:39:2086] Leader for TabletID 72057594037927937 is [12:58:2099] sender: [12:93:2057] recipient: [12:92:2121] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:95:2057] recipient: [12:92:2121] !Reboot 72057594037927937 (actor [12:58:2099]) rebooted! !Reboot 72057594037927937 (actor [12:58:2099]) tablet resolver refreshed! new actor is[12:94:2122] Leader for TabletID 72057594037927937 is [12:94:2122] sender: [12:210:2057] recipient: [12:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [13:56:2057] recipient: [13:52:2097] Leader for TabletID 72057594037927937 is [13:58:2099] sender: [13:59:2057] recipient: [13:52:2097] Leader for Tab ... t: [35:39:2086] Leader for TabletID 72057594037927937 is [35:58:2099] sender: [35:85:2057] recipient: [35:84:2115] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:87:2057] recipient: [35:84:2115] !Reboot 72057594037927937 (actor [35:58:2099]) rebooted! !Reboot 72057594037927937 (actor [35:58:2099]) tablet resolver refreshed! new actor is[35:86:2116] Leader for TabletID 72057594037927937 is [35:86:2116] sender: [35:202:2057] recipient: [35:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [36:56:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:59:2057] recipient: [36:53:2097] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:76:2057] recipient: [36:14:2061] !Reboot 72057594037927937 (actor [36:58:2099]) on event NKikimr::TEvKeyValue::TEvNotify ! Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:83:2057] recipient: [36:39:2086] Leader for TabletID 72057594037927937 is [36:58:2099] sender: [36:86:2057] recipient: [36:85:2115] Leader for TabletID 72057594037927937 is [36:87:2116] sender: [36:88:2057] recipient: [36:85:2115] !Reboot 72057594037927937 (actor [36:58:2099]) rebooted! !Reboot 72057594037927937 (actor [36:58:2099]) tablet resolver refreshed! new actor is[36:87:2116] Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [37:56:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:59:2057] recipient: [37:53:2097] Leader for TabletID 72057594037927937 is [37:58:2099] sender: [37:76:2057] recipient: [37:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [38:56:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:59:2057] recipient: [38:53:2097] Leader for TabletID 72057594037927937 is [38:58:2099] sender: [38:76:2057] recipient: [38:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [39:56:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:59:2057] recipient: [39:54:2097] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:76:2057] recipient: [39:14:2061] !Reboot 72057594037927937 (actor [39:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:78:2057] recipient: [39:39:2086] Leader for TabletID 72057594037927937 is [39:58:2099] sender: [39:81:2057] recipient: [39:80:2112] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:83:2057] recipient: [39:80:2112] !Reboot 72057594037927937 (actor [39:58:2099]) rebooted! !Reboot 72057594037927937 (actor [39:58:2099]) tablet resolver refreshed! new actor is[39:82:2113] Leader for TabletID 72057594037927937 is [39:82:2113] sender: [39:198:2057] recipient: [39:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [40:56:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:59:2057] recipient: [40:53:2097] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:76:2057] recipient: [40:14:2061] !Reboot 72057594037927937 (actor [40:58:2099]) on event NKikimr::TEvKeyValue::TEvAcquireLock ! Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:78:2057] recipient: [40:39:2086] Leader for TabletID 72057594037927937 is [40:58:2099] sender: [40:81:2057] recipient: [40:80:2112] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:83:2057] recipient: [40:80:2112] !Reboot 72057594037927937 (actor [40:58:2099]) rebooted! !Reboot 72057594037927937 (actor [40:58:2099]) tablet resolver refreshed! new actor is[40:82:2113] Leader for TabletID 72057594037927937 is [40:82:2113] sender: [40:198:2057] recipient: [40:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [41:56:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:59:2057] recipient: [41:53:2097] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:76:2057] recipient: [41:14:2061] !Reboot 72057594037927937 (actor [41:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:79:2057] recipient: [41:39:2086] Leader for TabletID 72057594037927937 is [41:58:2099] sender: [41:82:2057] recipient: [41:81:2112] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:84:2057] recipient: [41:81:2112] !Reboot 72057594037927937 (actor [41:58:2099]) rebooted! !Reboot 72057594037927937 (actor [41:58:2099]) tablet resolver refreshed! new actor is[41:83:2113] Leader for TabletID 72057594037927937 is [41:83:2113] sender: [41:199:2057] recipient: [41:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [42:56:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:59:2057] recipient: [42:52:2097] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:76:2057] recipient: [42:14:2061] !Reboot 72057594037927937 (actor [42:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:82:2057] recipient: [42:39:2086] Leader for TabletID 72057594037927937 is [42:58:2099] sender: [42:85:2057] recipient: [42:84:2115] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:87:2057] recipient: [42:84:2115] !Reboot 72057594037927937 (actor [42:58:2099]) rebooted! !Reboot 72057594037927937 (actor [42:58:2099]) tablet resolver refreshed! new actor is[42:86:2116] Leader for TabletID 72057594037927937 is [42:86:2116] sender: [42:202:2057] recipient: [42:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [43:56:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:59:2057] recipient: [43:53:2097] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:76:2057] recipient: [43:14:2061] !Reboot 72057594037927937 (actor [43:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:82:2057] recipient: [43:39:2086] Leader for TabletID 72057594037927937 is [43:58:2099] sender: [43:85:2057] recipient: [43:84:2115] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:87:2057] recipient: [43:84:2115] !Reboot 72057594037927937 (actor [43:58:2099]) rebooted! !Reboot 72057594037927937 (actor [43:58:2099]) tablet resolver refreshed! new actor is[43:86:2116] Leader for TabletID 72057594037927937 is [43:86:2116] sender: [43:202:2057] recipient: [43:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [44:56:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:59:2057] recipient: [44:53:2097] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:76:2057] recipient: [44:14:2061] !Reboot 72057594037927937 (actor [44:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:83:2057] recipient: [44:39:2086] Leader for TabletID 72057594037927937 is [44:58:2099] sender: [44:86:2057] recipient: [44:85:2115] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:88:2057] recipient: [44:85:2115] !Reboot 72057594037927937 (actor [44:58:2099]) rebooted! !Reboot 72057594037927937 (actor [44:58:2099]) tablet resolver refreshed! new actor is[44:87:2116] Leader for TabletID 72057594037927937 is [44:87:2116] sender: [44:203:2057] recipient: [44:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [45:56:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:59:2057] recipient: [45:53:2097] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:76:2057] recipient: [45:14:2061] !Reboot 72057594037927937 (actor [45:58:2099]) on event NKikimr::TEvTabletPipe::TEvServerConnected ! Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:86:2057] recipient: [45:39:2086] Leader for TabletID 72057594037927937 is [45:58:2099] sender: [45:89:2057] recipient: [45:88:2118] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:91:2057] recipient: [45:88:2118] !Reboot 72057594037927937 (actor [45:58:2099]) rebooted! !Reboot 72057594037927937 (actor [45:58:2099]) tablet resolver refreshed! new actor is[45:90:2119] Leader for TabletID 72057594037927937 is [45:90:2119] sender: [45:206:2057] recipient: [45:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [46:56:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:59:2057] recipient: [46:53:2097] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:76:2057] recipient: [46:14:2061] !Reboot 72057594037927937 (actor [46:58:2099]) on event NKikimr::TEvKeyValue::TEvExecuteTransaction ! Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:86:2057] recipient: [46:39:2086] Leader for TabletID 72057594037927937 is [46:58:2099] sender: [46:89:2057] recipient: [46:88:2118] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:91:2057] recipient: [46:88:2118] !Reboot 72057594037927937 (actor [46:58:2099]) rebooted! !Reboot 72057594037927937 (actor [46:58:2099]) tablet resolver refreshed! new actor is[46:90:2119] Leader for TabletID 72057594037927937 is [46:90:2119] sender: [46:206:2057] recipient: [46:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [47:56:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:59:2057] recipient: [47:53:2097] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:76:2057] recipient: [47:14:2061] !Reboot 72057594037927937 (actor [47:58:2099]) on event NKikimr::TEvKeyValue::TEvIntermediate ! Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:87:2057] recipient: [47:39:2086] Leader for TabletID 72057594037927937 is [47:58:2099] sender: [47:90:2057] recipient: [47:89:2118] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:92:2057] recipient: [47:89:2118] !Reboot 72057594037927937 (actor [47:58:2099]) rebooted! !Reboot 72057594037927937 (actor [47:58:2099]) tablet resolver refreshed! new actor is[47:91:2119] Leader for TabletID 72057594037927937 is [47:91:2119] sender: [47:207:2057] recipient: [47:14:2061] Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] IGNORE Leader for TabletID 72057594037927937 is [0:0:0] sender: [48:56:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:59:2057] recipient: [48:53:2097] Leader for TabletID 72057594037927937 is [48:58:2099] sender: [48:76:2057] recipient: [48:14:2061] |96.6%| [TM] {BAZEL_UPLOAD} ydb/core/keyvalue/ut/unittest |96.6%| [TA] $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/keyvalue/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_deduplication_id[tables_format_v1] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> IncrementalBackup::VerifyIncrementalBackupTableAttributes [GOOD] >> IncrementalBackup::ResetVsUpsertMissingColumnsTest |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> IncrementalBackup::IncrementalBackupWithCoveringIndex [GOOD] >> IncrementalBackup::IncrementalBackupMultipleIndexes >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_disk.py::TestSafeDiskBreak::test_erase_method [GOOD] |96.6%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> KqpPg::TableDeleteAllData-useSink [GOOD] >> KqpPg::PgUpdateCompoundKey+useSink >> TCdcStreamTests::MeteringDedicated [GOOD] >> TCdcStreamTests::ChangeOwner |96.6%| [TM] {asan, default-linux-x86_64, release} ydb/services/ext_index/ut/unittest |96.6%| [TM] {BAZEL_UPLOAD} ydb/services/ext_index/ut/unittest >> TCdcStreamTests::ChangeOwner [GOOD] >> TCdcStreamTests::DropIndexWithStream >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] |96.6%| [TA] $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {RESULT} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/ext_index/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] >> IncrementalBackup::CdcVersionSync [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] >> TCdcStreamTests::DropIndexWithStream [GOOD] >> TCdcStreamTests::DropTableWithIndexWithStream >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ResetVsUpsertMissingColumnsTest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:09.545296Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:09.656928Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:09.657431Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:09.657499Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:09.879165Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:09.879302Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:09.938349Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495067284998 != 1767495067285001 2026-01-04T02:51:09.951954Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:09.992310Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.072343Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.365391Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.365443Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.365475Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.365571Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-04T02:51:10.365598Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:51:10.466789Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:51:10.467025Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.467211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:51:10.467256Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:51:10.467440Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:51:10.467518Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.467614Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.468320Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.468504Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:51:10.468560Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.468596Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.468804Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.468838Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.468907Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.468956Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:51:10.468987Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:10.469016Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:10.469122Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.469530Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.469575Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.469680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.469708Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.469763Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.469818Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:51:10.469859Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:10.469923Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.470257Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.470288Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.470373Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.470397Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.470438Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.470483Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.470526Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:51:10.470551Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.470587Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:10.474029Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:10.474448Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.474494Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:10.474612Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:51:10.474830Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-04T02:51:10.475185Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.475243Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.475287Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.475407Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-04T02:51:10.475451Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:51:10.475520Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.475554Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/ ... er action and tx pending commits 2026-01-04T02:52:37.632689Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.632719Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.655309Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:52:37.655380Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.655415Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:52:37.655450Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.655481Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.689981Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:52:37.690046Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.690081Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:52:37.690116Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.690148Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.712569Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:52:37.712637Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.712681Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:52:37.712717Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.712761Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.733459Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:52:37.733545Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.733580Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:52:37.733617Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.733649Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.754965Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [9:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:37.755042Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:37.755143Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [9:400:2399], Recipient [9:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:37.755177Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:37.790248Z node 9 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037893][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T02:52:37.790406Z node 9 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037893][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:52:37.790450Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.790482Z node 9 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037893][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:52:37.790516Z node 9 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037893][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:52:37.790546Z node 9 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037893][Partition][0][StateIdle] Try persist 2026-01-04T02:52:37.811365Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1777:3368], Recipient [9:400:2399]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-04T02:52:37.811468Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found incremental backup table at: /Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table 2026-01-04T02:52:37.813797Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122945, Sender [9:1779:3370], Recipient [9:400:2399]: NKikimrSchemeOp.TDescribePath Path: "/Root/.backups/collections/TestCollection/19700101000008Z_incremental/Table" Options { ShowPrivateTable: true } 2026-01-04T02:52:37.814529Z node 9 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5260: StateWork, processing event TEvSchemeShard::TEvDescribeScheme Found attribute: __incremental_backup = {} ... waiting for SysViewsRoster update finished 2026-01-04T02:52:44.287174Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:52:44.298179Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:52:44.301217Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:299:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:52:44.301487Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:52:44.301602Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:52:44.688841Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:52:44.689053Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:52:44.738035Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1767495158846467 != 1767495158846470 2026-01-04T02:52:44.743822Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:52:44.798476Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:52:44.876572Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:52:45.273073Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:52:45.288735Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:52:45.407174Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:45.737415Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:951:2809], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:45.737601Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:962:2814], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:45.737734Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:45.738905Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:965:2817], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:45.739002Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:45.746672Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:52:45.895743Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:966:2818], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:52:45.936971Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:1023:2856] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBackgroundCompactionTest::SchemeshardShouldHandleCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.156073Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.156164Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156196Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.156227Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.156260Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.156286Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.156335Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156403Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.157311Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.158187Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.254293Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.254345Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.267913Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.268389Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.268587Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.275868Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.276074Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.276798Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.277061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.280036Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.280215Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.281424Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.281514Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.281602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.281673Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.281824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.282029Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.463191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464390Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464486Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464655Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464814Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464911Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.464982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.465049Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.465120Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.465211Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.465336Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.465412Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... cipient [3:729:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:47.702262Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T02:52:47.702359Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186233409546 outdated step 5000038 last cleanup 0 2026-01-04T02:52:47.702435Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186233409546 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:52:47.702469Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186233409546 2026-01-04T02:52:47.702502Z node 3 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186233409546 has no attached operations 2026-01-04T02:52:47.702538Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186233409546 2026-01-04T02:52:47.702693Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:729:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:47.702964Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409546, FollowerId 0, tableId 38 2026-01-04T02:52:47.704584Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:729:2715], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409546 TableLocalId: 38 Generation: 2 Round: 7 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 225 Memory: 124368 Storage: 14156 GroupWriteThroughput { GroupID: 0 Channel: 0 Throughput: 261 } GroupWriteThroughput { GroupID: 0 Channel: 1 Throughput: 444 } GroupWriteIops { GroupID: 0 Channel: 0 Iops: 1 } } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 138 TableOwnerId: 72057594046678944 FollowerId 2026-01-04T02:52:47.704639Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:52:47.704691Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0225 2026-01-04T02:52:47.704812Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 1 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 80 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:52:47.704854Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:52:47.705957Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435080, Sender [3:1481:3423], Recipient [3:729:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvBuildTableStatsResult 2026-01-04T02:52:47.748616Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:47.748707Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:47.748740Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-04T02:52:47.748817Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-04T02:52:47.748866Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-04T02:52:47.748978Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-04T02:52:47.749058Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-04T02:52:47.749092Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-04T02:52:47.749175Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__background_compaction.cpp:219: [BackgroundCompaction] [Update] Updated shard# 72057594046678944:1 with partCount# 1, rowCount# 100, searchHeight# 1, lastFullCompaction# 1970-01-01T00:01:20.000000Z at schemeshard 72057594046678944 2026-01-04T02:52:47.749246Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-04T02:52:47.749331Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:52:47.760285Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:47.760391Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:52:47.760438Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:52:48.028643Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.028765Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.028919Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:48.028959Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:48.398525Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.398600Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.398683Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:48.398725Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:48.691272Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.691346Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:48.691430Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:48.691461Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.066578Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.066653Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.066733Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.066766Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.438343Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.438426Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.438511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.438544Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.508839Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:729:2715]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:52:49.845069Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.845143Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:52:49.845220Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:52:49.845251Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> PgCatalog::CheckSetConfig [GOOD] >> PgCatalog::PgDatabase+useSink >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] >> KqpPg::PgUpdateCompoundKey+useSink [GOOD] >> KqpPg::PgUpdateCompoundKey-useSink ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> TCdcStreamTests::DropTableWithIndexWithStream [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:02.900298Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:02.900417Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.900471Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:02.900522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:02.900564Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:02.900598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:02.900657Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:02.900759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:02.901701Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:02.902056Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:02.992895Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:02.992985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:03.008906Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:03.009465Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:03.011935Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:03.028072Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:03.028659Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:03.031202Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:03.035509Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:03.045526Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:03.045749Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:03.051889Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:03.051980Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:03.052042Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:03.052102Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:03.052236Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:03.053147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:03.254184Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256201Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256303Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256386Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256598Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256675Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256769Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256842Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.256915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257005Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257199Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:03.257288Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 1714: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2026-01-04T02:52:51.084834Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:2 progress is 4/5 2026-01-04T02:52:51.084920Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 4/5 2026-01-04T02:52:51.084995Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: false 2026-01-04T02:52:51.085422Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:52:51.085487Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 40 Version: 18446744073709551615 PathOwnerId: 72057594046678944, cookie: 103 2026-01-04T02:52:51.085526Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 103 2026-01-04T02:52:51.085556Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 103, pathId: [OwnerId: 72057594046678944, LocalPathId: 40], version: 18446744073709551615 2026-01-04T02:52:51.085587Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 4 2026-01-04T02:52:51.085671Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 4/5, is published: true 2026-01-04T02:52:51.086814Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 103:0, at schemeshard: 72057594046678944 2026-01-04T02:52:51.086864Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_drop_table.cpp:415: TDropTable TProposedDeletePart operationId: 103:0 ProgressState, at schemeshard: 72057594046678944 2026-01-04T02:52:51.087128Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove table for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 4 2026-01-04T02:52:51.087226Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-04T02:52:51.087252Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-04T02:52:51.087290Z node 20 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#103:0 progress is 5/5 2026-01-04T02:52:51.087313Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-04T02:52:51.087343Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 103, ready parts: 5/5, is published: true 2026-01-04T02:52:51.087436Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [20:782:2749] message: TxId: 103 2026-01-04T02:52:51.087545Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 103 ready parts: 5/5 2026-01-04T02:52:51.087627Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:0 2026-01-04T02:52:51.087706Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:0 2026-01-04T02:52:51.087832Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 3 2026-01-04T02:52:51.087891Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:1 2026-01-04T02:52:51.087911Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:1 2026-01-04T02:52:51.087936Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 39] was 2 2026-01-04T02:52:51.087954Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:2 2026-01-04T02:52:51.087971Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:2 2026-01-04T02:52:51.088001Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 3 2026-01-04T02:52:51.088032Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:3 2026-01-04T02:52:51.088058Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:3 2026-01-04T02:52:51.088087Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 2 2026-01-04T02:52:51.088109Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 103:4 2026-01-04T02:52:51.088127Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 103:4 2026-01-04T02:52:51.088170Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 42] was 1 2026-01-04T02:52:51.088920Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046678944 2026-01-04T02:52:51.088992Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 42], at schemeshard: 72057594046678944 2026-01-04T02:52:51.089094Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 41] was 1 2026-01-04T02:52:51.089171Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046678944, LocalPathId: 41], at schemeshard: 72057594046678944 2026-01-04T02:52:51.089206Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046678944, LocalPathId: 40] was 2 2026-01-04T02:52:51.091765Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.091835Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.091870Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.091903Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.091942Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.095010Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 103 2026-01-04T02:52:51.095229Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 103: got EvNotifyTxCompletionResult 2026-01-04T02:52:51.095295Z node 20 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 103: satisfy waiter [20:1125:3027] 2026-01-04T02:52:51.095628Z node 20 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 2 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046678944 TestWaitNotification: OK eventTxId 103 2026-01-04T02:52:51.096255Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:52:51.096600Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream" took 394us result status StatusPathDoesNotExist 2026-01-04T02:52:51.096822Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 2026-01-04T02:52:51.097797Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: true }, at schemeshard: 72057594046678944 2026-01-04T02:52:51.098174Z node 20 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" took 391us result status StatusPathDoesNotExist 2026-01-04T02:52:51.098396Z node 20 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusPathDoesNotExist Reason: "Check failed: path: \'/MyRoot/Table/Index/indexImplTable/Stream/streamImpl\', error: path hasn\'t been resolved, nearest resolved path: \'/MyRoot/Table/Index/indexImplTable\' (id: [OwnerId: 72057594046678944, LocalPathId: 40])" Path: "/MyRoot/Table/Index/indexImplTable/Stream/streamImpl" PathId: 18446744073709551615 LastExistedPrefixPath: "/MyRoot" LastExistedPrefixPathId: 1 LastExistedPrefixDescription { Self { Name: "MyRoot" PathId: 1 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 1 CreateStep: 5000037 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathSubType: EPathSubTypeEmpty ChildrenExist: true } } PathOwnerId: 18446744073709551615, at schemeshard: 72057594046678944 |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_cdc_stream/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] |96.7%| [TA] $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_cdc_stream/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_q_twice[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v0] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] >> IncrementalBackup::MultipleIncrementalBackupsWithIndexes [GOOD] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] >> PgCatalog::PgDatabase+useSink [GOOD] >> PgCatalog::PgDatabase-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test >> test_tablet.py::TestMassiveKills::test_tablets_are_ok_after_many_kills [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] >> IncrementalBackup::IncrementalBackupMultipleIndexes [GOOD] >> IncrementalBackup::IndexDataVerificationIncrementalRestore >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::PgUpdateCompoundKey-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 14066, MsgBus: 1532 2026-01-04T02:48:51.739541Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332925098193735:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.739663Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.829865Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.998377Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.998478Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.027193Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.111274Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.126415Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332925098193692:2081] 1767494931737617 != 1767494931737620 2026-01-04T02:48:52.143597Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.317624Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.317645Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.317660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.317755Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.757030Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:52.954628Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:52.978497Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 16 2026-01-04T02:48:55.137705Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:55.265141Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '0'::int2, ARRAY ['false'::bool, 'false'::bool] ); 2026-01-04T02:48:55.292952Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332942278063875:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.293095Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.293280Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332942278063887:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.293328Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332942278063888:2338], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.293424Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.296756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715661:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.306948Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332942278063891:2339], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715661 completed, doublechecking } 2026-01-04T02:48:55.433727Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332942278063942:2607] txid# 281474976715662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } --!syntax_pg INSERT INTO Pg1000_b (key, value) VALUES ( '1'::int2, ARRAY ['true'::bool, 'true'::bool] ); 18 2026-01-04T02:48:55.955916Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::"char", '0'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::"char", '1'::"char"] ); --!syntax_pg INSERT INTO Pg1002_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::"char", '2'::"char"] ); 21 2026-01-04T02:48:56.504987Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715671:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.555251Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int2, '0'::int2] ); --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int2, '1'::int2] ); 2026-01-04T02:48:56.740573Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332925098193735:2075];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.740644Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; --!syntax_pg INSERT INTO Pg1005_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int2, '2'::int2] ); 23 2026-01-04T02:48:57.040301Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.097967Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int4, '0'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int4, '1'::int4] ); --!syntax_pg INSERT INTO Pg1007_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int4, '2'::int4] ); 20 2026-01-04T02:48:57.562802Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:57.612831Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '0'::int2, ARRAY ['0'::int8, '0'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '1'::int2, ARRAY ['1'::int8, '1'::int8] ); --!syntax_pg INSERT INTO Pg1016_b (key, value) VALUES ( '2'::int2, ARRAY ['2'::int8, '2'::int8] ); 700 2026-01-04T02:48:58.102786Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715689:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.163418Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float4, '0.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '1'::int2, ARRAY ['1.5'::float4, '1.5'::float4] ); --!syntax_pg INSERT INTO Pg1021_b (key, value) VALUES ( '2'::int2, ARRAY ['2.5'::float4, '2.5'::float4] ); 701 2026-01-04T02:48:58.661736Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715695:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:58.721674Z node 1 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill --!syntax_pg INSERT INTO Pg1022_b (key, value) VALUES ( '0'::int2, ARRAY ['0.5'::float8, '0.5'::float8] ); --!syntax_pg IN ... node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:52:49.984518Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [9:7591333950427320539:2344], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:52:50.084689Z node 9 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [9:7591333954722287886:2604] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:52:50.378345Z node 9 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [9:7591333954722287930:2356], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-04T02:52:50.378960Z node 9 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=9&id=N2U3OTQ4MmEtOWIzMTc5NjUtNmI2M2IzY2QtZGI4N2NjYmI=, ActorId: [9:7591333954722287923:2352], ActorState: ExecuteState, LegacyTraceId: 01ke3eqy92854kh80x74gn4jvv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-04T02:52:50.399201Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 9506, MsgBus: 7190 2026-01-04T02:52:52.978478Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591333963168963387:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:52:52.978945Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:52:53.029905Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:52:53.167166Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:52:53.169984Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591333963168963261:2081] 1767495172939820 != 1767495172939823 2026-01-04T02:52:53.284690Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:52:53.284816Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:52:53.285940Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:52:53.289832Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:52:53.383714Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:52:53.383739Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:52:53.383749Z node 10 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:52:53.383863Z node 10 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:52:53.976481Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:52:54.292673Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:52:54.303245Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:52:57.972279Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[10:7591333963168963387:2152];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:52:57.972399Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:52:59.715653Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333993233735231:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.715808Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.716407Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333993233735241:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.716477Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.789188Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:59.873464Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333993233735337:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.873626Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.874103Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333993233735342:2343], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.874176Z node 10 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [10:7591333993233735343:2344], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.874325Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:52:59.881894Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:52:59.900496Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [10:7591333993233735346:2345], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:52:59.996168Z node 10 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [10:7591333993233735399:2604] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:53:00.603439Z node 10 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [10:7591333997528702762:2366], status: GENERIC_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiWriteTable!
:1:1: Error: Cannot update primary key column: key1
:1:1: Error: Cannot update primary key column: key2 2026-01-04T02:53:00.604557Z node 10 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=10&id=OGM3Y2RmNDgtZjYzMjMxMTItZmZlOTRkZi03YjE3NDRhZQ==, ActorId: [10:7591333997528702755:2362], ActorState: ExecuteState, LegacyTraceId: 01ke3er88j5m56021ayp4xmm0d, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiWriteTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key1" end_position { row: 1 column: 1 } severity: 1 } issues { position { row: 1 column: 1 } message: "Cannot update primary key column: key2" end_position { row: 1 column: 1 } severity: 1 } } } tx_id# trace_id# 2026-01-04T02:53:00.616975Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] >> PgCatalog::PgDatabase-useSink [GOOD] >> PgCatalog::PgRoles >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] >> Secret::Simple [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Simple [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:49.260847Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:49.386525Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:49.387001Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:49.387076Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:49.665740Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:49.665914Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:49.726542Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495046784509 != 1767495046784512 2026-01-04T02:50:49.740643Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:49.783940Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:49.869221Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:50.343610Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:50.343663Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:50.343736Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:50.346424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:50.346893Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:50.361163Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-04T02:51:02.465531Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1018:2872], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.465791Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.467506Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1045:2879], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.467599Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.477114Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:02.699193Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1137:2952], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.699333Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.699869Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1141:2956], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.699955Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.700032Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1144:2959], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:02.707523Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:02.841961Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1146:2961], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:03.424373Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1240:3026] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:04.080814Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:04.403862Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:04.971843Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:05.576151Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:05.973686Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:07.199962Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-04T02:51:07.704313Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-04T02:51:10.687096Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:51:10.687156Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:52:08.848367Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:09.946877Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:11.934091Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:52:12.524487Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 E0104 02:53:08.047353703 858259 backup_poller.cc:113] run_poller: UNKNOWN:Timer list shutdown {created_time:"2026-01-04T02:53:08.047081005+00:00"} |96.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_multi_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] >> Secret::SimpleQueryService [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1_UNIQUE_SYNC-pk_types29-all_types29-index29-Uint64-UNIQUE-SYNC] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::SimpleQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:53.531006Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:53.622304Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:53.622601Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:53.622651Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:53.873467Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:53.873571Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:53.934696Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495050944156 != 1767495050944159 2026-01-04T02:50:53.950323Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:53.994495Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:54.080188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:54.507082Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:54.507130Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:54.507164Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:54.509044Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:54.509541Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:54.522735Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: Initialization finished REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 2026-01-04T02:51:06.219679Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1011:2866], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.219843Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1022:2871], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.219939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.220975Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1027:2876], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.221051Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.225909Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:06.246503Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1025:2874], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:51:06.293333Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1078:2908] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:06.493488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:07.145907Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:07.432568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:08.113356Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:08.761041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:09.138305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:10.132190Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) snapshot->GetSecrets().size() incorrect: SECRETS:ACCESS: 2026-01-04T02:51:10.540478Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 2026-01-04T02:51:13.120095Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:51:13.120154Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:100;ACCESS: REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:100;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1:100;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=UPSERT OBJECT secret1_1 (TYPE SECRET) WITH value = `200`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:52:11.343330Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715736:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:12.736374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715743:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:52:14.820381Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715754:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:52:15.415722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715757:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect (zero expects): SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS:root@builtin:secret1:test@test1; FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 snapshot->GetAccess().size() incorrect: SECRETS:root@builtin:secret1:abcde;root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=;EXPECTATION=1 snapshot->GetSecrets().size() incorrect: SECRETS:root@builtin:secret1_1:200;ACCESS: FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=1;WAITING=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT * FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] >> PgCatalog::PgRoles [GOOD] >> PgCatalog::PgTables |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v0] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] >> KqpPg::TableDeleteWhere+useSink [GOOD] >> KqpPg::TableDeleteWhere-useSink >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::MultipleTablesWithIndexesIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:09.306945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:09.447492Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:09.448107Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:09.448170Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:09.761695Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:09.761878Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:09.846468Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495067038791 != 1767495067038794 2026-01-04T02:51:09.863541Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:09.910676Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.000141Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.283789Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.283868Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.283905Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.284025Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-04T02:51:10.284063Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:51:10.388719Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:51:10.388965Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.389193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:51:10.389239Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:51:10.389459Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:51:10.389565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.389672Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.390456Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.390624Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:51:10.390675Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.390713Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.390921Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.390966Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.391044Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.391100Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:51:10.391145Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:10.391181Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:10.391290Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.391756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.391811Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.391934Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.391967Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.392016Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.392057Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:51:10.392099Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:10.392176Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.392532Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.392562Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.392643Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.392680Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.392728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.392773Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.392813Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:51:10.392846Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.392884Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:10.396925Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:10.397341Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.397413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:10.397525Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:51:10.397763Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-04T02:51:10.398178Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.398227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.398265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.398386Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-04T02:51:10.398418Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:51:10.398499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.398542Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/ ... ck for operation: 281474976715672 2026-01-04T02:53:15.106899Z node 10 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976715672 2026-01-04T02:53:15.212927Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.213015Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.213128Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.213166Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.394713Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.394805Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.394922Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.394962Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.503209Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 49, but no stats yet 2026-01-04T02:53:15.503479Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037909, for tableId 46, but no stats yet 2026-01-04T02:53:15.503643Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037910, for tableId 43, but no stats yet 2026-01-04T02:53:15.503815Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037911, for tableId 40, but no stats yet 2026-01-04T02:53:15.587270Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.587358Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.587491Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.587528Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.670687Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037916, for tableId 54, but no stats yet 2026-01-04T02:53:15.670945Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037917, for tableId 51, but no stats yet 2026-01-04T02:53:15.671140Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037918, for tableId 53, but no stats yet 2026-01-04T02:53:15.671318Z node 10 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037919, for tableId 56, but no stats yet 2026-01-04T02:53:15.696298Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-04T02:53:15.696374Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:53:15.696453Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:53:15.804894Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.804975Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:15.805079Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.805123Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:15.828811Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-04T02:53:15.829031Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-04T02:53:15.922812Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037890 state 5 2026-01-04T02:53:15.923002Z node 10 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037891 state 5 2026-01-04T02:53:16.039289Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.039373Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.039470Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.039523Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.220870Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.220968Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.221090Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.221124Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.246411Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.257307Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037897 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.257481Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037903 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.257571Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037902 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.417065Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.417136Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.417230Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.417281Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.532414Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037904 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.543527Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037909 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.543738Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037910 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.543834Z node 10 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037911 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:16.618745Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.618808Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.618872Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.618892Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.789712Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.789793Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:16.789928Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [10:400:2399], Recipient [10:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:16.789964Z node 10 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 1 } items { uint32_value: 100 } }, { items { uint32_value: 2 } items { uint32_value: 200 } }, { items { uint32_value: 3 } items { uint32_value: 300 } } { items { uint32_value: 1 } items { uint32_value: 1000 } }, { items { uint32_value: 2 } items { uint32_value: 2000 } }, { items { uint32_value: 3 } items { uint32_value: 3000 } } { items { uint32_value: 3 } } { items { uint32_value: 3 } } { items { uint64_value: 3 } } { items { uint64_value: 3 } } { items { uint32_value: 300 } items { uint32_value: 3 } } { items { uint32_value: 3000 } items { uint32_value: 3 } } |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint4-pk_types4-all_types4-index4-pgint4--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgtimestamp-pk_types7-all_types7-index7-pgtimestamp--] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSinkTx::TIsolationSettingTest+IsOlap+UsePragma [GOOD] Test command err: Trying to start YDB, gRPC: 11278, MsgBus: 25629 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:12.789256Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:12.893643Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:12.894152Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:12.894215Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:13.189228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.189351Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.261555Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494829716385 != 1767494829716388 2026-01-04T02:47:13.278434Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:13.324750Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.459365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:13.894316Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:13.894379Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:13.894434Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:13.894647Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:13.910360Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:14.377568Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:14.426394Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.426547Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.426640Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.427697Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.429121Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.434273Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.511841Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:14.608538Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.972134Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateColumnTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/olap/operations/create_table.cpp:814) 2026-01-04T02:47:15.153992Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:90;event=normalization_start;last_saved_id=0; 2026-01-04T02:47:15.154278Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Granules; 2026-01-04T02:47:15.154562Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=Chunks; 2026-01-04T02:47:15.154692Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=TablesCleaner; 2026-01-04T02:47:15.154824Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanGranuleId; 2026-01-04T02:47:15.154945Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=GCCountersNormalizer; 2026-01-04T02:47:15.155052Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=SyncPortionFromChunks; 2026-01-04T02:47:15.155169Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV1Chunks_V2; 2026-01-04T02:47:15.155279Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV2Chunks; 2026-01-04T02:47:15.155407Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CleanDeprecatedSnapshot; 2026-01-04T02:47:15.155523Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreV0ChunksMeta; 2026-01-04T02:47:15.155626Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=CopyBlobIdsToV2; 2026-01-04T02:47:15.155756Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;self_id=[1:1113:2913];tablet_id=72075186224037888;process=TTxInitSchema::Execute;fline=abstract.cpp:11;event=normalizer_register;description=CLASS_NAME=RestoreAppearanceSnapshot; 2026-01-04T02:47:15.195602Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Granules;id=1; 2026-01-04T02:47:15.195699Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=Chunks;id=Chunks; 2026-01-04T02:47:15.195870Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=Chunks;id=2; 2026-01-04T02:47:15.195924Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=TablesCleaner;id=TablesCleaner; 2026-01-04T02:47:15.196133Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=TablesCleaner;id=4; 2026-01-04T02:47:15.196194Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=CleanGranuleId;id=CleanGranuleId; 2026-01-04T02:47:15.196306Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=CleanGranuleId;id=6; 2026-01-04T02:47:15.196357Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=GCCountersNormalizer;id=GCCountersNormalizer; 2026-01-04T02:47:15.196405Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:62;event=normalizer_finished;description=CLASS_NAME=GCCountersNormalizer;id=9; 2026-01-04T02:47:15.196441Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224037888;process=TTxUpdateSchema::Execute;fline=abstract.cpp:42;event=normalizer_switched;description=CLASS_NAME=SyncPortionFromChunks;id=SyncPortionFromChunks; 2026-01-04T02:47:15.196484Z node 1 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=7207518622403788 ... s_local=44;result=not_found; 2026-01-04T02:53:18.364895Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.365003Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.365049Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038089;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.376561Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.376662Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.376708Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038084;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.387556Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.387656Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.387722Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038091;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.398587Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.398665Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.398696Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038086;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.410158Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.410262Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.410311Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038088;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.422265Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.422367Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.422411Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038093;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.434140Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.434258Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.434308Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038090;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.447219Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.447324Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.447371Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038092;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.463129Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.463241Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.463287Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038095;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.487829Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.487936Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.487982Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.512662Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.512774Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.512824Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038097;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.533614Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.533720Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.533765Z node 14 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038096;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976715662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:53:18.675378Z node 14 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eqz69bzs7w7ggxaf714g9", SessionId: ydb://session/3?node_id=14&id=ZGI4Zjk2OTItYWRkYTE1NWMtZDg4NjM0MjEtNDM3NzNhOTM=, Slow query, duration: 27.396128s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:53:18.839644Z node 14 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [14:8741:8110], status: GENERIC_ERROR, issues:
: Error: Pre type annotation, code: 1020
:2:50: Error: Bad "DefaultTxMode" setting for "$all" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO] 2026-01-04T02:53:18.844371Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=14&id=OGI4ZWNjYmYtYzU2ODVjOWYtN2Y3NDFmY2MtZWM0OGEyZTc=, ActorId: [14:8661:8030], ActorState: ExecuteState, LegacyTraceId: 01ke3ert20cm27y0sp07pavcfn, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { message: "Pre type annotation" issue_code: 1020 severity: 1 issues { position { row: 2 column: 50 } message: "Bad \"DefaultTxMode\" setting for \"$all\" cluster: (yexception) Unknown DefaultTxMode, available: [SerializableRW, SnapshotRW, SnapshotRO, StaleRO]" end_position { row: 2 column: 50 } severity: 1 } } tx_id# trace_id# |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::IndexDataVerificationIncrementalRestore [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:09.074188Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:09.243722Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:09.244337Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:09.244396Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:09.621455Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:09.621596Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:09.701381Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495067038790 != 1767495067038793 2026-01-04T02:51:09.715083Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:09.763636Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:09.874117Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.169798Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.169901Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.169932Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.170033Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-04T02:51:10.170086Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:51:10.279559Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:51:10.279770Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.279956Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:51:10.280015Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:51:10.280780Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:51:10.280875Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.280963Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.281515Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.281633Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:51:10.281683Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.281720Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.281907Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.281941Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.281998Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.282051Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:51:10.282086Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:10.282114Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:10.282191Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.282518Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.282558Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.282635Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.282655Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.282688Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.282721Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:51:10.282749Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:10.282802Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.283040Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.283060Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.283134Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.283151Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.283185Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.283219Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.283251Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:51:10.283275Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.283301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:10.285849Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:10.286194Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.286228Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:10.286324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:51:10.286481Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-04T02:51:10.286746Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.286793Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.286824Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.286909Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-04T02:51:10.286931Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:51:10.286982Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.287012Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/ ... 0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:19.899006Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:19.899099Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:19.899131Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:19.920320Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435096, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressIncrementalRestore 2026-01-04T02:53:19.920404Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5416: StateWork, processing event TEvPrivate::TEvProgressIncrementalRestore 2026-01-04T02:53:19.920460Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:444: [IncrementalRestore] Handle(TEvProgressIncrementalRestore) operationId: 281474976710670 tablet: 72057594046644480 2026-01-04T02:53:19.920537Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:41: [IncrementalRestore] TTxProgressIncrementalRestore::Execute operationId: 281474976710670 tablet: 72057594046644480 2026-01-04T02:53:19.920581Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:55: [IncrementalRestore] Incremental restore already in state 3, skipping progress check for operation: 281474976710670 2026-01-04T02:53:19.920647Z node 11 :FLAT_TX_SCHEMESHARD INFO: schemeshard_incremental_restore_scan.cpp:135: [IncrementalRestore] TTxProgressIncrementalRestore::Complete operationId: 281474976710670 2026-01-04T02:53:20.030836Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037894, for tableId 18, but no stats yet 2026-01-04T02:53:20.031283Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037893, for tableId 18, but no stats yet 2026-01-04T02:53:20.031492Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037895, for tableId 20, but no stats yet 2026-01-04T02:53:20.121840Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.121941Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.122088Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.122141Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.269289Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.269371Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.269533Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.269570Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.301140Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037900, for tableId 28, but no stats yet 2026-01-04T02:53:20.301391Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037901, for tableId 31, but no stats yet 2026-01-04T02:53:20.470712Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.470813Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.470923Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.470966Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.483034Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037904, for tableId 35, but no stats yet 2026-01-04T02:53:20.483341Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037905, for tableId 33, but no stats yet 2026-01-04T02:53:20.483573Z node 11 :TX_DATASHARD DEBUG: datashard_impl.h:3374: SendPeriodicTableStats at datashard 72075186224037906, for tableId 33, but no stats yet 2026-01-04T02:53:20.571083Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435093, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvConsoleConfigsTimeout 2026-01-04T02:53:20.571163Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:53:20.571235Z node 11 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:53:20.720903Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.721001Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.721104Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.721148Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.745264Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037888 state 5 2026-01-04T02:53:20.745553Z node 11 :TX_DATASHARD INFO: datashard__cleanup_tx.cpp:17: Cleanup tx at non-ready tablet 72075186224037889 state 5 2026-01-04T02:53:20.910464Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.910563Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:20.910672Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:20.910722Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.032726Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037894 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:21.032937Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037893 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:21.033037Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037895 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:21.112655Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.112756Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.112861Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.112914Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.254336Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.254441Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.254557Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.254598Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.321995Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037900 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:21.322188Z node 11 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037901 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T02:53:21.486166Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.486246Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:53:21.486335Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [11:400:2399], Recipient [11:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:53:21.486386Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime { items { uint32_value: 2 } items { text_value: "Bob" } items { uint32_value: 31 } }, { items { uint32_value: 12 } items { text_value: "David" } items { uint32_value: 41 } }, { items { uint32_value: 13 } items { text_value: "Frank" } items { uint32_value: 45 } } { items { uint32_value: 28 } items { uint32_value: 3 } }, { items { uint32_value: 31 } items { uint32_value: 2 } }, { items { uint32_value: 41 } items { uint32_value: 12 } }, { items { uint32_value: 45 } items { uint32_value: 13 } } { items { uint64_value: 4 } } |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> PgCatalog::PgTables [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_partial_delete_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> PgCatalog::PgTables [GOOD] Test command err: Trying to start YDB, gRPC: 23479, MsgBus: 11501 2026-01-04T02:48:51.644196Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332924632172830:2138];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.644385Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.684990Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:48:51.969366Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.973094Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.973197Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.057541Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.118419Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332924632172727:2081] 1767494931637770 != 1767494931637773 2026-01-04T02:48:52.126427Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.148648Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.326362Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.326383Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.326401Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.326490Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.665426Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:53.021993Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.041903Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 1042 2026-01-04T02:48:55.099174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Typemod mismatch, got type pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce_pgbpchar_17472595041006102391_17823623939509273229 (key, value) VALUES ( '0'::int2, 'abcd'::bpchar ) 2026-01-04T02:48:55.277918Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941812042894:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.277937Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941812042905:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.278021Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.281968Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591332941812042909:2339], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.282079Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:48:55.283306Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710659:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:48:55.296672Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591332941812042908:2338], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710659 completed, doublechecking } 2026-01-04T02:48:55.423521Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591332941812042961:2593] txid# 281474976710660, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 42], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:48:55.868493Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710663] at 72075186224037888: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-04T02:48:55.869828Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710663 at tablet 72075186224037888 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-04T02:48:55.872482Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7591332941812043014:2332] TxId: 281474976710663. Ctx: { TraceId: 01ke3egrqb0xyb1g61nhmfaq08, Database: /Root, SessionId: ydb://session/3?node_id=1&id=OWEyOGQ3ODQtNzkyOGE3ODMtODAzM2Y1MDYtZGY5ZTJiZmQ=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-04T02:48:55.881945Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=OWEyOGQ3ODQtNzkyOGE3ODMtODAzM2Y1MDYtZGY5ZTJiZmQ=, ActorId: [1:7591332941812042891:2332], ActorState: ExecuteState, LegacyTraceId: 01ke3egrqb0xyb1g61nhmfaq08, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2)\n" severity: 1 } } trace_id#
: Error: Error executing transaction (ExecError): Execution failed
: Error: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-04T02:48:55.914335Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Typemod mismatch, got type _pgbpchar for column value, type mod , but expected 2 --!syntax_pg INSERT INTO Coerce__pgbpchar_17472595041006102391_5352544928909966465 (key, value) VALUES ( '0'::int2, '{abcd,abcd}'::_bpchar ) 2026-01-04T02:48:56.241583Z node 1 :TX_DATASHARD CRIT: execute_kqp_data_tx_unit.cpp:484: Exception while executing KQP transaction [0:281474976710668] at 72075186224037889: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-04T02:48:56.243317Z node 1 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976710668 at tablet 72075186224037889 status: EXEC_ERROR errors: UNKNOWN (Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ) | 2026-01-04T02:48:56.243551Z node 1 :KQP_EXECUTER ERROR: {KQPDATA@kqp_data_executer.cpp:888} ActorId: [1:7591332946107010443:2367] TxId: 281474976710668. Ctx: { TraceId: 01ke3egsdzaf7dc2ppw1sz6n0b, Database: /Root, SessionId: ydb://session/3?node_id=1&id=M2Q5Y2MyNTctZWJmOTdlOGQtNzY5YzA2NWQtMjQzZTlmNGY=, PoolId: default, IsStreamingQuery: 0}. EXEC_ERROR: [UNKNOWN] Tx was terminated: ydb/core/tx/datashard/datashard_kqp_upsert_rows.cpp:87: Apply(): requirement !error failed. Incorrect value: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) ; trace_id# 2026-01-04T02:48:56.243909Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=M2Q5Y2MyNTctZWJmOTdlOGQtNzY5YzA2NWQtMjQzZTlmNGY=, ActorId: [1:7591332941812043104:2367], ActorState: ExecuteState, LegacyTraceId: 01ke3egsdzaf7dc2ppw1sz6n0b, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Error executing transaction (ExecError): Execution failed" severity: 1 issues { message: "[UNKNOWN] Tx was terminated: ydb/core/tx/datashard/d ... :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[13:7591334025382102782:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:53:12.370113Z node 13 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:53:14.142441Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591334055446874676:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:14.142441Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591334055446874687:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:14.142588Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:14.142931Z node 13 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [13:7591334055446874699:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:14.143033Z node 13 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:14.147564Z node 13 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:53:14.160155Z node 13 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [13:7591334055446874698:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:53:14.232876Z node 13 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [13:7591334055446874751:2544] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Trying to start YDB, gRPC: 6699, MsgBus: 24202 2026-01-04T02:53:15.659093Z node 14 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[14:7591334060333167296:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:53:15.659194Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:53:15.684455Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:53:15.797633Z node 14 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:53:15.802079Z node 14 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [14:7591334060333167256:2081] 1767495195656314 != 1767495195656317 2026-01-04T02:53:15.827041Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:53:15.827190Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:53:15.853406Z node 14 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(14, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:53:15.936273Z node 14 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:53:15.958606Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:53:15.958643Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:53:15.958658Z node 14 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:53:15.958806Z node 14 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:53:16.674016Z node 14 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:53:16.715723Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:53:20.657746Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[14:7591334060333167296:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:53:20.657871Z node 14 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:53:21.757770Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591334086102971940:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:21.757910Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591334086102971929:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:21.758163Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:21.758958Z node 14 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [14:7591334086102971944:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:21.759051Z node 14 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:53:21.763566Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:53:21.779076Z node 14 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [14:7591334086102971943:2331], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710658 completed, doublechecking } 2026-01-04T02:53:21.881799Z node 14 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [14:7591334086102971996:2542] txid# 281474976710659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:53:22.058506Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:22.118250Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:27.141025Z node 14 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 14, TabletId: 72075186224037888 not found 2026-01-04T02:53:27.188304Z node 14 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:27.609838Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [14:7591334111872776307:2439], TxId: 281474976710671, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3es2b24rh5y8yyns633tbd. CustomerSuppliedId : . RunScriptActorId : [0:0:0]. PoolId : default. SessionId : ydb://session/3?node_id=14&id=YzhjZDgzYTQtNTcwMjVkZTQtM2Y0Y2I2ZGYtZDBiYTdjMDA=. CurrentExecutionId : . DatabaseId : /Root. Database : /Root. }. InternalError: PRECONDITION_FAILED DEFAULT_ERROR: {
: Error: Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: "pg_proc" }. 2026-01-04T02:53:27.611418Z node 14 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:1264: SelfId: [14:7591334111872776308:2440], TxId: 281474976710671, task: 2. Ctx: { CheckpointId : . TraceId : 01ke3es2b24rh5y8yyns633tbd. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=14&id=YzhjZDgzYTQtNTcwMjVkZTQtM2Y0Y2I2ZGYtZDBiYTdjMDA=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Handle abort execution event from: [14:7591334111872776304:2436], status: PRECONDITION_FAILED, reason: {
: Error: Terminate execution } 2026-01-04T02:53:27.612559Z node 14 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=14&id=YzhjZDgzYTQtNTcwMjVkZTQtM2Y0Y2I2ZGYtZDBiYTdjMDA=, ActorId: [14:7591334111872776298:2436], ActorState: ExecuteState, LegacyTraceId: 01ke3es2b24rh5y8yyns633tbd, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Terminate was called, reason(57): ERROR: invalid input syntax for type boolean: \"pg_proc\"\n\n" severity: 1 } trace_id# |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_receive_attempt_id[tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldCompactBorrowedAfterSplitMerge [GOOD] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v0-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/cost/unittest |96.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/cost/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] |96.7%| [TA] $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/cost/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-fifo] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_message_batch[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] >> Secret::Validation [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::Validation [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:47.286949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:47.442860Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:47.443524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:47.443599Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:47.804429Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:47.804563Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:47.893384Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495044651460 != 1767495044651463 2026-01-04T02:50:47.910217Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:47.959364Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:48.056521Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:48.785210Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:48.785260Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:48.785351Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:48.788259Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:48.788760Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:48.803865Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:00.791926Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:953:2819], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:00.792084Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:00.792389Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:963:2822], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:00.792458Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:11.105559Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:984:2839], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.105784Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.106629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:988:2841], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.107033Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.110841Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:11.262180Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1096:2918], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.262278Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.262594Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1101:2923], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.262758Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1102:2924], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.262950Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:11.266165Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:11.385508Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:1105:2927], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:51:11.669135Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1199:2992] txid# 281474976715661, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:12.138825Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:12.466859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:12.985666Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715670:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:13.709722Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:14.105407Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:15.332898Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:15.629424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-04T02:51:16.073604Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:51:16.073662Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Execution, code: 1060
:1:48: Error: Executing ALTER OBJECT SECRET
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:51:51.684087Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715713:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:52.566821Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715718:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:54.459106Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715727:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:54.897307Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715730:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Execution, code: 1060
:1:42: Error: Executing CREATE OBJECT SECRET_ACCESS
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:52:20.786082Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=16; 2026-01-04T02:52:20.786405Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 16 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:52:20.786621Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 16 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:52:20.787094Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3841:4879], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3746:4879]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3841:4879].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:52:20.787641Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3834:4879], SessionActorId: [1:3746:4879], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3746:4879]. 2026-01-04T02:52:20.793889Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=YTc3MjZmN2YtODlkZmMzNDYtZmM4MDJmYWMtYWMwMGQ0NzE=, ActorId: [1:3746:4879], ActorState: ExecuteState, LegacyTraceId: 01ke3eq190cka5w2kf8azx2e3g, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3835:4879] from: [1:3834:4879] trace_id# 2026-01-04T02:52:20.794368Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3835:4879] TxId: 281474976715757. Ctx: { TraceId: 01ke3eq190cka5w2kf8azx2e3g, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YTc3MjZmN2YtODlkZmMzNDYtZmM4MDJmYWMtYWMwMGQ0NzE=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:52:20.795159Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YTc3MjZmN2YtODlkZmMzNDYtZmM4MDJmYWMtYWMwMGQ0NzE=, ActorId: [1:3746:4879], ActorState: ExecuteState, LegacyTraceId: 01ke3eq190cka5w2kf8azx2e3g, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-04T02:52:20.813190Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01ke3eq116axbwk2bd2xgwb6ch" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=YTc3MjZmN2YtODlkZmMzNDYtZmM4MDJmYWMtYWMwMGQ0NzE=" tx_control { tx_id: "01ke3eq116axbwk2bd2xgwb6ch" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Execution, code: 1060
:1:29: Error: Executing DROP OBJECT SECRET
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-04T02:52:45.453038Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4352:5326], for# root@builtin, access# DescribeSchema 2026-01-04T02:52:45.453171Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4352:5326], for# root@builtin, access# DescribeSchema 2026-01-04T02:52:45.455998Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4346:5320], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:52:45.459300Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=MzI3NzkxNC04MzI0YmUwLTYwNTNjMDc2LThiZTcwYjIz, ActorId: [1:4334:5309], ActorState: ExecuteState, LegacyTraceId: 01ke3eqsf8e3sw2pzcv5bgxdkd, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:52:57.280122Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing CREATE OBJECT SECRET
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Execution, code: 1060
:1:50: Error: Executing UPSERT OBJECT SECRET
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_change_visibility] [GOOD] >> Secret::ValidationQueryService [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/services/metadata/secret/ut/unittest >> Secret::ValidationQueryService [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:50:53.994705Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:50:54.113557Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:50:54.114031Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:50:54.114098Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:50:54.414873Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:50:54.415012Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:50:54.491027Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495051656056 != 1767495051656059 2026-01-04T02:50:54.510766Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:54.557268Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:50:54.656523Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:50:55.085327Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:50:55.085375Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:50:55.085399Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:50:55.087535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:50:55.088042Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:50:55.102063Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; Initialization finished REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:06.786226Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:951:2818], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.786315Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:961:2823], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.786396Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.787365Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:967:2828], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.787590Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:51:06.790704Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715657:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:51:06.806050Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:965:2826], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715657 completed, doublechecking } 2026-01-04T02:51:06.847157Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1018:2860] txid# 281474976715658, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:51:06.893491Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:1027:2868], status: GENERIC_ERROR, issues:
:1:20: Error: mismatched input '-' expecting '(' 2026-01-04T02:51:06.895333Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=M2JhYjQ4MTMtY2ViN2MzZjgtZTBjNWM2NTYtMzY4ZDNhNDA=, ActorId: [1:949:2816], ActorState: ExecuteState, LegacyTraceId: 01ke3ems4wd0gc72g4qhvzd5nv, ReplyQueryCompileError, remove tx status# GENERIC_ERROR issues# { position { row: 1 column: 20 } message: "mismatched input \'-\' expecting \'(\'" end_position { row: 1 column: 20 } severity: 1 } tx_id# trace_id# REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;RESULT=
:1:20: Error: mismatched input '-' expecting '(' ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret-1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 2026-01-04T02:51:17.253030Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:18.026488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:18.379983Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:19.068212Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:19.928215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:20.323409Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715676:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:51:20.817078Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpInitiateBuildIndexImplTable, opId: 281474976710758:2, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:21.741034Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpFinalizeBuildIndexMainTable, opId: 281474976710759:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_finalize_build_index.cpp:383) 2026-01-04T02:51:22.340489Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T02:51:22.340551Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:23.427028Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZGM4ZjM4Yy0xZjFjMGQ1Ni0xOTg4MjUzZi0xOTFkZjBlNA==, ActorId: [1:1045:2878], ActorState: ExecuteState, LegacyTraceId: 01ke3en35vd50p4bchvm75sj4a, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 not found for alter" severity: 1 } } trace_id# REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 not found for alter ;EXPECTATION=0 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=ALTER OBJECT secret1 (TYPE SECRET) SET value = `abcde`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:51:58.098394Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715712:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:51:59.197742Z node 1 :FLAT_TX_SCHEMESHARD WARN: schem ... t called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) 2026-01-04T02:52:01.508324Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpModifyACL, opId: 281474976715731:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_modify_acl.cpp:101) REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 2026-01-04T02:52:14.384996Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTQzNGQ5YjYtMzZiMGRjYTctN2I4NWZiYzQtNDlkN2EyMTc=, ActorId: [1:3390:4624], ActorState: ExecuteState, LegacyTraceId: 01ke3eptf88hrcbjfw5jken33c, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET_ACCESS\"" severity: 1 issues { message: "preparation problem: used in access secret secret2 not found" severity: 1 } } trace_id# REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);RESULT=
: Error: Executing operation with object "SECRET_ACCESS"
: Error: preparation problem: used in access secret secret2 not found ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT `secret2:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 2026-01-04T02:52:26.979156Z node 1 :GLOBAL WARN: log.cpp:841: fline=events.h:105;event=ev_write_error;status=STATUS_CONSTRAINT_VIOLATION;details=Conflict with existing key.;tx_id=15; 2026-01-04T02:52:26.979391Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:226: Prepare transaction failed. txid 15 at tablet 72075186224037892 errors: Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:52:26.979575Z node 1 :TX_DATASHARD ERROR: finish_propose_write_unit.cpp:168: Errors while proposing transaction txid 15 at tablet 72075186224037892 Status: STATUS_CONSTRAINT_VIOLATION Issues: { message: "Conflict with existing key." issue_code: 2012 severity: 1 } 2026-01-04T02:52:26.979898Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:970: SelfId: [1:3774:4835], Table: `//Root/.metadata/secrets/access` ([72057594046644480:49:1]), SessionActorId: [1:3679:4835]Got CONSTRAINT VIOLATION for table `//Root/.metadata/secrets/access`. ShardID=72075186224037892, Sink=[1:3774:4835].{
: Error: Conflict with existing key., code: 2012 } 2026-01-04T02:52:26.980407Z node 1 :KQP_COMPUTE ERROR: kqp_write_actor.cpp:4656: SelfId: [1:3767:4835], SessionActorId: [1:3679:4835], statusCode=PRECONDITION_FAILED. Issue=
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012
: Error: Conflict with existing key., code: 2012 . sessionActorId=[1:3679:4835]. 2026-01-04T02:52:26.980825Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2328} SessionId: ydb://session/3?node_id=1&id=ZWE2ZjRiN2YtODk2N2NmMTUtODY1YzNkNTgtZjU4MDBjM2E=, ActorId: [1:3679:4835], ActorState: ExecuteState, LegacyTraceId: 01ke3eq7bycbgwggdcsh8kdpnx, got TEvKqpBuffer::TEvError in ExecuteState, status: PRECONDITION_FAILED send to: [1:3768:4835] from: [1:3767:4835] trace_id# 2026-01-04T02:52:26.981012Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3768:4835] TxId: 281474976715757. Ctx: { TraceId: 01ke3eq7bycbgwggdcsh8kdpnx, Database: /Root, SessionId: ydb://session/3?node_id=1&id=ZWE2ZjRiN2YtODk2N2NmMTUtODY1YzNkNTgtZjU4MDBjM2E=, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# PRECONDITION_FAILED Issues# {
: Error: Constraint violated. Table: `//Root/.metadata/secrets/access`., code: 2012 subissue: {
: Error: Conflict with existing key., code: 2012 } } trace_id# 2026-01-04T02:52:26.981459Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZWE2ZjRiN2YtODk2N2NmMTUtODY1YzNkNTgtZjU4MDBjM2E=, ActorId: [1:3679:4835], ActorState: ExecuteState, LegacyTraceId: 01ke3eq7bycbgwggdcsh8kdpnx, Create QueryResponse for error on request, msg: status# PRECONDITION_FAILED issues# { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } trace_id# 2026-01-04T02:52:26.988695Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=request_actor.h:64;event=unexpected reply;error_message=operation { ready: true status: PRECONDITION_FAILED issues { message: "Constraint violated. Table: `//Root/.metadata/secrets/access`." issue_code: 2012 severity: 1 issues { message: "Conflict with existing key." issue_code: 2012 severity: 1 } } result { [type.googleapis.com/Ydb.Table.ExecuteQueryResult] { tx_meta { id: "01ke3eq72n523vhfwaz1cmz7er" } } } } ;request=session_id: "ydb://session/3?node_id=1&id=ZWE2ZjRiN2YtODk2N2NmMTUtODY1YzNkNTgtZjU4MDBjM2E=" tx_control { tx_id: "01ke3eq72n523vhfwaz1cmz7er" } query { yql_text: "--!syntax_v1\nDECLARE $objects AS List>;\nINSERT INTO `//Root/.metadata/secrets/access`\nSELECT ownerUserId,secretId,accessSID FROM AS_TABLE($objects)\n" } parameters { key: "$objects" value { type { list_type { item { struct_type { members { name: "ownerUserId" type { type_id: UTF8 } } members { name: "secretId" type { type_id: UTF8 } } members { name: "accessSID" type { type_id: UTF8 } } } } } } value { items { items { text_value: "root@builtin" } items { text_value: "secret1" } items { text_value: "test@test1" } } } } } ; REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT IF NOT EXISTS `secret1:test@test1` (TYPE SECRET_ACCESS);EXPECTATION=1;WAITING=1 REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-04T02:52:39.373341Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=ZjcxZWQ3YzAtOTIxMTYwMTgtZjI3YzMyLWFlNDI4ZGJk, ActorId: [1:3966:5042], ActorState: ExecuteState, LegacyTraceId: 01ke3eqjz7b3sp9y3ynrdyzd5d, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "preparation problem: secret secret1 using in access for test@test1" severity: 1 } } trace_id# REQUEST=DROP OBJECT `secret1` (TYPE SECRET);RESULT=
: Error: Executing operation with object "SECRET"
: Error: preparation problem: secret secret1 using in access for test@test1 ;EXPECTATION=0 FINISHED_REQUEST=DROP OBJECT `secret1` (TYPE SECRET);EXPECTATION=0;WAITING=1 2026-01-04T02:52:51.014288Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4275:5273], for# root@builtin, access# DescribeSchema 2026-01-04T02:52:51.014408Z node 1 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:324: Access denied: self# [1:4275:5273], for# root@builtin, access# DescribeSchema 2026-01-04T02:52:51.016448Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:4272:5270], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T02:52:51.019270Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NWRhZjc2MGEtZDkyOWJiMWItZGRjZmUyNTMtNTI0ZGM1Mg==, ActorId: [1:4265:5264], ActorState: ExecuteState, LegacyTraceId: 01ke3eqyxm04xqkdcsda2hfap3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 1 column: 1 } message: "At function: KiReadTable!" end_position { row: 1 column: 1 } severity: 1 issues { position { row: 1 column: 1 } message: "Cannot find table \'db.[/Root/.metadata/secrets/values]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 1 column: 1 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;RESULT=
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/Root/.metadata/secrets/values]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 ;EXPECTATION=0 REQUEST=SELECT * FROM `/Root/.metadata/secrets/values`;EXPECTATION=0 REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:53:02.783731Z node 1 :TICKET_PARSER ERROR: ticket_parser_impl.h:1034: Ticket **** (51449FAE): Could not find correct token validator 2026-01-04T02:53:03.642360Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NTQ0YTZlOTQtZjg5NjgwYzQtYTNjZTViYzctNjUxNTI1ZTM=, ActorId: [1:4520:5453], ActorState: ExecuteState, LegacyTraceId: 01ke3eradyaftc6x2c7wezsqpn, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot CREATE objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot CREATE objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=CREATE OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 2026-01-04T02:53:16.277600Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=NGQzNTI4ODUtZWE1NGE4OGUtODAyM2YzOTQtYWU3YzQ1ZDM=, ActorId: [1:4902:5736], ActorState: ExecuteState, LegacyTraceId: 01ke3erpqg4xvx3cc5hq5t5yxe, Create QueryResponse for error on request, msg: status# GENERIC_ERROR issues# { message: "Executing operation with object \"SECRET\"" severity: 1 issues { message: "cannot UPSERT objects: Secret already exists: secret1" severity: 1 } } trace_id# REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;RESULT=
: Error: Executing operation with object "SECRET"
: Error: cannot UPSERT objects: Secret already exists: secret1 ;EXPECTATION=0 FINISHED_REQUEST=UPSERT OBJECT secret1 (TYPE SECRET) WITH value = `100`;EXPECTATION=0;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;RESULT=;EXPECTATION=1 FINISHED_REQUEST=CREATE OBJECT secret2 (TYPE SECRET) WITH value = `100`;EXPECTATION=1;WAITING=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;RESULT=;EXPECTATION=1 REQUEST=SELECT COUNT(*) FROM `/Root/.metadata/initialization/migrations`;EXPECTATION=1 |96.7%| [TM] {BAZEL_UPLOAD} ydb/services/metadata/secret/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] |96.7%| [TA] $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {RESULT} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/services/metadata/secret/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attribute_value[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_change_disables_receive_attempt_id[tables_format_v1-with_delete_message] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_zero_visibility_timeout_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_only_single_read_infly_from_fifo [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_works[tables_format_v1-std] [GOOD] |96.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithDataVerification [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_group_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_attribute_name[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v0-std] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_validates_receive_attempt_id[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_works[tables_format_v1] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_change_visibility] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] >> KqpPg::TableDeleteWhere-useSink [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |96.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/pg/unittest >> KqpPg::TableDeleteWhere-useSink [GOOD] Test command err: Trying to start YDB, gRPC: 16459, MsgBus: 21418 2026-01-04T02:48:51.629583Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591332927566501213:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:51.629655Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:48:51.917941Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T02:48:51.956493Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:48:51.958843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:48:52.016406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:48:52.024904Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591332927566501179:2081] 1767494931625260 != 1767494931625263 2026-01-04T02:48:52.033952Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:48:52.116986Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T02:48:52.318490Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:48:52.318508Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:48:52.318527Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:48:52.318597Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:48:52.642006Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:48:53.080557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:48:53.091043Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:48:55.020196Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbpchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-04T02:48:55.294410Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbpchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgbpchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character(2) 2026-01-04T02:48:55.395129Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-04T02:48:55.520174Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-04T02:48:55.663768Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-04T02:48:55.793058Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {"abcd ","abcd "} 2026-01-04T02:48:55.985227Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710672:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgvarchar_17472595041006102391_17823623939509273229' Unable to coerce value for pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-04T02:48:56.067548Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710673:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgvarchar_17472595041006102391_5352544928909966465' Unable to coerce value for _pgvarchar: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: value too long for type character varying(2) 2026-01-04T02:48:56.147237Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710674:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-04T02:48:56.267219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710677:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-04T02:48:56.371774Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710680:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) abcd 2026-01-04T02:48:56.469557Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710683:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {abcd,abcd} 2026-01-04T02:48:56.579472Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710686:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:48:56.629868Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591332927566501213:2072];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:48:56.629930Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout;
: Error: Bulk upsert to table '/Root/Coerce_pgbit_17472595041006102391_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-04T02:48:56.666791Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710687:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce__pgbit_17472595041006102391_11087201080355820517' Unable to coerce value for _pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(2) 2026-01-04T02:48:56.738540Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710688:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 1111 2026-01-04T02:48:56.855228Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710691:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) {1111,1111} 2026-01-04T02:48:56.967929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710694:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689)
: Error: Bulk upsert to table '/Root/Coerce_pgbit_10103374131519304989_5866627432374416336' Unable to coerce value for pgbit: Error while coercing value, reason: yql/essentials/minikql/mkql_terminator.cpp:47: ERROR: bit string length 4 does not match type bit(6) 2026-01-04T02:48:57.039805Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboper ... 06656Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715827:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:58.146314Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:58.181662Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715829:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 650 2026-01-04T02:53:58.271405Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:58.312046Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715831:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:58.404619Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:58.439881Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715833:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 829 2026-01-04T02:53:58.574901Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715834:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:58.713229Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715835:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:58.810968Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 774 2026-01-04T02:53:58.858979Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715837:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:58.952786Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.000996Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715839:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:59.111724Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2950 2026-01-04T02:53:59.148955Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715841:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:59.243934Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.293520Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715843:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 114 2026-01-04T02:53:59.499891Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.531504Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715845:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:59.623829Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.658978Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715847:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3802 2026-01-04T02:53:59.776423Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.814433Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715849:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:53:59.913615Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:53:59.951440Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715851:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 4072 2026-01-04T02:54:00.051324Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:54:00.053263Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:245: Prepare transaction failed. txid 281474976715853 at tablet 72075186224037959 errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976715853] at 72075186224037959 while waiting for scan finish) | 2026-01-04T02:54:00.055440Z node 11 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976715853 at tablet 72075186224037959 status: ERROR errors: WRONG_SHARD_STATE (Interrupted operation [0:281474976715853] at 72075186224037959 while waiting for scan finish) | 2026-01-04T02:54:00.083805Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715854:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.219575Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715855:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.329726Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 142 2026-01-04T02:54:00.369449Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715857:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.451887Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:54:00.497305Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715859:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.603717Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 3615 2026-01-04T02:54:00.644870Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715861:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.738946Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:54:00.781134Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715863:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 3614 2026-01-04T02:54:00.885848Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715864:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:00.982364Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:54:01.011500Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715866:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 22 2026-01-04T02:54:01.138897Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715867:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:01.207908Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill 2026-01-04T02:54:01.234490Z node 11 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715869:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:54:01.315923Z node 11 :READ_TABLE_API WARN: rpc_read_table.cpp:116: ForgetAction occurred, send TEvPoisonPill |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/pg/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v0-std] [GOOD] |96.8%| [TA] $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/pg/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_compaction/unittest >> TSchemeshardBorrowedCompactionTest::SchemeshardShouldHandleBorrowCompactionTimeouts [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:50:58.156060Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:50:58.156140Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156189Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:50:58.156230Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:50:58.156271Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:50:58.156297Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:50:58.156334Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:50:58.156392Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:50:58.156985Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:50:58.158195Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:50:58.229302Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:50:58.229377Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:50:58.241103Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:50:58.241527Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:50:58.242553Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:50:58.252281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:50:58.253836Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:50:58.256661Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:50:58.260598Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:50:58.267732Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.268120Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:50:58.275855Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:50:58.275966Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:50:58.276043Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:50:58.276097Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:50:58.276210Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:50:58.276412Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:50:58.450460Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451378Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451609Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451692Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451772Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451851Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.451938Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452027Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452179Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452255Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452334Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452421Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:50:58.452507Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... pleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 137 Memory: 124368 Storage: 14156 } ShardState: 2 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 138 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-04T02:54:05.322164Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:54:05.322217Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0137 2026-01-04T02:54:05.322345Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409546 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 38] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: true Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:54:05.322406Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:54:05.364029Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.364123Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.364160Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-04T02:54:05.364268Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-04T02:54:05.364307Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-04T02:54:05.364443Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 38 shard idx 72057594046678944:1 data size 13940 row count 100 2026-01-04T02:54:05.364533Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409546 maps to shardIdx: 72057594046678944:1 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], pathId map=Simple, is column=0, is olap=0, RowCount 100, DataSize 13940 2026-01-04T02:54:05.364571Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409546, followerId 0 2026-01-04T02:54:05.364692Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:587: Do not want to split tablet 72075186233409546 by load, its table already has 1 out of 1 partitions 2026-01-04T02:54:05.364782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:54:05.375318Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.375404Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.375440Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:54:05.406691Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [3:1134:3099]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T02:54:05.407022Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186233409547, FollowerId 0, tableId 39 2026-01-04T02:54:05.407511Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269553162, Sender [3:1134:3099], Recipient [3:127:2152]: NKikimrTxDataShard.TEvPeriodicTableStats DatashardId: 72075186233409547 TableLocalId: 39 Generation: 2 Round: 6 TableStats { DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 38 Memory: 124368 } ShardState: 2 UserTablePartOwners: 72075186233409547 UserTablePartOwners: 72075186233409546 NodeId: 3 StartTime: 310 TableOwnerId: 72057594046678944 FollowerId: 0 2026-01-04T02:54:05.407576Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5285: StateWork, processing event TEvDataShard::TEvPeriodicTableStats 2026-01-04T02:54:05.407628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:740: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] state 'Ready' dataSize 13940 rowCount 100 cpuUsage 0.0038 2026-01-04T02:54:05.407782Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:747: Got periodic table stats at tablet 72057594046678944 from shard 72075186233409547 followerId 0 pathId [OwnerId: 72057594046678944, LocalPathId: 39] raw table stats: DataSize: 13940 RowCount: 100 IndexSize: 102 InMemSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 2 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 SearchHeight: 1 LastFullCompactionTs: 0 HasLoanedParts: false Channels { Channel: 1 DataSize: 13940 IndexSize: 102 } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 2026-01-04T02:54:05.407841Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.100000s, queue# 1 2026-01-04T02:54:05.452667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:92: Operation queue wakeup 2026-01-04T02:54:05.452764Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:65: Borrowed compaction timeout for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, in queue# 0 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-04T02:54:05.452810Z node 3 :FLAT_TX_SCHEMESHARD INFO: schemeshard__borrowed_compaction.cpp:28: RunBorrowedCompaction for pathId# [OwnerId: 72057594046678944, LocalPathId: 39], datashard# 72075186233409547, next wakeup# 0.000000s, rate# 0, in queue# 1 shards, running# 0 shards at schemeshard 72057594046678944 2026-01-04T02:54:05.452882Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: operation_queue_timer.h:84: Operation queue set wakeup after delta# 3 seconds 2026-01-04T02:54:05.452915Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-04T02:54:05.453101Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.453151Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.453182Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 1 2026-01-04T02:54:05.453267Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 1 2026-01-04T02:54:05.453298Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 1 2026-01-04T02:54:05.453397Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 39 shard idx 72057594046678944:2 data size 13940 row count 100 2026-01-04T02:54:05.453463Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186233409547 maps to shardIdx: 72057594046678944:2 followerId=0, pathId: [OwnerId: 72057594046678944, LocalPathId: 39], pathId map=CopyTable, is column=0, is olap=0, RowCount 100, DataSize 13940, with borrowed parts 2026-01-04T02:54:05.453498Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186233409547, followerId 0 2026-01-04T02:54:05.453605Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:582: Want to split tablet 72075186233409547 by size: split by size (shardCount: 1, maxShardCount: 2, shardSize: 13940, maxShardSize: 1) 2026-01-04T02:54:05.453638Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__table_stats.cpp:669: Postpone split tablet 72075186233409547 because it has borrow parts, enqueue compact them first 2026-01-04T02:54:05.453676Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__borrowed_compaction.cpp:100: Borrowed compaction enqueued shard# 72057594046678944:2 at schemeshard 72057594046678944 2026-01-04T02:54:05.453756Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046678944 2026-01-04T02:54:05.464350Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.464453Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:05.464506Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046678944, queue size# 0 2026-01-04T02:54:05.674491Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:54:05.674568Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T02:54:05.674675Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:127:2152], Recipient [3:127:2152]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T02:54:05.674731Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_compaction/unittest >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |96.8%| [TA] $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] |96.8%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_compaction/test-results/unittest/{meta.json ... results_accumulator.log} >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_for_deleted_message[tables_format_v1-fifo] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_validates_message_attributes[tables_format_v1] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_attributes[tables_format_v1] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/tx/unittest >> KqpSnapshotIsolation::ConflictWrite+IsOlap+FillTables [GOOD] Test command err: Trying to start YDB, gRPC: 3538, MsgBus: 4864 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:12.596066Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:12.728844Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:12.729425Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:12.729484Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:13.021177Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:13.021335Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:13.097423Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767494829776600 != 1767494829776603 2026-01-04T02:47:13.113648Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:13.159642Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:13.280995Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:13.690753Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:13.690820Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:13.690871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:13.691093Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:13.704037Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:14.165698Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:14.284080Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:917:2782], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.284244Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:928:2787], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.284348Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.285595Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:933:2792], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.285703Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:14.290900Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T02:47:14.381774Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:931:2790], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T02:47:14.465960Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:988:2828] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 41], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T02:47:14.767500Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:15.097219Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:47:17.669730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) Trying to start YDB, gRPC: 3800, MsgBus: 13184 ... waiting for SysViewsRoster update finished 2026-01-04T02:47:25.433354Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:47:25.441763Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:47:25.446512Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:47:25.446914Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:47:25.447106Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:47:25.713416Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:47:25.713558Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:47:25.749560Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767494842045991 != 1767494842045995 2026-01-04T02:47:25.755641Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:47:25.806098Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:47:25.908715Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:47:26.269412Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:47:26.269456Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:47:26.269486Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:47:26.270105Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:47:26.282344Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:47:26.694758Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:47:26.752312Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:915:2780], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.752418Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.752504Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:925:2785], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.753287Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:929:2788], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.753460Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T02:47:26.758970Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 2814 ... 03.297594Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038076;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311182Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311266Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311291Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038071;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311823Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311881Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.311905Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038041;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.324238Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.324297Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.324317Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038085;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.325167Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.325226Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.325248Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038037;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.337833Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.337950Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.337980Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038047;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.338207Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.338274Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.338298Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038094;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352386Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352475Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352499Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352524Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038079;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352571Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.352597Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038087;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.366453Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.366542Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.366593Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038051;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.367603Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.367695Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.367719Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038077;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.379951Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.380067Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.380094Z node 20 :TX_COLUMNSHARD WARN: log.cpp:841: tablet_id=72075186224038045;tx_state=TTxProgressTx::Execute;tx_current=0;tx_id=281474976710662;fline=tables_manager.cpp:57;method=resolve_internal_path_id;ss_local=44;result=not_found; 2026-01-04T02:54:03.476468Z node 20 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3eshzd7yy8w605zqw7d8jy", SessionId: ydb://session/3?node_id=20&id=NzIyOWFkZGUtYjUzMjU0OC1mYzk5MDJjNi1hYTk5Yzk3OA==, Slow query, duration: 14.361117s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "\n CREATE TABLE `/Root/Test` (\n Group Uint32 not null,\n Name String not null,\n Amount Uint64,\n Comment String,\n PRIMARY KEY (Group, Name)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 10\n );\n\n CREATE TABLE `/Root/KV` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n\n CREATE TABLE `/Root/KV2` (\n Key Uint32 not null,\n Value String,\n PRIMARY KEY (Key)\n ) WITH (\n STORE = COLUMN,\n AUTO_PARTITIONING_BY_SIZE = DISABLED,\n AUTO_PARTITIONING_BY_LOAD = DISABLED,\n AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 100,\n UNIFORM_PARTITIONS = 100\n );\n ", parameters: 0b 2026-01-04T02:54:05.464306Z node 20 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:2088: SelfId: [20:7591334274771189332:4164], TxId: 281474976710668, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3et7h39bnxq160pc5r4ezt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=20&id=YWZmYmUxY2YtZThjZWZjMjUtMzZlOTUxMGUtMWFhOTdjZDQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. Sink[0] fatal error: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 } 2026-01-04T02:54:05.464433Z node 20 :KQP_COMPUTE ERROR: dq_compute_actor_impl.h:734: SelfId: [20:7591334274771189332:4164], TxId: 281474976710668, task: 1. Ctx: { CheckpointId : . TraceId : 01ke3et7h39bnxq160pc5r4ezt. RunScriptActorId : [0:0:0]. CustomerSuppliedId : . PoolId : default. SessionId : ydb://session/3?node_id=20&id=YWZmYmUxY2YtZThjZWZjMjUtMzZlOTUxMGUtMWFhOTdjZDQ=. CurrentExecutionId : . Database : /Root. DatabaseId : /Root. }. InternalError: ABORTED KIKIMR_LOCKS_INVALIDATED: {
: Error: Transaction locks invalidated. Table: `/Root/Test`, code: 2001 }. 2026-01-04T02:54:05.465111Z node 20 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=20&id=YWZmYmUxY2YtZThjZWZjMjUtMzZlOTUxMGUtMWFhOTdjZDQ=, ActorId: [20:7591334266181254089:3653], ActorState: ExecuteState, LegacyTraceId: 01ke3et7h39bnxq160pc5r4ezt, Create QueryResponse for error on request, msg: status# ABORTED issues# { message: "Transaction locks invalidated. Table: `/Root/Test`" issue_code: 2001 severity: 1 } trace_id# |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/tx/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_queue_attributes[tables_format_v0] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_empty_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] |96.8%| [TA] $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v0] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delay_one_message[tables_format_v1-std] [GOOD] >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/kqp/ut/tx/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_validates_message_body[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/compile_service/ut/unittest |96.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/compile_service/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] |96.8%| [TA] $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {RESULT} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/kqp/compile_service/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_wrong_delete_fails[tables_format_v1] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_receive_with_very_big_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_DyNumber-pk_types13-all_types13-index13-DyNumber--] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_and_read_message[tables_format_v0-std] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-fifo] [GOOD] >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_visibility_change_disables_receive_attempt_id[tables_format_v0-with_delete_message] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_for_deleted_message[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-fifo] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] >> TInterconnectTest::TestNotifyUndelivered |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] >> TInterconnectTest::TestNotifyUndelivered [GOOD] >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_all_types-pk_types3-all_types3-index3---] [GOOD] |96.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TInterconnectTest::TestNotifyUndeliveredOnMissedActor [GOOD] >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes >> TInterconnectTest::TestPreSerializedBlobEventUpToMebibytes [GOOD] >> TInterconnectTest::TestPingPongThroughSubChannel >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_to_nonexistent_queue[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |96.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/core/actorlib_impl/ut/unittest >> TInterconnectTest::TestPingPongThroughSubChannel [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/actorlib_impl/ut/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_create_queue_by_nonexistent_user_fails[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] |96.9%| [TA] $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {RESULT} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/actorlib_impl/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_send_message_batch[tables_format_v0-std] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_incremental_backup/unittest >> IncrementalBackup::ComplexBackupSequenceWithIntermediateVerification [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:51:09.847643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:51:09.972314Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:51:09.972773Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:51:09.972832Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:51:10.259426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:51:10.259564Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:51:10.333277Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495067038845 != 1767495067038848 2026-01-04T02:51:10.348816Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:51:10.395472Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:51:10.491625Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:51:10.774144Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:831:2726], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.774229Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.774267Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.774398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:829:2724], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 1 TabletId# 72057594046644480} 2026-01-04T02:51:10.774445Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T02:51:10.867576Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 1 TabletId: 72057594046644480 , at schemeshard: 72057594046644480 2026-01-04T02:51:10.867747Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.867885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T02:51:10.867919Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 1:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T02:51:10.868059Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 1:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:51:10.868111Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:51:10.868174Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.868567Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 1, response: Status: StatusAccepted TxId: 1 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.868670Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 1, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T02:51:10.868721Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.868756Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.868885Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.868910Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.868954Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.868989Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 1:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T02:51:10.869011Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 1:0 ProgressState no shards to create, do next state 2026-01-04T02:51:10.869043Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 2 -> 3 2026-01-04T02:51:10.869107Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.869358Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.869398Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.869464Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.869484Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.869514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.869545Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 1:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:51:10.869569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 1:0 3 -> 128 2026-01-04T02:51:10.869620Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.869864Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.869886Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 1:0 2026-01-04T02:51:10.869938Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.869954Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T02:51:10.869985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.870018Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 1:0, at schemeshard: 72057594046644480 2026-01-04T02:51:10.870045Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 1:0, at tablet# 72057594046644480 2026-01-04T02:51:10.870069Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T02:51:10.870099Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 1 ready parts: 1/1 2026-01-04T02:51:10.872387Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 1 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T02:51:10.872681Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:51:10.872726Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 1:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:1 msg type: 269090816 2026-01-04T02:51:10.872822Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 1, partId: 4294967295, tablet: 72057594046316545 2026-01-04T02:51:10.872972Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 1 2026-01-04T02:51:10.873196Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:837:2732], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.873227Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T02:51:10.873251Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T02:51:10.873325Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124996, Sender [1:829:2724], Recipient [1:399:2398]: NKikimrScheme.TEvNotifyTxCompletion TxId: 1 2026-01-04T02:51:10.873343Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5261: StateWork, processing event TEvSchemeShard::TEvNotifyTxCompletion 2026-01-04T02:51:10.873400Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 1, at schemeshard: 72057594046644480 2026-01-04T02:51:10.873427Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 1, ready parts: 0/ ... 337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.516050Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.516086Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.528089Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:408:2406]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:34.528192Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:34.528231Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 2 2026-01-04T02:54:34.528318Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:778: Will execute TTxStoreStats, queue# 2 2026-01-04T02:54:34.528359Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:787: Will delay TTxStoreTableStats on# 0.000000s, queue# 2 2026-01-04T02:54:34.528495Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 60 shard idx 72057594046644480:11 data size 271 row count 13 2026-01-04T02:54:34.528572Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037898 maps to shardIdx: 72057594046644480:11 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 60], pathId map=SequenceTable, is column=0, is olap=0, RowCount 13, DataSize 271, with borrowed parts 2026-01-04T02:54:34.528613Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037898, followerId 0 2026-01-04T02:54:34.528728Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037898: SplitByLoadNotEnabledForTable 2026-01-04T02:54:34.528791Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:312: PersistSingleStats for pathId 68 shard idx 72057594046644480:17 data size 368 row count 16 2026-01-04T02:54:34.528838Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:333: TTxStoreTableStats.PersistSingleStats: main stats from datashardId(TabletID)=72075186224037906 maps to shardIdx: 72057594046644480:17 followerId=0, pathId: [OwnerId: 72057594046644480, LocalPathId: 68], pathId map=SequenceTable, is column=0, is olap=0, RowCount 16, DataSize 368, with borrowed parts 2026-01-04T02:54:34.528868Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__table_stats.cpp:37: BuildStatsForCollector: datashardId 72075186224037906, followerId 0 2026-01-04T02:54:34.528923Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:595: Do not want to split tablet 72075186224037906: SplitByLoadNotEnabledForTable 2026-01-04T02:54:34.528996Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T02:54:34.542163Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435092, Sender [0:0:0], Recipient [11:408:2406]: NKikimr::NSchemeShard::TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:34.542264Z node 11 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5446: StateWork, processing event TEvPrivate::TEvPersistTableStats 2026-01-04T02:54:34.542308Z node 11 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__table_stats.cpp:767: Started TEvPersistStats at tablet 72057594046644480, queue size# 0 2026-01-04T02:54:34.552873Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.552964Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.553007Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.553063Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.553111Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.577894Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.577973Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.578009Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.578045Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.578076Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.591304Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.591381Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.591419Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.591458Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.591490Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.602290Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.609784Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.609872Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.609909Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.609940Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.634475Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.634551Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.634587Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.634625Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.634659Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.646226Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.646303Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.646338Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.646375Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.646406Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.658220Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.658294Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.658324Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.658358Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.658390Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.681711Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.681786Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.681844Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.681881Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.681911Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.695218Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.695293Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.695324Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.695361Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.695392Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T02:54:34.706314Z node 11 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037901][Partition][0][StateIdle] Process user action and tx events 2026-01-04T02:54:34.706392Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.706429Z node 11 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037901][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T02:54:34.706465Z node 11 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037901][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T02:54:34.706495Z node 11 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037901][Partition][0][StateIdle] Try persist 2026-01-04T02:54:35.129618Z node 11 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037907, clientId# [11:3279:4466], serverId# [11:3280:4467], sessionId# [0:0:0] { items { uint32_value: 6 } items { uint32_value: 600 } }, { items { uint32_value: 7 } items { uint32_value: 70 } }, { items { uint32_value: 8 } items { uint32_value: 80 } }, { items { uint32_value: 9 } items { uint32_value: 90 } } |96.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_incremental_backup/unittest >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TA] $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] |96.9%| [TA] {RESULT} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} |96.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/datashard/ut_incremental_backup/test-results/unittest/{meta.json ... results_accumulator.log} >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_message[tables_format_v0-fifo] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-fifo] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_to_zero_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestYandexAttributesPrefix::test_allows_yandex_message_attribute_prefix[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_attributes_table[tables_format_v1-fifo] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_queue_attributes[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_expires_on_wait_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_message_visibility_with_very_big_timeout[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_send_and_read_message[tables_format_v0-std] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_visibility_timeout_works[tables_format_v0] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_get_queue_attributes_only_runtime_attributes[tables_format_v1-std] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryServiceStreamCall [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:248: Test is failing right now |96.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v1-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncTableClient [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-by_deduplication_id] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_visibility_timeout_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> YdbSdkSessions::CloseSessionAfterDriverDtorWithoutSessionPool [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_read_dont_stall[tables_format_v0] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_path_with_long_name_failed [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_set_very_big_visibility_timeout[tables_format_v0] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_can_read_from_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v0-content_based] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_queue_attributes_batch[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_and_read_to_different_groups[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadSessionPoolLimitSyncQueryClient [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-by_deduplication_id] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_delete_path_with_folder_then_get_error_response [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_remove_directory_that_does_not_exist_failure [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_after_create_table_it_is_success [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_does_not_change_visibility_not_in_flight[tables_format_v1-std] [GOOD] |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |96.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |96.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_delete_message_works[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolExplicit [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_fifo_read_delete_single_message [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_and_drop_table_many_times_in_range [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_read_dont_stall[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_when_copy_table_partition_config [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-fifo] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_visibility_timeout_works[tables_format_v0] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_directory_from_leaf_success [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_delay_message_batch[tables_format_v1-std] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v0-long_polling-std] [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_deduplication[tables_format_v0-content_based] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_compaction_policy_options [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TLocksFatTest::PointSetNotBreak >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-fifo] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_when_create_path_second_time_then_it_is_ok [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_primary_key_and_other_scheme_then_ok [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] >> TFlatTest::SelectRangeNullArgs3 >> TFlatTest::SplitEmptyAndWrite >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_read_dont_stall[tables_format_v0] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessions::TestMultipleSessions >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] >> YdbSdkSessions::TestActiveSessionCountAfterBadSession >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient >> YdbSdkSessions::TestMultipleSessions [GOOD] >> YdbSdkSessions::TestActiveSessionCountAfterTransportError >> TObjectStorageListingTest::CornerCases >> TLocksTest::Range_BrokenLock2 >> YdbSdkSessions::TestActiveSessionCountAfterBadSession [GOOD] >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] >> YdbSdkSessions::TestSessionPool >> TFlatTest::SelectRangeNullArgs3 [GOOD] >> TFlatTest::SelectRangeNullArgs4 >> YdbSdkSessions::MultiThreadSync |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_deduplication[tables_format_v1-content_based] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> TFlatTest::SplitEmptyAndWrite [GOOD] >> TFlatTest::SplitBoundaryRead |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> YdbSdkSessions::MultiThreadSync [GOOD] >> YdbSdkSessions::SessionsServerLimit [SKIPPED] >> YdbSdkSessions::TestSessionPool [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_table_and_path_with_name_clash_unsuccessful [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly >> TLocksFatTest::PointSetNotBreak [GOOD] >> TLocksFatTest::PointSetRemove >> TObjectStorageListingTest::CornerCases [GOOD] >> TObjectStorageListingTest::Decimal ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSdkFreeSessionAfterBadSessionQueryService [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:200: Test is failing right now |97.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimitWithSessionPool [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:588: Enable after accepting a pull request with merging configs |97.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] >> TFlatTest::SelectRangeNullArgs4 [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsTableClient [GOOD] >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_decreasing_number_of_generations_it_is_raise_error [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_same_scheme_then_ok [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TFlatTest::SplitBoundaryRead [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::SessionsServerLimit [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:548: Enable after accepting a pull request with merging configs |97.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_and_remove_column_many_times_success [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestSessionPool [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_add_column_after_table_creation_with_data_and_success [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SelectRangeNullArgs4 [GOOD] Test command err: 2026-01-04T02:55:26.059855Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334623362282232:2087];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:26.060203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:26.326137Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:26.408062Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:26.408145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:26.410842Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:26.466003Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591334623362282169:2081] 1767495326048390 != 1767495326048393 2026-01-04T02:55:26.473986Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:26.529140Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:26.669046Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:26.676311Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:55:26.686565Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715658, at schemeshard: 72057594046644480 2026-01-04T02:55:26.777984Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:29.440901Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334635265409094:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:29.442070Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:29.454240Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:55:29.545636Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:29.547232Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591334635265409067:2081] 1767495329439377 != 1767495329439380 2026-01-04T02:55:29.547930Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:29.547995Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:29.579980Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:29.587913Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:29.781868Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:29.814641Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:29.821130Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:55:29.837482Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::SplitBoundaryRead [GOOD] Test command err: 2026-01-04T02:55:26.549454Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334624014993972:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:26.549561Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:26.839508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:26.866880Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:26.866984Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:26.929230Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:26.930472Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591334624014993942:2081] 1767495326545339 != 1767495326545342 2026-01-04T02:55:26.945317Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:27.091214Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:27.157194Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:27.166445Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:55:27.241753Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:27.415188Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:55:27.425066Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:55:27.451524Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:55:27.458101Z node 1 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:55:27.559870Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:27.559898Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.22, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:27.560240Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.23, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:27.560516Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.24, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:27.560724Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:27.562164Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 3} end=Done, 4 blobs 2r (max 2), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (1907 1533 0)b }, ecr=1.000 2026-01-04T02:55:27.566297Z node 1 :OPS_COMPACT INFO: Compact{72075186224037888.1.32, eph 3} end=Done, 4 blobs 8r (max 8), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3250 2180 6413)b }, ecr=1.000 2026-01-04T02:55:27.681621Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037888 not found 2026-01-04T02:55:27.681796Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037890 not found 2026-01-04T02:55:27.686857Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037892 not found 2026-01-04T02:55:27.686907Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037889 not found 2026-01-04T02:55:27.687802Z node 1 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 1, TabletId: 72075186224037891 not found 2026-01-04T02:55:30.133039Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334639804050082:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:30.133093Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:30.149347Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:30.211825Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:30.247770Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:30.247848Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:30.251620Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:30.429439Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:30.430114Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:30.447576Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:30.531243Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1265 647 2154)b }, ecr=1.000 2026-01-04T02:55:30.536857Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.11, eph 1} end=Done, 4 blobs 3r (max 3), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 2 +0, (1139 521 2626)b }, ecr=1.000 2026-01-04T02:55:30.561091Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (1573 647 6413)b }, ecr=1.000 2026-01-04T02:55:30.565155Z node 2 :OPS_COMPACT INFO: Compact{72075186224037889.1.16, eph 2} end=Done, 4 blobs 6r (max 6), put Spent{time=0.001s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 4 +0, (2326 1432 5183)b }, ecr=1.000 2026-01-04T02:55:30.587003Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.21, eph 3} end=Done, 4 blobs 9r (max 9), put Spent{time=0.002s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 5 +0, (3307 2180 6413)b }, ecr=1.000 2026-01-04T02:55:30.701929Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { OperationType: ESchemeOpSplitMergeTablePartitions SplitMergeTablePartitions { TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } } } TxId: 281474976715678 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T02:55:30.702166Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:804: TSplitMerge Propose, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715678:0, at schemeshard: 72057594046644480, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-04T02:55:30.702370Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 3 2026-01-04T02:55:30.702397Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason new shard created for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 4 2026-01-04T02:55:30.702592Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 5 2026-01-04T02:55:30.702620Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715678:0 type: TxSplitTablePartition target path: [OwnerId: 72057594046644480, LocalPathId: 39] source path: 2026-01-04T02:55:30.702823Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_split_merge.cpp:1083: TSplitMerge Propose accepted, tableStr: /dc-1/Dir/TableOld, tableId: , opId: 281474976715678:0, at schemeshard: 72057594046644480, op: SourceRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" TabletID: 72075186224037888 ShardIdx: 1 } DestinationRanges { KeyRangeBegin: "\001\000\000\000\000\200" KeyRangeEnd: "\001\000\004\000\000\000g+\000\000" ShardIdx: 3 } DestinationRanges { KeyRangeBegin: "\001\000\004\000\000\000g+\000\000" KeyRangeEnd: "\001\000\004\000\000\000\377\377\377\177" ShardIdx: 4 }, request: TablePath: "/dc-1/Dir/TableOld" SourceTabletId: 72075186224037888 SplitBoundary { KeyPrefix { Tuple { Optional { Uint32: 11111 } } } } 2026-01-04T02:55:30.702855Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715678:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T02:55:30.703940Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715678, response: S ... msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037890 cookie: 72057594046644480:3 msg type: 269553152 2026-01-04T02:55:30.709965Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037891 cookie: 72057594046644480:4 msg type: 269553152 2026-01-04T02:55:30.710065Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037890 2026-01-04T02:55:30.710081Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037891 2026-01-04T02:55:30.730364Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037890 2026-01-04T02:55:30.730442Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037890 2026-01-04T02:55:30.730869Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.732802Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037891 2026-01-04T02:55:30.732847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:38: TSplitMerge TConfigureDestination operationId# 281474976715678:0 HandleReply TEvInitSplitMergeDestinationAck, operationId: 281474976715678:0, at schemeshard: 72057594046644480 message# OperationCookie: 281474976715678 TabletId: 72075186224037891 2026-01-04T02:55:30.732875Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715678:0 3 -> 131 2026-01-04T02:55:30.733228Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.733333Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.733363Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:334: TSplitMerge TTransferData operationId# 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:55:30.733385Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:353: TSplitMerge TTransferData operationId# 281474976715678:0 Starting split on src datashard 72075186224037888 splitOpId# 281474976715678:0 at tablet 72057594046644480 2026-01-04T02:55:30.733625Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553154 2026-01-04T02:55:30.733702Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715678, partId: 0, tablet: 72075186224037888 2026-01-04T02:55:30.736108Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.25, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:30.736109Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.26, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:30.736406Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.27, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:30.736412Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.29, eph -9223372036854775808} end=Done, 0 blobs 0r (max 0), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:30.736624Z node 2 :OPS_COMPACT INFO: Compact{72075186224037888.1.28, eph 1} end=Done, 0 blobs 0r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=0} 2026-01-04T02:55:30.743159Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-01-04T02:55:30.743222Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:207: TSplitMerge TTransferData operationId# 281474976715678:0 HandleReply TEvSplitAck, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-01-04T02:55:30.743522Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715678:0 131 -> 132 2026-01-04T02:55:30.743686Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-04T02:55:30.744053Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.744214Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T02:55:30.744230Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976715678, path id: [OwnerId: 72057594046644480, LocalPathId: 39] 2026-01-04T02:55:30.744437Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046644480 2026-01-04T02:55:30.744453Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [2:7591334639804050543:2240], at schemeshard: 72057594046644480, txId: 281474976715678, path id: 39 2026-01-04T02:55:30.744484Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.744504Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:437: TSplitMerge TNotifySrc, operationId: 281474976715678:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T02:55:30.744526Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_split_merge.cpp:468: Notify src datashard 72075186224037888 on partitioning changed splitOp# 281474976715678 at tablet 72057594046644480 2026-01-04T02:55:30.746907Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2026-01-04T02:55:30.746984Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 39 Version: 4 PathOwnerId: 72057594046644480, cookie: 281474976715678 2026-01-04T02:55:30.746998Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046644480, txId: 281474976715678 2026-01-04T02:55:30.747017Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976715678, pathId: [OwnerId: 72057594046644480, LocalPathId: 39], version: 4 2026-01-04T02:55:30.747034Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 7 2026-01-04T02:55:30.747096Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 0/1, is published: true 2026-01-04T02:55:30.747229Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715678:0 from tablet: 72057594046644480 to tablet: 72075186224037888 cookie: 72057594046644480:1 msg type: 269553158 2026-01-04T02:55:30.747452Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976715678 2026-01-04T02:55:30.749127Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:628: TTxOperationReply execute, operationId: 281474976715678:0, at schemeshard: 72057594046644480, message: OperationCookie: 281474976715678 TabletId: 72075186224037888 2026-01-04T02:55:30.749161Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_split_merge.cpp:392: TSplitMerge TNotifySrc, operationId: 281474976715678:0 HandleReply TEvSplitPartitioningChangedAck, from datashard: 72075186224037888, at schemeshard: 72057594046644480 2026-01-04T02:55:30.749213Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2026-01-04T02:55:30.749224Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-01-04T02:55:30.749244Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#281474976715678:0 progress is 1/1 2026-01-04T02:55:30.749252Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-01-04T02:55:30.749265Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715678, ready parts: 1/1, is published: true 2026-01-04T02:55:30.749303Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1702: TOperation DoNotify send TEvNotifyTxCompletionResult to actorId: [2:7591334639804051169:2347] message: TxId: 281474976715678 2026-01-04T02:55:30.749329Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 281474976715678 ready parts: 1/1 2026-01-04T02:55:30.749347Z node 2 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 281474976715678:0 2026-01-04T02:55:30.749355Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 281474976715678:0 2026-01-04T02:55:30.749491Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046644480, LocalPathId: 39] was 6 2026-01-04T02:55:30.749834Z node 2 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:656: TTxOperationReply complete, operationId: 281474976715678:0, at schemeshard: 72057594046644480 2026-01-04T02:55:30.749847Z node 2 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:270: Unable to activate 281474976715678:0 |97.0%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_crutch_groups_selection_algorithm_selects_second_group_batch[tables_format_v0] [GOOD] |97.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_only_single_read_infly_from_fifo [GOOD] |97.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::TestActiveSessionCountAfterTransportError [GOOD] >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::MultiThreadMultipleRequestsOnSharedSessionsQueryClient [SKIPPED] Test command err: ydb/public/sdk/cpp/tests/integration/sessions/main.cpp:539: Enable after interactive tx support ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TObjectStorageListingTest::Decimal [GOOD] Test command err: 2026-01-04T02:55:28.343766Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334630668021357:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:28.343963Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:28.383591Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:55:28.624028Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:28.624136Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:28.629580Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:28.697918Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:28.698271Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:28.698619Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591334630668021330:2081] 1767495328341780 != 1767495328341783 2026-01-04T02:55:28.754862Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:55:28.754901Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:55:28.754910Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:55:28.754999Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:55:28.897887Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:28.912713Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:28.987902Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:29.351167Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:31.729542Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334644432786163:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:31.729611Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:31.745790Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:31.842366Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:31.856803Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:31.857029Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:31.861062Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:31.920680Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:55:31.920697Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:55:31.920703Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:55:31.920763Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:55:32.006946Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:32.110792Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:32.116483Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:55:32.134788Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_change_visibility_batch_works[tables_format_v0-std] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_ydb_create_and_remove_directory_success [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] >> YdbSdkSessions::CloseSessionWithSessionPoolExplicitDriverStopOnly [GOOD] >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_single_key_column_failure [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> TLocksFatTest::PointSetRemove [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TFlatTest::LargeDatashardReplyDistributed ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksFatTest::PointSetRemove [GOOD] Test command err: 2026-01-04T02:55:23.836762Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334608213035799:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:23.837078Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:23.881507Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:55:24.153370Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:24.154484Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:24.154579Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:24.165298Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:24.245055Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591334608213035772:2081] 1767495323834391 != 1767495323834394 2026-01-04T02:55:24.251433Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:24.446996Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:24.468536Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:24.475513Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:55:24.534752Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:24.700911Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:24.768896Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:24.859277Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:28.837108Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591334608213035799:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:28.837199Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:55:31.194455Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334643377230023:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:31.194508Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:31.220436Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:31.291142Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:31.293984Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591334643377229995:2081] 1767495331193582 != 1767495331193585 2026-01-04T02:55:31.327422Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:31.327497Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:31.331517Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:31.414795Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:31.499935Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:31.529691Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:31.582233Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:31.636534Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:32.217561Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:34.886696Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591334656340352278:2068];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:34.887220Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:34.899133Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:34.960947Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:34.970105Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591334656340352250:2081] 1767495334885582 != 1767495334885585 2026-01-04T02:55:35.051861Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:35.051965Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:35.054304Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:35.081339Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:35.207897Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:35.228079Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.287577Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.390124Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.895222Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; |97.1%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_can_change_partition_config_options [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_copy_ops.py::TestSchemeShardCopyOps::test_given_table_when_create_copy_of_it_then_ok [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_write_read_delete_many_groups[tables_format_v1] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_create_many_directories_success [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TLocksTest::Range_BrokenLock2 [GOOD] >> TLocksTest::Range_BrokenLock3 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1_UNIQUE_SYNC-pk_types3-all_types3-index3--UNIQUE-SYNC] [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_polling.py::TestSqsPolling::test_receive_message_with_polling[tables_format_v1-long_polling-std] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> YdbSdkSessionsPool::WaitQueue/0 >> YdbSdkSessionsPool::WaitQueue/1 >> YdbSdkSessionsPool::StressTestSync/1 >> YdbSdkSessionsPool1Session::CustomPlan/0 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] >> YdbSdkSessionsPool::StressTestSync/0 |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessionsPool1Session::FailTest/0 >> YdbSdkSessionsPool::StressTestAsync/0 >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] >> TFlatTest::LargeDatashardReplyDistributed [GOOD] >> TFlatTest::LargeDatashardReplyRW |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_delete_table_that_doesnt_exist_failure [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_by_not_single_key_column_failure [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::RunSmallPlan/0 [GOOD] |97.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_scheme_shard_operations.py::TestSchemeShardSimpleOps::test_given_table_when_drop_table_and_create_with_other_keys_then_ok [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/0 [GOOD] |97.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::WaitQueue/1 [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::FailTest/0 [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithPath::test_receive_attempt_reloads_same_messages[tables_format_v0-standard_mode] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessions::CloseSessionWithSessionPoolFromDtors [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions/gtest >> YdbSdkSessionsPool::StressTestSync/1 [FAIL] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_shard/py3test >> test_alter_ops.py::TestSchemeShardAlterTest::test_alter_table_cant_add_existing_column [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_shard/py3test >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions/test-results/gtest/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |97.2%| [TA] $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TA] {RESULT} $(B)/ydb/tests/functional/scheme_shard/test-results/py3test/{meta.json ... results_accumulator.log} |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_1__ASYNC-pk_types10-all_types10-index10---ASYNC] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TFlatTest::LargeDatashardReplyRW [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::CustomPlan/0 [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/1 [FAIL] Test command err: ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 10 |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TFlatTest::LargeDatashardReplyRW [GOOD] Test command err: 2026-01-04T02:55:40.689895Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334684256759953:2228];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:40.690096Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:40.717956Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:55:41.049765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:41.049977Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:41.052222Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:41.125130Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:41.125733Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591334684256759751:2081] 1767495340673414 != 1767495340673417 2026-01-04T02:55:41.153823Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:41.329405Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:41.342673Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T02:55:41.355198Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T02:55:41.376240Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:41.420809Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:41.690591Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:45.686193Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591334684256759953:2228];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:45.686321Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:55:52.195359Z node 1 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037889, txid %281474976711360, engine error: Error executing transaction (read-only: 1): Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-04T02:55:52.212741Z node 1 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [1767495351685:281474976711360] at 72075186224037889: Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) 2026-01-04T02:55:52.218701Z node 1 :TX_PROXY ERROR: datareq.cpp:2286: Actor# [1:7591334731501406700:6139] txid# 281474976711360 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# RESULT_UNAVAILABLE shard id 72075186224037889 marker# P12 2026-01-04T02:55:52.218790Z node 1 :TX_PROXY ERROR: datareq.cpp:883: Actor# [1:7591334731501406700:6139] txid# 281474976711360 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037889: reply size limit exceeded. (61442990 > 50331648) proxy error code: ExecResultUnavailable 2026-01-04T02:55:52.910816Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334734624017420:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:52.910993Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:52.925263Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:53.021644Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:53.021723Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:53.025993Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:53.029906Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591334734624017304:2081] 1767495352904558 != 1767495352904561 2026-01-04T02:55:53.057360Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:53.219923Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:53.240898Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:53.262496Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:53.914414Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:55:57.910498Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591334734624017420:2146];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:57.910555Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T02:56:03.555126Z node 2 :MINIKQL_ENGINE ERROR: datashard__engine_host.cpp:518: Shard %72075186224037888, txid %281474976716361, engine error: Error executing transaction (read-only: 0): Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-04T02:56:03.573107Z node 2 :TX_DATASHARD ERROR: execute_data_tx_unit.cpp:273: Datashard execution error for [0:281474976716361] at 72075186224037888: Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) 2026-01-04T02:56:03.575954Z node 2 :TX_DATASHARD ERROR: finish_propose_unit.cpp:174: Errors while proposing transaction txid 281474976716361 at tablet 72075186224037888 status: RESULT_UNAVAILABLE errors: REPLY_SIZE_EXCEEDED (Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648)) | 2026-01-04T02:56:03.576503Z node 2 :TX_PROXY ERROR: datareq.cpp:883: Actor# [2:7591334777573696911:6114] txid# 281474976716361 RESPONSE Status# ExecResultUnavailable marker# P13c DataShardErrors: [REPLY_SIZE_EXCEEDED] Datashard 72075186224037888: reply size limit exceeded. (71580986 > 50331648) proxy error code: ExecResultUnavailable |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestSync/0 [FAIL] Test command err: ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 1 |97.2%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest ------- [TM] {asan, default-linux-x86_64, release} ydb/core/statistics/aggregator/ut/unittest >> AnalyzeColumnshard::AnalyzeMultiOperationId 2026-01-04 02:56:01,851 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 02:56:02,022 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 782279 58.6M 58.0M 32.6M test_tool run_ut @/home/runner/.ya/build/build_root/c5zo/001ffd/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.ar 782664 1.7G 1.7G 1.2G └─ ydb-core-statistics-aggregator-ut --trace-path-append /home/runner/.ya/build/build_root/c5zo/001ffd/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_s Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T02:46:05.852378Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:05.936496Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:05.944994Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:449:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T02:46:05.945394Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T02:46:05.945565Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T02:46:06.338777Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:06.429066Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:06.429167Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:06.475512Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:06.554484Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T02:46:07.177758Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:46:07.178384Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T02:46:07.178443Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T02:46:07.178480Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T02:46:07.179040Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T02:46:07.243094Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T02:46:07.756416Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateExtSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_extsubdomain.cpp:194) 2026-01-04T02:46:10.320895Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T02:46:10.329552Z node 2 :STATISTICS INFO: service_impl.cpp:234: Subscribed for config changes on node 2 2026-01-04T02:46:10.333089Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T02:46:10.361229Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:10.361327Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:10.387944Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T02:46:10.389486Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:10.547267Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:46:10.547366Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:46:10.548817Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.549429Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.550126Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.550797Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.551067Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.551161Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.551367Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.551502Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.551664Z node 2 :HIVE WARN: tx__create_tablet.cpp:348: HIVE#72075186224037888 THive::TTxCreateTablet::Execute CreateTablet Postponed 2026-01-04T02:46:10.577188Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72075186224037888 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:46:10.780866Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:46:10.821284Z node 2 :STATISTICS INFO: aggregator_impl.cpp:43: [72075186224037894] OnActivateExecutor 2026-01-04T02:46:10.821374Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:13: [72075186224037894] TTxInitSchema::Execute 2026-01-04T02:46:10.908620Z node 2 :STATISTICS DEBUG: tx_init_schema.cpp:34: [72075186224037894] TTxInitSchema::Complete 2026-01-04T02:46:10.909012Z node 2 :STATISTICS DEBUG: tx_init.cpp:18: [72075186224037894] TTxInit::Execute 2026-01-04T02:46:10.909233Z node 2 :STATISTICS DEBUG: tx_init.cpp:124: [72075186224037894] Loaded BaseStatistics: schemeshard count# 0 2026-01-04T02:46:10.909315Z node 2 :STATISTICS DEBUG: tx_init.cpp:149: [72075186224037894] Loaded ColumnStatistics: column count# 0 2026-01-04T02:46:10.909379Z node 2 :STATISTICS DEBUG: tx_init.cpp:188: [72075186224037894] Loaded ScheduleTraversals: table count# 0 2026-01-04T02:46:10.909431Z node 2 :STATISTICS DEBUG: tx_init.cpp:224: [72075186224037894] Loaded ForceTraversalOperations: table count# 0 2026-01-04T02:46:10.909486Z node 2 :STATISTICS DEBUG: tx_init.cpp:273: [72075186224037894] Loaded ForceTraversalTables: table count# 0 2026-01-04T02:46:10.909540Z node 2 :STATISTICS DEBUG: tx_init.cpp:280: [72075186224037894] TTxInit::Complete 2026-01-04T02:46:10.910091Z node 2 :STATISTICS INFO: aggregator_impl.cpp:69: [72075186224037894] Subscribed for config changes 2026-01-04T02:46:10.911800Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:1905:2454] 2026-01-04T02:46:10.916659Z node 2 :STATISTICS DEBUG: tx_configure.cpp:21: [72075186224037894] TTxConfigure::Execute: database# /Root/Database 2026-01-04T02:46:10.922448Z node 2 :STATISTICS DEBUG: table_creator.cpp:191: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Describe result: PathErrorUnknown 2026-01-04T02:46:10.922504Z node 2 :STATISTICS NOTICE: table_creator.cpp:211: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Creating table 2026-01-04T02:46:10.922589Z node 2 :STATISTICS DEBUG: table_creator.cpp:112: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Created ESchemeOpCreateTable transaction for path: /Root/Database/.metadata/_statistics 2026-01-04T02:46:10.926543Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8331: ResolveSA(), StatisticsAggregatorId=72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.926666Z node 2 :STATISTICS DEBUG: schemeshard_impl.cpp:8361: ConnectToSA(), pipe client id: [2:2145:2598], at schemeshard: 72075186224037897, StatisticsAggregatorId: 72075186224037894, at schemeshard: 72075186224037897 2026-01-04T02:46:10.956505Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:97: [72075186224037894] EvServerConnected, pipe server id = [2:2218:2651] 2026-01-04T02:46:10.956648Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:224: [72075186224037894] EvConnectSchemeShard, pipe server id = [2:2218:2651], schemeshard id = 72075186224037897 2026-01-04T02:46:10.967401Z node 2 :TX_PROXY_SCHEME_CACHE WARN: cache.cpp:307: Path does not belong to the specified domain: self# [2:2233:2658], domain# [OwnerId: 72057594046644480, LocalPathId: 38], path's domain# [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T02:46:10.997147Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976720657:1, at schemeshard: 72075186224037897, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:46:11.009785Z node 2 :STATISTICS DEBUG: table_creator.cpp:229: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. TEvProposeTransactionStatus: { Status: 53 TxId: 281474976720657 SchemeShardStatus: 1 SchemeShardTabletId: 72075186224037897 PathId: 33 } 2026-01-04T02:46:11.009913Z node 2 :STATISTICS DEBUG: table_creator.cpp:304: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Subscribe on create table tx: 281474976720657 2026-01-04T02:46:11.016178Z node 2 :STATISTICS DEBUG: table_creator.cpp:329: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Subscribe on tx: 281474976720657 registered 2026-01-04T02:46:11.189689Z node 2 :STATISTICS DEBUG: tx_configure.cpp:36: [72075186224037894] TTxConfigure::Complete 2026-01-04T02:46:11.230290Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/Database/.metadata/script_executions 2026-01-04T02:46:11.533024Z node 2 :STATISTICS DEBUG: table_creator.cpp:334: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Request: create. Transaction completed: 281474976720657. Doublechecking... 2026-01-04T02:46:11.620914Z node 2 :STATISTICS DEBUG: table_creator.cpp:221: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Table already exists, number of columns: 5, has SecurityObject: true 2026-01-04T02:46:11.621035Z node 2 :STATISTICS DEBUG: table_creator.cpp:441: Table _statistics updater. SelfId: [2:2120:2586] Owner: [2:2119:2585]. Column diff is empty, finishing 2026-01-04T02:46:12.200579Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... OwnerId: [2:5397:4452], ActorId: [2:5399:4453], Got response [2:5400:4454] SUCCESS 2026-01-04T02:52:51.540477Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:52:51.572640Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:52:51.572748Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId2, ActorId=[1:3147:3428] 2026-01-04T02:52:52.501156Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:52:52.502763Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:52:52.506596Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:52:52.506753Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], Start read next stream part 2026-01-04T02:52:52.567814Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 8 ], ReplyToActorId[ [2:5485:4506]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:52:52.568198Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 8 ] 2026-01-04T02:52:52.568256Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 8, ReplyToActorId = [2:5485:4506], StatRequests.size() = 1 2026-01-04T02:54:11.855440Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:54:11.855664Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], Start read next stream part 2026-01-04T02:54:11.856602Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:54:11.859507Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3er0ct7cksk4rxg80jhzth", SessionId: ydb://session/3?node_id=2&id=Y2JkM2M0NzQtOTRmYjExZGYtMjU0N2E4ZjItNDNiYWNiZTQ=, Slow query, duration: 79.348487s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:54:11.860674Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], RunStreamQuery with text: $f0 = ($p0,$p1) -> { return AggregationFactory( "UDAF", ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) }, ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) }, StatisticsInternal::CMSMerge, StatisticsInternal::CMSFinalize, StatisticsInternal::CMSSerialize, StatisticsInternal::CMSDeserialize, ) }; SELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table` 2026-01-04T02:54:11.860807Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], Start read next stream part 2026-01-04T02:54:11.861255Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:54:11.861316Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5474:4496], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=Njk2YTJkOTMtYTk5OWFmOC0zZDAwODNjZC1lY2VjNzIzZQ==, TxId: 2026-01-04T02:54:11.861430Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35000, txId: 18446744073709551615] shutting down 2026-01-04T02:54:11.933550Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 9 ], ReplyToActorId[ [2:5661:4605]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:54:11.933891Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 9 ] 2026-01-04T02:54:11.933953Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 9, ReplyToActorId = [2:5661:4605], StatRequests.size() = 1 2026-01-04T02:55:11.559961Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], StreamQueryResultPart #1 finished SUCCESS, Issues: 2026-01-04T02:55:11.560084Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], Start read next stream part 2026-01-04T02:55:11.561101Z node 2 :KQP_SLOW_LOG WARN: kqp_worker_common.cpp:132: TraceId: "01ke3etdwm07bazc6bedd4q9t0", SessionId: ydb://session/3?node_id=2&id=ZDg3NjQwNDQtY2I1NjJmM2ItZmY3NmRhNDUtYTQ3NWFmOQ==, Slow query, duration: 59.695303s, status: STATUS_CODE_UNSPECIFIED, user: UNAUTHENTICATED, results: 0b, text: "$f0 = ($p0,$p1) -> { return AggregationFactory(\n \"UDAF\",\n ($item,$parent) -> { return Udf(StatisticsInternal::CMSCreate, $parent as Depends)($item,$p0,$p1) },\n ($state,$item,$parent) -> { return Udf(StatisticsInternal::CMSAddValue, $parent as Depends)($state, $item) },\n StatisticsInternal::CMSMerge,\n StatisticsInternal::CMSFinalize,\n StatisticsInternal::CMSSerialize,\n StatisticsInternal::CMSDeserialize,\n)\n};\nSELECT AGGREGATE_BY(Value,$f0(4096,8)) FROM `/Root/Database/Table`", parameters: 0b 2026-01-04T02:55:11.561535Z node 2 :STATISTICS DEBUG: query_actor.h:292: [TQueryRetryActor] OwnerId: [2:5815:4688], ActorId: [2:5817:4689], Starting query actor #1 [2:5818:4690] 2026-01-04T02:55:11.561584Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5817:4689], ActorId: [2:5818:4690], Bootstrap. Database: /Root/Database, IsSystemUser: 1, run create session 2026-01-04T02:55:11.565981Z node 2 :STATISTICS DEBUG: query_actor.cpp:303: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], StreamQueryResultPart #2 finished SUCCESS, Issues: 2026-01-04T02:55:11.566057Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5472:2463], ActorId: [2:5642:4590], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=ZjYwZTJkYWQtYWIxOGI0OTAtN2FhMzI2MGEtZThjOGJlMTE=, TxId: 2026-01-04T02:55:11.566189Z node 2 :KQP_RESOURCE_MANAGER WARN: kqp_snapshot_manager.cpp:239: KqpSnapshotManager: discarding snapshot; our snapshot: [step: 35100, txId: 18446744073709551615] shutting down 2026-01-04T02:55:11.567343Z node 2 :STATISTICS DEBUG: query_actor.cpp:201: [TQueryBase] OwnerId: [2:5817:4689], ActorId: [2:5818:4690], RunDataQuery with SessionId: ydb://session/3?node_id=2&id=YjllMTk4OTctNGY2Yjc1YzktZGFmNTdkMzAtY2JiZmQ5ZTU=, TxId: , text: DECLARE $owner_id AS Uint64; DECLARE $local_path_id AS Uint64; DECLARE $stat_types AS List; DECLARE $column_tags AS List>; DECLARE $data AS List; $to_struct = ($t) -> { RETURN <| owner_id:$owner_id, local_path_id:$local_path_id, stat_type:$t.0, column_tag:$t.1, data:$t.2, |>; }; UPSERT INTO `.metadata/_statistics` (owner_id, local_path_id, stat_type, column_tag, data) SELECT owner_id, local_path_id, stat_type, column_tag, data FROM AS_TABLE(ListMap(ListZip($stat_types, $column_tags, $data), $to_struct)); 2026-01-04T02:55:11.614048Z node 2 :STATISTICS DEBUG: query_actor.cpp:244: [TQueryBase] OwnerId: [2:5817:4689], ActorId: [2:5818:4690], DataQuery #1 finished SUCCESS, Issues: , SessionId: ydb://session/3?node_id=2&id=YjllMTk4OTctNGY2Yjc1YzktZGFmNTdkMzAtY2JiZmQ5ZTU=, TxId: 2026-01-04T02:55:11.614124Z node 2 :STATISTICS DEBUG: query_actor.cpp:371: [TQueryBase] OwnerId: [2:5817:4689], ActorId: [2:5818:4690], Finish with SUCCESS, SessionId: ydb://session/3?node_id=2&id=YjllMTk4OTctNGY2Yjc1YzktZGFmNTdkMzAtY2JiZmQ5ZTU=, TxId: 2026-01-04T02:55:11.614562Z node 2 :STATISTICS DEBUG: query_actor.h:311: [TQueryRetryActor] OwnerId: [2:5815:4688], ActorId: [2:5817:4689], Got response [2:5818:4690] SUCCESS 2026-01-04T02:55:11.615366Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:28: [72075186224037894] TTxFinishTraversal::Execute 2026-01-04T02:55:11.683487Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:38: [72075186224037894] TTxFinishTraversal::Complete force traversal for path [OwnerId: 72075186224037897, LocalPathId: 34] 2026-01-04T02:55:11.683565Z node 2 :STATISTICS DEBUG: tx_finish_trasersal.cpp:52: [72075186224037894] TTxFinishTraversal::Complete. Send TEvAnalyzeResponse, OperationId=operationId3, ActorId=[1:3147:3428] 2026-01-04T02:55:12.602926Z node 2 :STATISTICS DEBUG: aggregator_impl.cpp:646: [72075186224037894] ScheduleNextAnalyze 2026-01-04T02:55:12.604984Z node 2 :STATISTICS DEBUG: query_actor.cpp:135: [TQueryBase] OwnerId: [2:5893:2463], ActorId: [2:5895:4737], Bootstrap. Database: /Root/Database, IsSystemUser: 0, run create session 2026-01-04T02:55:12.615576Z node 2 :STATISTICS DEBUG: query_actor.cpp:273: [TQueryBase] OwnerId: [2:5893:2463], ActorId: [2:5895:4737], RunStreamQuery with text: SELECT count(*),HLL(Key),min(Key),max(Key),HLL(Value) FROM `/Root/Database/Table` 2026-01-04T02:55:12.615755Z node 2 :STATISTICS DEBUG: query_actor.cpp:292: [TQueryBase] OwnerId: [2:5893:2463], ActorId: [2:5895:4737], Start read next stream part 2026-01-04T02:55:12.673036Z node 2 :STATISTICS DEBUG: service_impl.cpp:773: [TStatService::TEvGetStatistics] RequestId[ 10 ], ReplyToActorId[ [2:5906:4747]], StatType[ 0 ], StatRequestsCount[ 1 ] 2026-01-04T02:55:12.673580Z node 2 :STATISTICS DEBUG: service_impl.cpp:790: [TStatService::TEvNavigateKeySetResult] RequestId[ 10 ] 2026-01-04T02:55:12.673635Z node 2 :STATISTICS DEBUG: service_impl.cpp:1285: ReplySuccess(), request id = 10, ReplyToActorId = [2:5906:4747], StatRequests.size() = 1 Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: 600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/001ffd/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("600 second(s) wait timeout has expired: Command '['/home/runner/.ya/tools/v4/10669990147/test_tool', 'run_ut', '@/home/runner/.ya/build/build_root/c5zo/001ffd/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/chunk4/testing_out_stuff/test_tool.args']' stopped by 600 seconds timeout",), {}) |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/statistics/aggregator/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.2%| [TA] $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TSlotIndexesPoolTest::Ranges [GOOD] >> TNodeBrokerTest::NodesMigrationExpiredChanged >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] |97.2%| [TA] {RESULT} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/statistics/aggregator/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> TNodeBrokerTest::Test1000NodesSubscribers >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TSlotIndexesPoolTest::Ranges [GOOD] |97.2%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_creates_quoter [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |97.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> TTenantPoolTests::TestStateStatic >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] >> TTenantPoolTests::TestStateStatic [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TTenantPoolTests::TestStateStatic [GOOD] >> TLocksTest::Range_BrokenLock3 [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v1-std] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] >> YdbSdkSessionsPool1Session::GetSession/0 |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::NodesMigrationExpiredChanged [GOOD] Test command err: 2026-01-04T02:56:07.918681Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:56:07.918764Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/client/ut/unittest >> TLocksTest::Range_BrokenLock3 [GOOD] Test command err: 2026-01-04T02:55:28.459550Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591334632486622126:2129];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:28.460462Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:28.691946Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:28.730278Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:28.730409Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:28.752830Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:28.822093Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:28.893676Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:28.974815Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:29.041389Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:29.157373Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:29.209299Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:31.755159Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591334646724994195:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:31.755222Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:31.781863Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:31.880657Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:31.882890Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591334646724994166:2081] 1767495331754135 != 1767495331754138 2026-01-04T02:55:31.898615Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:31.898722Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:31.901072Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:31.971774Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:32.097455Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:32.122155Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:32.180781Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:32.227475Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.262111Z node 3 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[3:7591334662488989381:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:35.262234Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:35.275116Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:35.361759Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:7591334662488989354:2081] 1767495335261309 != 1767495335261312 2026-01-04T02:55:35.365701Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:35.381906Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:35.381986Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:35.384135Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:35.497992Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:35.533931Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:35.547778Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.598955Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:35.640890Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:38.691000Z node 4 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[4:7591334673409947099:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:38.691068Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:38.725025Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:38.801907Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:38.839857Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:38.839938Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:38.849353Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:39.024848Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:39.059280Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:39.065420Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:55:39.077742Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:39.141598Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemesha ... 2057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:51.605532Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:51.634897Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:51.658329Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:51.883275Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:51.894609Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:55:51.909367Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:51.984351Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:52.040133Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:55.907347Z node 8 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[8:7591334748197332205:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:55:55.907432Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:55:55.965971Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:55:56.050220Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:55:56.050335Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:55:56.063611Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:55:56.084170Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:55:56.204471Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:55:56.361284Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:55:56.383068Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:56.465166Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:55:56.520976Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:00.538438Z node 9 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[9:7591334768528765882:2067];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:56:00.538513Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:56:00.568769Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:56:00.682101Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:56:00.693353Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:7591334768528765856:2081] 1767495360537959 != 1767495360537962 2026-01-04T02:56:00.712174Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:56:00.712293Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:56:00.726955Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:56:00.747897Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:56:01.020502Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:56:01.029261Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T02:56:01.047302Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:01.180110Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:01.258963Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:05.372079Z node 10 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[10:7591334789332896459:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T02:56:05.372179Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T02:56:05.396024Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: dc-1/.metadata/script_executions 2026-01-04T02:56:05.487036Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:56:05.489118Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:7591334789332896414:2081] 1767495365366739 != 1767495365366742 2026-01-04T02:56:05.505028Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T02:56:05.505143Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T02:56:05.527645Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T02:56:05.566061Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: dc-1/.metadata/script_executions 2026-01-04T02:56:05.765903Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T02:56:05.793304Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:05.860642Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T02:56:05.920907Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) |97.3%| [TM] {BAZEL_UPLOAD} ydb/core/client/ut/unittest |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |97.3%| [TA] $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] |97.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} |97.3%| [TA] {RESULT} $(B)/ydb/core/client/ut/test-results/unittest/{meta.json ... results_accumulator.log} >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool1Session::GetSession/0 [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-fifo] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v0] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_not_create_kesus [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_send_message_rate[tables_format_v1] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_other_requests_rate[tables_format_v0] [GOOD] |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v0-std] [GOOD] |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-fifo] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-fifo] [GOOD] >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v1] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_canonical_records.py::test_dstool_add_group_http |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-fifo] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_not_throttling_with_custom_queue_name[std-tables_format_v1] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithKesus::test_properly_creates_and_deletes_queue[tables_format_v1-std] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_fifo_groups_with_dlq_in_cloud[tables_format_v1] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_sending_duplicates [GOOD] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_create_queue_rate[tables_format_v0] [GOOD] |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-fifo] [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/with_quotas/py3test >> test_quoting.py::TestSqsQuotingWithLocalRateLimiter::test_does_actions_with_queue[tables_format_v0-std] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/with_quotas/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TA] $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} |97.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/with_quotas/test-results/py3test/{meta.json ... results_accumulator.log} >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-fifo] [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_ymq_expiring_counters [GOOD] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomain [GOOD] >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_message_counters_in_cloud[tables_format_v0-std] [GOOD] |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/schemeshard/ut_subdomain/unittest >> TSchemeShardSubDomainTest::CreateItemsInsideSubdomainWithStoragePools [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T02:56:39.527552Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T02:56:39.527702Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:56:39.527759Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T02:56:39.527832Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T02:56:39.527879Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T02:56:39.527911Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T02:56:39.527974Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T02:56:39.528061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T02:56:39.528968Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T02:56:39.529283Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T02:56:39.630516Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:56:39.630591Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T02:56:39.646872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T02:56:39.647064Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T02:56:39.647224Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T02:56:39.654969Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T02:56:39.655147Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T02:56:39.655703Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T02:56:39.655938Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T02:56:39.658322Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:56:39.658483Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T02:56:39.659419Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T02:56:39.659476Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T02:56:39.659577Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T02:56:39.659618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T02:56:39.659729Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T02:56:39.659907Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T02:56:39.804479Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805508Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805564Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805605Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805646Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805686Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805745Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805827Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805896Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.805968Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.806022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.806062Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.806153Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T02:56:39.806247Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:56:42.411333Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0" took 233us result status StatusSuccess 2026-01-04T02:56:42.411880Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0" PathDescription { Self { Name: "USER_0" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeSubDomain CreateFinished: true CreateTxId: 100 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 8 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 8 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 6 SubDomainVersion: 1 SecurityStateVersion: 0 } } Children { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" ChildrenExist: true } Children { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "name_USER_0_kind_hdd-1" Kind: "hdd-1" } StoragePools { Name: "name_USER_0_kind_hdd-2" Kind: "hdd-2" } StoragePools { Name: "pool-1" Kind: "pool-kind-1" } StoragePools { Name: "pool-2" Kind: "pool-kind-2" } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:56:42.412429Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/table_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:56:42.412643Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/table_0" took 238us result status StatusSuccess 2026-01-04T02:56:42.413072Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/table_0" PathDescription { Self { Name: "table_0" PathId: 39 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 101 CreateStep: 250 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_0" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 39 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:56:42.413573Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:56:42.413721Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0" took 175us result status StatusSuccess 2026-01-04T02:56:42.414101Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0" PathDescription { Self { Name: "dir_0" PathId: 40 SchemeshardId: 72057594046678944 PathType: EPathTypeDir CreateFinished: true CreateTxId: 102 CreateStep: 200 ParentPathId: 38 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 4 } ChildrenExist: true } Children { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateCreate Owner: "root@builtin" ACL: "" ChildrenExist: false } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 40 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T02:56:42.414622Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/USER_0/dir_0/table_1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T02:56:42.414814Z node 2 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/USER_0/dir_0/table_1" took 195us result status StatusSuccess 2026-01-04T02:56:42.415198Z node 2 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/USER_0/dir_0/table_1" PathDescription { Self { Name: "table_1" PathId: 41 SchemeshardId: 72057594046678944 PathType: EPathTypeTable CreateFinished: true CreateTxId: 103 CreateStep: 250 ParentPathId: 40 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table_1" Columns { Name: "RowId" Type: "Uint64" TypeId: 4 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "Value" Type: "Utf8" TypeId: 4608 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "RowId" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 50 Coordinators: 72075186233409546 TimeCastBucketsPerMediator: 2 Mediators: 72075186233409547 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } PathsInside: 3 PathsLimit: 10000 ShardsInside: 4 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 41 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/core/tx/schemeshard/ut_subdomain/unittest |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TA] $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> test_restarts.py::test_basic [FAIL] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] |97.6%| [TA] {RESULT} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tx/schemeshard/ut_subdomain/test-results/unittest/{meta.json ... results_accumulator.log} |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_cloud_queues_with_iam_token[tables_format_v1-std] [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-fifo] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_purge_queue_counters [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_list_clouds [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonYandexWithTenant::test_private_create_queue[tables_format_v0-std] [GOOD] |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> YdbSdkSessionsPool::StressTestAsync/0 [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] >> test_auditlog.py::test_single_dml_query_logged[upsert] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant |97.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_common.py::TestCommonSqsYandexCloudMode::test_private_create_queue[tables_format_v1-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/statistics/py3test >> test_restarts.py::test_basic [FAIL] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/statistics/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_audit.py::TestCloudEvents::test_create_update_delete_one_queue[tables_format_v0-std] [GOOD] Test command err: run test with cloud_id=CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b folder_id=folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b iam_token=usr_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b cloud_account=acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b 2026-01-04T02:56:12.378892Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b]","tx_id":"281474976720685","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.485486Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p]","tx_id":"281474976720691","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.516376Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2]","tx_id":"281474976720692","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.566397Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/State]","tx_id":"281474976720699","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.566627Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/SentTimestampIdx]","tx_id":"281474976720700","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.566812Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Messages]","tx_id":"281474976720698","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.567003Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Reads]","tx_id":"281474976720697","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.567176Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Attributes]","tx_id":"281474976720693","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.567901Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Data]","tx_id":"281474976720695","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.571814Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Groups]","tx_id":"281474976720696","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.572770Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Deduplication]","tx_id":"281474976720694","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:12.900055Z: {"request_id":"243d7ec5-8255621d-4a9dc6d5-7db826c1","cloud_id":"CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","subject":"fake_user_sid@as","operation":"create_queue","component":"ymq","folder_id":"folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b"} ======================================== 2026-01-04T02:56:13.422098Z: {"request_id":"243d7ec5-8255621d-4a9dc6d5-7db826c1","permission":"ymq.queues.create","id":"1249320005110929093$CreateMessageQueue$2026-01-04T02:56:13.421916Z","idempotency_id":"1249320005110929093$CreateMessageQueue$2026-01-04T02:56:12.436000Z","cloud_id":"CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:56:12.436000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ed729efa-e918-11f0-831b-d00dcd536b8b.fifo","resource_id":"0000000000000001054p","labels":"{}","operation":"CreateMessageQueue","folder_id":"folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:56:15.565205Z: {"request_id":"a65c2052-458aef36-1f874b10-c5914317","permission":"ymq.queues.setAttributes","id":"8454295949579167593$UpdateMessageQueue$2026-01-04T02:56:15.564974Z","idempotency_id":"8454295949579167593$UpdateMessageQueue$2026-01-04T02:56:13.983000Z","cloud_id":"CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:56:13.983000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ed729efa-e918-11f0-831b-d00dcd536b8b.fifo","resource_id":"0000000000000001054p","labels":"{\"tag_key_1\":\"tag_value_1\",\"tag_key_2\":\"tag_value_2\"}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:56:15.565890Z: {"request_id":"3dfda11e-fe48721d-829262f1-cd731969","permission":"ymq.queues.setAttributes","id":"5035484629206420088$UpdateMessageQueue$2026-01-04T02:56:15.565048Z","idempotency_id":"5035484629206420088$UpdateMessageQueue$2026-01-04T02:56:15.042000Z","cloud_id":"CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:56:15.042000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_ed729efa-e918-11f0-831b-d00dcd536b8b.fifo","resource_id":"0000000000000001054p","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:56:16.165166Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/SentTimestampIdx]","tx_id":"281474976720714","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.226775Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/State]","tx_id":"281474976720715","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.288464Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Reads]","tx_id":"281474976720716","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.335460Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Messages]","tx_id":"281474976720717","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.397754Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Groups]","tx_id":"281474976720718","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.472897Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Deduplication]","tx_id":"281474976720719","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.529263Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Data]","tx_id":"281474976720720","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.623633Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2/Attributes]","tx_id":"281474976720721","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.670288Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p/v2]","tx_id":"281474976720722","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:16.692570Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_ed8e1b04-e918-11f0-bdfa-d00dcd536b8b/0000000000000001054p]","tx_id":"281474976720725","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} =================================== ... 918-11f0-9246-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:56:31.688000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_f735a567-e918-11f0-91a3-d00dcd536b8b","resource_id":"000000000000000307vg","labels":"{}","operation":"UpdateMessageQueue","folder_id":"folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:56:32.792238Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/3/SentTimestampIdx]","tx_id":"281474976720787","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:32.834244Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/3/Infly]","tx_id":"281474976720788","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:32.885648Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/3/Messages]","tx_id":"281474976720789","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:32.943835Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/3/MessageData]","tx_id":"281474976720790","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.009292Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/2/SentTimestampIdx]","tx_id":"281474976720791","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.064522Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/2/Infly]","tx_id":"281474976720792","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.105302Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/2/Messages]","tx_id":"281474976720793","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.175163Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/2/MessageData]","tx_id":"281474976720794","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.241296Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/1/SentTimestampIdx]","tx_id":"281474976720795","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.294932Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/1/Infly]","tx_id":"281474976720796","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.337165Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/1/Messages]","tx_id":"281474976720797","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.396231Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/1/MessageData]","tx_id":"281474976720798","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.472672Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/0/SentTimestampIdx]","tx_id":"281474976720799","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.521480Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/0/Infly]","tx_id":"281474976720800","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.576290Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/0/Messages]","tx_id":"281474976720801","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.622439Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/0/MessageData]","tx_id":"281474976720802","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.711841Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/State]","tx_id":"281474976720803","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.741537Z: {"request_id":"c648633a-debc183f-6fbee359-bd4b1fe3","permission":"ymq.queues.delete","id":"15868795690733906148$DeleteMessageQueue$2026-01-04T02:56:33.741385Z","idempotency_id":"15868795690733906148$DeleteMessageQueue$2026-01-04T02:56:32.736000Z","cloud_id":"CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b","masked_token":"*** hidden ***","auth_type":"{none}","remote_address":"::1","created_at":"2026-01-04T02:56:32.736000Z","status":"SUCCESS","subject":"fake_user_sid@as","queue":"Q_create_update_delete_one_queue_f735a567-e918-11f0-91a3-d00dcd536b8b","resource_id":"000000000000000307vg","labels":"{}","operation":"DeleteMessageQueue","folder_id":"folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b","component":"ymq"} ======================================== 2026-01-04T02:56:33.798552Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/Attributes]","tx_id":"281474976720804","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP TABLE","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.855716Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/3]","tx_id":"281474976720805","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.883655Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/2]","tx_id":"281474976720808","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.908099Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/1]","tx_id":"281474976720810","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.926300Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4/0]","tx_id":"281474976720811","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.955951Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg/v4]","tx_id":"281474976720812","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.977325Z: {"paths":"[/Root/TenantSQS/CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b/000000000000000307vg]","tx_id":"281474976720813","database":"/Root/TenantSQS","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DIRECTORY","component":"schemeshard"} ======================================== 2026-01-04T02:56:33.996503Z: {"request_id":"c648633a-debc183f-6fbee359-bd4b1fe3","cloud_id":"CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b","status":"SUCCESS","account":"CLOUD_FOR_folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b","subject":"fake_user_sid@as","queue":"000000000000000307vg","resource_id":"000000000000000307vg","operation":"delete_queue","component":"ymq","folder_id":"folder_acc_f741c525-e918-11f0-9246-d00dcd536b8b"} ======================================== ======================================== |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |97.8%| [TA] $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {RESULT} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/statistics/test-results/py3test/{meta.json ... results_accumulator.log} |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002385/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk2/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_bad_dynconfig/audit_log.qysrg39k.txt 2026-01-04T02:56:52.837336Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"{none}","remote_address":"127.0.0.1","status":"ERROR","subject":"{none}","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/001fdc/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk5/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_good_dynconfig/audit_log.p1bix4h9.txt 2026-01-04T02:56:57.167323Z: {"sanitized_token":"othe****ltin (27F910A9)","subject":"other-user@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] >> test_canonical_records.py::test_dml_through_http >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/nemesis/ut/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/nemesis/ut/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TA] $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TA] {RESULT} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} |97.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/tools/nemesis/ut/test-results/py3test/{meta.json ... results_accumulator.log} >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_other-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002126/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk4/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_other-_bad_dynconfig/audit_log.5sx99ukl.txt 2026-01-04T02:57:06.429549Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"othe****ltin (27F910A9)","remote_address":"127.0.0.1","status":"ERROR","subject":"other-user@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_to_zero_works[tables_format_v1-std] [GOOD] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] >> test_auditlog.py::test_create_and_remove_tenant [GOOD] >> test_auditlog.py::test_dynconfig [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithPath::test_micro_batch_read[tables_format_v1] [GOOD] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |97.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[upsert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0022da/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk22/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.upsert/audit_log.6fu__tx5.txt 2026-01-04T02:57:10.930154Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:10.930093Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-04T02:57:10.839953Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_dlq_mechanics_in_cloud[tables_format_v1-tables_format_v0-std] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_deduplicates_receipt_handle[tables_format_v1-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_invalid_queue_url[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types1-all_types1-index1] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[insert] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0022a5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk18/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.insert/audit_log.27cdnl0u.txt 2026-01-04T02:57:20.495214Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:20.495169Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-04T02:57:20.421568Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint64-pk_types4-all_types4-index4] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_create_and_remove_tenant [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0022c1/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk10/testing_out_stuff/test_auditlog.py.test_create_and_remove_tenant/audit_log.wndzafcc.txt 2026-01-04T02:57:07.870251Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-04T02:57:07.878672Z: {"paths":"[/Root/users/database]","tx_id":"281474976710660","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"CREATE DATABASE","component":"schemeshard"} 2026-01-04T02:57:07.914822Z: {"paths":"[/Root/users/database]","tx_id":"281474976710661","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"ALTER DATABASE","component":"schemeshard"} 2026-01-04T02:57:12.450897Z: {"paths":"[/Root/users/database/.sys]","tx_id":"281474976725657","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE DIRECTORY","component":"schemeshard"} 2026-01-04T02:57:12.453316Z: {"paths":"[/Root/users/database/.sys/auth_owners]","tx_id":"281474976725658","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.453683Z: {"paths":"[/Root/users/database/.sys/query_sessions]","tx_id":"281474976725659","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.453917Z: {"paths":"[/Root/users/database/.sys/tables]","tx_id":"281474976725660","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.454117Z: {"paths":"[/Root/users/database/.sys/pg_class]","tx_id":"281474976725661","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.454286Z: {"paths":"[/Root/users/database/.sys/partition_stats]","tx_id":"281474976725662","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.454493Z: {"paths":"[/Root/users/database/.sys/query_metrics_one_minute]","tx_id":"281474976725663","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.454766Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_hour]","tx_id":"281474976725664","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.454938Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_hour]","tx_id":"281474976725665","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.457358Z: {"paths":"[/Root/users/database/.sys/compile_cache_queries]","tx_id":"281474976725666","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.457726Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_minute]","tx_id":"281474976725667","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.457998Z: {"paths":"[/Root/users/database/.sys/top_queries_by_read_bytes_one_minute]","tx_id":"281474976725668","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.458179Z: {"paths":"[/Root/users/database/.sys/resource_pool_classifiers]","tx_id":"281474976725669","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.458357Z: {"paths":"[/Root/users/database/.sys/nodes]","tx_id":"281474976725670","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.458596Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_hour]","tx_id":"281474976725671","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.458777Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_minute]","tx_id":"281474976725672","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.459753Z: {"paths":"[/Root/users/database/.sys/top_queries_by_cpu_time_one_minute]","tx_id":"281474976725673","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.460040Z: {"paths":"[/Root/users/database/.sys/top_partitions_by_tli_one_hour]","tx_id":"281474976725674","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.460258Z: {"paths":"[/Root/users/database/.sys/top_queries_by_request_units_one_hour]","tx_id":"281474976725675","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.460463Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_hour]","tx_id":"281474976725676","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.460708Z: {"paths":"[/Root/users/database/.sys/pg_tables]","tx_id":"281474976725677","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.460921Z: {"paths":"[/Root/users/database/.sys/top_queries_by_duration_one_minute]","tx_id":"281474976725678","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.461134Z: {"paths":"[/Root/users/database/.sys/auth_group_members]","tx_id":"281474976725679","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.461364Z: {"paths":"[/Root/users/database/.sys/auth_users]","tx_id":"281474976725680","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.461564Z: {"paths":"[/Root/users/database/.sys/resource_pools]","tx_id":"281474976725681","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.461697Z: {"paths":"[/Root/users/database/.sys/streaming_queries]","tx_id":"281474976725682","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.461896Z: {"paths":"[/Root/users/database/.sys/auth_permissions]","tx_id":"281474976725683","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.462122Z: {"paths":"[/Root/users/database/.sys/top_partitions_one_minute]","tx_id":"281474976725684","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.462351Z: {"paths":"[/Root/users/database/.sys/auth_effective_permissions]","tx_id":"281474976725685","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.463178Z: {"paths":"[/Root/users/database/.sys/auth_groups]","tx_id":"281474976725686","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE SYSTEM VIEW","component":"schemeshard"} 2026-01-04T02:57:12.467931Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END INIT DATABASE CONFIG","remote_address":"::1","database":"/Root/users/database"} 2026-01-04T02:57:13.823412Z: {"paths":"[.metadata/workload_manager/pools/default]","tx_id":"281474976720657","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAccepted","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2026-01-04T02:57:13.959373Z: {"reason":"Check failed: path: '/Root/users/database/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72075186224037897, LocalPathId: 35], type: EPathTypeResourcePool, state: EPathStateNoChanges)","paths":"[default]","tx_id":"281474976720658","new_owner":"metadata@system","acl_add":"[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin]","database":"/Root/users/database","sanitized_token":"{none}","remote_address":"{none}","status":"SUCCESS","subject":"metadata@system","detailed_status":"StatusAlreadyExists","operation":"CREATE RESOURCE POOL","component":"schemeshard"} 2026-01-04T02:57:16.318922Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"BEGIN REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} 2026-01-04T02:57:16.330062Z: {"paths":"[/Root/users/database]","tx_id":"281474976710662","database":"/Root","sanitized_token":"{none}","remote_address":"::1","status":"SUCCESS","subject":"{none}","detailed_status":"StatusAccepted","operation":"DROP DATABASE","component":"schemeshard"} 2026-01-04T02:57:16.353799Z: {"sanitized_token":"{none}","subject":"{none}","status":"SUCCESS","component":"console","operation":"END REMOVE DATABASE","remote_address":"::1","database":"/Root/users/database"} |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int16-pk_types2-all_types2-index2] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint16-pk_types6-all_types6-index6] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_list_queues_of_nonexistent_user[tables_format_v1] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v0] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_queue_counters.py::TestYmqQueueCounters::test_counters_when_reading_from_empty_queue [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_sid_is_unexpected [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0022a2/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk14/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_sid_is_unexpected/audit_log.e1ri9zqb.txt 2026-01-04T02:57:22.932750Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:22.932708Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-04T02:57:22.863964Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:23.241674Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:23.241639Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-04T02:57:23.044527Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:23.470648Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:23.470617Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-04T02:57:23.354026Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:23.721933Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:23.721896Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:57:23.580226Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:23.927475Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:23.927442Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-04T02:57:23.830589Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:24.118067Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:24.118027Z","sanitized_token":"othe****ltin (27F910A9)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-04T02:57:24.035466Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"other-user@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dynconfig [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002290/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk16/testing_out_stuff/test_auditlog.py.test_dynconfig/audit_log.d0d0nlt2.txt 2026-01-04T02:57:24.046901Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int64-pk_types0-all_types0-index0] [GOOD] |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v0-std] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] |98.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-after_crutch_batch] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_receive_attempt_reloads_same_messages[tables_format_v1-standard_mode] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[DROP TABLE {}-`.metadata/script_executions`] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-fifo] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_execute_minikql [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_all_types-pk_types3-all_types3-index3---] [GOOD] |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_begin_commit_logged [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002257/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk11/testing_out_stuff/test_auditlog.py.test_dml_begin_commit_logged/audit_log.pqhf0qhh.txt 2026-01-04T02:57:35.493377Z: {"tx_id":"01ke3f0mr5bqcb68ek1fj2qdrg","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:35.493302Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-04T02:57:35.492645Z","grpc_method":"Ydb.Table.V1.TableService/BeginTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"BeginTransactionRequest","component":"grpc-proxy"} 2026-01-04T02:57:35.656279Z: {"tx_id":"01ke3f0mr5bqcb68ek1fj2qdrg","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:35.656236Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","commit_tx":"0","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:57:35.499616Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:57:35.670299Z: {"tx_id":"01ke3f0mr5bqcb68ek1fj2qdrg","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:35.670252Z","sanitized_token":"**** (B6C6F477)","remote_address":"127.0.0.1","status":"SUCCESS","start_time":"2026-01-04T02:57:35.662618Z","grpc_method":"Ydb.Table.V1.TableService/CommitTransaction","subject":"root@builtin","detailed_status":"SUCCESS","operation":"CommitTransactionRequest","component":"grpc-proxy"} |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |98.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_all_types-pk_types12-all_types12-index12---] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dstool_add_group_http [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast, DROP COLUMN stats-`.metadata/script_executions`] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_with_auth_root-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/00223d/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk7/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_with_auth_root-_good_dynconfig/audit_log.0cm1to5b.txt 2026-01-04T02:57:40.325182Z: {"sanitized_token":"**** (B6C6F477)","subject":"root@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] |98.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] >> test_canonical_records.py::test_dml >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_send_and_read_multiple_messages[tables_format_v1] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_uniform_partitiona[table_ttl_Date-pk_types0-all_types0-index0] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithTenant::test_multi_read_dont_stall[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v0] [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestPgSelect::test_select[table_index_0__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_delete_message_batch_works[tables_format_v1] [GOOD] |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Utf8-pk_types9-all_types9-index9] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] >> test_fifo_messaging.py::TestSqsFifoMicroBatchesWithTenant::test_micro_batch_read[tables_format_v1] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_bad_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002208/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk0/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_bad_dynconfig/audit_log.ql45g2n_.txt 2026-01-04T02:57:52.138086Z: {"reason":"ydb/library/fyamlcpp/fyamlcpp.cpp:1068: \n6:12 plain scalar cannot start with '%'","sanitized_token":"**** (C877DF61)","remote_address":"127.0.0.1","status":"ERROR","subject":"__bad__@builtin","operation":"REPLACE DYNCONFIG","new_config":"\n---\n123metadata:\n kind: MainConfig\n cluster: \"\"\n version: %s\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","component":"console"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> overlapping_portions.py::TestOverlappingPortions::test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[select] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0021eb/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk20/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.select/audit_log.7fijbsl2.txt 2026-01-04T02:57:55.769567Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:55.769529Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-04T02:57:55.656633Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint8-pk_types7-all_types7-index7] [GOOD] |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[update] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0021cd/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk21/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.update/audit_log.gccw4qj2.txt 2026-01-04T02:57:58.217214Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:57:58.217171Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:57:58.059855Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/mind/ut/unittest >> TNodeBrokerTest::Test1000NodesSubscribers [GOOD] Test command err: 2026-01-04T02:56:07.954598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T02:56:07.954704Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for nameservers are connected ... waiting for nameservers are connected (done) |98.4%| [TM] {BAZEL_UPLOAD} ydb/core/mind/ut/unittest |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TA] $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {RESULT} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/mind/ut/test-results/unittest/{meta.json ... results_accumulator.log} |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_kill_tablet_using_developer_ui [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_bad_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0021b5/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk1/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_bad_auth-_good_dynconfig/audit_log.rgeymrqg.txt 2026-01-04T02:58:00.932418Z: {"sanitized_token":"**** (C877DF61)","subject":"__bad__@builtin","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_change_visibility_works[tables_format_v1-std] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs0] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0021a9/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk8/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs0/audit_log.y2973hp8.txt 2026-01-04T02:58:02.775519Z: {"commit_tx":"1","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","cloud_id":"cloud-id-A","begin_tx":"1","resource_id":"database-id-C","end_time":"2026-01-04T02:58:02.775457Z","tx_id":"{none}","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:58:02.622649Z","database":"/Root/test_auditlog.py","subject":"root@builtin","status":"SUCCESS","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","component":"grpc-proxy","sanitized_token":"**** (B6C6F477)","detailed_status":"SUCCESS","remote_address":"127.0.0.1"} |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_fifo_messaging.py::TestSqsFifoMessagingWithTenant::test_validates_deduplication_id[tables_format_v0] [GOOD] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Datetime-pk_types16-all_types16-index16-Datetime--] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_single_dml_query_logged[replace] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002172/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk19/testing_out_stuff/test_auditlog.py.test_single_dml_query_logged.replace/audit_log.b657owrs.txt 2026-01-04T02:58:09.888556Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:09.888511Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-04T02:58:09.814229Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml_through_http [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Date-pk_types18-all_types18-index18-Date--] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v0] [GOOD] >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/oom/py3test >> overlapping_portions.py::TestOverlappingPortions::test [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/oom/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TA] $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} |98.5%| [TA] {RESULT} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] |98.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/olap/oom/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__SYNC-pk_types9-all_types9-index9-Timestamp--SYNC] [GOOD] |98.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__SYNC-pk_types15-all_types15-index15-DyNumber--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_dml_requests_logged_when_unauthorized [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/0020ba/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk15/testing_out_stuff/test_auditlog.py.test_dml_requests_logged_when_unauthorized/audit_log.aq9w97oo.txt 2026-01-04T02:58:38.644610Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:38.644570Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"insert into `/Root/test_auditlog.py/test-table` (id, value) values (100, 100), (101, 101)","start_time":"2026-01-04T02:58:38.628506Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:58:38.779171Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:38.779138Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"delete from `/Root/test_auditlog.py/test-table` where id = 100 or id = 101","start_time":"2026-01-04T02:58:38.755581Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:58:38.904247Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:38.904218Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"select id from `/Root/test_auditlog.py/test-table`","start_time":"2026-01-04T02:58:38.888515Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:58:39.029891Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:39.029852Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:58:39.013663Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:58:39.156362Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:39.156333Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"replace into `/Root/test_auditlog.py/test-table` (id, value) values (2, 3), (3, 3)","start_time":"2026-01-04T02:58:39.139986Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} 2026-01-04T02:58:39.283418Z: {"database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:39.283384Z","sanitized_token":"**** (C877DF61)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"ERROR","query_text":"upsert into `/Root/test_auditlog.py/test-table` (id, value) values (4, 4), (5, 5)","start_time":"2026-01-04T02:58:39.265962Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"__bad__@builtin","detailed_status":"SCHEME_ERROR","operation":"ExecuteDataQueryRequest","component":"grpc-proxy"} |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/select/py3test >> test_select.py::TestSelect::test_select[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/select/py3test |98.6%| [TA] $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/select/test-results/py3test/{meta.json ... results_accumulator.log} ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_broken_dynconfig[_client_session_pool_no_auth-_good_dynconfig] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002093/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk3/testing_out_stuff/test_auditlog.py.test_broken_dynconfig._client_session_pool_no_auth-_good_dynconfig/audit_log.i_5mqeno.txt 2026-01-04T02:58:41.814144Z: {"sanitized_token":"{none}","subject":"{none}","new_config":"\n---\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\nconfig:\n yaml_config_enabled: true\nallowed_labels:\n node_id:\n type: string\n host:\n type: string\n tenant:\n type: string\nselector_config: []\n ","status":"SUCCESS","component":"console","operation":"REPLACE DYNCONFIG","remote_address":"127.0.0.1"} |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_auditlog.py::test_cloud_ids_are_logged[attrs1] [GOOD] Test command err: AAA /home/runner/.ya/build/build_root/c5zo/002096/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_auditlog/chunk9/testing_out_stuff/test_auditlog.py.test_cloud_ids_are_logged.attrs1/audit_log.nowi4mad.txt 2026-01-04T02:58:43.013902Z: {"tx_id":"{none}","database":"/Root/test_auditlog.py","end_time":"2026-01-04T02:58:43.013862Z","sanitized_token":"**** (B6C6F477)","begin_tx":"1","remote_address":"127.0.0.1","commit_tx":"1","status":"SUCCESS","query_text":"update `/Root/test_auditlog.py/test-table` set value = 0 where id = 1","start_time":"2026-01-04T02:58:42.814004Z","grpc_method":"Ydb.Table.V1.TableService/ExecuteDataQuery","subject":"root@builtin","detailed_status":"SUCCESS","operation":"ExecuteDataQueryRequest","folder_id":"folder-id-B","component":"grpc-proxy"} |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] >> test_canonical_records.py::test_dml [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/messaging/py3test >> test_generic_messaging.py::TestSqsGenericMessagingWithPath::test_can_read_new_written_data_on_visibility_timeout[tables_format_v0] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/messaging/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_topic [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |98.6%| [TA] $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/messaging/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_String-pk_types8-all_types8-index8] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int8-pk_types3-all_types3-index3] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Uint32-pk_types5-all_types5-index5] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0_UNIQUE_SYNC-pk_types5-all_types5-index5-pgint8-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_dml [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint8-pk_types4-all_types4-index4-pgint8--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/partitioning/py3test >> test_partitioning.py::TestPartitionong::test_partition_at_keys[table_Int32-pk_types1-all_types1-index1] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/partitioning/py3test |98.6%| [TA] $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/partitioning/test-results/py3test/{meta.json ... results_accumulator.log} >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__ASYNC-pk_types1-all_types1-index1-pgint4--ASYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest >> YdbSdkSessionsPool::StressTestAsync/1 [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/sessions_pool/gtest |98.6%| [TA] $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/{meta.json ... results_accumulator.log} >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__ASYNC-pk_types34-all_types34-index34-Date--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types1-all_types1-index1---ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0_UNIQUE_SYNC-pk_types2-all_types2-index2-Datetime-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__SYNC-pk_types21-all_types21-index21-Uint32--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test >> test_update_script_tables.py::TestUpdateScriptTablesYdb::test_recreate_tables[ALTER TABLE {} DROP COLUMN syntax, DROP COLUMN ast-`.metadata/script_executions`] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Timestamp-pk_types17-all_types17-index17-Timestamp--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__SYNC-pk_types27-all_types27-index27-Uint64--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgtimestamp-pk_types6-all_types6-index6-pgtimestamp--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__ASYNC-pk_types25-all_types25-index25-Uint64--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0__SYNC-pk_types18-all_types18-index18-Uint32--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__SYNC-pk_types0-all_types0-index0-Datetime--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint32-pk_types9-all_types9-index9-Uint32--] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_all_types-pk_types2-all_types2-index2---] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__SYNC-pk_types30-all_types30-index30-Date--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0__SYNC-pk_types6-all_types6-index6-pgdate--SYNC] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_0_UNIQUE_SYNC-pk_types20-all_types20-index20-Uint32-UNIQUE-SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__ASYNC-pk_types7-all_types7-index7-Timestamp--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2_UNIQUE_SYNC-pk_types2-all_types2-index2--UNIQUE-SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0__SYNC-pk_types6-all_types6-index6-Timestamp--SYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/script_execution/py3test |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/script_execution/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__ASYNC-pk_types4-all_types4-index4-pgint8--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |98.6%| [TA] $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {RESULT} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/script_execution/test-results/py3test/{meta.json ... results_accumulator.log} |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_1__ASYNC-pk_types28-all_types28-index28-Uint64--ASYNC] [GOOD] |98.6%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_0_UNIQUE_SYNC-pk_types8-all_types8-index8-Timestamp-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1_UNIQUE_SYNC-pk_types11-all_types11-index11-Timestamp-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |98.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0__SYNC-pk_types0-all_types0-index0-pgint4--SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint8_0__SYNC-pk_types3-all_types3-index3-pgint8--SYNC] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgdate-pk_types5-all_types5-index5-pgdate--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__SYNC-pk_types9-all_types9-index9---SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Uint64-pk_types10-all_types10-index10-Uint64--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__ASYNC-pk_types4-all_types4-index4-Datetime--ASYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] >> ConfigGRPCService::ReplaceConfig >> TImportantConsumerOffsetTrackerTest::EmptyConsumersList [GOOD] >> TImportantConsumerOffsetTrackerTest::Unbound [GOOD] >> TImportantConsumerOffsetTrackerTest::Expired [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndExpired [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting300 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting400 [GOOD] >> TImportantConsumerOffsetTrackerTest::ReadUnboundAndWaiting500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400And500 [GOOD] >> TImportantConsumerOffsetTrackerTest::Waiting400AndRead500 [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerMaxRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchSingleConsumerFiniteRetention [GOOD] >> TImportantConsumerOffsetTrackerTest::ExactMatchMultipleConsumers [GOOD] >> TImportantConsumerOffsetTrackerTest::MultipleExactMatches [GOOD] >> TImportantConsumerOffsetTrackerTest::UnboundKeepNextBlob [GOOD] >> TDeduplicatorTest::AddMessage [GOOD] >> TDeduplicatorTest::AddTwoMessages [GOOD] >> TDeduplicatorTest::AddDeduplicatedMessages [GOOD] >> TDeduplicatorTest::AddTwoMessages_DifferentTime_OneBucket [GOOD] >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/partition/ut/gtest >> TDeduplicatorTest::AddManyMessages_SameTime_DifferentBucket [GOOD] Test command err: Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000001 Append WAL: e0000000001|0000000000000002 |98.7%| [TS] {RESULT} ydb/core/persqueue/pqtablet/partition/ut/gtest |98.7%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/partition/ut/gtest >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |98.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/library/ut/py3test >> kikimr_config.py::test_kikimr_config_generator_generic_connector_config [GOOD] |98.7%| [TS] {RESULT} ydb/tests/library/ut/py3test |98.7%| [TS] {BAZEL_UPLOAD} ydb/tests/library/ut/py3test |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Date-pk_types13-all_types13-index13-Date--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] >> HttpRouter::Basic [GOOD] |98.7%| [TS] {asan, default-linux-x86_64, release} ydb/core/public_http/ut/unittest >> HttpRouter::Basic [GOOD] |98.7%| [TS] {RESULT} ydb/core/public_http/ut/unittest |98.7%| [TS] {BAZEL_UPLOAD} ydb/core/public_http/ut/unittest |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0_UNIQUE_SYNC-pk_types26-all_types26-index26-Uint64-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> ConfigGRPCService::ReplaceConfig [GOOD] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] >> test.py::TestViewer::test_whoami_root >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_bs_controller] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] >> ConfigGRPCService::ReplaceConfigWithInvalidHostConfig [GOOD] >> ConfigGRPCService::FetchConfig |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1_UNIQUE_SYNC-pk_types23-all_types23-index23-Uint32-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> Coordinator::ReadStepSubscribe >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_datashard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_hive] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_schemeshard] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[flat_tx_coordinator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_allocator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[keyvalueflat] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[tx_mediator] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] >> ConfigGRPCService::FetchConfig [GOOD] >> ConfigGRPCService::CheckV1IsBlocked >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_0__ASYNC-pk_types1-all_types1-index1-Datetime--ASYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[persqueue] [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] >> Login::TokenIsNotExpired >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types2-all_types2-index2---ASYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> Login::TokenIsNotExpired [GOOD] >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Datetime-pk_types11-all_types11-index11-Datetime--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> KqpExecuter::TestSuddenAbortAfterReady >> ConfigGRPCService::CheckV1IsBlocked [GOOD] >> CoordinatorTests::Route >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] >> CoordinatorTests::Route [GOOD] >> CoordinatorTests::RouteTwoTopicWichSameName [GOOD] >> CoordinatorTests::WaitNodesConnected |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_ttl_pgint4-pk_types3-all_types3-index3-pgint4--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/services/config/ut/unittest >> ConfigGRPCService::CheckV1IsBlocked [GOOD] Test command err: 2026-01-04T03:00:51.825251Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336019709383128:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:00:51.825734Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:00:51.829978Z node 2 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.005011s 2026-01-04T03:00:52.100430Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:00:52.346250Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:00:52.346406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:00:52.367696Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:00:52.399985Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:00:52.420508Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:00:52.472085Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-04T03:00:52.472242Z node 1 :BS_CONTROLLER DEBUG: {BSCTXUDM01@disk_metrics.cpp:68} Updating disk status Record# {VDisksMetrics { VDiskId { GroupID: 2181038080 GroupGeneration: 1 Ring: 0 Domain: 0 VDisk: 0 } VSlotId { NodeId: 1 PDiskId: 1 VSlotId: 1000 } State: Initial Replicated: false DiskSpace: Green } } 2026-01-04T03:00:52.473391Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-04T03:00:52.474121Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE DIRECTORY, path: /Root/.sys 2026-01-04T03:00:52.475467Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, mkdir '/Root/.sys', status: StatusAccepted 2026-01-04T03:00:52.475483Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715657 2026-01-04T03:00:52.476829Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-04T03:00:52.477298Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-04T03:00:52.477333Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-04T03:00:52.477417Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-04T03:00:52.477589Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-04T03:00:52.477838Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-04T03:00:52.477847Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-04T03:00:52.479078Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715658, response: Status: StatusAccepted TxId: 281474976715658 SchemeshardId: 72057594046644480 PathId: 3, at schemeshard: 72057594046644480 2026-01-04T03:00:52.479305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715658, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_permissions 2026-01-04T03:00:52.479332Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715658:0, there is await operations num 1 2026-01-04T03:00:52.479492Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715659, response: Status: StatusAccepted TxId: 281474976715659 SchemeshardId: 72057594046644480 PathId: 4, at schemeshard: 72057594046644480 2026-01-04T03:00:52.479621Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715659, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_effective_permissions 2026-01-04T03:00:52.479655Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715659:0, there is await operations num 1 2026-01-04T03:00:52.479721Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715660, response: Status: StatusAccepted TxId: 281474976715660 SchemeshardId: 72057594046644480 PathId: 5, at schemeshard: 72057594046644480 2026-01-04T03:00:52.479848Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715660, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_users 2026-01-04T03:00:52.479860Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715660:0, there is await operations num 1 2026-01-04T03:00:52.479952Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715661, response: Status: StatusAccepted TxId: 281474976715661 SchemeshardId: 72057594046644480 PathId: 6, at schemeshard: 72057594046644480 2026-01-04T03:00:52.480067Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715661, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/pg_tables 2026-01-04T03:00:52.480080Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715661:0, there is await operations num 1 2026-01-04T03:00:52.480130Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715662, response: Status: StatusAccepted TxId: 281474976715662 SchemeshardId: 72057594046644480 PathId: 7, at schemeshard: 72057594046644480 2026-01-04T03:00:52.480242Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715662, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/ds_pdisks 2026-01-04T03:00:52.480249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/auth_permissions', status: StatusAccepted 2026-01-04T03:00:52.480254Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715662:0, there is await operations num 1 2026-01-04T03:00:52.480264Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715658 2026-01-04T03:00:52.480301Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/auth_effective_permissions', status: StatusAccepted 2026-01-04T03:00:52.480312Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715659 2026-01-04T03:00:52.480315Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715663, response: Status: StatusAccepted TxId: 281474976715663 SchemeshardId: 72057594046644480 PathId: 8, at schemeshard: 72057594046644480 2026-01-04T03:00:52.480331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/auth_users', status: StatusAccepted 2026-01-04T03:00:52.480339Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715660 2026-01-04T03:00:52.480362Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/pg_tables', status: StatusAccepted 2026-01-04T03:00:52.480372Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715661 2026-01-04T03:00:52.480404Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/ds_pdisks', status: StatusAccepted 2026-01-04T03:00:52.480413Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715662 2026-01-04T03:00:52.480464Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715663, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/auth_group_members 2026-01-04T03:00:52.480477Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715663:0, there is await operations num 1 2026-01-04T03:00:52.480524Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715664, response: Status: StatusAccepted TxId: 281474976715664 SchemeshardId: 72057594046644480 PathId: 9, at schemeshard: 72057594046644480 2026-01-04T03:00:52.480547Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:175: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Handle TEvModifySchemeTransactionResult, create sys view '/Root/.sys/auth_group_members', status: StatusAccepted 2026-01-04T03:00:52.480557Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:158: SysViewsRosterUpdate# [1:7591336024004351066:2399] at schemeshard: 72057594046644480 Send TEvNotifyTxCompletion, txId 281474976715663 2026-01-04T03:00:52.480630Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715664, database: /Root, subject: metadata@system, status: StatusAccepted, operation: CREATE SYSTEM VIEW, path: /Root/.sys/top_partitions_one_minute 2026-01-04T03:00:52.480705Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:280: Delay activating, operation part: 281474976715664:0, there is await operati ... Handle TEvUpdateAck, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710657 2026-01-04T03:01:06.007923Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:06.007934Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046644480, msg: Owner: 72057594046644480 Generation: 2 LocalPathId: 1 Version: 6 PathOwnerId: 72057594046644480, cookie: 281474976710657 2026-01-04T03:01:06.007948Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:147: Publication in-flight, count: 1, at schemeshard: 72057594046644480, txId: 281474976710657 2026-01-04T03:01:06.007954Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:06.007967Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046644480, txId: 281474976710657, pathId: [OwnerId: 72057594046644480, LocalPathId: 1], version: 6 2026-01-04T03:01:06.007983Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T03:01:06.007994Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:06.008047Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__publish_to_scheme_board.cpp:155: Publication complete, notify & remove, at schemeshard: 72057594046644480, txId: 281474976710657, subscribers: 1 2026-01-04T03:01:06.008077Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:212: TTxAckPublishToSchemeBoard Notify send TEvNotifyTxCompletionResult, at schemeshard: 72057594046644480, to actorId: [1:7591336077928363221:2305] 2026-01-04T03:01:06.009148Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046644480, cookie: 281474976710657 2026-01-04T03:01:06.046211Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01ke3f72bw5sqy54h092bpw5qv, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58380, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:06.074215Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-04T03:01:06.074684Z node 1 :BS_CONTROLLER DEBUG: {BSC24@console_interaction.cpp:224} Console replace config request Request# {ClusterYaml: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 0\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" DedicatedConfigMode: false AllowUnknownFields: false BypassMetadataChecks: false PeerName: "ipv6:[::1]:58380" UserToken: "\n\014root@builtin\022\030\022\026\n\024all-users@well-known\032\014root@builtin\"\007Builtin*\017**** (B6C6F477)0\000" } 2026-01-04T03:01:06.076115Z node 1 :BS_CONTROLLER DEBUG: {BSC36@console_interaction.cpp:407} Sending TEvControllerValidateConfigRequest to console ConsolePipe# [1:7591336077928362708:2464] 2026-01-04T03:01:06.086770Z node 1 :BS_CONTROLLER DEBUG: {BSC27@console_interaction.cpp:478} Console validate config response Response# {Status: ConfigIsValid YAML: "\nmetadata:\n kind: MainConfig\n cluster: \"\"\n version: 1\n\nconfig:\n host_configs:\n - host_config_id: 1\n drive:\n - path: SectorMap:1:64\n type: SSD\n - path: SectorMap:2:64\n type: SSD\n - host_config_id: 2\n drive:\n - path: SectorMap:3:64\n type: SSD\n hosts:\n - host: ::1\n port: 12001\n host_config_id: 2\n feature_flags:\n switch_to_config_v2: true\n" } 2026-01-04T03:01:06.093046Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } Rollback: true StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-04T03:01:06.093336Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-04T03:01:06.093357Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-04T03:01:06.093371Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-04T03:01:06.093441Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-04T03:01:06.093459Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-04T03:01:06.093467Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-04T03:01:06.093475Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-04T03:01:06.098137Z node 1 :BS_CONTROLLER DEBUG: {BSC14@bsc.cpp:624} ApplyStorageConfig Request# {Request { Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } } 2026-01-04T03:01:06.100022Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { DefineHostConfig { HostConfigId: 1 Drive { Path: "SectorMap:1:64" Type: SSD } Drive { Path: "SectorMap:2:64" Type: SSD } ItemConfigGeneration: 1 } } Command { DefineHostConfig { HostConfigId: 2 Drive { Path: "SectorMap:3:64" Type: SSD } } } Command { DefineBox { BoxId: 999 Host { Key { Fqdn: "::1" IcPort: 12001 } HostConfigId: 2 } ItemConfigGeneration: 1 } } StorageConfig { type_url: "type.googleapis.com/NKikimrBlobStorage.TStorageConfig" value: "\032\234\001\nL\n\030\010\001\020\001\032\016SectorMap:1:64(\0010\001\022\030\n\n\010\000\020\001\030\000 \000(\000\022\010\010\001\020\001\030\000 \001\030\000\032\024\010\000\020\001\030\000\"\014\n\n\n\010\010\001\020\001\030\000 \001 \001R*\010\001\032\022\n\016SectorMap:1:64\020\001\032\022\n\016SectorMap:2:64\020\001R\026\010\002\032\022\n\016SectorMap:3:64\020\001Z\010\010\001\"\004\020\002\030\001*,\n\003::1\020\341]\030\001\" R\007default\362\001\020generated-rack-1\240\2111\001J\010\010\001\022\004\010\001\030\001R\010\010\001\022\004\010\001\030\001Z\010\010\001\022\004\010\001\030\001" } } 2026-01-04T03:01:06.100394Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1000 Path# SectorMap:1:64 2026-01-04T03:01:06.100423Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1001 Path# SectorMap:2:64 2026-01-04T03:01:06.100440Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-04T03:01:06.100519Z node 1 :BS_CONTROLLER NOTICE: {BSCFP02@config_fit_pdisks.cpp:356} Create new pdisk PDiskId# 1:1002 Path# SectorMap:3:64 2026-01-04T03:01:06.100530Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1001 2026-01-04T03:01:06.100538Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1 2026-01-04T03:01:06.100547Z node 1 :BS_CONTROLLER NOTICE: {BSCFP03@config_fit_pdisks.cpp:363} PDisk to remove: PDiskId# 1:1000 2026-01-04T03:01:06.104444Z node 1 :BS_CONTROLLER ERROR: {BSC15@bsc.cpp:687} TEvControllerConfigResponse Response# {Status { Success: true } Status { Success: true } Status { Success: true } ErrorDescription: "failed to remove PDisk# 1:1 as it has active VSlots" ConfigTxSeqNo: 4 } 2026-01-04T03:01:06.105613Z node 1 :BS_CONTROLLER DEBUG: {BSC20@console_interaction.cpp:199} Console commit config response Response# {Status: Committed } response: ready: true status: SUCCESS result { [type.googleapis.com/Ydb.Config.ReplaceConfigResult] { } } 2026-01-04T03:01:06.118026Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:8101: Got new config: FeatureFlags { SwitchToConfigV2: true } QueryServiceConfig { AvailableExternalDataSources: "ObjectStorage" } 2026-01-04T03:01:06.118060Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:06.120233Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# ObjectStorage 2026-01-04T03:01:06.120291Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T03:01:06.128904Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# FetchConfigRequest, traceId# 01ke3f72e0797h2qag7yc9vre9, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58380, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:06.129561Z node 1 :BS_CONTROLLER DEBUG: {BSCTXCC01@config_cmd.cpp:433} Execute TEvControllerConfigRequest Request# {Command { GetInterfaceVersion { } } } 2026-01-04T03:01:06.134775Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ReplaceConfigRequest, traceId# 01ke3f72em2deptfes1xf8f0df, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:58380, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:06.227908Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions |98.7%| [TM] {RESULT} ydb/services/config/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/services/config/ut/unittest >> CoordinatorTests::WaitNodesConnected [GOOD] >> CoordinatorTests::ProcessMappingWithNodeIds [GOOD] >> CoordinatorTests::RebalanceAfterNewNodeConnected >> CoordinatorTests::RebalanceAfterNewNodeConnected [GOOD] >> CoordinatorTests::RebalanceAfterNodeDisconnected >> CoordinatorTests::RebalanceAfterNodeDisconnected [GOOD] >> LeaderElectionTests::Test1 |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_table [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> LeaderElectionTests::Test1 [GOOD] >> LeaderElectionTests::TestLocalMode [GOOD] >> TopicSessionTests::TwoSessionsWithoutOffsets >> test_canonical_requests.py::Test::test_essential |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/auth/gtest >> Login::TokenIsNotExpired [GOOD] |98.7%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/auth/gtest |98.7%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/auth/gtest >> test_http_api.py::TestHttpApi::test_simple_analytics_query |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgdate-pk_types6-all_types6-index6-pgdate--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint32_1__ASYNC-pk_types22-all_types22-index22-Uint32--ASYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] >> test_ctas.py::TestYtCtas::test_simple_ctast |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/scheme_tests/py3test >> tablet_scheme_tests.py::TestTabletSchemes::test_tablet_schemes[kesus] [GOOD] |98.7%| [TM] {RESULT} ydb/tests/functional/scheme_tests/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/scheme_tests/py3test >> TSequence::CreateTableWithDefaultFromSequence >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] >> Coordinator::ReadStepSubscribe [GOOD] >> Coordinator::LastStepSubscribe |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_2__SYNC-pk_types7-all_types7-index7---SYNC] [GOOD] >> SequenceProxy::Basics |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict >> test_example.py::TestExample::test_example >> SequenceProxy::Basics [GOOD] >> SequenceProxy::DropRecreate >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/executer_actor/ut/unittest >> KqpExecuter::TestSuddenAbortAfterReady [GOOD] Test command err: Trying to start YDB, gRPC: 22471, MsgBus: 6101 ... waiting for SysViewsRoster update finished 2026-01-04T03:01:12.792947Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:12.977295Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:12.978079Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:12.978148Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:13.400419Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:13.400556Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:13.490702Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495669880558 != 1767495669880561 2026-01-04T03:01:13.509130Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:13.561796Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:13.701923Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:14.577192Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T03:01:14.577318Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T03:01:14.577353Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T03:01:14.577569Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:14.590108Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:14.972370Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:15.053570Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:15.362309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:15.762375Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:16.067245Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:17.610814Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1907:3513], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:17.611516Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:17.612629Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:1980:3532], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:17.612712Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:17.731153Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:17.912845Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:18.179751Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:18.499701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:18.833256Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:19.146488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:19.448494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:19.777036Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:20.288241Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2793:4174], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:20.288355Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:20.288779Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2797:4178], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:20.288852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:20.288928Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:2800:4181], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:20.319249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:20.505169Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:2802:4183], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715670 completed, doublechecking } 2026-01-04T03:01:20.571479Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:2857:4219] txid# 281474976715671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } Got NKikimr::NGRpcService::TGrpcRequestCall> [1:829:2724] [1:8320808721877066593:7169396] Got NKikimr::NGRpcService::TEvGrpcMon::TEvReportPeer [0:6011864930757669447:2188150] [1:8320808721877066593:7169396] Got NActors::IEventHandle [1:3155:4455] [1:829:2724] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:8678280833929343339:121] [1:3155:4455] Got NKikimr::NKqp::NPrivateEvents::TEvQueryRequest [1:3154:4454] [1:66:2113] Got NKikimr::NKqp::NWorkload::TEvSubscribeOnPoolChanges [1:7742373267896299883:25708] [1:66:2113] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:66:2113] Got NActors::IEventHandle [1:3157:4457] [1:300:2344] Got NKikimr::NKqp::NWorkload::TEvPlaceRequestIntoPool [1:7742373267896299883:25708] [1:3154:4454] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:66:2113] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3157:4457] Got NActors::IEventHandle [1:3158:4458] [1:300:2344] Got NActors::IEventHandle [1:3159:4459] [1:304:2348] Got NActors::IEventHandle [1:3160:4460] [1:3158:4458] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:66:2113] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3160:4460] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3157:4457] [1:3159:4459] Got NActors::IEventHandle [1:3161:4461] [1:304:2348] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:300:2344] [1:3157:4457] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3160:4460] [1:3161:4461] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvUpdatePoolSubscription [1:2865:4226] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NWorkload::TEvUpdatePoolInfo [1:66:2113] [1:2865:4226] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFetchPoolResponse [1:3158:4458] [1:3160:4460] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:7742373267896299883:25708] [1:3158:4458] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvResolvePoolResponse [1:2865:4226] [1:3158:4458] Got NKikimr::NKqp::NWorkload::TEvContinueRequest [1:3154:4454] [1:2865:4226] Got NKikimr::NKqp::NPrivateEvents::TEvCompileRequest [1:8101253777303040363:6646889] [1:3154:4454] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvPlaceRequestIntoPoolResponse [1:7742373267896299883:25708] [1:2865:4226] Got NActors::IEventHandle [1:3162:4462] [1:296:2340] Got NActors::IEventHandle [1:3164:4464] [1:3162:4462] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:3164:4464] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2886:4241] [1:304:2348] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncRequest [1:2886:4241] [1:304:2348] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2887:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2888:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2889:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2890:4241] [1:2887:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2892:4241] [1:2888:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2890:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2892:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2894:4241] [1:2889:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2890:4241] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2894:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2892:4241] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2887:4241] [1:2890:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2894:4241] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2888:4241] [1:2892:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2887:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2889:4241] [1:2894:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2888:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:304:2348] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2889:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2887:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2888:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2889:4241] [1:2886:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2890:4241] [1:2887:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:24339059:0] [1:2890:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2892:4241] [1:2888:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:1099535966835:0] [1:2892:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2894:4241] [1:2889:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2890:4241] [1:2:2049] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionRequest [1:2199047594611:0] [1:2894:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2892:4241] [1:5:2052] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2887:4241] [1:2890:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2894:4241] [1:8:2055] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2888:4241] [1:2892:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2887:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2889:4241] [1:2894:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2888:4241] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncResponse [1:304:2348] [1:2886:4241] Got NActors::IEventHandle [1:3165:4465] [1:304:2348] Got NKikimr::NSchemeBoard::NInternalEvents::TEvSyncVersionResponse [1:2886:4241] [1:2889:4241] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:3164:4464] [1:3165:4465] Got NActors::IEventHandle [1:3166:4466] [1:3164:4464] Got NKikimr::NStat::TEvStatistics::TEvGetStatistics [1:8534995652929746003:6644585] [1:3166:4466] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:77:2124] Got NActors::IEventHandle [1:3167:4467] [1:304:2348] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:77:2124] [1:3167:4467] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySet [0:7307199536658146131:7762515] [1:77:2124] Got NActors::IEventHandle [1:3168:4468] [1:304:2348] Got NKikimr::TEvTxProxySchemeCache::TEvNavigateKeySetResult [1:77:2124] [1:3168:4468] Got NKikimr::NStat::TEvStatistics::TEvGetStatisticsResult [1:3166:4466] [1:77:2124] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4462] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4462] [1:8320808721877066593:7169396] Got NKikimr::NKqp::TEvKqp::TEvContinueProcess [1:3162:4462] [1:8320808721877066593:7169396] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:296:2340] [1:3162:4462] Got NKikimr::NKqp::NPrivateEvents::TEvCompileResponse [1:3154:4454] [1:296:2340] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3154:4454] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3169:4454] [1:68:2115] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3169:4454] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3169:4454] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3169:4454] 2026-01-04T03:01:24.215376Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3169:4454] TxId: 281474976715673. Ctx: { TraceId: 01ke3f7k9se4zpjpmjew77je5d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWE3MGQ4OTItODA0Njg3YjgtNjc2MzI4ZC1lMjQ5N2ZmYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3154:4454] [1:3169:4454] 2026-01-04T03:01:24.215816Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:3171} SessionId: ydb://session/3?node_id=1&id=YWE3MGQ4OTItODA0Njg3YjgtNjc2MzI4ZC1lMjQ5N2ZmYg==, ActorId: [1:3154:4454], ActorState: ExecuteState, LegacyTraceId: 01ke3f7k9se4zpjpmjew77je5d, Create QueryResponse for error on request, msg: status# STATUS_CODE_UNSPECIFIED issues# trace_id# Got NKikimr::NKqp::NWorkload::TEvCleanupRequest [1:2865:4226] [1:3154:4454] Got NKikimr::NKqp::NWorkload::TEvPrivate::TEvFinishRequestInPool [1:7742373267896299883:25708] [1:2865:4226] Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3169:4454] Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3169:4454] Got NKikimr::NSysView::TEvSysView::TEvCollectQueryStats [1:6014387330472966483:2188150] [1:3154:4454] Got NKikimr::TEvTxUserProxy::TEvProposeKqpTransaction [0:6014971197384587348:7762533] [1:3154:4454] Got NActors::TEvents::TEvPoison [1:3169:4454] [1:3169:4454] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3169:4454] [1:301:2345] Got NKikimr::NKqp::NWorkload::TEvCleanupResponse [1:3154:4454] [1:2865:4226] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxRequest [1:3170:4454] [1:68:2115] Got NKikimr::NKqp::NScheduler::TEvAddDatabase [1:7235142148544295275:29292] [1:3170:4454] Got NKikimr::NKqp::NScheduler::TEvAddPool [1:7235142148544295275:29292] [1:3170:4454] Got NKikimr::NKqp::NScheduler::TEvAddQuery [1:7235142148544295275:29292] [1:3170:4454] 2026-01-04T03:01:24.216747Z node 1 :KQP_EXECUTER ERROR: {KQPEX@kqp_executer_impl.h:1269} ActorId: [1:3170:4454] TxId: 281474976715674. Ctx: { TraceId: 01ke3f7k9se4zpjpmjew77je5d, Database: /Root, SessionId: ydb://session/3?node_id=1&id=YWE3MGQ4OTItODA0Njg3YjgtNjc2MzI4ZC1lMjQ5N2ZmYg==, PoolId: default, IsStreamingQuery: 0}. Runtime error Status# STATUS_CODE_UNSPECIFIED Issues# trace_id# Got NKikimr::NKqp::NScheduler::TEvRemoveQuery [1:7235142148544295275:29292] [1:3170:4454] Got NKikimr::TEvPipeCache::TEvUnlink [0:7521962744731429200:16741] [1:3170:4454] Got NKikimr::NKqp::TEvKqpExecuter::TEvTxResponse [1:3154:4454] [1:3170:4454] 2026-01-04T03:01:24.216965Z node 1 :KQP_SESSION ERROR: {KQPSA@kqp_session_actor.cpp:3081} SessionId: ydb://session/3?node_id=1&id=YWE3MGQ4OTItODA0Njg3YjgtNjc2MzI4ZC1lMjQ5N2ZmYg==, ActorId: [1:3154:4454], ActorState: CleanupState, LegacyTraceId: 01ke3f7k9se4zpjpmjew77je5d, Failed to cleanup issues# trace_id# Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:66:2113] [1:3154:4454] Got NActors::TEvents::TEvPoison [1:3170:4454] [1:3170:4454] Got NKikimr::NKqp::NPrivateEvents::TEvQueryResponse [1:3155:4455] [1:66:2113] Got NActors::TEvents::TEvPoison [1:3156:4456] [1:66:2113] Got NKikimr::NKqp::NScheduler::TEvQueryResponse [1:3170:4454] [1:301:2345] |98.7%| [TM] {RESULT} ydb/core/kqp/executer_actor/ut/unittest |98.7%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/executer_actor/ut/unittest >> TSequence::CreateTableWithDefaultFromSequence [GOOD] >> TSequence::SequencesIndex >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> SequenceProxy::DropRecreate [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_ttl_pgint8-pk_types5-all_types5-index5-pgint8--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TopicSessionTests::TwoSessionsWithoutOffsets [GOOD] >> TopicSessionTests::TwoSessionWithoutPredicate |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_DyNumber-pk_types8-all_types8-index8-DyNumber--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test ------- [TS] {asan, default-linux-x86_64, release} ydb/core/tx/sequenceproxy/ut/unittest >> SequenceProxy::DropRecreate [GOOD] Test command err: 2026-01-04T03:01:21.937995Z node 1 :BS_NODE WARN: {NWDC01@distconf.cpp:423} StateFunc too long Type# 131077 Duration# 0.007956s 2026-01-04T03:01:22.330701Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T03:01:22.330765Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:22.780575Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:23.774207Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-04T03:01:24.146731Z node 2 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T03:01:24.147301Z node 2 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpad5pgn/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T03:01:24.147930Z node 2 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpad5pgn/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpad5pgn/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6742643082311557711 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T03:01:25.260120Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T03:01:25.260218Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:25.335309Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:25.967857Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715657:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) 2026-01-04T03:01:26.205785Z node 4 :BS_PDISK WARN: {BPD01@blobstorage_pdisk_blockdevice_async.cpp:924} Warning# got EIoResult::FileOpenError from IoContext->Setup PDiskId# 1000 2026-01-04T03:01:26.206248Z node 4 :BS_PDISK CRIT: {BPD39@blobstorage_pdisk_impl.cpp:2980} BlockDevice initialization error Details# Can't open file "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpyxg7wq/pdisk_1.dat": unknown reason, errno# 0. PDiskId# 1000 2026-01-04T03:01:26.206484Z node 4 :BS_PDISK CRIT: {BPD01@blobstorage_pdisk_actor.cpp:267} PDiskId# 1000 bootstrapped to the StateError, reason# Can't open file "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpyxg7wq/pdisk_1.dat": unknown reason, errno# 0. Can not be initialized Config: {TPDiskConfig Path# "/home/runner/.ya/build/build_root/c5zo/001d75/r3tmp/tmpyxg7wq/pdisk_1.dat" ExpectedPath# "" ExpectedSerial# "" PDiskGuid# 6439574010135789989 PDiskId# 1000 PDiskCategory# {Type# DEVICE_TYPE_ROT Kind# 0} MetadataOnly# 0 StartOwnerRound# 2 SectorMap# true EnableSectorEncryption # 0 ChunkSize# 134217728 SectorSize# 4096 StatisticsUpdateIntervalMs# 1000 SchedulerCfg# {TPDiskSchedulerConfig BytesSchedulerWeight# 1 LogWeight# 1 FreshWeight# 2 CompWeight# 7 SyncLogWeight# 15 HugeWeight# 2 FastReadWeight# 1 OtherReadWeight# 1 LoadWeight# 2 LowReadWeight# 1 MaxChunkReadsPerCycle# 16 MaxChunkReadsDurationPerCycleMs# 0.25 MaxChunkWritesPerCycle# 8 MaxChunkWritesDurationPerCycleMs# 1} MinLogChunksTotal# 4 MaxLogChunksPerOwnerMultiplier# 5 MaxLogChunksPerOwnerDivisor# 4 SortFreeChunksPerItems# 100 GetDriveDataSwitch# DoNotTouch WriteCacheSwitch# DoNotTouch DriveModelSeekTimeNs# 8000000 DriveModelSpeedBps# 127000000 DriveModelSpeedBpsMin# 135000000 DriveModelSpeedBpsMax# 200000000 DriveModelBulkWrieBlockSize# 2097152 DriveModelTrimSpeedBps# 0 ReorderingMs# 50 DeviceInFlight# 4 CostLimitNs# 50000000 BufferPoolBufferSizeBytes# 524288 BufferPoolBufferCount# 256 MaxQueuedCompletionActions# 128 ExpectedSlotCount# 0 SlotSizeInUnits# 0 ReserveLogChunksMultiplier# 56 InsaneLogChunksMultiplier# 40 RedLogChunksMultiplier# 30 OrangeLogChunksMultiplier# 20 WarningLogChunksMultiplier# 4 YellowLogChunksMultiplier# 4 MaxMetadataMegabytes# 32 SpaceColorBorder# GREEN CompletionThreadsCount# 1 UseNoopScheduler# false PlainDataChunks# 0 SeparateHugePriorities# 0} PDiskId# 1000 2026-01-04T03:01:26.461784Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSequence, opId: 281474976715658:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_sequence.cpp:353) 2026-01-04T03:01:26.749682Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSequence, opId: 281474976715659:0, at schemeshard: 72057594046578944, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_sequence.cpp:555) |98.7%| [TS] {RESULT} ydb/core/tx/sequenceproxy/ut/unittest |98.7%| [TS] {BAZEL_UPLOAD} ydb/core/tx/sequenceproxy/ut/unittest >> DataShardStats::OneChannelStatsCorrect |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0__SYNC-pk_types12-all_types12-index12-DyNumber--SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> DataShardSnapshotIsolation::ReadWriteNoLocksNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite >> test_commit.py::TestCommit::test_commit >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] >> TMLPChangerTests::TopicNotExists >> test_http_api.py::TestHttpApi::test_simple_analytics_query [GOOD] >> test_http_api.py::TestHttpApi::test_empty_query [GOOD] >> test_http_api.py::TestHttpApi::test_warning >> test_commit.py::TestCommit::test_commit [GOOD] >> test_timeout.py::TestTimeout::test_timeout >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] >> test_example.py::TestExample::test_example [GOOD] >> test_http_api.py::TestHttpApi::test_warning [GOOD] >> test_http_api.py::TestHttpApi::test_get_unknown_query [GOOD] >> test_http_api.py::TestHttpApi::test_unauthenticated [GOOD] >> test_http_api.py::TestHttpApi::test_create_idempotency >> Coordinator::LastStepSubscribe [GOOD] >> Coordinator::RestoreDomainConfiguration >> TSequence::SequencesIndex [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceFromSelect >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types4-all_types4-index4--UNIQUE-SYNC] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TopicSessionTests::TwoSessionWithoutPredicate [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_ttl_Timestamp-pk_types12-all_types12-index12-Timestamp--] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> GraphShard::NormalizeAndDownsample1 [GOOD] >> GraphShard::NormalizeAndDownsample2 [GOOD] >> GraphShard::NormalizeAndDownsample3 [GOOD] >> GraphShard::NormalizeAndDownsample4 [GOOD] >> GraphShard::NormalizeAndDownsample5 [GOOD] >> GraphShard::NormalizeAndDownsample6 [GOOD] >> GraphShard::CheckHistogramToPercentileConversions [GOOD] >> GraphShard::CreateGraphShard >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default [GOOD] >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null >> DataShardSnapshotIsolation::ReadWriteConflictOnUncommittedWrite [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects >> test_ctas.py::TestYtCtas::test_simple_ctast [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] >> DataShardBackgroundCompaction::ShouldCompact >> test_http_api.py::TestHttpApi::test_create_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_stop_idempotency >> GraphShard::CreateGraphShard [GOOD] |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/cloud/py3test >> test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_retryable_iam_error[tables_format_v1] [GOOD] |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/cloud/py3test >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable ------- [TS] {asan, default-linux-x86_64, release} ydb/core/graph/shard/ut/unittest >> GraphShard::CreateGraphShard [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:128:2153] sender: [1:130:2058] recipient: [1:112:2143] 2026-01-04T03:01:35.594522Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T03:01:35.594659Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T03:01:35.594726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T03:01:35.594763Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T03:01:35.594813Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T03:01:35.594858Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T03:01:35.594967Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T03:01:35.595058Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T03:01:35.596041Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T03:01:35.605589Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T03:01:35.712744Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T03:01:35.712831Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:35.732673Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T03:01:35.733424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T03:01:35.736213Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T03:01:35.748475Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T03:01:35.748884Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T03:01:35.753237Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T03:01:35.756419Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T03:01:35.770391Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T03:01:35.771917Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T03:01:35.784497Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T03:01:35.784601Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T03:01:35.784717Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T03:01:35.784784Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T03:01:35.784915Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T03:01:35.786240Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T03:01:35.966331Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.968594Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.968761Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.968859Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.968921Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.968973Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969022Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969084Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969198Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969260Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969298Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969376Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969452Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969530Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:01:35.969619Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:214:2215] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 2935Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2026-01-04T03:01:37.023068Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720657 2026-01-04T03:01:37.023158Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 102:4294967295 from tablet: 72057594046678944 to tablet: 72057594046316545 cookie: 0:102 msg type: 269090816 2026-01-04T03:01:37.023291Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 102, partId: 4294967295, tablet: 72057594046316545 2026-01-04T03:01:37.023444Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72075186234409546, cookie: 281474976720658 FAKE_COORDINATOR: Add transaction: 102 at step: 5000039 FAKE_COORDINATOR: advance: minStep5000039 State->FrontStep: 5000038 FAKE_COORDINATOR: Send Plan to tablet 72057594046678944 for txId: 102 at step: 5000039 2026-01-04T03:01:37.023752Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 5000039, transactions count in step: 1, at schemeshard: 72057594046678944 2026-01-04T03:01:37.023892Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 102 Coordinator: 72057594046316545 AckTo { RawX1: 136 RawX2: 4294969454 } } Step: 5000039 MediatorID: 0 TabletID: 72057594046678944, at schemeshard: 72057594046678944 2026-01-04T03:01:37.023941Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:715: TTxOperationPlanStep Execute operation part is already done, operationId: 102:0 2026-01-04T03:01:37.023995Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-04T03:01:37.024318Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 128 -> 240 2026-01-04T03:01:37.024381Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 102:1, at tablet# 72057594046678944 2026-01-04T03:01:37.024514Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-04T03:01:37.024664Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:580: DoUpdateTenant no hasChanges, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], tenantLink: TSubDomainsLinks::TLink { DomainKey: [OwnerId: 72057594046678944, LocalPathId: 38], Generation: 2, ActorId:[1:813:2777], EffectiveACLVersion: 0, SubdomainVersion: 3, UserAttributesVersion: 1, TenantHive: 72075186233409546, TenantSysViewProcessor: 18446744073709551615, TenantStatisticsAggregator: 18446744073709551615, TenantGraphShard: 72075186234409549, TenantRootACL: }, subDomain->GetVersion(): 3, actualEffectiveACLVersion: 0, actualUserAttrsVersion: 1, tenantHive: 72075186233409546, tenantSysViewProcessor: 18446744073709551615, at schemeshard: 72057594046678944 FAKE_COORDINATOR: Erasing txId 102 2026-01-04T03:01:37.027094Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T03:01:37.027140Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046678944, txId: 102, path id: [OwnerId: 72057594046678944, LocalPathId: 38] 2026-01-04T03:01:37.027329Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T03:01:37.027379Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:66: TTxPublishToSchemeBoard Send, to populator: [1:210:2211], at schemeshard: 72057594046678944, txId: 102, path id: 38 2026-01-04T03:01:37.027462Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-04T03:01:37.027511Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_alter_extsubdomain.cpp:760: [72057594046678944] TSyncHive, operationId 102:1, ProgressState, NeedSyncHive: 0 2026-01-04T03:01:37.027544Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 102:1 240 -> 240 2026-01-04T03:01:37.028528Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:6274: Handle TEvUpdateAck, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T03:01:37.028602Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:96: TTxAckPublishToSchemeBoard Execute, at schemeshard: 72057594046678944, msg: Owner: 72057594046678944 Generation: 2 LocalPathId: 38 Version: 5 PathOwnerId: 72057594046678944, cookie: 102 2026-01-04T03:01:37.028645Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:108: Operation in-flight, at schemeshard: 72057594046678944, txId: 102 2026-01-04T03:01:37.028689Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:194: AckPublish, at schemeshard: 72057594046678944, txId: 102, pathId: [OwnerId: 72057594046678944, LocalPathId: 38], version: 5 2026-01-04T03:01:37.028731Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove publishing for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 9 2026-01-04T03:01:37.028802Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 1/2, is published: true 2026-01-04T03:01:37.031562Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 102:1, at schemeshard: 72057594046678944 2026-01-04T03:01:37.031618Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:497: [72057594046678944] TDone opId# 102:1 ProgressState 2026-01-04T03:01:37.031707Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-04T03:01:37.031732Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-04T03:01:37.031762Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_side_effects.cpp:928: Part operation is done id#102:1 progress is 2/2 2026-01-04T03:01:37.031787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-04T03:01:37.031821Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 102, ready parts: 2/2, is published: true 2026-01-04T03:01:37.031852Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1714: TOperation IsReadyToDone TxId: 102 ready parts: 2/2 2026-01-04T03:01:37.031922Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:0 2026-01-04T03:01:37.031966Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:0 2026-01-04T03:01:37.032113Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 8 2026-01-04T03:01:37.032158Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_side_effects.cpp:995: Operation and all the parts is done, operation id: 102:1 2026-01-04T03:01:37.032193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5577: RemoveTx for txid 102:1 2026-01-04T03:01:37.032281Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove txstate target path for pathId [OwnerId: 72057594046678944, LocalPathId: 38] was 7 2026-01-04T03:01:37.032986Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:175: TTxAckPublishToSchemeBoard Complete, at schemeshard: 72057594046678944, cookie: 102 TestModificationResult got TxId: 102, wait until txId: 102 TestWaitNotification wait txId: 102 2026-01-04T03:01:37.034667Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:213: tests -- TTxNotificationSubscriber for txId 102: send EvNotifyTxCompletion 2026-01-04T03:01:37.034728Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:259: tests -- TTxNotificationSubscriber, SendToSchemeshard, txId 102 2026-01-04T03:01:37.035130Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 102, at schemeshard: 72057594046678944 2026-01-04T03:01:37.035249Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:230: tests -- TTxNotificationSubscriber for txId 102: got EvNotifyTxCompletionResult 2026-01-04T03:01:37.035292Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: test_env.cpp:239: tests -- TTxNotificationSubscriber for txId 102: satisfy waiter [1:1010:2942] TestWaitNotification: OK eventTxId 102 2026-01-04T03:01:37.037150Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:31: TTxDescribeScheme DoExecute, record: Path: "/MyRoot/db1" Options { ReturnPartitioningInfo: false ReturnPartitionConfig: false BackupInfo: false ReturnBoundaries: false ShowPrivateTable: false }, at schemeshard: 72057594046678944 2026-01-04T03:01:37.037358Z node 1 :SCHEMESHARD_DESCRIBE INFO: schemeshard__describe_scheme.cpp:50: Tablet 72057594046678944 describe path "/MyRoot/db1" took 228us result status StatusSuccess 2026-01-04T03:01:37.038741Z node 1 :SCHEMESHARD_DESCRIBE DEBUG: schemeshard__describe_scheme.cpp:56: TTxDescribeScheme DoComplete, result: Status: StatusSuccess Path: "/MyRoot/db1" PathDescription { Self { Name: "db1" PathId: 38 SchemeshardId: 72057594046678944 PathType: EPathTypeExtSubDomain CreateFinished: true CreateTxId: 101 CreateStep: 5000038 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 5 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 5 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 1 SubDomainVersion: 3 SecurityStateVersion: 0 } } DomainDescription { SchemeShardId_Depricated: 72057594046678944 PathId_Depricated: 1 ProcessingParams { Version: 3 PlanResolution: 50 Coordinators: 72075186234409547 TimeCastBucketsPerMediator: 2 Mediators: 72075186234409548 SchemeShard: 72075186234409546 Hive: 72075186233409546 GraphShard: 72075186234409549 } DomainKey { SchemeShard: 72057594046678944 PathId: 38 } StoragePools { Name: "pool-1" Kind: "hdd" } PathsInside: 0 PathsLimit: 10000 ShardsInside: 5 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046678944 PathId: 38 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SecurityState { } SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046678944, at schemeshard: 72057594046678944 |98.7%| [TS] {RESULT} ydb/core/graph/shard/ut/unittest |98.7%| [TS] {BAZEL_UPLOAD} ydb/core/graph/shard/ut/unittest >> TTxDataShardBuildFulltextDictScan::BadRequest >> Coordinator::RestoreDomainConfiguration [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false >> test_cte.py::TestCte::test_toplevel >> TSequence::CreateTableWithDefaultFromSequenceFromSelect [GOOD] >> TSequence::CreateTableWithDefaultFromSequenceBadRequest >> test.py::TestViewer::test_whoami_root [GOOD] >> test.py::TestViewer::test_whoami_database |98.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgint4_0_UNIQUE_SYNC-pk_types2-all_types2-index2-pgint4-UNIQUE-SYNC] [GOOD] |98.7%| [TS] {asan, default-linux-x86_64, release} ydb/tests/tools/pq_read/test/py3test >> test_timeout.py::TestTimeout::test_timeout [GOOD] >> test.py::TestViewer::test_whoami_database [GOOD] >> test.py::TestViewer::test_whoami_viewer [GOOD] >> test.py::TestViewer::test_whoami_monitoring [GOOD] >> test.py::TestViewer::test_counter |98.7%| [TS] {RESULT} ydb/tests/tools/pq_read/test/py3test |98.7%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |98.8%| [TS] {BAZEL_UPLOAD} ydb/tests/tools/pq_read/test/py3test |98.8%| [TA] $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitWithEffects [GOOD] >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit >> KeyValueGRPCService::TestSimpleExecuteTransactionV1 [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 >> TopicSessionTests::SessionWithPredicateAndSessionWithoutPredicate [GOOD] >> test.py::TestViewer::test_counter [GOOD] >> test.py::TestViewer::test_viewer_nodelist |98.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test.py::TestViewer::test_viewer_nodelist [GOOD] >> test.py::TestViewer::test_viewer_nodes >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected >> test.py::TestViewer::test_viewer_nodes [GOOD] >> test_yt_reading.py::TestYtReading::test_partitioned_reading [GOOD] >> test.py::TestViewer::test_viewer_nodes_all >> test_yt_reading.py::TestYtReading::test_block_reading >> test.py::TestViewer::test_viewer_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_no_database_filter_node_id [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes >> test.py::TestViewer::test_viewer_storage_nodes [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |98.8%| [TA] {RESULT} $(B)/ydb/tests/functional/sqs/cloud/test-results/py3test/{meta.json ... results_accumulator.log} >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] >> test.py::TestViewer::test_viewer_storage_nodes_all [GOOD] >> test.py::TestViewer::test_viewer_nodes_group >> test.py::TestViewer::test_viewer_nodes_group [GOOD] >> test.py::TestViewer::test_storage_groups [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_pool_name [GOOD] >> test.py::TestViewer::test_viewer_groups_group_by_capacity_alert [GOOD] >> test.py::TestViewer::test_viewer_sysinfo [GOOD] >> test.py::TestViewer::test_viewer_vdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_pdiskinfo [GOOD] >> test.py::TestViewer::test_viewer_bsgroupinfo [GOOD] >> test.py::TestViewer::test_viewer_tabletinfo >> test.py::TestViewer::test_viewer_tabletinfo [GOOD] >> test.py::TestViewer::test_viewer_describe >> TFetchRequestTests::HappyWay >> test.py::TestViewer::test_viewer_describe [GOOD] >> test.py::TestViewer::test_viewer_cluster [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo [GOOD] >> test.py::TestViewer::test_viewer_tenantinfo_db >> TMLPChangerTests::TopicNotExists [GOOD] >> TMLPChangerTests::ConsumerNotExists >> Discovery::DelayedNameserviceResponse >> test.py::TestViewer::test_viewer_tenantinfo_db [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource >> test.py::TestViewer::test_viewer_healthcheck >> test.py::TestViewer::test_viewer_healthcheck [GOOD] >> test.py::TestViewer::test_viewer_acl >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSource [GOOD] >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel >> test.py::TestViewer::test_viewer_acl [GOOD] >> test.py::TestViewer::test_viewer_acl_write >> TCheckpointCoordinatorTests::ShouldTriggerCheckpointWithSourcesAndWithChannel [GOOD] >> TCheckpointCoordinatorTests::ShouldAllSnapshots >> test.py::TestViewer::test_viewer_acl_write [GOOD] >> test.py::TestViewer::test_viewer_autocomplete >> TCheckpointCoordinatorTests::ShouldAllSnapshots [GOOD] >> TCheckpointCoordinatorTests::Should2Increments1Snapshot [GOOD] >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved >> test.py::TestViewer::test_viewer_autocomplete [GOOD] >> test.py::TestViewer::test_viewer_check_access >> DataShardBackgroundCompaction::ShouldCompact [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed >> TTxDataShardBuildFulltextDictScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast >> TCheckpointCoordinatorTests::ShouldAbortPreviousCheckpointsIfNodeStateCantBeSaved [GOOD] >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks >> test.py::TestViewer::test_viewer_check_access [GOOD] >> test.py::TestViewer::test_viewer_query >> DataShardStats::OneChannelStatsCorrect [GOOD] >> DataShardStats::MultipleChannelsStatsCorrect >> test.py::TestViewer::test_viewer_query [GOOD] >> test.py::TestViewer::test_viewer_query_from_table >> test.py::TestViewer::test_viewer_query_from_table [GOOD] >> test.py::TestViewer::test_viewer_query_from_table_different_schemas |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1__ASYNC-pk_types16-all_types16-index16-DyNumber--ASYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> test_example.py::TestExample::test_example2 >> test.py::TestViewer::test_viewer_query_from_table_different_schemas [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] >> KeyValueGRPCService::TestSimpleExecuteTransactionV2 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 >> test_cte.py::TestCte::test_toplevel [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13757 [GOOD] >> test.py::TestViewer::test_viewer_query_issue_13945 >> DataShardFollowers::FollowerKeepsWorkingAfterMvccReadTable [GOOD] >> DataShardFollowers::FollowerStaleRo |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/plan2svg/py3test >> test_cte.py::TestCte::test_toplevel [GOOD] |98.8%| [TM] {RESULT} ydb/tests/functional/kqp/plan2svg/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/plan2svg/py3test >> test.py::TestViewer::test_viewer_query_issue_13945 [GOOD] >> test.py::TestViewer::test_pqrb_tablet >> DataShardSnapshotIsolation::ReadWriteConflictOnCommitAfterAnotherCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit >> test.py::TestViewer::test_pqrb_tablet [GOOD] >> test.py::TestViewer::test_viewer_nodes_issue_14992 [GOOD] >> test.py::TestViewer::test_operations_list [GOOD] >> test.py::TestViewer::test_operations_list_page >> test.py::TestViewer::test_operations_list_page [GOOD] >> test.py::TestViewer::test_operations_list_page_bad [GOOD] >> test.py::TestViewer::test_scheme_directory >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] >> test.py::TestViewer::test_scheme_directory [GOOD] >> test.py::TestViewer::test_topic_data ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_sequence/unittest >> TSequence::CreateTableWithDefaultFromSequenceBadRequest [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:21.589945Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:21.883218Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:21.883763Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:21.883820Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:22.445892Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:22.446056Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:22.599853Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495677950074 != 1767495677950077 2026-01-04T03:01:22.626249Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:22.682822Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:22.864268Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:23.253859Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:23.271172Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:23.418811Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:24.056104Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:24.056230Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:993:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:24.056307Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:24.059323Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:997:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:24.059504Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:24.068476Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:24.204447Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:998:2841], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T03:01:24.390223Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_value: 5 } items { uint32_value: 5 } }, { items { int64_value: 6 } items { uint32_value: 6 } }, { items { int64_value: 7 } items { uint32_value: 7 } }, { items { int64_value: 8 } items { uint32_value: 8 } }, { items { int64_value: 9 } items { uint32_value: 9 } } ... waiting for SysViewsRoster update finished 2026-01-04T03:01:30.337989Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:30.344948Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:30.349388Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:30.349672Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:30.349903Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:30.611446Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:30.611582Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:30.642771Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767495687101939 != 1767495687101943 2026-01-04T03:01:30.648571Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:30.694047Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:30.776005Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:31.070244Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:31.084669Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:31.192999Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:31.497317Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1030:2869], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:31.497461Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1041:2874], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:31.497558Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:31.498754Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:1044:2877], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:31.498859Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:31.503513Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:31.637792Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:1045:2878], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T03:01:31.673854Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:1103:2917] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 1 } }, { items { int64_value: 2 } items { uint32_value: 2 } }, { items { int64_value: 3 } items { uint32_value: 3 } }, { items { int64_value: 4 } items { uint32_value: 4 } }, { items { int64_ ... IVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:37.879592Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:38.174683Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:38.189049Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:38.297641Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:38.604053Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:38.604179Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:994:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:38.604263Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:38.605011Z node 3 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [3:997:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:38.605113Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:38.608833Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:38.735190Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [3:998:2841], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T03:01:38.770248Z node 3 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [3:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } { items { int64_value: 1 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } } { items { int64_value: 1 } items { uint32_value: 303 } }, { items { int64_value: 2 } items { uint32_value: 303 } }, { items { int64_value: 3 } items { uint32_value: 303 } } ... waiting for SysViewsRoster update finished 2026-01-04T03:01:43.513041Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:43.524096Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:303:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:43.524540Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:43.524630Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:43.787915Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:43.788064Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:43.815861Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767495700384566 != 1767495700384569 2026-01-04T03:01:43.820833Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:43.865151Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:43.934651Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:44.235971Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:44.250484Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:44.356541Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:44.679154Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:983:2832], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:44.679287Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:993:2837], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:44.679373Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:44.680426Z node 4 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [4:997:2840], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:44.680512Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:44.686661Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976715658:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:44.819369Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [4:998:2841], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976715658 completed, doublechecking } 2026-01-04T03:01:44.855073Z node 4 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [4:1055:2879] txid# 281474976715659, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 43], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:01:44.962149Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1065:2888], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:98: Error: Key columns are not specified., code: 2017 2026-01-04T03:01:44.964829Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=ZGJiNDRhOGYtNmRhNDIyZTktMTgwZTdjMy03Njg5YTViMA==, ActorId: [4:980:2829], ActorState: ExecuteState, LegacyTraceId: 01ke3f8834cadwh4ej6b3cg3t5, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 98 } message: "Key columns are not specified." end_position { row: 1 column: 98 } issue_code: 2017 severity: 1 } } tx_id# trace_id# 2026-01-04T03:01:45.011363Z node 4 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [4:1087:2904], status: BAD_REQUEST, issues:
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017
: Error: Execution, code: 1060
:1:103: Error: Key columns are not specified., code: 2017 2026-01-04T03:01:45.014451Z node 4 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=4&id=YWQxMzIwNzAtNTBlYjQzNzQtODBiODM3OTktNzRkMzg2NTc=, ActorId: [4:1079:2896], ActorState: ExecuteState, LegacyTraceId: 01ke3f88c96xvz9vq9qv0wj0j6, ReplyQueryCompileError, remove tx status# BAD_REQUEST issues# { message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } }{ message: "Execution" issue_code: 1060 severity: 1 issues { position { row: 1 column: 103 } message: "Key columns are not specified." end_position { row: 1 column: 103 } issue_code: 2017 severity: 1 } } tx_id# trace_id# |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_sequence/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_sequence/unittest >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/yt/kqp_yt_import/py3test >> test_yt_reading.py::TestYtReading::test_block_reading [GOOD] |98.8%| [TM] {RESULT} ydb/tests/fq/yt/kqp_yt_import/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/yt/kqp_yt_import/py3test >> DataShardBackgroundCompaction::ShouldNotCompactWhenBorrowed [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable >> test_ttl.py::TestTTL::test_ttl[table_Date_0_UNIQUE_SYNC-pk_types32-all_types32-index32-Date-UNIQUE-SYNC] [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] >> TopicSessionTests::SecondSessionWithoutOffsetsAfterSessionConnected [GOOD] >> TopicSessionTests::TwoSessionsWithOffsets >> MediatorTest::BasicTimecastUpdates >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV1 [GOOD] >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 >> DataShardDiskQuotas::DiskQuotaExceeded >> test.py::TestViewer::test_topic_data [GOOD] >> test.py::TestViewer::test_topic_data_cdc >> test_http_api.py::TestHttpApi::test_stop_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_restart_idempotency >> test_example.py::TestExample::test_example2 [GOOD] >> test_query_cache.py::TestQueryCache::test >> Discovery::DelayedNameserviceResponse [GOOD] >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst-SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast >> DataShardFollowers::FollowerStaleRo [GOOD] >> DataShardFollowers::FollowerRebootAfterSysCompaction >> test.py::TestViewer::test_topic_data_cdc [GOOD] >> test.py::TestViewer::test_async_replication_describe >> test.py::TestViewer::test_async_replication_describe [GOOD] >> test.py::TestViewer::test_transfer_describe >> DataShardSnapshotIsolation::ReadWriteUpsertTwiceThenCommit [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict >> TSentinelUnstableTests::BSControllerCantChangeStatus |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint32-pk_types14-all_types14-index14-Uint32--] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test.py::TestViewer::test_transfer_describe [GOOD] >> test.py::TestViewer::test_viewer_query_long >> MediatorTest::BasicTimecastUpdates [GOOD] >> TMLPChangerTests::ConsumerNotExists [GOOD] >> TMLPChangerTests::PartitionNotExists >> KeyValueGRPCService::SimpleExecuteTransactionWithWrongGenerationV2 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV1 >> MediatorTest::MultipleTablets >> DataShardBackgroundCompaction::ShouldNotCompactWhenCopyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable >> TTxDataShardTestInit::TestGetShardStateAfterInitialization |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0_UNIQUE_SYNC-pk_types11-all_types11-index11-pgtimestamp-UNIQUE-SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> DataShardStats::MultipleChannelsStatsCorrect [GOOD] >> DataShardStats::HistogramStatsCorrect |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/add_column/py3test >> test_add_column.py::TestYdbAddColumnWorkload::test_add_column_default_not_null [GOOD] |98.8%| [TM] {RESULT} ydb/tests/datashard/add_column/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/add_column/py3test >> TFetchRequestTests::HappyWay [GOOD] >> TFetchRequestTests::CDC >> TTxDataShardTestInit::TestGetShardStateAfterInitialization [GOOD] >> TTxDataShardTestInit::TestTableHasPath >> TTxDataShardBuildFulltextDictScan::Build-SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast >> TopicSessionTests::TwoSessionsWithOffsets [GOOD] >> test.py::TestViewer::test_viewer_query_long [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart >> TopicSessionTests::BadDataSessionError >> DataShardSnapshotIsolation::ReadWriteUpsertAgainOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict >> test_example.py::TestExample::test_linked_with_testcase >> test_query_cache.py::TestQueryCache::test [GOOD] >> MediatorTest::MultipleTablets [GOOD] >> test.py::TestViewer::test_viewer_query_long_multipart [GOOD] >> test.py::TestViewer::test_viewer_query_event_stream >> test.py::TestViewer::test_viewer_query_event_stream [GOOD] >> test.py::TestViewer::test_security >> MediatorTest::TabletAckBeforePlanComplete >> DataShardFollowers::FollowerRebootAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction >> test.py::TestViewer::test_security [GOOD] >> test.py::TestViewer::test_storage_stats >> KeyValueGRPCService::SimpleAcquireLockV1 [GOOD] >> KeyValueGRPCService::SimpleAcquireLockV2 >> DataShardBackgroundCompaction::ShouldNotCompactEmptyTable [GOOD] >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] >> DataShardDiskQuotas::DiskQuotaExceeded [GOOD] >> DataShardDiskQuotas::ShardRestartOnCreateTable >> TTxDataShardBuildFulltextDictScan::Build+SkipFirst+SkipLast [GOOD] >> TTxDataShardBuildFulltextIndexScan::BadRequest >> test.py::TestViewer::test_storage_stats [GOOD] >> test.py::TestViewer::test_viewer_peers >> test.py::TestViewer::test_viewer_peers [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Uint64_0__SYNC-pk_types24-all_types24-index24-Uint64--SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TDqSolomonWriteActorTest::TestWriteFormat >> TTxDataShardTestInit::TestTableHasPath [GOOD] >> TTxDataShardTestInit::TestResolvePathAfterRestart ------- [TM] {asan, default-linux-x86_64, release} ydb/core/kqp/ut/discovery/unittest >> Discovery::SecondDiscovererRequestBeforeNameserviceResponse [GOOD] Test command err: Trying to start YDB, gRPC: 29350, MsgBus: 7579 2026-01-04T03:01:43.405132Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336241636667735:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:43.406033Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:43.686092Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:43.733614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:43.733776Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:43.752859Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:43.814849Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:43.887321Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:44.022933Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T03:01:44.023042Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T03:01:44.023051Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T03:01:44.023166Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:44.413496Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:44.653451Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:44.660425Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:01:44.732484Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:44.877655Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:45.047852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:45.117027Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:46.659214Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336254521571456:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.659317Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.662215Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336254521571465:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.662286Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.340042Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.373088Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.402388Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.432592Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.461721Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.515820Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.548495Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.597239Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.694307Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336258816539636:2484], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.694428Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.694810Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336258816539642:2488], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.694867Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336258816539641:2487], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.694969Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:47.701494Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:47.714897Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336258816539645:2489], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T03:01:47.802273Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336258816539696:3772] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:01:48.404424Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336241636667735:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:48.404522Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; Trying to start YDB, gRPC: 29275, MsgBus: 25035 2026-01-04T03:01:51.431832Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591336275749456941:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:51.431932Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:51.445117Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:51.512043Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:7591336275749456898:2081] 1767495711430933 != 1767495711430936 2026-01-04T03:01:51.514927Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:51.552117Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:51.552211Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:51.557499Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:51.608750Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T03:01:51.608783Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T03:01:51.608793Z node 2 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T03:01:51.608874Z node 2 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:51.739099Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:51.995344Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:52.012069Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:01:52.030932Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710658:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:52.094722Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:52.249991Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710660:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:52.318288Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:52.460732Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:54.696223Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591336288634360665:2399], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:54.696303Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:54.696612Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591336288634360675:2400], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:54.696654Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:54.752446Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.790091Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710663:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.823028Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.857797Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.889343Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.932424Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710667:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:54.978610Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710668:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:55.033304Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710669:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:55.145778Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591336292929328845:2482], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:55.145896Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:55.145938Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591336292929328850:2485], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:55.146177Z node 2 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [2:7591336292929328852:2486], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:55.146247Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:55.150730Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710670:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:55.165776Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [2:7591336292929328853:2487], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710670 completed, doublechecking } 2026-01-04T03:01:55.256425Z node 2 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [2:7591336292929328905:3776] txid# 281474976710671, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 53], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:01:56.432341Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[2:7591336275749456941:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:56.432423Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; |98.8%| [TM] {RESULT} ydb/core/kqp/ut/discovery/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/ut/discovery/unittest >> MediatorTest::TabletAckBeforePlanComplete [GOOD] >> ServerRestartTest::RestartOnGetSession |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3_UNIQUE_SYNC-pk_types1-all_types1-index1--UNIQUE-SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_example.py::TestExample::test_linked_with_testcase [GOOD] >> MediatorTest::TabletAckWhenDead >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-false [GOOD] >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true >> TopicSessionTests::BadDataSessionError [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertAgainTwiceOnCommitNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict >> TMLPChangerTests::PartitionNotExists [GOOD] >> TMLPChangerTests::CommitTest >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] >> TopicSessionTests::WrongFieldType >> TTxDataShardBuildFulltextIndexScan::BadRequest [GOOD] >> TTxDataShardBuildFulltextIndexScan::Build >> KeyValueGRPCService::SimpleAcquireLockV2 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 >> test_http_api.py::TestHttpApi::test_restart_idempotency [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] >> DataShardFollowers::FollowerAfterSysCompaction [GOOD] >> DataShardFollowers::FollowerAfterDataCompaction |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Timestamp_1__ASYNC-pk_types10-all_types10-index10-Timestamp--ASYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] >> test_http_api.py::TestHttpApi::test_simple_streaming_query [GOOD] >> test_http_api.py::TestHttpApi::test_integral_results >> BasicExample::BasicExample ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_background_compaction/unittest >> DataShardBackgroundCompaction::ShouldNotCompactSecondTime [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:40.284391Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:40.469253Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:40.470010Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:40.470077Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:40.899527Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:40.899717Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:40.985165Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495697301000 != 1767495697301003 2026-01-04T03:01:40.999935Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:41.051795Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:41.166856Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:41.500891Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:41.518495Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:41.631944Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T03:01:41.632049Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T03:01:41.633021Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T03:01:41.748330Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T03:01:41.748461Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T03:01:41.750368Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T03:01:41.750520Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T03:01:41.750968Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T03:01:41.751175Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T03:01:41.751348Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T03:01:41.755501Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:41.756107Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T03:01:41.756961Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T03:01:41.757049Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T03:01:41.811912Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:41.812910Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:41.814260Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T03:01:41.814572Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:41.857994Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:41.858691Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:41.858795Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:41.861305Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:01:41.861407Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:01:41.861474Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:01:41.863535Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:41.863736Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:41.863851Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T03:01:41.874804Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:41.923374Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:01:41.924551Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:41.924717Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T03:01:41.924766Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:41.924806Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:01:41.924878Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:41.925044Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:41.925094Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:41.926312Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:01:41.926434Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:01:41.926561Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:41.926607Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:41.926751Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:01:41.926876Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:01:41.926935Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:01:41.926972Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:01:41.927043Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:41.927237Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:41.927282Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:41.927351Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T03:01:41.927878Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T03:01:41.927927Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:01:41.928053Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:01:41.928468Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:01:41.928535Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:01:41.929263Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:01:41.929390Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... Z node 5 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T03:02:04.160665Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T03:02:04.160700Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T03:02:04.160741Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T03:02:04.160788Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T03:02:04.160879Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T03:02:04.160917Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T03:02:04.160960Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:02:04.161002Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:02:04.161049Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T03:02:04.161082Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:02:04.161132Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T03:02:04.172037Z node 5 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T03:02:04.172137Z node 5 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T03:02:04.172204Z node 5 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T03:02:04.172489Z node 5 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:04.174951Z node 5 :TX_PROXY DEBUG: proxy_impl.cpp:402: actor# [5:68:2115] Handle TEvNavigate describe path /Root/table-1 2026-01-04T03:02:04.175132Z node 5 :TX_PROXY DEBUG: describe.cpp:270: Actor# [5:1058:2876] HANDLE EvNavigateScheme /Root/table-1 2026-01-04T03:02:04.175724Z node 5 :TX_PROXY DEBUG: describe.cpp:354: Actor# [5:1058:2876] HANDLE EvNavigateKeySetResult TDescribeReq marker# P5 ErrorCount# 0 2026-01-04T03:02:04.175892Z node 5 :TX_PROXY DEBUG: describe.cpp:433: Actor# [5:1058:2876] SEND to# 72057594046644480 shardToRequest NKikimrSchemeOp.TDescribePath Path: "/Root/table-1" Options { ShowPrivateTable: true } 2026-01-04T03:02:04.177302Z node 5 :TX_PROXY DEBUG: describe.cpp:446: Actor# [5:1058:2876] Handle TEvDescribeSchemeResult Forward to# [5:829:2724] Cookie: 0 TEvDescribeSchemeResult: NKikimrScheme.TEvDescribeSchemeResult PreSerializedData size# 20 Record# Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 PartitionConfig { CompactionPolicy { InMemSizeToSnapshot: 4194304 InMemStepsToSnapshot: 300 InMemForceStepsToSnapshot: 500 InMemForceSizeToSnapshot: 16777216 InMemCompactionBrokerQueue: 0 ReadAheadHiThreshold: 67108864 ReadAheadLoThreshold: 16777216 MinDataPageSize: 7168 SnapBrokerQueue: 0 Generation { GenerationId: 0 SizeToCompact: 0 CountToCompact: 8 ForceCountToCompact: 8 ForceSizeToCompact: 134217728 CompactionBrokerQueue: 4294967295 KeepInCache: true BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen1" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 0 UpliftPartSize: 0 } Generation { GenerationId: 1 SizeToCompact: 41943040 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 536870912 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen2" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 8388608 UpliftPartSize: 8388608 } Generation { GenerationId: 2 SizeToCompact: 419430400 CountToCompact: 5 ForceCountToCompact: 16 ForceSizeToCompact: 17179869184 CompactionBrokerQueue: 4294967295 KeepInCache: false BackgroundCompactionPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } ResourceBrokerTask: "compaction_gen3" ExtraCompactionPercent: 10 ExtraCompactionMinSize: 16384 ExtraCompactionExpPercent: 110 ExtraCompactionExpMaxSize: 83886080 UpliftPartSize: 83886080 } BackupBrokerQueue: 1 DefaultTaskPriority: 5 BackgroundSnapshotPolicy { Threshold: 101 PriorityBase: 100 TimeFactor: 1 ResourceBrokerTask: "background_compaction" } InMemResourceBrokerTask: "compaction_gen0" SnapshotResourceBrokerTask: "compaction_gen0" BackupResourceBrokerTask: "scan" LogOverheadSizeToSnapshot: 16777216 LogOverheadCountToSnapshot: 500 DroppedRowsPercentToCompact: 50 MinBTreeIndexNodeSize: 7168 MinBTreeIndexNodeKeys: 6 } PartitioningPolicy { MinPartitionsCount: 1 } ColumnFamilies { Id: 0 Name: "default" StorageConfig { SysLog { PreferredPoolKind: "test" } Log { PreferredPoolKind: "test" } Data { PreferredPoolKind: "test" } } } } TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 41 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-04T03:02:04.178432Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1062:2880], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.178503Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.178562Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1061:2879], serverId# [5:1062:2880], sessionId# [0:0:0] 2026-01-04T03:02:04.178789Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553169, Sender [5:1060:2878], Recipient [5:886:2765]: NKikimrTxDataShard.TEvGetInfoRequest 2026-01-04T03:02:04.179865Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1065:2883], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.179952Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.180008Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1064:2882], serverId# [5:1065:2883], sessionId# [0:0:0] 2026-01-04T03:02:04.180206Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1063:2881], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-04T03:02:04.180351Z node 5 :TX_DATASHARD INFO: datashard__compaction.cpp:141: Started background compaction# 1 of 72075186224037888 tableId# 38 localTid# 1001, requested from [5:1063:2881], partsCount# 0, memtableSize# 728, memtableWaste# 3880, memtableRows# 3 2026-01-04T03:02:04.253323Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 1, ts 1970-01-01T00:00:02.529698Z 2026-01-04T03:02:04.253443Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 1, front# 1 2026-01-04T03:02:04.253513Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:260: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001 sending TEvCompactTableResult to# [5:1063:2881]pathId# [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T03:02:04.254459Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [5:877:2759], Recipient [5:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T03:02:04.254973Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [5:1072:2889], Recipient [5:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.255046Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:04.255103Z node 5 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [5:1071:2888], serverId# [5:1072:2889], sessionId# [0:0:0] 2026-01-04T03:02:04.255350Z node 5 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553210, Sender [5:1070:2887], Recipient [5:886:2765]: NKikimrTxDataShard.TEvCompactTable PathId { OwnerId: 72057594046644480 LocalId: 38 } CompactBorrowed: false 2026-01-04T03:02:04.255482Z node 5 :TX_DATASHARD DEBUG: datashard__compaction.cpp:118: Background compaction of tablet# 72075186224037888 of path# [OwnerId: 72057594046644480, LocalPathId: 38], requested from# [5:1070:2887] is not needed |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_background_compaction/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_background_compaction/unittest >> TFetchRequestTests::CDC [GOOD] >> TFetchRequestTests::SmallBytesRead ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_init/unittest >> TTxDataShardTestInit::TestResolvePathAfterRestart [GOOD] Test command err: 2026-01-04T03:01:55.178916Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:55.225262Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:55.234052Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 9437184 actor [1:112:2143] 2026-01-04T03:01:55.235957Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:55.299853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:106:2139], Recipient [1:112:2143]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:55.320224Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:55.320388Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:55.323512Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 9437184 2026-01-04T03:01:55.323604Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 9437184 2026-01-04T03:01:55.323705Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 9437184 2026-01-04T03:01:55.326576Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:55.327137Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:55.327204Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 9437184 persisting started state actor id [1:136:2143] in generation 2 2026-01-04T03:01:55.371436Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:55.400416Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 9437184 2026-01-04T03:01:55.401912Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 9437184 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:55.402038Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 9437184, actorId: [1:141:2163] 2026-01-04T03:01:55.402071Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 9437184 2026-01-04T03:01:55.402107Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 9437184, state: WaitScheme 2026-01-04T03:01:55.402137Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 9437184 2026-01-04T03:01:55.402259Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:112:2143], Recipient [1:112:2143]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:55.403246Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:55.404279Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 9437184 2026-01-04T03:01:55.404408Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 9437184 2026-01-04T03:01:55.404460Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 9437184 2026-01-04T03:01:55.404519Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 9437184 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:55.404570Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 9437184 2026-01-04T03:01:55.404603Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 9437184 has no attached operations 2026-01-04T03:01:55.404633Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 9437184 2026-01-04T03:01:55.404678Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 9437184 TxInFly 0 2026-01-04T03:01:55.404724Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 9437184 2026-01-04T03:01:55.407717Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269551617, Sender [1:103:2137], Recipient [1:112:2143]: NKikimrTxDataShard.TEvGetShardState Source { RawX1: 103 RawX2: 4294969433 } 2026-01-04T03:01:55.407786Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3156: StateWork, processing event TEvDataShard::TEvGetShardState ... waiting for SysViewsRoster update finished 2026-01-04T03:01:58.840400Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:58.969734Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:58.981160Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:58.981526Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:58.981662Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:59.331678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:59.331839Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:59.437303Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767495715791001 != 1767495715791005 2026-01-04T03:01:59.454211Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:59.504666Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:59.615105Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:59.949782Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:59.964119Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:00.134172Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:00.185745Z node 2 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [2:886:2765] 2026-01-04T03:02:00.186716Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:02:00.243346Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:02:00.243494Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:02:00.245139Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:02:00.245218Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:02:00.245275Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:02:00.245609Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:02:00.245734Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:02:00.245923Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [2:901:2765] in generation 1 2026-01-04T03:02:00.256843Z node 2 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:02:00.256951Z node 2 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:02:00.257072Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:02:00.257185Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [2:903:2775] 2026-01-04T03:02:00.257232Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:02:00.257272Z node 2 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:02:00.257310Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:00.257792Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:02:00.257962Z node 2 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:02:00.258052Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:02:00.258142Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:00.258185Z node 2 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:02:00.258233Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:00.258333Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [2:882:2762], serverId# [2:888:2766], sessionId# [0:0:0] 2026-01-04T03:02:00.258788Z node 2 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:02:00.259058Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:02:00.259179Z node 2 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:02:00.261467Z node 2 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:02: ... planned 0 immediate 0 planned 1 2026-01-04T03:02:05.079881Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [2000:281474976715657] in PlanQueue unit at 72075186224037888 2026-01-04T03:02:05.080125Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 2000:281474976715657 keys extracted: 0 2026-01-04T03:02:05.080246Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T03:02:05.081250Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:02:05.081323Z node 3 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T03:02:05.081727Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T03:02:05.082164Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:05.083815Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 0 2026-01-04T03:02:05.083858Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:05.084755Z node 3 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 2000} 2026-01-04T03:02:05.084819Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:05.085730Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:05.085773Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:02:05.085960Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T03:02:05.086020Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [2000 : 281474976715657] from 72075186224037888 at tablet 72075186224037888 send result to client [3:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T03:02:05.086068Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T03:02:05.086143Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:05.087237Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:02:05.088914Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T03:02:05.089922Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T03:02:05.089982Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T03:02:05.096002Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T03:02:05.097451Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T03:02:05.130375Z node 3 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [3:956:2812] 2026-01-04T03:02:05.130626Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:02:05.138554Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:02:05.139182Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:02:05.141640Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:02:05.141722Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:02:05.141763Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:02:05.145987Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:02:05.146819Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:02:05.146913Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [3:971:2812] in generation 2 2026-01-04T03:02:05.168509Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:02:05.168674Z node 3 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state Ready tabletId 72075186224037888 2026-01-04T03:02:05.168795Z node 3 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976715657 state Ready TxInFly 0 2026-01-04T03:02:05.168922Z node 3 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T03:02:05.169005Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4210: Resolve path at 72075186224037888: reason# empty path 2026-01-04T03:02:05.169135Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [3:975:2822] 2026-01-04T03:02:05.169192Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:02:05.169245Z node 3 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T03:02:05.169283Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:05.169605Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:742: TxInitSchemaDefaults.Execute 2026-01-04T03:02:05.169921Z node 3 :TX_DATASHARD DEBUG: datashard__init.cpp:754: TxInitSchemaDefaults.Complete 2026-01-04T03:02:05.170906Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:5937: Got TEvDataShard::TEvSchemaChanged for unknown txId 281474976715657 message# Source { RawX1: 956 RawX2: 12884904700 } Origin: 72075186224037888 State: 2 TxId: 281474976715657 Step: 0 Generation: 2 2026-01-04T03:02:05.171274Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:02:05.171381Z node 3 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:02:05.171619Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 2000 2026-01-04T03:02:05.171675Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:05.172129Z node 3 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:02:05.172254Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:02:05.172300Z node 3 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:05.172343Z node 3 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:02:05.172389Z node 3 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:05.172632Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715657 datashard 72075186224037888 state Ready 2026-01-04T03:02:05.202446Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:4271: Got scheme resolve result at 72075186224037888: Status: StatusSuccess Path: "/Root/table-1" PathDescription { Self { Name: "table-1" PathId: 38 SchemeshardId: 72057594046644480 PathType: EPathTypeTable CreateFinished: true CreateTxId: 281474976715657 CreateStep: 2000 ParentPathId: 1 PathState: EPathStateNoChanges Owner: "root@builtin" ACL: "" EffectiveACL: "" PathVersion: 3 PathSubType: EPathSubTypeEmpty Version { GeneralVersion: 3 ACLVersion: 0 EffectiveACLVersion: 0 UserAttrsVersion: 1 ChildrenVersion: 0 TableSchemaVersion: 1 TablePartitionVersion: 1 } ChildrenExist: false } Table { Name: "table-1" Columns { Name: "key" Type: "Uint32" TypeId: 2 Id: 1 NotNull: false IsBuildInProgress: false } Columns { Name: "value" Type: "Uint32" TypeId: 2 Id: 2 NotNull: false IsBuildInProgress: false } KeyColumnNames: "key" KeyColumnIds: 1 TableSchemaVersion: 1 IsBackup: false IsRestore: false } TableStats { DataSize: 0 RowCount: 0 IndexSize: 0 LastAccessTime: 0 LastUpdateTime: 0 ImmediateTxCompleted: 0 PlannedTxCompleted: 0 TxRejectedByOverload: 0 TxRejectedBySpace: 0 TxCompleteLagMsec: 0 InFlightTxCount: 0 RowUpdates: 0 RowDeletes: 0 RowReads: 0 RangeReads: 0 PartCount: 1 RangeReadRows: 0 StoragePools { } ByKeyFilterSize: 0 HasSchemaChanges: false LocksAcquired: 0 LocksWholeShard: 0 LocksBroken: 0 } TabletMetrics { CPU: 0 Memory: 0 Network: 0 Storage: 0 ReadThroughput: 0 WriteThroughput: 0 ReadIops: 0 WriteIops: 0 } DomainDescription { SchemeShardId_Depricated: 72057594046644480 PathId_Depricated: 1 ProcessingParams { Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } DomainKey { SchemeShard: 72057594046644480 PathId: 1 } PathsInside: 37 PathsLimit: 10000 ShardsInside: 1 ShardsLimit: 200000 ResourcesDomainKey { SchemeShard: 72057594046644480 PathId: 1 } DiskSpaceUsage { Tables { TotalSize: 0 DataSize: 0 IndexSize: 0 } Topics { ReserveSize: 0 AccountSize: 0 DataSize: 0 UsedReserveSize: 0 } } PQPartitionsInside: 0 PQPartitionsLimit: 1000000 SchemeLimits { MaxDepth: 32 MaxPaths: 10000 MaxChildrenInDir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 38 PathOwnerId: 72057594046644480 2026-01-04T03:02:05.202830Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 2000 2026-01-04T03:02:05.202975Z node 3 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T03:02:05.203329Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:20: TTxStoreTablePath::Execute at 72075186224037888 2026-01-04T03:02:05.204538Z node 3 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [3:979:2826], serverId# [3:981:2827], sessionId# [0:0:0] 2026-01-04T03:02:05.218588Z node 3 :TX_DATASHARD DEBUG: datashard__store_table_path.cpp:39: TTxStoreTablePath::Complete at 72075186224037888 |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_init/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_init/unittest >> test_http_api.py::TestHttpApi::test_integral_results [GOOD] >> test_http_api.py::TestHttpApi::test_optional_results >> TDescriberTests::TopicExists >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected >> DataShardDiskQuotas::ShardRestartOnCreateTable [GOOD] >> DataShardDiskQuotas::ShardRestartOnSplitDst >> KesusProxyTest::ReconnectsWithKesusWhenNotConnected [GOOD] >> KesusProxyTest::ReconnectsWithKesusWhenPipeDestroyed [GOOD] >> KesusProxyTest::ReconnectsWithKesusAfterSeveralRetries [GOOD] >> KesusProxyTest::RejectsNotCanonizedResourceName [GOOD] >> KesusProxyTest::SubscribesOnResource [GOOD] >> KesusProxyTest::SubscribesOnResourcesWhenReconnected [GOOD] >> KesusProxyTest::ProxyRequestDuringDisconnection >> test_http_api.py::TestHttpApi::test_optional_results [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results >> KesusProxyTest::ProxyRequestDuringDisconnection [GOOD] >> KesusProxyTest::DeactivateSessionWhenResourceClosed [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnSuccess [GOOD] >> KesusProxyTest::SendsProxySessionOnceOnFailure [GOOD] >> KesusProxyTest::AnswersWithSessionWhenResourceIsAlreadyKnown [GOOD] >> KesusProxyTest::SendsBrokenUpdateWhenKesusPassesError [GOOD] >> KesusProxyTest::AllocatesResourceWithKesus [GOOD] >> KesusProxyTest::DisconnectsDuringActiveSession [GOOD] >> KesusProxyTest::AllocatesResourceOffline [GOOD] >> KesusProxyTest::ConnectsDuringOfflineAllocation >> KesusProxyTest::ConnectsDuringOfflineAllocation [GOOD] >> KesusResourceAllocationStatisticsTest::ReturnsDefaultValues [GOOD] >> KesusResourceAllocationStatisticsTest::CalculatesAverage [GOOD] >> KesusResourceAllocationStatisticsTest::TakesBestStat [GOOD] >> TQuoterServiceTest::StaticRateLimiter >> TTxDataShardBuildFulltextIndexScan::Build [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithData >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] >> test_http_api.py::TestHttpApi::test_pg_results [GOOD] >> test_http_api.py::TestHttpApi::test_set_result >> MediatorTest::TabletAckWhenDead [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types4-all_types4-index4---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict >> KeyValueGRPCService::SimpleRenameUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 >> MediatorTest::PlanStepAckToReconnectedMediator >> TListAllTopicsTests::PlainList >> test.py::test[solomon-BadDownsamplingAggregation-] >> BasicExample::BasicExample [GOOD] >> test_http_api.py::TestHttpApi::test_set_result [GOOD] >> test_http_api.py::TestHttpApi::test_complex_results >> TopicSessionTests::WrongFieldType [GOOD] >> TDqSolomonWriteActorTest::TestWriteFormat [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/query_cache/py3test >> test_query_cache.py::TestQueryCache::test [GOOD] |98.8%| [TM] {RESULT} ydb/tests/functional/query_cache/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/query_cache/py3test >> TopicSessionTests::RestartSessionIfNewClientWithOffset >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues >> test_http_api.py::TestHttpApi::test_complex_results [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit >> DataShardFollowers::FollowerAfterDataCompaction [GOOD] >> DataShardFollowers::FollowerDuringSysPartSwitch |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestPgAsyncReplication::test_async_replication[table_index_0__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> TQuoterServiceTest::StaticRateLimiter [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources >> TTxDataShardBuildFulltextIndexScan::BuildWithData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData >> MediatorTest::PlanStepAckToReconnectedMediator [GOOD] >> test_http_api.py::TestHttpApi::test_result_offset_limit [GOOD] >> test_http_api.py::TestHttpApi::test_openapi_spec |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/core/viewer/tests/py3test >> test.py::TestViewer::test_viewer_peers [GOOD] |98.8%| [TM] {RESULT} ydb/core/viewer/tests/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/viewer/tests/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] >> MediatorTest::WatcherReconnect |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/basic_example/gtest >> BasicExample::BasicExample [GOOD] |98.8%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/basic_example/gtest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_ttl_Uint64-pk_types15-all_types15-index15-Uint64--] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> KeyValueGRPCService::SimpleRenameUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 >> ServerRestartTest::RestartOnGetSession [GOOD] >> TDqPqRdReadActorTests::TestReadFromTopic2 [GOOD] >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] >> TDqPqRdReadActorTests::IgnoreUndeliveredWithWrongGeneration [GOOD] >> test.py::test[solomon-BadDownsamplingAggregation-] [GOOD] >> test.py::test[solomon-BadDownsamplingDisabled-] >> TDqPqRdReadActorTests::SessionError [GOOD] >> TQuoterServiceTest::StaticMultipleAndResources [GOOD] >> TQuoterServiceTest::StaticDeadlines >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict >> TMLPChangerTests::CommitTest [GOOD] >> TMLPChangerTests::ReadAndReleaseTest |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/example/py3test >> test_example.py::TestExample::test_skipped_with_issue [SKIPPED] |98.8%| [TM] {RESULT} ydb/tests/example/py3test |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/example/py3test >> TDqPqRdReadActorTests::ReadWithFreeSpace [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] >> DataShardStats::HistogramStatsCorrect [GOOD] >> DataShardStats::BlobsStatsCorrect >> TTxDataShardBuildFulltextIndexScan::BuildWithTextData [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey >> MediatorTest::WatcherReconnect [GOOD] >> TDqPqRdReadActorTests::TestSaveLoadPqRdRead [GOOD] >> TDqPqRdReadActorTests::CoordinatorChanged >> DataShardReplication::SimpleApplyChanges >> MediatorTest::MultipleSteps >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] >> TDescriberTests::TopicExists [GOOD] >> TDescriberTests::TopicNotExists |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> ServerRestartTest::RestartOnGetSession [GOOD] |98.8%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/server_restart/gtest |98.8%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/server_restart/gtest >> DataShardFollowers::FollowerDuringSysPartSwitch [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] >> TQuoterServiceTest::StaticDeadlines [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_disk_quotas/unittest >> DataShardDiskQuotas::ShardRestartOnSplitDst [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:52.645684Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:52.813917Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:52.814524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:52.814584Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:53.256569Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:53.256701Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:53.360761Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495709826953 != 1767495709826956 2026-01-04T03:01:53.378070Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:53.427929Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:53.539165Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... Setting hard disk quota to 1 byte 2026-01-04T03:01:53.884011Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:53.884087Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5345: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:53.884133Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6167: Pipe server connected, at tablet: 72057594046644480 2026-01-04T03:01:53.884230Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271122432, Sender [1:830:2725], Recipient [1:399:2398]: {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T03:01:53.884271Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5259: StateWork, processing event TEvSchemeShard::TEvModifySchemeTransaction 2026-01-04T03:01:54.036066Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } DatabaseQuotas { data_size_hard_quota: 1 } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T03:01:54.036305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:01:54.037757Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T03:01:54.037872Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T03:01:54.038663Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T03:01:54.038831Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:54.038953Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T03:01:54.039746Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T03:01:54.039899Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T03:01:54.039951Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T03:01:54.039988Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-04T03:01:54.040272Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.040315Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.040398Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:01:54.040457Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T03:01:54.040509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-04T03:01:54.040552Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-04T03:01:54.040670Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T03:01:54.041204Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [1:833:2727], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T03:01:54.041246Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T03:01:54.041280Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-04T03:01:54.041424Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T03:01:54.041463Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-04T03:01:54.041583Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.041626Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.041689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:01:54.041736Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T03:01:54.041777Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-04T03:01:54.041867Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T03:01:54.042265Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T03:01:54.042296Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:285: Activate send for 281474976715657:0 2026-01-04T03:01:54.042393Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435072, Sender [1:399:2398], Recipient [1:399:2398]: NKikimr::NSchemeShard::TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.042422Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5265: StateWork, processing event TEvPrivate::TEvProgressOperation 2026-01-04T03:01:54.042523Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:01:54.042569Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:01:54.042618Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T03:01:54.042653Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:173: TSideEffects ApplyOnExecute at tablet# 72057594046644480 2026-01-04T03:01:54.042689Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-04T03:01:54.046681Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T03:01:54.047218Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard__operation_side_effects.cpp:216: TSideEffects ApplyOnComplete at tablet# 72057594046644480 2026-01-04T03:01:54.047266Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-04T03:01:54.047396Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T03:01:54.047674Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269091328, Sender [1:395:2394], Recipient [1:399:2398]: NKikimrTx.TEvProposeTransactionStatus Status: 16 StepId: 1500 TxId: 281474976715657 2026-01-04T03:01:54.049250Z node 1 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877761, Sender [1:839:2733], Recipient [1:399:2398]: NKikimr::TEvTabletPipe::TE ... .h:3153: StateWork, received event# 268829696, Sender [3:915:2777], Recipient [3:924:2783]: NKikimr::TEvTablet::TEvTabletDead 2026-01-04T03:02:17.455025Z node 3 :TX_DATASHARD INFO: datashard.cpp:256: OnTabletDead: 72075186224037888 2026-01-04T03:02:17.455185Z node 3 :TX_DATASHARD INFO: datashard.cpp:1310: Change sender killed: at tablet: 72075186224037888 2026-01-04T03:02:17.456574Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 2146435084, Sender [3:400:2399], Recipient [3:400:2399]: NKikimr::NSchemeShard::TEvPrivate::TEvCleanDroppedPaths 2026-01-04T03:02:17.456617Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5439: StateWork, processing event TEvPrivate::TEvCleanDroppedPaths 2026-01-04T03:02:17.456667Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:123: TTxCleanDroppedPaths Execute, 1 paths in candidate queue, at schemeshard: 72057594046644480 2026-01-04T03:02:17.456706Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__clean_pathes.cpp:137: TTxCleanDroppedPaths: PersistRemovePath for PathId# [OwnerId: 72057594046644480, LocalPathId: 38], at schemeshard: 72057594046644480 2026-01-04T03:02:17.456781Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:611: DecrementPathDbRefCount reason remove path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 4 2026-01-04T03:02:17.457619Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877764, Sender [3:1246:3046], Recipient [3:400:2399]: NKikimr::TEvTabletPipe::TEvServerDisconnected 2026-01-04T03:02:17.457663Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5346: StateWork, processing event TEvTabletPipe::TEvServerDisconnected 2026-01-04T03:02:17.457688Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6215: Server pipe is reset, at schemeshard: 72057594046644480 2026-01-04T03:02:17.458165Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:921:2781], Recipient [3:400:2399]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72075186224037888 ClientId: [3:921:2781] ServerId: [3:925:2784] } 2026-01-04T03:02:17.458206Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T03:02:17.458247Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72075186224037888, from:72057594046644480 is reset 2026-01-04T03:02:17.458628Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:176: Deleted shardIdx 72057594046644480:1 2026-01-04T03:02:17.458703Z node 3 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__delete_tablet_reply.cpp:182: Close pipe to deleted shardIdx 72057594046644480:1 tabletId 72075186224037888 2026-01-04T03:02:17.458908Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 269877763, Sender [3:1540:3271], Recipient [3:400:2399]: NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594037968897 ClientId: [3:1540:3271] ServerId: [3:1541:3272] } 2026-01-04T03:02:17.458933Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5344: StateWork, processing event TEvTabletPipe::TEvClientDestroyed 2026-01-04T03:02:17.458954Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:6177: Client pipe, to tablet: 72057594037968897, from:72057594046644480 is reset 2026-01-04T03:02:17.459194Z node 3 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__clean_pathes.cpp:160: TTxCleanDroppedPaths Complete, done PersistRemovePath for 1 paths, skipped 0, left 0 candidates, at schemeshard: 72057594046644480 2026-01-04T03:02:17.459288Z node 3 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 3, TabletId: 72075186224037888 not found 2026-01-04T03:02:17.754867Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271125000, Sender [0:0:0], Recipient [3:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T03:02:17.754949Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5256: StateWork, processing event TEvSchemeShard::TEvWakeupToMeasureSelfResponseTime 2026-01-04T03:02:17.755032Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5248: StateWork, received event# 271124999, Sender [3:400:2399], Recipient [3:400:2399]: NKikimr::NSchemeShard::TEvSchemeShard::TEvMeasureSelfResponseTime 2026-01-04T03:02:17.755060Z node 3 :FLAT_TX_SCHEMESHARD TRACE: schemeshard_impl.cpp:5255: StateWork, processing event TEvSchemeShard::TEvMeasureSelfResponseTime ... Inserting the 4th row 2026-01-04T03:02:17.939268Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [3:1573:3286], Recipient [3:1394:3158]: NKikimrDataEvents.TEvWrite Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-04T03:02:17.939343Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037890 2026-01-04T03:02:17.939486Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [3:1394:3158], Recipient [3:1394:3158]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T03:02:17.939532Z node 3 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T03:02:17.939609Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037890 2026-01-04T03:02:17.939773Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037890, record: Operations { Type: OPERATION_UPSERT TableId { OwnerId: 72057594046644480 TableId: 39 SchemaVersion: 1 } ColumnIds: 1 ColumnIds: 2 PayloadIndex: 0 PayloadFormat: FORMAT_CELLVEC DefaultFilledColumnCount: 0 } TxMode: MODE_IMMEDIATE OverloadSubscribe: 1 2026-01-04T03:02:17.939865Z node 3 :TX_DATASHARD TRACE: datashard_write_operation.cpp:256: Table /Root/table2, shard: 72075186224037890, write point (Uint32 : 4) 2026-01-04T03:02:17.939923Z node 3 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint32 : 4) table: [72057594046644480:39:1] 2026-01-04T03:02:17.940019Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CheckWrite 2026-01-04T03:02:17.940078Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-04T03:02:17.940125Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CheckWrite 2026-01-04T03:02:17.940173Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-04T03:02:17.940212Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-04T03:02:17.940259Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T03:02:17.940325Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037890 2026-01-04T03:02:17.940372Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-04T03:02:17.940404Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-04T03:02:17.940430Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit BlockFailPoint 2026-01-04T03:02:17.940455Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit BlockFailPoint 2026-01-04T03:02:17.940483Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-04T03:02:17.940508Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit BlockFailPoint 2026-01-04T03:02:17.940531Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit ExecuteWrite 2026-01-04T03:02:17.940559Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit ExecuteWrite 2026-01-04T03:02:17.940592Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037890 2026-01-04T03:02:17.940647Z node 3 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037890 CompleteEdge# v2500/281474976715659 IncompleteEdge# v{min} UnprotectedReadEdge# v23000/18446744073709551615 ImmediateWriteEdge# v{min} ImmediateWriteEdgeReplied# v{min} 2026-01-04T03:02:17.940791Z node 3 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037890, row count=1 2026-01-04T03:02:17.940854Z node 3 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T03:02:17.940929Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is ExecutedNoMoreRestarts 2026-01-04T03:02:17.940961Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit ExecuteWrite 2026-01-04T03:02:17.941003Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit FinishProposeWrite 2026-01-04T03:02:17.941047Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-04T03:02:17.941138Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is DelayCompleteNoMoreRestarts 2026-01-04T03:02:17.941171Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit FinishProposeWrite 2026-01-04T03:02:17.941217Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T03:02:17.941247Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037890 on unit CompletedOperations 2026-01-04T03:02:17.941290Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037890 is Executed 2026-01-04T03:02:17.941310Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T03:02:17.941338Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037890 has finished 2026-01-04T03:02:17.952812Z node 3 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037890 2026-01-04T03:02:17.952906Z node 3 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:2] at 72075186224037890 on unit FinishProposeWrite 2026-01-04T03:02:17.952973Z node 3 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 2 at tablet 72075186224037890 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T03:02:17.953083Z node 3 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 |98.8%| [TM] {RESULT} ydb/core/tx/datashard/ut_disk_quotas/unittest |98.8%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_disk_quotas/unittest >> TDqPqRdReadActorTests::CoordinatorChanged [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 >> TDqPqRdReadActorTests::Backpressure >> TabletService_ChangeSchema::Basics >> TListAllTopicsTests::PlainList [GOOD] >> TListAllTopicsTests::RecursiveList >> TFetchRequestTests::SmallBytesRead [GOOD] >> TFetchRequestTests::EmptyTopic >> test.py::test[solomon-BadDownsamplingDisabled-] [GOOD] >> test.py::test[solomon-BadDownsamplingFill-] >> TopicSessionTests::RestartSessionIfNewClientWithOffset [GOOD] >> DataShardCompaction::CompactBorrowed |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1__SYNC-pk_types3-all_types3-index3-Datetime--SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TTxDataShardBuildFulltextIndexScan::BuildWithTextFromKey [GOOD] >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance >> TopicSessionTests::ReadNonExistentTopic >> test.py::test_order_conflict [GOOD] >> test.py::test_missing_value [GOOD] >> test.py::test_unexpected_value [GOOD] >> test.py::test_local |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_0_UNIQUE_SYNC-pk_types14-all_types14-index14-DyNumber-UNIQUE-SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TDqSolomonWriteActorTest::TestWriteBigBatchMonitoring [GOOD] >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon >> MediatorTest::MultipleSteps [GOOD] >> BulkUpsert::BulkUpsert >> MediatorTest::WatchesBeforeFirstStep >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__SYNC-pk_types3-all_types3-index3---SYNC] [GOOD] |98.8%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> DataShardReplication::SimpleApplyChanges [GOOD] >> DataShardReplication::SplitMergeChanges >> QuoterWithKesusTest::ForbidsNotCanonizedQuoterPath [GOOD] >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor >> KeyValueGRPCService::SimpleConcatUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 >> TabletService_ChangeSchema::Basics [GOOD] >> TabletService_ChangeSchema::OnlyAdminsAllowed >> test.py::test[solomon-BadDownsamplingFill-] [GOOD] >> test.py::test[solomon-BadDownsamplingInterval-] >> KqpTpch::Query01 >> MediatorTest::WatchesBeforeFirstStep [GOOD] >> DataShardFollowers::FollowerDuringDataPartSwitch [GOOD] >> DataShardFollowers::FollowerReadDuringSplit >> MediatorTest::RebootTargetTablets >> TTxDataShardBuildFulltextIndexScan::BuildWithRelevance [GOOD] >> TTxDataShardBuildIndexScan::BadRequest >> Coordinator::RestoreTenantConfiguration-AlterDatabaseCreateHiveFirst-true [GOOD] >> Coordinator::LastEmptyStepResent >> DataShardStats::BlobsStatsCorrect [GOOD] >> DataShardStats::SharedCacheGarbage >> TopicSessionTests::ReadNonExistentTopic [GOOD] >> TDescriberTests::TopicNotExists [GOOD] >> TDescriberTests::TopicNotTopic >> TopicSessionTests::SlowSession >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split >> TMemoryController::Counters >> QuoterWithKesusTest::ForbidsNotCanonizedResourcePath [GOOD] >> QuoterWithKesusTest::HandlesNonExistentResource >> GenericProviderLookupActor::Lookup >> GenericProviderLookupActor::Lookup [GOOD] >> GenericProviderLookupActor::LookupMultiMatches [GOOD] >> GenericProviderLookupActor::LookupWithErrors >> GenericProviderLookupActor::LookupWithErrors [GOOD] |98.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_DyNumber_1_UNIQUE_SYNC-pk_types17-all_types17-index17-DyNumber-UNIQUE-SYNC] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitBlockedByVolatileNoConflict [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile >> DataShardReplication::SplitMergeChanges [GOOD] >> DataShardReplication::SplitMergeChangesReboots ------- [TS] {asan, default-linux-x86_64, release} ydb/library/yql/providers/generic/actors/ut/unittest >> GenericProviderLookupActor::LookupWithErrors [GOOD] Test command err: 2026-01-04 03:02:29.478 INFO ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007F55B5832FC0) [generic] yql_generic_lookup_actor.cpp:153: New generic proivider lookup source actor(ActorId=[1:4:2051]) for kind=YDB, endpoint=host: "some_host" port: 2135, database=some_db, use_tls=1, protocol=NATIVE, table=lookup_test 2026-01-04 03:02:29.496 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007F55B5832FC0) [generic] yql_generic_lookup_actor.cpp:301: ActorId=[1:4:2051] Got LookupRequest for 3 keys Call ListSplits. selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Expected: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } ... " } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 GENERIC-CONNECTOR-MOCK Actual: selects { data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } what { items { column { name: "id" type { type_id: UINT64 } } } items { column { name: "optional_id" type { optional_type { item { type_id: UINT64 } } } } } items { column { name: "string_value" type { optional_type { item { type_id: STRING } } } } } } from { table: "lookup_test" } where { filter_typed { disjunction { operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 1 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 101 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 0 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 100 } } } } } } } operands { conjunction { operands { comparison { operation: EQ left_value { column: "id" } right_value { typed_value { type { type_id: UINT64 } value { uint64_value: 2 } } } } } operands { comparison { operation: EQ left_value { column: "optional_id" } right_value { typed_value { type { optional_type { item { type_id: UINT64 } } } value { uint64_value: 102 } } } } } } } } } } } max_split_count: 1 ListSplits result. GRpcStatusCode: 0 2026-01-04 03:02:29.595 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:332: ActorId=[3:7591336439921275807:2051] Got TListSplitsStreamIterator 2026-01-04 03:02:29.595 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:200: ActorId=[3:7591336439921275807:2051] Got TListSplitsResponse from Connector Call ReadSplits. data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Expected: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY GENERIC-CONNECTOR-MOCK Actual: data_source_instance { kind: YDB endpoint { host: "some_host" port: 2135 } database: "some_db" credentials { token { type: "IAM" value: "token_value" } } use_tls: true protocol: NATIVE } splits { select { from { table: "example_1" } } description: "Actual split info is not important" } format: ARROW_IPC_STREAMING filtering: FILTERING_MANDATORY ReadSplits result. GRpcStatusCode: 0 2026-01-04 03:02:29.595 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:233: ActorId=[3:7591336439921275807:2051] Got ReadSplitsStreamIterator from Connector 2026-01-04 03:02:29.595 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:354: ActorId=[3:7591336439921275807:2051] Got DataChunk 2026-01-04 03:02:29.596 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:365: ActorId=[3:7591336439921275807:2051] Got EOF 2026-01-04 03:02:29.596 DEBUG ydb-library-yql-providers-generic-actors-ut(pid=1075108, tid=0x00007B55B0956640) [generic] yql_generic_lookup_actor.cpp:419: Sending lookup results for 3 keys |98.9%| [TS] {RESULT} ydb/library/yql/providers/generic/actors/ut/unittest >> test_canonical_requests.py::Test::test_essential [FAIL] |98.9%| [TS] {BAZEL_UPLOAD} ydb/library/yql/providers/generic/actors/ut/unittest >> TMLPChangerTests::ReadAndReleaseTest [GOOD] >> TMLPChangerTests::CapacityTest [GOOD] >> TMLPReaderTests::TopicNotExists >> TabletService_ChangeSchema::OnlyAdminsAllowed [GOOD] >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead >> MediatorTest::RebootTargetTablets [GOOD] >> TListAllTopicsTests::RecursiveList [GOOD] >> TListAllTopicsTests::ListLimitAndPaging >> MediatorTest::ResendSubset >> test.py::test[solomon-BadDownsamplingInterval-] [GOOD] >> test.py::test[solomon-Basic-default.txt] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV1 [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/http_api/py3test >> test_http_api.py::TestHttpApi::test_openapi_spec [GOOD] |98.9%| [TM] {RESULT} ydb/tests/fq/http_api/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/http_api/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution >> KqpTpch::Query01 [GOOD] >> KqpTpch::Query02 >> TDqSolomonWriteActorTest::TestWriteBigBatchSolomon [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] >> Coordinator::LastEmptyStepResent [GOOD] >> CoordinatorVolatile::PlanResentOnReboots >> DataShardFollowers::FollowerReadDuringSplit [GOOD] >> TTxDataShardBuildIndexScan::BadRequest [GOOD] >> TTxDataShardBuildIndexScan::RunScan >> QuoterWithKesusTest::HandlesNonExistentResource [GOOD] >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource >> TabletService_ExecuteMiniKQL::BasicMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_followers/unittest >> DataShardFollowers::FollowerReadDuringSplit [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:40.623392Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:40.813380Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:40.814059Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:40.814132Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:41.294982Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:41.295145Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:41.388300Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495697742291 != 1767495697742294 2026-01-04T03:01:41.409439Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:41.459457Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:41.563454Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:41.886470Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:41.901215Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:42.016594Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T03:01:42.016678Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T03:01:42.017523Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T03:01:42.181008Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 PartitionConfig { FollowerGroups { FollowerCount: 1 AllowLeaderPromotion: false } } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T03:01:42.181128Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T03:01:42.182909Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T03:01:42.183047Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T03:01:42.183494Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T03:01:42.183819Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T03:01:42.184020Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T03:01:42.189249Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:42.189862Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T03:01:42.190688Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T03:01:42.190788Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T03:01:42.243805Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:42.244876Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:42.246091Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:886:2765] 2026-01-04T03:01:42.246386Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:42.292375Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:886:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:42.293185Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:42.293287Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:42.295834Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:01:42.295956Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:01:42.296028Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:01:42.298779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:42.299013Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:42.299147Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T03:01:42.310113Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:42.348928Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:01:42.350086Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:42.350240Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T03:01:42.350282Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:42.350330Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:01:42.350381Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:42.350564Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:886:2765], Recipient [1:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:42.351436Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:42.352757Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:01:42.352879Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:01:42.352982Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:42.353023Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:42.353091Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:01:42.353135Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:01:42.353185Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:01:42.353241Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:01:42.353289Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:42.353535Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:42.353589Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:42.353651Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:882:2762], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T03:01:42.354156Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T03:01:42.354208Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:01:42.354329Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:01:42.354716Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:01:42.354808Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:01:42.354914Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:01:4 ... }, { items { uint32_value: 2 } items { uint32_value: 22 } } ... reading from the right follower 2026-01-04T03:02:33.803402Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:352: actor# [8:68:2115] Handle TEvExecuteKqpTransaction 2026-01-04T03:02:33.803500Z node 8 :TX_PROXY DEBUG: proxy_impl.cpp:341: actor# [8:68:2115] TxId# 281474976710665 ProcessProposeKqpTransaction 2026-01-04T03:02:33.805060Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [8:1309:3063], Recipient [8:1282:3045]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:33.805154Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:33.805236Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037890, clientId# [8:1307:3062], serverId# [8:1309:3063], sessionId# [0:0:0] 2026-01-04T03:02:33.807605Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [8:1313:3064], Recipient [8:1282:3045]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 RangesSize: 1 2026-01-04T03:02:33.807679Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-04T03:02:33.807820Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} queued, type NKikimr::NDataShard::TDataShard::TTxReadViaPipeline 2026-01-04T03:02:33.807896Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T03:02:33.808090Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-04T03:02:33.808191Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:837: Updating sys metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=6, epoch=1} 2026-01-04T03:02:33.808895Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:854: Updating tables metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=4, epoch=1} 2026-01-04T03:02:33.809461Z node 8 :TX_DATASHARD DEBUG: datashard__init.cpp:925: Updating snapshots metadata on follower, tabletId 72075186224037890 prev TChangeCounter{serial=0, epoch=0} current TChangeCounter{serial=0, epoch=1} 2026-01-04T03:02:33.809579Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037890 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-04T03:02:33.809671Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CheckRead 2026-01-04T03:02:33.809791Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T03:02:33.809870Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CheckRead 2026-01-04T03:02:33.809928Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit BuildAndWaitDependencies 2026-01-04T03:02:33.809976Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit BuildAndWaitDependencies 2026-01-04T03:02:33.810022Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:1] at 72075186224037890 2026-01-04T03:02:33.810078Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T03:02:33.810106Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit BuildAndWaitDependencies 2026-01-04T03:02:33.810132Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit ExecuteRead 2026-01-04T03:02:33.810159Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-04T03:02:33.810323Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T03:02:33.810615Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Restart 2026-01-04T03:02:33.810654Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037890 2026-01-04T03:02:33.810747Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T03:02:33.810835Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} pin 0 (0 b) load 1 (65 b) 2026-01-04T03:02:33.810905Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} took 8388608b of static mem, Memory{8388608 dyn 0} 2026-01-04T03:02:33.811004Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} request page collection [72075186224037888:1:23:1:12288:190:0] pages [ 0 ] 2026-01-04T03:02:33.811090Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} postponed, loading 1 pages, 65 bytes, newly pinned 0 pages, 0 bytes 2026-01-04T03:02:33.811332Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} got result TEvResult{1 pages [72075186224037888:1:23:1:12288:190:0] ok OK}, type 1 2026-01-04T03:02:33.811442Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} activated 2026-01-04T03:02:33.811531Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037890, FollowerId 1 2026-01-04T03:02:33.811572Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit ExecuteRead 2026-01-04T03:02:33.811685Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037890 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 MaxRowsInResult: 1 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T03:02:33.811904Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037890 Complete read# {[8:1313:3064], 0} after executionsCount# 2 2026-01-04T03:02:33.811984Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037890 read iterator# {[8:1313:3064], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 1, total queries# 1, firstUnprocessed# 0 2026-01-04T03:02:33.812099Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T03:02:33.812129Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit ExecuteRead 2026-01-04T03:02:33.812158Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:1] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T03:02:33.812188Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:1] at 72075186224037890 on unit CompletedOperations 2026-01-04T03:02:33.812242Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:1] at 72075186224037890 is Executed 2026-01-04T03:02:33.812268Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:1] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T03:02:33.812306Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:1] at 72075186224037890 has finished 2026-01-04T03:02:33.812352Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037890 2026-01-04T03:02:33.812447Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T03:02:33.812518Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{1, NKikimr::NDataShard::TDataShard::TTxReadViaPipeline} release 8388608b of static, Memory{0 dyn 0} 2026-01-04T03:02:33.812587Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037890 2026-01-04T03:02:33.812758Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553217, Sender [8:1282:3045], Recipient [8:1282:3045]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-04T03:02:33.812806Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3306: StateWorkAsFollower, processing event TEvDataShard::TEvReadContinue 2026-01-04T03:02:33.812897Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} queued, type NKikimr::NDataShard::TDataShard::TTxReadContinue 2026-01-04T03:02:33.812972Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} took 4194304b of static mem, Memory{4194304 dyn 0} 2026-01-04T03:02:33.813063Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037890 ReadContinue for iterator# {[8:1313:3064], 0}, firstUnprocessedQuery# 0 2026-01-04T03:02:33.813145Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037890 ReadContinue: iterator# {[8:1313:3064], 0}, FirstUnprocessedQuery# 0 2026-01-04T03:02:33.813271Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3338: 72075186224037890 readContinue iterator# {[8:1313:3064], 0} sends rowCount# 0, bytes# 0, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T03:02:33.813355Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3362: 72075186224037890 read iterator# {[8:1313:3064], 0} finished in ReadContinue 2026-01-04T03:02:33.813469Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 2026-01-04T03:02:33.813546Z node 8 :TABLET_EXECUTOR DEBUG: Follower{72075186224037890:1:7} Tx{2, NKikimr::NDataShard::TDataShard::TTxReadContinue} release 4194304b of static, Memory{0 dyn 0} 2026-01-04T03:02:33.814400Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [8:1313:3064], Recipient [8:1282:3045]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T03:02:33.814458Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-04T03:02:33.814525Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037890 ReadCancel: { ReadId: 0 } { items { uint32_value: 3 } items { uint32_value: 33 } } >> TMemoryController::Counters [GOOD] >> TMemoryController::Counters_HardLimit >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32--2147483648-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_followers/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_followers/unittest >> TCloudEventsProcessorTests::TestCreateCloudEventProcessor [GOOD] >> MediatorTest::ResendSubset [GOOD] >> ProtoTests::CreateQueueFiller [GOOD] >> ProtoTests::UpdateQueueFiller [GOOD] >> ProtoTests::DeleteQueueFiller [GOOD] >> TestFilterSet::FilterGroup >> MediatorTest::ResendNotSubset ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/tools/kqprun/tests/py3test >> test_kqprun_recipe.py::TestKqprunRecipe::test_query_execution [GOOD] Test command err: contrib/python/ydb/py3/ydb/__init__.py:43: UserWarning: Used deprecated behavior, for fix ADD PEERDIR kikimr/public/sdk/python/ydb_v3_new_behavior contrib/python/ydb/py3/ydb/global_settings.py:22: UserWarning: Global allow split transaction is deprecated behaviour. contrib/python/ydb/py3/ydb/global_settings.py:12: UserWarning: Global allow truncated response is deprecated behaviour. >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedUpsertNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile |98.9%| [TM] {RESULT} ydb/tests/tools/kqprun/tests/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/tools/kqprun/tests/py3test >> DataShardReplication::SplitMergeChangesReboots [GOOD] >> DataShardReplication::ReplicatedTable+UseSink >> TFetchRequestTests::EmptyTopic [GOOD] >> TFetchRequestTests::BadTopicName >> TopicSessionTests::SlowSession [GOOD] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> ProtoTests::DeleteQueueFiller [GOOD] Test command err: 2026-01-04T03:02:25.540115Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336424818959193:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:25.542800Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:02:25.791116Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:02:25.800226Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:25.800336Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:25.821459Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:25.902238Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:25.904404Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591336424818959085:2081] 1767495745528851 != 1767495745528854 2026-01-04T03:02:25.973745Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T03:02:25.973766Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T03:02:25.973773Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T03:02:25.973880Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:02:25.999679Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:02:26.230732Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:26.238041Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:02:26.552713Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:28.700407Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336437703861926:2324], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:28.700551Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:28.701243Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336437703861938:2327], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:28.701339Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336437703861939:2328], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:28.701437Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:28.709921Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710660:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:02:28.724531Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336437703861942:2329], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710660 completed, doublechecking } 2026-01-04T03:02:28.742305Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710661:1, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:28.833279Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336437703862045:2610] txid# 281474976710662, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767495749933,18163742482231817770,'queue1','CreateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767495750220,17700572643399689625,'queue1','UpdateMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== ===Execute query: UPSERT INTO`/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq` (CreatedAt,Id,QueueName,Type,CloudId,FolderId,ResourceId,UserSID,MaskedToken,AuthType,PeerName,RequestId,Labels)VALUES(1767495750334,8831270094876912851,'queue1','DeleteMessageQueue','cloud1','folder1','/Root/sqs/folder/queue1','username','maskedToken123','authtype','localhost:8000','req1','{"k1" : "v1"}'); End execute query=== 2026-01-04T03:02:30.541995Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336424818959193:2145];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:30.542088Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; 2026-01-04T03:02:25.865708Z: component=schemeshard, tx_id=281474976715657, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/.sys], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.866391Z: component=schemeshard, tx_id=281474976715658, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.866580Z: component=schemeshard, tx_id=281474976715659, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_effective_permissions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.866722Z: component=schemeshard, tx_id=281474976715660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_users], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.866828Z: component=schemeshard, tx_id=281474976715661, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.866996Z: component=schemeshard, tx_id=281474976715662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_pdisks], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867145Z: component=schemeshard, tx_id=281474976715663, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_group_members], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867226Z: component=schemeshard, tx_id=281474976715664, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867333Z: component=schemeshard, tx_id=281474976715665, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867482Z: component=schemeshard, tx_id=281474976715666, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_vslots], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867661Z: component=schemeshard, tx_id=281474976715667, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867775Z: component=schemeshard, tx_id=281474976715668, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/nodes], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.867857Z: component=schemeshard, tx_id=281474976715669, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868026Z: component=schemeshard, tx_id=281474976715670, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868143Z: component=schemeshard, tx_id=281474976715671, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_read_bytes_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868243Z: component=schemeshard, tx_id=281474976715672, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/streaming_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868305Z: component=schemeshard, tx_id=281474976715673, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/hive_tablets], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868385Z: component=schemeshard, tx_id=281474976715674, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868443Z: component=schemeshard, tx_id=281474976715675, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_by_tli_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868592Z: component=schemeshard, tx_id=281474976715676, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pool_classifiers], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868838Z: component=schemeshard, tx_id=281474976715677, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/compile_cache_queries], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.868960Z: component=schemeshard, tx_id=281474976715678, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_owners], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869155Z: component=schemeshard, tx_id=281474976715679, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_sessions], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869294Z: component=schemeshard, tx_id=281474976715680, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869424Z: component=schemeshard, tx_id=281474976715681, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/pg_class], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869527Z: component=schemeshard, tx_id=281474976715682, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/tables], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869635Z: component=schemeshard, tx_id=281474976715683, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_cpu_time_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869836Z: component=schemeshard, tx_id=281474976715684, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_request_units_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.869969Z: component=schemeshard, tx_id=281474976715685, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/partition_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870084Z: component=schemeshard, tx_id=281474976715686, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870201Z: component=schemeshard, tx_id=281474976715687, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/auth_groups], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870366Z: component=schemeshard, tx_id=281474976715688, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/resource_pools], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870463Z: component=schemeshard, tx_id=281474976715689, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/query_metrics_one_minute], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870540Z: component=schemeshard, tx_id=281474976715690, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/ds_storage_stats], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870635Z: component=schemeshard, tx_id=281474976715691, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_partitions_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:25.870783Z: component=schemeshard, tx_id=281474976715692, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE SYSTEM VIEW, paths=[/Root/.sys/top_queries_by_duration_one_hour], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:26.231425Z: component=schemeshard, tx_id=281474976710657, remote_address={none}, subject={none}, sanitized_token={none}, database={none}, operation=ALTER DATABASE, paths=[//Root], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:26.242894Z: component=schemeshard, tx_id=281474976710658, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:26.466781Z: component=schemeshard, tx_id=281474976710659, remote_address={none}, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE DIRECTORY, paths=[/Root/SQS/Root/SQS/CreateCloudEventProcessor], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:28.711591Z: component=schemeshard, tx_id=281474976710660, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[.metadata/workload_manager/pools/default], status=SUCCESS, detailed_status=StatusAccepted, new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-04T03:02:28.743119Z: component=schemeshard, tx_id=281474976710661, remote_address=::1, subject={none}, sanitized_token={none}, database=/Root, operation=CREATE TABLE, paths=[/Root/SQS/CreateCloudEventProcessor/.CloudEventsYmq], status=SUCCESS, detailed_status=StatusAccepted 2026-01-04T03:02:28.833015Z: component=schemeshard, tx_id=281474976710662, remote_address={none}, subject=metadata@system, sanitized_token={none}, database=/Root, operation=CREATE RESOURCE POOL, paths=[default], status=SUCCESS, detailed_status=StatusAlreadyExists, reason=Check failed: path: '/Root/.metadata/workload_manager/pools/default', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges), new_owner=metadata@system, acl_add=[+(SR|DS):all-users@well-known, +(SR|DS):root@builtin] 2026-01-04T03:02:31.976080Z: component=ymq, id=18163742482231817770$CreateMessageQueue$2026-01-04T03:02:31.975742Z, operation=CreateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.create, created_at=2026-01-04T03:02:29.933000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=18163742482231817770$CreateMessageQueue$2026-01-04T03:02:29.933000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-04T03:02:31.982939Z: component=ymq, id=17700572643399689625$UpdateMessageQueue$2026-01-04T03:02:31.975841Z, operation=UpdateMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.setAttributes, created_at=2026-01-04T03:02:30.220000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=17700572643399689625$UpdateMessageQueue$2026-01-04T03:02:30.220000Z, queue=queue1, labels={"k1" : "v1"} 2026-01-04T03:02:31.983142Z: component=ymq, id=8831270094876912851$DeleteMessageQueue$2026-01-04T03:02:31.975897Z, operation=DeleteMessageQueue, status=SUCCESS, remote_address=localhost:8000, subject=username, masked_token=maskedToken123, auth_type=authtype, permission=ymq.queues.delete, created_at=2026-01-04T03:02:30.334000Z, cloud_id=cloud1, folder_id=folder1, resource_id=/Root/sqs/folder/queue1, request_id=req1, idempotency_id=8831270094876912851$DeleteMessageQueue$2026-01-04T03:02:30.334000Z, queue=queue1, labels={"k1" : "v1"} |98.9%| [TS] {RESULT} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/ymq/actor/cloud_events/cloud_events_ut/unittest >> TopicSessionTests::TwoSessionsWithDifferentSchemes >> test.py::test[solomon-Basic-default.txt] [GOOD] >> test.py::test[solomon-BasicExtractMembers-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] >> TDescriberTests::TopicNotTopic [GOOD] >> TDescriberTests::CDC >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int32-2147483647-False] [GOOD] >> KeyValueGRPCService::SimpleCopyUnexistedKeyV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV1 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] >> TabletService_ExecuteMiniKQL::ParamsMiniKQLRead [GOOD] >> TabletService_ExecuteMiniKQL::MalformedParams >> CoordinatorVolatile::PlanResentOnReboots [GOOD] >> CoordinatorVolatile::MediatorReconnectPlanRace >> DataShardCompaction::CompactBorrowed [GOOD] >> DataShardCompaction::CompactBorrowedTxStatus >> QuoterWithKesusTest::HandlesAllRequestsForNonExistentResource [GOOD] >> QuoterWithKesusTest::GetsQuota >> TestFilterSet::FilterGroup [GOOD] >> TMemoryController::Counters_HardLimit [GOOD] >> TMemoryController::Counters_NoHardLimit >> TestFilterSet::DuplicationValidation >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] >> TTxDataShardBuildIndexScan::RunScan [GOOD] >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction >> MediatorTest::ResendNotSubset [GOOD] >> TListAllTopicsTests::ListLimitAndPaging [GOOD] >> TMLPReaderTests::TopicNotExists [GOOD] >> TMLPReaderTests::TopicWithoutConsumer >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive >> MediatorTest::OneCoordinatorResendTxNotLost >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] >> KqpTpch::Query02 [GOOD] >> KqpTpch::Query03 >> SdkCredProvider::PingFromProviderSyncDiscovery ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/list_topics/ut/unittest >> TListAllTopicsTests::ListLimitAndPaging [GOOD] Test command err: 2026-01-04T03:02:10.340472Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336358514102724:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:10.340524Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:02:10.386055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:10.392485Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:02:10.643330Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:10.645585Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591336358514102684:2081] 1767495730335026 != 1767495730335029 2026-01-04T03:02:10.658329Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:02:10.664253Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:10.664426Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:10.697084Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:10.914136Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:02:10.934092Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0038a1/r3tmp/yandexYqELYq.tmp 2026-01-04T03:02:10.934188Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0038a1/r3tmp/yandexYqELYq.tmp 2026-01-04T03:02:10.935482Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0038a1/r3tmp/yandexYqELYq.tmp 2026-01-04T03:02:10.935599Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:02:11.127923Z INFO: TTestServer started on Port 1904 GrpcPort 8952 PQClient connected to localhost:8952 2026-01-04T03:02:11.329760Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:11.353223Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:11.402756Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:11.550588Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T03:02:13.202938Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336371399005608:2329], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:13.206872Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:13.207505Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336371399005619:2332], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:13.207593Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:13.208122Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336371399005623:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:13.215940Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:02:13.232027Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336371399005625:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T03:02:13.290400Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336371399005689:2644] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:02:13.715275Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591336371399005697:2341], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T03:02:13.720837Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzRkZTE5YTEtYzE2ZjZhN2YtZmQ3OThmMjgtYmE3YTAyZjI=, ActorId: [1:7591336371399005606:2328], ActorState: ExecuteState, LegacyTraceId: 01ke3f93yh1cnxqzvsjhb9ghbp, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T03:02:13.725176Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T03:02:13.760863Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:13.789160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:13.873093Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591336375693973285:2822] 2026-01-04T03:02:15.340714Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336358514102724:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:15.340814Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T03:02:20.240079Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-01-04T03:02:20.242195Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:196: new Create topic request 2026-01-04T03:02:20.292143Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037894] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T03:02:20.292143Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T03:02:20.300871Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591336401463777307:2940] connected; active server actors: 1 2026-01-04T03:02:20.301097Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037895][] pipe [1:7591336401463777324:2949] connected; active server actors: 1 2026-01-04T03:02:20.301867Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][topic1] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T03:02:20.302480Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037895][topic2] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T03:02:20.303528Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][topic1] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T0 ... e] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.279903Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.279915Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T03:02:41.279917Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.279930Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.279948Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T03:02:41.279963Z node 3 :PERSQUEUE DEBUG: partition.cpp:1433: [72075186224037896][Partition][0][StateIdle] Handle TEvPQ::TEvTxCommit Step 1767495761325, TxId 281474976710676 2026-01-04T03:02:41.279980Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.279992Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 1, PendingWrites: 0 2026-01-04T03:02:41.280001Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.280022Z node 3 :PERSQUEUE DEBUG: partition.cpp:2480: [72075186224037896][Partition][0][StateIdle] TPartition::ProcessUserActionAndTxPendingCommit(TTransaction[ProposeConfig]) 2026-01-04T03:02:41.280188Z node 3 :PERSQUEUE DEBUG: partition.cpp:3950: [72075186224037896][Partition][0][StateIdle] Schedule reply tx done 281474976710676 2026-01-04T03:02:41.280215Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 1 2026-01-04T03:02:41.280227Z node 3 :PERSQUEUE DEBUG: partition.cpp:2342: [72075186224037896][Partition][0][StateIdle] Batch completed (1) 2026-01-04T03:02:41.280242Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.280310Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037894] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic2" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir1/topic2" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2026-01-04T03:02:41.280363Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:02:41.280426Z node 3 :PERSQUEUE DEBUG: read.h:275: [72075186224037896][PQCacheProxy]CacheProxy. Passthrough write request to KV 2026-01-04T03:02:41.280478Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037894] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T03:02:41.281356Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037894] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T03:02:41.281509Z node 3 :PERSQUEUE DEBUG: partition.cpp:2153: [72075186224037896][Partition][0][StateIdle] Received TEvKeyValue::TEvResponse 2026-01-04T03:02:41.281699Z node 3 :PERSQUEUE DEBUG: partition_mlp.cpp:122: [72075186224037896][Partition][0][StateIdle] Initializing MLP Consumers: 0 2026-01-04T03:02:41.281750Z node 3 :PERSQUEUE DEBUG: partition_write.cpp:572: [72075186224037896][Partition][0][StateIdle] TPartition::HandleWriteResponse writeNewSize# 0 WriteNewSizeFromSupportivePartitions# 0 2026-01-04T03:02:41.281766Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.281778Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.281788Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.281819Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.281830Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.281847Z node 3 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T03:02:41.282151Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:399: [PQ: 72075186224037896] Apply new config PartitionConfig { MaxCountInPartition: 2147483647 MaxSizeInPartition: 9223372036854775807 LifetimeSeconds: 64800 SourceIdLifetimeSeconds: 1382400 WriteSpeedInBytesPerSecond: 2097152 BurstSize: 2097152 TotalPartitions: 1 ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } ExplicitChannelProfiles { PoolKind: "test" } SourceIdMaxCounts: 6000000 } PartitionIds: 0 TopicName: "topic3" Version: 0 RequireAuthWrite: true RequireAuthRead: true FormatVersion: 0 Codecs { } TopicPath: "/Root/dir2/topic3" YcCloudId: "" YcFolderId: "" YdbDatabaseId: "" YdbDatabasePath: "/Root" Partitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } AllPartitions { PartitionId: 0 Status: Active CreateVersion: 1 TabletId: 0 } MonitoringProjectId: "" 2026-01-04T03:02:41.282201Z node 3 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037896] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:02:41.282310Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:3626: [PQ: 72075186224037896] Send TEvKeyValue::TEvRequest (WRITE_TX_COOKIE) 2026-01-04T03:02:41.283572Z node 3 :PERSQUEUE DEBUG: pq_impl.cpp:1059: [PQ: 72075186224037896] Handle TEvKeyValue::TEvResponse (WRITE_TX_COOKIE) 2026-01-04T03:02:41.317944Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.317981Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.317999Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.318021Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.318035Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.379964Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.380009Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.380021Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.380034Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.380056Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.380099Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.380106Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.380111Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.380118Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.380124Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.425951Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.425992Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.426010Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.426032Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.426047Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.480903Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.480938Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.480960Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.480979Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.480995Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][0][StateIdle] Try persist 2026-01-04T03:02:41.481052Z node 3 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:41.481062Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.481069Z node 3 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:41.481079Z node 3 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:41.481095Z node 3 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][0][StateIdle] Try persist |98.9%| [TM] {RESULT} ydb/core/persqueue/public/list_topics/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/list_topics/ut/unittest >> test.py::test_local [GOOD] >> TDqSolomonWriteActorTest::TestWriteWithTimeseries [GOOD] >> TDqSolomonWriteActorTest::TestCheckpoints |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_1__ASYNC-pk_types5-all_types5-index5---ASYNC] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] >> TestFilterSet::DuplicationValidation [GOOD] >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> TestFilterSet::CompilationValidation >> TDqSolomonWriteActorTest::TestCheckpoints [GOOD] >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v2-client0] [GOOD] >> SdkCredProvider::PingFromProviderSyncDiscovery [GOOD] >> SdkCredProvider::PingFromProviderAsyncDiscovery >> TabletService_ExecuteMiniKQL::MalformedParams [GOOD] >> TabletService_ExecuteMiniKQL::MalformedProgram >> KqpTpch::Query03 [GOOD] >> KqpTpch::Query04 >> DataShardSnapshotIsolation::ReadWriteUpsertOnCommitNotBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile >> test.py::test[solomon-BasicExtractMembers-default.txt] [GOOD] >> test.py::test[solomon-Downsampling-default.txt] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] >> DataShardReplication::ReplicatedTable+UseSink [GOOD] >> DataShardReplication::ReplicatedTable-UseSink >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[UInt32-4294967295-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] >> CoordinatorVolatile::MediatorReconnectPlanRace [GOOD] >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx >> Graph::CreateGraphShard >> KeyValueGRPCService::SimpleWriteReadV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 >> KqpTpch::Query04 [GOOD] >> KqpTpch::Query05 >> TQuotaTracker::TestSmallMessages [GOOD] >> TQuotaTracker::TestBigMessages [GOOD] >> TWriteQuoterTests::WaitDeduplicationIdQuota >> TMemoryController::Counters_NoHardLimit [GOOD] >> TMemoryController::Config_ConsumerLimits >> TopicSessionTests::TwoSessionsWithDifferentSchemes [GOOD] >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] >> QuoterWithKesusTest::GetsQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuota >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes >> test_postgres.py::TestPostgresSuite::test_postgres_suite[select_1] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] >> TestFilterSet::CompilationValidation [GOOD] >> TestFilterSet::Watermark >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[text] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-True] [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serializable/py3test >> test.py::test_local [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] |98.9%| [TM] {RESULT} ydb/tests/functional/serializable/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serializable/py3test >> Graph::CreateGraphShard [GOOD] >> Graph::UseGraphShard >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64--9223372036854775808-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/mediator/ut/unittest >> MediatorTest::OneCoordinatorResendTxNotLost [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:52.554148Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:52.723674Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:52.724383Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:52.724445Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:53.168496Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:53.168652Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:53.257723Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495709769043 != 1767495709769046 2026-01-04T03:01:53.273320Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:53.323765Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:53.429490Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:53.743071Z node 1 :TX_MEDIATOR INFO: mediator__schema.cpp:23: tablet# 72057594047365120 TTxSchema Complete 2026-01-04T03:01:53.743708Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:88: tablet# 72057594047365120 CreateTxInit wait TEvMediatorConfiguration for switching to StateWork from external 2026-01-04T03:01:53.744451Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:853:2738] connected 2026-01-04T03:01:53.744612Z node 1 :TX_MEDIATOR NOTICE: mediator_impl.cpp:133: tablet# 72057594047365120 actor# [1:836:2728] HANDLE TEvMediatorConfiguration Version# 1 2026-01-04T03:01:53.745243Z node 1 :TX_MEDIATOR DEBUG: mediator__configure.cpp:77: tablet# 72057594047365120 version# 1 TTxConfigure Complete 2026-01-04T03:01:53.745425Z node 1 :TX_MEDIATOR INFO: mediator__init.cpp:64: tablet# 72057594047365120 CreateTxInit Complete ... waiting for watcher to connect 2026-01-04T03:01:53.746058Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:859:2743] connected 2026-01-04T03:01:53.746154Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-04T03:01:53.746203Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:175: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatch from# [1:857:2742] bucket# 0 ... waiting for watcher to connect (done) 2026-01-04T03:01:53.746457Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-04T03:01:53.746523Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:159: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvWatch 2026-01-04T03:01:53.746581Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:164: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvWatchBucket to# [1:856:2741] bucket.ActiveActor 2026-01-04T03:01:53.746651Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:380: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvWatchBucket Source# [1:857:2742]} 2026-01-04T03:01:53.746733Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:391: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 0} 2026-01-04T03:01:53.757687Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [1:863:2747] connected 2026-01-04T03:01:53.757853Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-04T03:01:53.757971Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [1:861:2745] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 951 Coordinator# 72057594046316545 2026-01-04T03:01:53.758317Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1000Steps: {{TCoordinatorStep step# 1000 PrevStep# 0}}} marker# M1 2026-01-04T03:01:53.758372Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1000 2026-01-04T03:01:53.758460Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1000} 2026-01-04T03:01:53.758665Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1000} ... waiting for blocked plan step 2026-01-04T03:01:53.778471Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-04T03:01:53.778540Z node 1 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-04T03:01:53.778644Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [1:855:2740] ExecQueue {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M0 2026-01-04T03:01:53.778781Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 1000 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}}}}} marker# M1 2026-01-04T03:01:53.778843Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 marker# M2 2026-01-04T03:01:53.778902Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND Ev to# [1:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 marker# M3 2026-01-04T03:01:53.778966Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [1:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [1:856:2741] bucket.ActiveActor step# 1010 2026-01-04T03:01:53.779056Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [1:861:2745]}}} marker# M4 2026-01-04T03:01:53.779213Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-04T03:01:53.780615Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [1:885:2759] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T03:01:53.780716Z node 1 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-04T03:01:53.780766Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 ... waiting for blocked plan step (done) ... waiting for no pending commands 2026-01-04T03:01:53.781214Z node 1 :TX_MEDIATOR DEBUG: mediator_impl.cpp:308: tablet# 72057594047365120 FORWARD Watch from# [1:857:2742] to# [1:855:2740] ExecQueue 2026-01-04T03:01:53.781285Z node 1 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:189: Actor# [1:855:2740] MediatorId# 72057594047365120 HANDLE TEvGranularWatchModify from# [1:857:2742] bucket# 0 ... waiting for no pending commands (done) ... unblocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet ... waiting for watch updates 2026-01-04T03:01:53.781617Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:342: Actor# [1:856:2741] Mediator# 72057594047365120 HANDLE {TEvPlanStepAccepted TabletId# 72057594047365121 step# 1010} 2026-01-04T03:01:53.781692Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:415: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:861:2745] {TEvPlanStepAck TabletId# 72057594047365121 step# 1010 txid# 1} 2026-01-04T03:01:53.781835Z node 1 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:171: Actor# [1:856:2741] Mediator# 72057594047365120 SEND to# [1:857:2742] {TEvUpdate Mediator# 72057594047365120 Bucket# 0 TimeBarrier# 1010} ... waiting for watch updates (done) ... waiting for SysViewsRoster update finished 2026-01-04T03:01:57.517154Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:57.523154Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:57.527365Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:57.527659Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:57.527835Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:57.783678Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:57.783787Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:57.811727Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767495714300782 != 1767495714300786 2026-01-04T03:01:57.817788Z node 2 :IMPOR ... EDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:901:2767] Cookie# 1 CompleteStep# 0 LatestKnownStep# 0 SubjectiveTime# 950 Coordinator# 72057594046316546 2026-01-04T03:02:46.948493Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 1010 2026-01-04T03:02:46.948545Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316545], step# [1010] transactions [1] 2026-01-04T03:02:46.948621Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-04T03:02:46.948650Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-04T03:02:46.948774Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594047365120 SEND EvCommitStep to# [12:855:2740] ExecQueue {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M0 2026-01-04T03:02:46.948903Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvCommitStep {TMediateStep From 0 To# 1010Steps: {{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 1}{tablet# 72057594047365122 txid# 1}}}{TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}}}} marker# M1 2026-01-04T03:02:46.948957Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-04T03:02:46.949011Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 1 txid# 2 marker# M3 2026-01-04T03:02:46.949061Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 1 txid# 2 marker# M2 2026-01-04T03:02:46.949090Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 1 txid# 2 marker# M3 2026-01-04T03:02:46.949129Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND TEvStepPlanComplete to# [12:856:2741] bucket.ActiveActor step# 1010 2026-01-04T03:02:46.949220Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-04T03:02:46.949414Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvCommitTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 1 AckTo# [12:898:2764]}{TTx Moderator# 0 txid# 2 AckTo# [12:901:2767]}}} marker# M4 2026-01-04T03:02:46.949665Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvStepPlanComplete step# 1010} 2026-01-04T03:02:46.950541Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:909:2773] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T03:02:46.950610Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-04T03:02:46.950650Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-04T03:02:46.950694Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-04T03:02:46.951073Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:910:2774] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T03:02:46.951118Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-04T03:02:46.951146Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-04T03:02:46.951178Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-04T03:02:46.962488Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594047365120 server# [12:913:2777] connected 2026-01-04T03:02:46.962621Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594047365120 HANDLE EvCoordinatorSync 2026-01-04T03:02:46.962668Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594047365120 SEND EvCoordinatorSyncResult to# [12:911:2775] Cookie# 2 CompleteStep# 1010 LatestKnownStep# 1010 SubjectiveTime# 951 Coordinator# 72057594046316546 2026-01-04T03:02:46.962910Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594047365120 HANDLE EvCoordinatorStep coordinator# 72057594046316546 step# 1010 2026-01-04T03:02:46.962960Z node 12 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594047365120], Coordinator [72057594046316546], step# [1010] transactions [1] 2026-01-04T03:02:46.963036Z node 12 :TX_MEDIATOR DEBUG: mediator_impl.cpp:223: tablet# 72057594047365120 SEND EvRequestLostAcks to# [12:855:2740] ExecQueue step {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} 2026-01-04T03:02:46.963141Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:130: Actor# [12:855:2740] MediatorId# 72057594047365120 HANDLE TEvRequestLostAcks {TCoordinatorStep step# 1010 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 2 AckTo# [0:0:0]}}TabletsToTransaction: {{tablet# 72057594047365121 txid# 2}{tablet# 72057594047365122 txid# 2}}} AckTo# [12:911:2775] 2026-01-04T03:02:46.963188Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365121]. TxIds: txid# 2 marker# M2 2026-01-04T03:02:46.963246Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365121 txid# 2 marker# M3 2026-01-04T03:02:46.963299Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: execute_queue.cpp:44: Mediator exec queue [72057594047365120], step# 1010 for tablet [72057594047365122]. TxIds: txid# 2 marker# M2 2026-01-04T03:02:46.963350Z node 12 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [12:855:2740] MediatorId# 72057594047365120 SEND Ev to# [12:856:2741] step# 1010 forTablet# 72057594047365122 txid# 2 marker# M3 2026-01-04T03:02:46.963430Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365121 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-04T03:02:46.963499Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:222: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE {TEvOoOTabletStep step# 1010 TabletId# 72057594047365122 Transactions {{TTx Moderator# 0 txid# 2 AckTo# [12:911:2775]}}} 2026-01-04T03:02:46.978458Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365121 ClientId: [12:905:2771] ServerId: [12:909:2773] } 2026-01-04T03:02:46.998609Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365121 Status: OK ServerId: [12:934:2787] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-04T03:02:46.998705Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 1, marker M5lu 2026-01-04T03:02:46.998745Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365121, step# 1010, txid# 2, marker M5lu 2026-01-04T03:02:46.998795Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365121 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365121} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 2026-01-04T03:02:47.011369Z node 12 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:47.051553Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:294: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientDestroyed { TabletId: 72057594047365122 ClientId: [12:906:2772] ServerId: [12:910:2774] } 2026-01-04T03:02:47.067760Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [12:856:2741] Mediator# 72057594047365120 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365122 Status: OK ServerId: [12:973:2803] Leader: 1 Dead: 0 Generation: 3 VersionInfo: } 2026-01-04T03:02:47.067854Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 1, marker M5lu 2026-01-04T03:02:47.067897Z node 12 :TX_MEDIATOR_PRIVATE DEBUG: tablet_queue.cpp:117: Send from 72057594047365120 to tablet 72057594047365122, step# 1010, txid# 2, marker M5lu 2026-01-04T03:02:47.067948Z node 12 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [12:856:2741] Mediator# 72057594047365120 SEND to# 72057594047365122 {TEvPlanStep step# 1010 MediatorId# 72057594047365120 TabletID 72057594047365122} ... blocking NKikimr::TEvTxProcessing::TEvPlanStep from TX_MEDIATOR_TABLET_QUEUE_ACTOR to NKikimr::NTxMediator::NTestSuiteMediatorTest::TTargetTablet cookie 0 |98.9%| [TM] {RESULT} ydb/core/tx/mediator/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/mediator/ut/unittest >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown >> TFetchRequestTests::BadTopicName [GOOD] >> TFetchRequestTests::CheckAccess >> TabletService_ExecuteMiniKQL::MalformedProgram [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TDescriberTests::CDC [GOOD] >> KqpTpch::Query05 [GOOD] >> KqpTpch::Query06 >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] >> TestFilterSet::Watermark [GOOD] >> Graph::UseGraphShard [GOOD] >> Graph::MemoryBackendFullCycle >> TestFilterSet::WatermarkWhere >> test.py::test[solomon-Downsampling-default.txt] [GOOD] >> test.py::test[solomon-DownsamplingValidSettings-default.txt] >> KqpTpch::Query06 [GOOD] >> KqpTpch::Query07 >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] ------- [TS] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/describer/ut/unittest >> TDescriberTests::CDC [GOOD] Test command err: 2026-01-04T03:02:07.805042Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336343956633628:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:07.805118Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:02:07.868332Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:02:08.124207Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:08.124291Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:08.171820Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:02:08.178696Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591336343956633595:2081] 1767495727802330 != 1767495727802333 2026-01-04T03:02:08.183503Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:08.186482Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:08.347396Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:02:08.397425Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0024df/r3tmp/yandexHlaKHl.tmp 2026-01-04T03:02:08.397504Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0024df/r3tmp/yandexHlaKHl.tmp 2026-01-04T03:02:08.398684Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0024df/r3tmp/yandexHlaKHl.tmp 2026-01-04T03:02:08.398828Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:02:08.596624Z INFO: TTestServer started on Port 29274 GrpcPort 19977 PQClient connected to localhost:19977 2026-01-04T03:02:08.812773Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:08.819332Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:08.824868Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:02:08.911340Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:10.694984Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336356841536516:2330], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:10.695156Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336356841536528:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:10.695241Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:10.695991Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336356841536532:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:10.696082Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:10.702160Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:02:10.714113Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336356841536531:2335], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T03:02:10.799798Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336356841536597:2639] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:02:11.103652Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591336356841536612:2342], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T03:02:11.109237Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=NTM4MTMzNGMtOWM0Yjc1OTUtZTg2MTQ4MzYtMjQ1MDNlOTU=, ActorId: [1:7591336356841536514:2329], ActorState: ExecuteState, LegacyTraceId: 01ke3f91fe50g8j4cqnwe977w4, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T03:02:11.112920Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T03:02:11.145693Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:11.176730Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:11.264598Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591336361136504198:2819] 2026-01-04T03:02:12.807305Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336343956633628:2069];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:12.807392Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T03:02:17.659699Z :TopicExists INFO: TTopicSdkTestSetup started 2026-01-04T03:02:17.694029Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T03:02:17.724502Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591336386906308217:2930] connected; active server actors: 1 2026-01-04T03:02:17.725422Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T03:02:17.728860Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T03:02:17.732174Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T03:02:17.752905Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T03:02:17.765162Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T03:02:17.766278Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T03:02:17.766374Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:02:17.766429Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx wr ... Dir: 100000 MaxAclBytesSize: 10240 MaxTableColumns: 200 MaxTableColumnNameLength: 255 MaxTableKeyColumns: 30 MaxTableIndices: 20 MaxShards: 200000 MaxShardsInPath: 35000 MaxConsistentCopyTargets: 10000 MaxPathElementLength: 255 ExtraPathSymbolsAllowed: "!\"#$%&\'()*+,-.:;<=>?@[\\]^_`{|}~" MaxPQPartitions: 1000000 MaxTableCdcStreams: 5 MaxExports: 10 MaxImports: 10 MaxColumnTableColumns: 10000 } } } PathId: 52 PathOwnerId: 72057594046644480 }, by path# { Subscriber: { Subscriber: [4:7591336526834372761:3091] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1767495769438 PathId: [OwnerId: 72057594046644480, LocalPathId: 52] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 1 IsVirtual: 0 SchemaVersion: 0 }, by pathId# { Subscriber: { Subscriber: [4:7591336526834372761:3091] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1767495769438 PathId: [OwnerId: 72057594046644480, LocalPathId: 52] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 1 IsVirtual: 0 SchemaVersion: 0 } 2026-01-04T03:02:49.415736Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:30: [[4:7591336526834372771:3094]] Create request [/Root/table1/feed] with SyncVersion=0 2026-01-04T03:02:49.415911Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591336483884698132:2119], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 1 ResultSet [{ Path: Root/table1/feed TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T03:02:49.416026Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591336483884698132:2119], cacheItem# { Subscriber: { Subscriber: [4:7591336526834372746:3088] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 13 TableKind: 0 Created: 1 CreateStep: 1767495769438 PathId: [OwnerId: 72057594046644480, LocalPathId: 51] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/table1/feed TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T03:02:49.416208Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591336526834372772:3095], recipient# [4:7591336526834372771:3094], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 1 ResultSet [{ Path: Root/table1/feed TableId: [72057594046644480:51:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindCdcStream DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } Children [streamImpl] }] } 2026-01-04T03:02:49.416240Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:54: [[4:7591336526834372771:3094]] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-04T03:02:49.416261Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:84: [[4:7591336526834372771:3094]] Path '/Root/table1/feed' is a CDC 2026-01-04T03:02:49.416298Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:30: [[4:7591336526834372771:3094]] Create request [/Root/table1/feed/streamImpl] with SyncVersion=0 2026-01-04T03:02:49.416449Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591336483884698132:2119], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 1 ResultSet [{ Path: Root/table1/feed/streamImpl TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T03:02:49.416532Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591336483884698132:2119], cacheItem# { Subscriber: { Subscriber: [4:7591336526834372761:3091] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusSuccess Kind: 4 TableKind: 0 Created: 1 CreateStep: 1767495769438 PathId: [OwnerId: 72057594046644480, LocalPathId: 52] DomainId: [OwnerId: 72057594046644480, LocalPathId: 1] IsPrivatePath: 1 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/table1/feed/streamImpl TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T03:02:49.416650Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591336526834372773:3096], recipient# [4:7591336526834372771:3094], result# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 1 ResultSet [{ Path: Root/table1/feed/streamImpl TableId: [72057594046644480:52:0] RequestType: ByPath Operation: OpList RedirectRequired: true ShowPrivatePath: true SyncVersion: false Status: Ok Kind: KindTopic DomainInfo { DomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] ResourcesDomainKey: [OwnerId: 72057594046644480, LocalPathId: 1] Params { Version: 1 PlanResolution: 7 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 } ServerlessComputeResourcesMode: (empty maybe) Users: [] Groups: [] } }] } 2026-01-04T03:02:49.416673Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:54: [[4:7591336526834372771:3094]] Handle TEvTxProxySchemeCache::TEvNavigateKeySetResult 2026-01-04T03:02:49.416702Z node 4 :PQ_DESCRIBER DEBUG: describer.cpp:103: [[4:7591336526834372771:3094]] Path '/Root/table1/feed/streamImpl' SUCCESS 2026-01-04T03:02:49.418707Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:49.418732Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.418750Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:49.418772Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.418786Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T03:02:49.497995Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:49.498037Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.498055Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:49.498081Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.498097Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T03:02:49.519163Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:49.519205Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.519223Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:49.519249Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.519265Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T03:02:49.568908Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:2810: Handle TEvTxProxySchemeCache::TEvNavigateKeySet: self# [4:7591336483884698132:2119], request# { ErrorCount: 0 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }] } 2026-01-04T03:02:49.569090Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:1892: FillEntry for TNavigate: self# [4:7591336483884698132:2119], cacheItem# { Subscriber: { Subscriber: [4:7591336488179666120:2593] DomainOwnerId: 72057594046644480 Type: 2 SyncCookie: 0 } Filled: 1 Status: StatusPathDoesNotExist Kind: 0 TableKind: 0 Created: 0 CreateStep: 0 PathId: DomainId: IsPrivatePath: 0 IsVirtual: 0 SchemaVersion: 0 }, entry# { Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: Unknown Kind: KindUnknown DomainInfo }, props# { Cookie: 0 IsSync: false Partial: 0 } 2026-01-04T03:02:49.569691Z node 4 :TX_PROXY_SCHEME_CACHE DEBUG: cache.cpp:268: Send result: self# [4:7591336526834372776:3097], recipient# [4:7591336526834372775:2486], result# { ErrorCount: 1 DatabaseName: /Root DomainOwnerId: 0 Instant: 0 ResultSet [{ Path: Root/.metadata/initialization/migrations TableId: [18446744073709551615:18446744073709551615:0] RequestType: ByPath Operation: OpPath RedirectRequired: true ShowPrivatePath: false SyncVersion: false Status: PathErrorUnknown Kind: KindUnknown DomainInfo }] } 2026-01-04T03:02:49.602120Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:49.602160Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.602177Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:49.602209Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.602227Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][0][StateIdle] Try persist 2026-01-04T03:02:49.619534Z node 4 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:02:49.619564Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.619591Z node 4 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:02:49.619613Z node 4 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:02:49.619627Z node 4 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |98.9%| [TS] {RESULT} ydb/core/persqueue/public/describer/ut/unittest |98.9%| [TS] {BAZEL_UPLOAD} ydb/core/persqueue/public/describer/ut/unittest >> CoordinatorVolatile::CoordinatorMigrateUncommittedVolatileTx [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep >> ReadUpdateWrite::Load >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Int64-9223372036854775807-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] >> DataShardReplication::ReplicatedTable-UseSink [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable >> TMemoryController::Config_ConsumerLimits [GOOD] >> TMemoryController::SharedCache >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertBlockedByOlderVolatile [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> SdkCredProvider::PingFromProviderAsyncDiscovery [GOOD] |98.9%| [TM] {RESULT} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sdk/cpp/sdk_credprovider/unittest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[withtable] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestPgYdbS3TTL::test_s3[table_index_0_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] >> KeyValueGRPCService::SimpleWriteReadV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 >> QuoterWithKesusTest::GetsBigQuota [GOOD] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline >> TMLPReaderTests::TopicWithoutConsumer [GOOD] >> TMLPReaderTests::EmptyTopic |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] >> TestFilterSet::WatermarkWhere [GOOD] >> TabletService_ExecuteMiniKQL::DryRunEraseRow [GOOD] >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed >> TestFilterSet::WatermarkWhereFalse >> TopicSessionTests::TwoSessionsWithDifferentColumnTypes [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-True] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] >> TopicSessionTests::RestartSessionIfQueryStopped >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-0-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/library/yql/providers/solomon/actors/ut/unittest >> TDqSolomonWriteActorTest::TestShouldReturnAfterCheckpoint [GOOD] Test command err: 2026-01-04T03:02:00.868921Z node 1 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:00.870753Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:00.872079Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-04T03:02:00.873358Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-04T03:02:00.873403Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:00.880252Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:00 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-04T03:02:00.883061Z node 1 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:11.073260Z node 2 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:11.079724Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:11.095276Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-04T03:02:11.106966Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.119005Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.133088Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.144724Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.155516Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.167421Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:11.173640Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 54513 bytes of data to buffer 2026-01-04T03:02:11.173986Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-04T03:02:11.174304Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.174675Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.174697Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:11.274210Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.274614Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.274641Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:11.322246Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.322768Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.322794Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:11.374363Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.374792Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.374872Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:11.470363Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.470839Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:11.470860Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:11.521202Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.521483Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 54513 bytes to solomon 2026-01-04T03:02:11.521508Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-04T03:02:11.619427Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.619555Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:11.649283Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 500} 2026-01-04T03:02:11.649407Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:11.704826Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:11 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:11.704968Z node 2 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:22.302824Z node 3 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:22.331144Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 7500 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:22.345213Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 79903 bytes of data to buffer 2026-01-04T03:02:22.357023Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.368537Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.378399Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.388105Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.395869Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.404830Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 81013 bytes of data to buffer 2026-01-04T03:02:22.408492Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 40513 bytes of data to buffer 2026-01-04T03:02:22.408771Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 79903 bytes to solomon 2026-01-04T03:02:22.409075Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.409409Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.409448Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:22.498009Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.498446Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.498462Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:22.589066Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.589519Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.589540Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:22.673273Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.673663Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.673678Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:22.757546Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[3]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.757925Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 81013 bytes to solomon 2026-01-04T03:02:22.757955Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:22.842333Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[4]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.842559Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 500 metrics with size of 40513 bytes to solomon 2026-01-04T03:02:22.842576Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer MaxRequestsInflight 2026-01-04T03:02:22.926025Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[5]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:22 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:22.926156Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:23.020178Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[6]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:23 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 1000} 2026-01-04T03:02:23.020308Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:23.063108Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[7]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 25 Date: Sun, 04 Jan 2026 03:02:23 GMT Server: Python/3.12 aiohttp/3.10.6 {"sensorsProcessed": 500} 2026-01-04T03:02:23.063222Z node 3 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:33.640128Z node 4 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:33.640318Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 10 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:33.640727Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 579 bytes of data to buffer 2026-01-04T03:02:33.640891Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 10 metrics with size of 579 bytes to solomon 2026-01-04T03:02:33.640905Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:33.647339Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 27 Date: Sun, 04 Jan 2026 03:02:33 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 10} 2026-01-04T03:02:33.647457Z node 4 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:43.928374Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:43.930703Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 2400 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:43.947264Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 107903 bytes of data to buffer 2026-01-04T03:02:43.963859Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 109013 bytes of data to buffer 2026-01-04T03:02:43.970699Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 43613 bytes of data to buffer 2026-01-04T03:02:43.971024Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 107903 bytes to solomon 2026-01-04T03:02:43.971287Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1000 metrics with size of 109013 bytes to solomon 2026-01-04T03:02:43.971367Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 400 metrics with size of 43613 bytes to solomon 2026-01-04T03:02:43.971384Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: MaxRequestsInflight 2026-01-04T03:02:44.003885Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[2]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 28 Date: Sun, 04 Jan 2026 03:02:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 400} 2026-01-04T03:02:44.004013Z node 5 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 2 2026-01-04T03:02:44.097020Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:44.097146Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: CheckpointInProgress Empty buffer 2026-01-04T03:02:44.138354Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 29 Date: Sun, 04 Jan 2026 03:02:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1000} 2026-01-04T03:02:44.138501Z node 5 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:44.742026Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:117: TxId: TxId-42, Solomon sink. Init 2026-01-04T03:02:44.742366Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 1. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:44.742513Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-04T03:02:44.742663Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-04T03:02:44.742688Z node 6 :KQP_COMPUTE DEBUG: dq_solomon_write_actor.cpp:373: TxId: TxId-42, Solomon sink. Process checkpoint. Inflight before checkpoint: 1 2026-01-04T03:02:44.756188Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[0]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-04T03:02:44.756344Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:44.756475Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:135: TxId: TxId-42, Solomon sink. Got 1 items to send. Checkpoint: 0. Send queue: 0. Inflight: 0. Checkpoint in progress: 0 2026-01-04T03:02:44.756613Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:292: TxId: TxId-42, Solomon sink. Push 121 bytes of data to buffer 2026-01-04T03:02:44.756742Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:363: TxId: TxId-42, Solomon sink. Sent 1 metrics with size of 121 bytes to solomon 2026-01-04T03:02:44.756760Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer 2026-01-04T03:02:44.761846Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:386: TxId: TxId-42, Solomon sink. Solomon response[1]: HTTP/1.1 200 OK Content-Type: application/json; charset=utf-8 Content-Length: 26 Date: Sun, 04 Jan 2026 03:02:44 GMT Server: Python/3.12 aiohttp/3.10.6 {"writtenMetricsCount": 1} 2026-01-04T03:02:44.761993Z node 6 :KQP_COMPUTE TRACE: dq_solomon_write_actor.cpp:345: TxId: TxId-42, Solomon sink. Skip sending to solomon. Reason: Empty buffer |98.9%| [TM] {BAZEL_UPLOAD} ydb/library/yql/providers/solomon/actors/ut/unittest |98.9%| [TM] {RESULT} ydb/library/yql/providers/solomon/actors/ut/unittest >> test_liveness_wardens.py::TestLivenessWarden::test_hive_liveness_warden_reports_issues [GOOD] >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions >> test_canonical_requests.py::Test::test_pdisk_set_status_inactive [GOOD] >> KqpTpch::Query07 [GOOD] >> KqpTpch::Query08 >> test.py::test[solomon-DownsamplingValidSettings-default.txt] [GOOD] >> test.py::test[solomon-HistResponse-default.txt] >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] >> DataShardReplication::ApplyChangesToReplicatedTable [GOOD] >> DataShardReplication::ApplyChangesToCommonTable >> TestFilterSet::WatermarkWhereFalse [GOOD] >> TestFormatHandler::ManyJsonClients >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-True] [GOOD] >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] >> test_kv.py::TestYdbKvWorkload::test_minimal_maximal_values[Uint64-18446744073709551615-False] [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_compaction/unittest >> DataShardCompaction::CompactBorrowedTxStatus [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:02:25.009937Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:25.218899Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:25.219699Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:25.219769Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:25.658065Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:25.658203Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:25.752793Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495741903523 != 1767495741903526 2026-01-04T03:02:25.773181Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:25.822963Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:25.950055Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:26.257000Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:26.270766Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:26.380501Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T03:02:26.380588Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T03:02:26.381711Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T03:02:26.538105Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T03:02:26.538233Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T03:02:26.543287Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T03:02:26.543458Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T03:02:26.543961Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T03:02:26.544160Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T03:02:26.544388Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T03:02:26.544764Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T03:02:26.548807Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:26.550025Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T03:02:26.550098Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T03:02:26.591562Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:02:26.592452Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:02:26.593628Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T03:02:26.593896Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:02:26.638394Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:02:26.639281Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:02:26.639407Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:02:26.642097Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:02:26.642212Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:02:26.642286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:02:26.644250Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:02:26.644395Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:02:26.644507Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T03:02:26.655519Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:02:26.689975Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:02:26.691286Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:02:26.691446Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T03:02:26.691476Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:02:26.691505Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:02:26.691534Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:26.691709Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:26.691768Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:26.693173Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:02:26.693310Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:02:26.693401Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:02:26.693442Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:26.693531Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:02:26.693580Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:02:26.693610Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:02:26.693635Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:02:26.693684Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:26.693840Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:26.693872Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:26.693939Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T03:02:26.694291Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T03:02:26.694346Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:02:26.694502Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:02:26.694800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:02:26.694852Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:02:26.695514Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:02:26.695694Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status ... 1474976715661] at 72075186224037892 is DelayComplete 2026-01-04T03:02:57.465615Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompleteOperation 2026-01-04T03:02:57.465644Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037892 to execution unit CompletedOperations 2026-01-04T03:02:57.465677Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037892 on unit CompletedOperations 2026-01-04T03:02:57.465712Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037892 is Executed 2026-01-04T03:02:57.465737Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037892 executing on unit CompletedOperations 2026-01-04T03:02:57.465765Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037892 has finished 2026-01-04T03:02:57.465797Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037892 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:57.466030Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037892 2026-01-04T03:02:57.466065Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037892 has no attached operations 2026-01-04T03:02:57.466093Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037892 2026-01-04T03:02:57.477026Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T03:02:57.477103Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037892 2026-01-04T03:02:57.477150Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037892 on unit CompleteOperation 2026-01-04T03:02:57.477217Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037892 at tablet 72075186224037892 send result to client [2:1647:3440], exec latency: 0 ms, propose latency: 1 ms 2026-01-04T03:02:57.477266Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037892 2026-01-04T03:02:57.477433Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1647:3440] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037892 marker# P12 2026-01-04T03:02:57.477479Z node 2 :TX_PROXY DEBUG: datareq.cpp:2968: Send stream clearance, shard: 72075186224037890, txid: 281474976715661, cleared: 1 2026-01-04T03:02:57.477616Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269287940, Sender [2:1647:3440], Recipient [2:983:2833]: NKikimrTx.TEvStreamClearanceResponse TxId: 281474976715661 Cleared: true 2026-01-04T03:02:57.477658Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3174: StateWork, processing event TEvTxProcessing::TEvStreamClearanceResponse 2026-01-04T03:02:57.477740Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:983:2833], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:57.477786Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:57.477889Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T03:02:57.477928Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-04T03:02:57.477969Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for WaitForStreamClearance 2026-01-04T03:02:57.478002Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit WaitForStreamClearance 2026-01-04T03:02:57.478041Z node 2 :TX_DATASHARD TRACE: wait_for_stream_clearance_unit.cpp:156: Got stream clearance for [72500:281474976715661] at 72075186224037890 2026-01-04T03:02:57.478078Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-04T03:02:57.478111Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit WaitForStreamClearance 2026-01-04T03:02:57.478142Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit ReadTableScan 2026-01-04T03:02:57.478172Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-04T03:02:57.478406Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Continue 2026-01-04T03:02:57.478448Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T03:02:57.478490Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037890 2026-01-04T03:02:57.478529Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-04T03:02:57.478556Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T03:02:57.479029Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435082, Sender [2:1679:3469], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvRegisterScanActor 2026-01-04T03:02:57.479092Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3183: StateWork, processing event TEvPrivate::TEvRegisterScanActor 2026-01-04T03:02:57.479263Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:514: Got quota for read table scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-04T03:02:57.479362Z node 2 :TX_DATASHARD DEBUG: read_table_scan.cpp:718: Finish scan ShardId: 72075186224037890, TxId: 281474976715661, MessageQuota: 1 2026-01-04T03:02:57.542290Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4486: FullScan complete at 72075186224037890 2026-01-04T03:02:57.542364Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:4492: Found op: cookie: 281474976715661, at: 72075186224037890 2026-01-04T03:02:57.542625Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [2:983:2833], Recipient [2:983:2833]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:57.542677Z node 2 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:57.542746Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037890 2026-01-04T03:02:57.542786Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 active 1 active planned 1 immediate 0 planned 1 2026-01-04T03:02:57.542834Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:307: Found ready candidate operation [72500:281474976715661] at 72075186224037890 for ReadTableScan 2026-01-04T03:02:57.542866Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit ReadTableScan 2026-01-04T03:02:57.542901Z node 2 :TX_DATASHARD TRACE: read_table_scan_unit.cpp:158: ReadTable scan complete for [72500:281474976715661] at 72075186224037890 error: , IsFatalError: 0 2026-01-04T03:02:57.542946Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-04T03:02:57.542980Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit ReadTableScan 2026-01-04T03:02:57.543012Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompleteOperation 2026-01-04T03:02:57.543042Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-04T03:02:57.543262Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is DelayComplete 2026-01-04T03:02:57.543304Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompleteOperation 2026-01-04T03:02:57.543338Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [72500:281474976715661] at 72075186224037890 to execution unit CompletedOperations 2026-01-04T03:02:57.543375Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [72500:281474976715661] at 72075186224037890 on unit CompletedOperations 2026-01-04T03:02:57.543419Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [72500:281474976715661] at 72075186224037890 is Executed 2026-01-04T03:02:57.543449Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [72500:281474976715661] at 72075186224037890 executing on unit CompletedOperations 2026-01-04T03:02:57.543477Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [72500:281474976715661] at 72075186224037890 has finished 2026-01-04T03:02:57.543510Z node 2 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037890 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:57.543545Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:327: Check candidate unit PlanQueue at 72075186224037890 2026-01-04T03:02:57.543577Z node 2 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037890 has no attached operations 2026-01-04T03:02:57.543604Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:341: Unit PlanQueue has no ready operations at 72075186224037890 2026-01-04T03:02:57.554628Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T03:02:57.554701Z node 2 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037890 2026-01-04T03:02:57.554742Z node 2 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [72500:281474976715661] at 72075186224037890 on unit CompleteOperation 2026-01-04T03:02:57.554804Z node 2 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [72500 : 281474976715661] from 72075186224037890 at tablet 72075186224037890 send result to client [2:1647:3440], exec latency: 1 ms, propose latency: 1 ms 2026-01-04T03:02:57.554853Z node 2 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037890 2026-01-04T03:02:57.555031Z node 2 :TX_PROXY DEBUG: datareq.cpp:2286: Actor# [2:1647:3440] txid# 281474976715661 HANDLE Plan TEvProposeTransactionResult TDataReq GetStatus# COMPLETE shard id 72075186224037890 marker# P12 2026-01-04T03:02:57.555123Z node 2 :TX_PROXY INFO: datareq.cpp:834: Actor# [2:1647:3440] txid# 281474976715661 RESPONSE Status# ExecComplete prepare time: 0.000500s execute time: 0.001500s total time: 0.002000s marker# P13 |98.9%| [TM] {RESULT} ydb/core/tx/datashard/ut_compaction/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_compaction/unittest >> Graph::MemoryBackendFullCycle [GOOD] >> Graph::LocalBackendFullCycle |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] >> test_kv.py::TestYdbKvWorkload::test_dynumber [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/coordinator/ut/unittest >> CoordinatorVolatile::CoordinatorRestartWithEnqueuedVolatileStep [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:08.519597Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:08.520748Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:08.531585Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:685:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T03:01:08.673952Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:08.685503Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:08.686786Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:299:2225], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:08.687125Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:08.687377Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:08.689335Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:08.689402Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:09.271609Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:09.412104Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:09.412237Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:09.412747Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:09.412833Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:09.482429Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T03:01:09.484162Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:09.485392Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:09.614428Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:09.628865Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) ... waiting for the first mediator step 2026-01-04T03:01:10.395112Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:10.396544Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... found first step to be 1500 2026-01-04T03:01:10.731486Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 1500 ... waiting for the next mediator step ... found second step to be 2000 ... read step subscribe result: [1500, 2000] ... read step subscribe update: 3000 2026-01-04T03:01:11.807544Z node 1 :TX_COORDINATOR DEBUG: coordinator__acquire_read_step.cpp:97: tablet# 72057594046316545 HANDLE TEvAcquireReadStep ... acquired read step 3000 ... read step subscribe result: [3000, 3000] ... read step subscribe update: 3500 ... read step subscribe update: 3500 ... read step subscribe update: 4000 ... read step subscribe update: 5000 ... read step subscribe update: 6000 ... read step subscribe update: 7000 ... read step subscribe result: [3000, 7000] 2026-01-04T03:01:14.662921Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936129] NodeDisconnected NodeId# 2 2026-01-04T03:01:14.662989Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037936131] NodeDisconnected NodeId# 2 2026-01-04T03:01:14.663029Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594046447617] NodeDisconnected NodeId# 2 2026-01-04T03:01:14.663072Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037968897] NodeDisconnected NodeId# 2 2026-01-04T03:01:14.663102Z node 1 :PIPE_SERVER ERROR: tablet_pipe_server.cpp:228: [72057594037932033] NodeDisconnected NodeId# 2 2026-01-04T03:01:14.663279Z node 2 :BS_NODE ERROR: {NW42@node_warden_pipe.cpp:60} Handle(TEvTabletPipe::TEvClientDestroyed) ClientId# [2:103:2092] ServerId# [1:1131:2699] TabletId# 72057594037932033 PipeClientId# [2:103:2092] 2026-01-04T03:01:14.663616Z node 2 :TX_PROXY WARN: proxy_impl.cpp:226: actor# [2:258:2138] HANDLE TEvClientDestroyed from tablet# 72057594046447617 2026-01-04T03:01:14.664070Z node 1 :HIVE WARN: hive_impl.cpp:824: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeDisconnected, NodeId 2 2026-01-04T03:01:14.664191Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connected -> Disconnecting 2026-01-04T03:01:14.665110Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnecting -> Disconnected 2026-01-04T03:01:14.674246Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:14.698933Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T03:01:14.700244Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected ... read step subscribe update: 8000 ... read step subscribe update: 9000 ... read step subscribe update: 10000 ... read step subscribe update: 11000 ... read step subscribe update: 12000 ... waiting for SysViewsRoster update finished 2026-01-04T03:01:27.096163Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:27.096691Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:680:2347], Scheduled retry for error: {
: Error: Scheme service not found } 2026-01-04T03:01:27.097975Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:27.115768Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:27.122490Z node 4 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:01:27.124571Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:685:2406], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:27.125397Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:27.125483Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:27.132466Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:27.132556Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:27.545976Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:27.667060Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:27.667217Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:27.667855Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:27.667943Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:27.733234Z node 3 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 4 Cookie 4 2026-01-04T03:01:27.733908Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:27.734374Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:27.832238Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:27.849598Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:28.570644Z node 3 :TX_COORDINATOR DEBUG: coordinator__last_step_subscriptions.cpp:52: Processing TEvSubscribeLastStep from [4:1493:2374] at coordinator 72057594046316545 with seqNo 123 and cookie 234 2026-01-04T03:01:28.626773Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:28.626938Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:28.983713Z node 3 :TX_COORDINATOR DEBUG: coordinator__last_step_subscriptions.cpp:52: Processing TEvSubs ... ut responses 2026-01-04T03:02:57.584358Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000000 HANDLE EvProposeTransaction marker# C0 2026-01-04T03:02:57.584458Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000000 step# 200 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 ... coordinator 72057594046316545 gen 2 is planning step 200 2026-01-04T03:02:57.595375Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000000 has been planned 2026-01-04T03:02:57.595521Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000000 for mediator 72057594046382081 tablet 72057594047365120 ... blocking put [72057594046316545:2:10:1:24576:168:0] response ... waiting for planning for the required step ... coordinator 72057594046316545 gen 2 is planning step 250 ... starting a new coordinator instance ... waiting for migrated state 2026-01-04T03:02:57.637551Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-04T03:02:57.638000Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-04T03:02:57.645735Z node 20 :TX_COORDINATOR INFO: coordinator__init.cpp:120: tablet# 72057594046316545 CreateTxInit Complete ... blocking state response from [20:535:2394] to [20:873:2744] LastSentStep: 150 LastAcquiredStep: 0 LastConfirmedStep: 150 ... unblocking put responses and requests 2026-01-04T03:02:57.646519Z node 20 :TX_COORDINATOR INFO: coordinator_impl.cpp:615: OnTabletStop: 72057594046316545 reason = ReasonDemoted 2026-01-04T03:02:57.646835Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000000 stepId# 200 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy 2026-01-04T03:02:57.648790Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:322: tablet# 72057594046382081 server# [20:543:2482] disconnnected 2026-01-04T03:02:57.649230Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:201: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvServerDisconnected server# [20:543:2482] ... trying to plan tx 10000011 ... waiting for planned another persistent tx 2026-01-04T03:02:57.676268Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:316: tablet# 72057594046382081 server# [20:880:2754] connected 2026-01-04T03:02:57.676726Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:139: tablet# 72057594046382081 HANDLE EvCoordinatorSync 2026-01-04T03:02:57.676809Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:83: tablet# 72057594046382081 SEND EvCoordinatorSyncResult to# [20:876:2752] Cookie# 1 CompleteStep# 150 LatestKnownStep# 150 SubjectiveTime# 102 Coordinator# 72057594046316545 2026-01-04T03:02:57.677001Z node 20 :TX_COORDINATOR NOTICE: coordinator_impl.cpp:412: tablet# 72057594046316545 HANDLE EvMediatorQueueRestart MediatorId# 72057594046382081 2026-01-04T03:02:57.677063Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 200, txid# 10000000 marker# C2 ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000000 } Step: 200 PrevStep: 0 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-04T03:02:57.690132Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 200 2026-01-04T03:02:57.690242Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [200] transactions [1] 2026-01-04T03:02:57.690423Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M0 2026-01-04T03:02:57.690664Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 150 To# 200Steps: {{TCoordinatorStep step# 200 PrevStep# 0Transactions: {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000000}}}}} marker# M1 2026-01-04T03:02:57.690759Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 200 forTablet# 72057594047365120 txid# 10000000 marker# M3 2026-01-04T03:02:57.690850Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 200 2026-01-04T03:02:57.690906Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 200 2026-01-04T03:02:57.691039Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 200 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000000 AckTo# [20:876:2752]}}} marker# M4 2026-01-04T03:02:57.691209Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-04T03:02:57.691359Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:183: tablet# 72057594046316545 txid# 10000011 HANDLE EvProposeTransaction marker# C0 2026-01-04T03:02:57.691430Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:29: tablet# 72057594046316545 txid# 10000011 step# 250 Status# 16 SEND to# [20:829:2724] Proxy marker# C1 2026-01-04T03:02:57.691621Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 200} 2026-01-04T03:02:57.692280Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:248: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE NKikimr::TEvTabletPipe::TEvClientConnected { TabletId: 72057594047365120 Status: OK ServerId: [20:884:2757] Leader: 1 Dead: 0 Generation: 2 VersionInfo: } 2026-01-04T03:02:57.692388Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 200 MediatorId# 72057594046382081 TabletID 72057594047365120} ... observed tablet step: Transactions { TxId: 10000000 AckTo { RawX1: 0 RawX2: 0 } } Step: 200 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 250 2026-01-04T03:02:57.705515Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:184: Transaction 10000011 has been planned 2026-01-04T03:02:57.705663Z node 20 :TX_COORDINATOR DEBUG: coordinator__plan_step.cpp:197: Planned transaction 10000011 for mediator 72057594046382081 tablet 72057594047365120 2026-01-04T03:02:57.706688Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:580: Send from# 72057594046316545 to mediator# 72057594046382081, step# 250, txid# 10000011 marker# C2 2026-01-04T03:02:57.706789Z node 20 :TX_COORDINATOR DEBUG: coordinator_impl.cpp:424: tablet# 72057594046316545 txid# 10000011 stepId# 250 Status# 17 SEND EvProposeTransactionStatus to# [20:829:2724] Proxy ... observed step: Transactions { AffectedSet: 72057594047365120 TxId: 10000011 } Step: 250 PrevStep: 200 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-04T03:02:57.707153Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:280: tablet# 72057594046382081 HANDLE EvCoordinatorStep coordinator# 72057594046316545 step# 250 2026-01-04T03:02:57.707225Z node 20 :TX_MEDIATOR INFO: mediator_impl.cpp:287: Coordinator step: Mediator [72057594046382081], Coordinator [72057594046316545], step# [250] transactions [1] 2026-01-04T03:02:57.707386Z node 20 :TX_MEDIATOR DEBUG: mediator_impl.cpp:205: tablet# 72057594046382081 SEND EvCommitStep to# [20:557:2490] ExecQueue {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M0 2026-01-04T03:02:57.707566Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 200 To# 250Steps: {{TCoordinatorStep step# 250 PrevStep# 200Transactions: {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}TabletsToTransaction: {{tablet# 72057594047365120 txid# 10000011}}}}} marker# M1 2026-01-04T03:02:57.707678Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:54: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND Ev to# [20:558:2491] step# 250 forTablet# 72057594047365120 txid# 10000011 marker# M3 2026-01-04T03:02:57.707770Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 250 2026-01-04T03:02:57.707826Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 250 2026-01-04T03:02:57.707946Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:183: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvCommitTabletStep step# 250 TabletId# 72057594047365120 Transactions {{TTx Moderator# 0 txid# 10000011 AckTo# [20:876:2752]}}} marker# M4 2026-01-04T03:02:57.708040Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:120: Actor# [20:558:2491] Mediator# 72057594046382081 SEND to# 72057594047365120 {TEvPlanStep step# 250 MediatorId# 72057594046382081 TabletID 72057594047365120} 2026-01-04T03:02:57.708164Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} 2026-01-04T03:02:57.708308Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 250} ... observed tablet step: Transactions { TxId: 10000011 AckTo { RawX1: 0 RawX2: 0 } } Step: 250 MediatorID: 72057594046382081 TabletID: 72057594047365120 ... blocked accept from 72057594047365120 ... coordinator 72057594046316545 gen 3 is planning step 300 ... observed step: Step: 300 PrevStep: 250 MediatorID: 72057594046382081 CoordinatorID: 72057594046316545 ActiveCoordinatorGeneration: 3 2026-01-04T03:02:57.722701Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:72: Actor# [20:557:2490] MediatorId# 72057594046382081 HANDLE TEvCommitStep {TMediateStep From 250 To# 300Steps: {{TCoordinatorStep step# 300 PrevStep# 250}}} marker# M1 2026-01-04T03:02:57.722764Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:558:2491] bucket.ActiveActor step# 300 2026-01-04T03:02:57.722831Z node 20 :TX_MEDIATOR_EXEC_QUEUE DEBUG: execute_queue.cpp:119: Actor# [20:557:2490] MediatorId# 72057594046382081 SEND TEvStepPlanComplete to# [20:559:2492] bucket.ActiveActor step# 300 2026-01-04T03:02:57.722901Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:558:2491] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} 2026-01-04T03:02:57.722947Z node 20 :TX_MEDIATOR_TABLETQUEUE DEBUG: tablet_queue.cpp:319: Actor# [20:559:2492] Mediator# 72057594046382081 HANDLE {TEvStepPlanComplete step# 300} |98.9%| [TM] {RESULT} ydb/core/tx/coordinator/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/tx/coordinator/ut/unittest >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertDuplicateKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_0__ASYNC-pk_types11-all_types11-index11---ASYNC] [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TabletService_ExecuteMiniKQL::OnlyAdminsAllowed [GOOD] >> TabletService_Restart::Basics >> TTxDataShardBuildIndexScan::ShadowBorrowCompaction [GOOD] >> TTxDataShardFilterKMeansScan::BadRequest >> test_postgres.py::TestPostgresSuite::test_postgres_suite[horology] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] >> QuoterWithKesusTest::GetsBigQuotaWithDeadline [GOOD] >> QuoterWithKesusTest::FailsToGetBigQuota |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/pqtablet/quota/ut/unittest >> TWriteQuoterTests::WaitDeduplicationIdQuota [GOOD] Test command err: processed_blobs=41800 quoted_time=9.980000s 2026-01-04T03:02:46.792801Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336513875165961:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:46.793338Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:02:46.860181Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:02:47.217889Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:02:47.280009Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:47.280151Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:47.293021Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:47.408410Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:47.409839Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591336513875165924:2081] 1767495766785845 != 1767495766785848 2026-01-04T03:02:47.497901Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:02:47.718444Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/003874/r3tmp/yandexeqMvWG.tmp 2026-01-04T03:02:47.718467Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/003874/r3tmp/yandexeqMvWG.tmp 2026-01-04T03:02:47.720855Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/003874/r3tmp/yandexeqMvWG.tmp 2026-01-04T03:02:47.720969Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:02:47.810311Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:48.170522Z INFO: TTestServer started on Port 14592 GrpcPort 11876 PQClient connected to localhost:11876 2026-01-04T03:02:48.479823Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:48.501172Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:02:48.510830Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710658, at schemeshard: 72057594046644480 2026-01-04T03:02:48.610040Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:48.797785Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710660, at schemeshard: 72057594046644480 2026-01-04T03:02:48.806741Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__notify.cpp:123: NotifyTxCompletion, unknown transaction, txId: 281474976710661, at schemeshard: 72057594046644480 2026-01-04T03:02:50.102262Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336531055036159:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:50.102381Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336531055036149:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:50.102721Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:50.103368Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336531055036165:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:50.103425Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:02:50.114482Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:02:50.125851Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336531055036164:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T03:02:50.228557Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336531055036230:2646] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:02:50.686584Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591336531055036238:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T03:02:50.695935Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=Y2FjMWQ4ZTItNzI5NzgwNGEtZGFhZmNlMzYtNDVhOWUzOGQ=, ActorId: [1:7591336531055036147:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3fa7zjb3cxg6bjzbbznrk3, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T03:02:50.700708Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T03:02:50.769281Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:50.821488Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:50.970250Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591336535350003822:2824] 2026-01-04T03:02:51.794126Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336513875165961:2070];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:02:51.794235Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T03:02:57.509427Z :TEST_CASE_NAME INFO: TTopicSdkTestSetup started |98.9%| [TM] {RESULT} ydb/core/persqueue/pqtablet/quota/ut/unittest |98.9%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/pqtablet/quota/ut/unittest >> TestFormatHandler::ManyJsonClients [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TestFormatHandler::ManyRawClients >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] >> DataShardReplication::ApplyChangesToCommonTable [GOOD] >> DataShardReplication::ApplyChangesWithConcurrentTx >> TTopicWriterTests::TestEnterMessage_ZeroSymbol_Delimited [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 >> test.py::test[solomon-HistResponse-default.txt] [GOOD] >> test.py::test[solomon-InvalidProject-] >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_NewlineDelimited [GOOD] |98.9%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadOneMessage |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |98.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardFilterKMeansScan::BadRequest [GOOD] >> TTxDataShardFilterKMeansScan::BuildToPosting >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success >> TestFormatHandler::ManyRawClients [GOOD] >> TestFormatHandler::ClientValidation >> KqpTpch::Query08 [GOOD] >> KqpTpch::Query09 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float8] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] >> TabletService_Restart::Basics [GOOD] >> TabletService_Restart::OnlyAdminsAllowed >> TMLPReaderTests::EmptyTopic [GOOD] >> TMLPReaderTests::TopicWithData >> test_postgres.py::TestPostgresSuite::test_postgres_suite[roles] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[char] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges >> DataShardSnapshotIsolation::ReadWriteUncommittedInsertMissingKeyAtSnapshot [GOOD] >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges >> QuoterWithKesusTest::FailsToGetBigQuota [GOOD] >> QuoterWithKesusTest::PrefetchCoefficient >> TFetchRequestTests::CheckAccess [GOOD] >> Graph::LocalBackendFullCycle [GOOD] >> Graph::MemoryBordersOnGet >> test_postgres.py::TestPostgresSuite::test_postgres_suite[float4] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] >> KeyValueGRPCService::SimpleWriteReadWithIncorreectPathV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 |98.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_0__ASYNC-pk_types31-all_types31-index31-Date--ASYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TestFormatHandler::ClientValidation [GOOD] >> TopicSessionTests::RestartSessionIfQueryStopped [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/fetcher/ut/unittest >> TFetchRequestTests::CheckAccess [GOOD] Test command err: 2026-01-04T03:01:43.275879Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336244369824796:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:43.276648Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:43.334771Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:01:43.518030Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:43.641994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:43.642102Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:43.690421Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:43.699483Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:43.700829Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:7591336244369824759:2081] 1767495703271997 != 1767495703272000 2026-01-04T03:01:43.756999Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:43.930554Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0024b6/r3tmp/yandexBv2QSB.tmp 2026-01-04T03:01:43.930660Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0024b6/r3tmp/yandexBv2QSB.tmp 2026-01-04T03:01:43.934015Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0024b6/r3tmp/yandexBv2QSB.tmp 2026-01-04T03:01:43.934158Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:44.121542Z INFO: TTestServer started on Port 11821 GrpcPort 25688 2026-01-04T03:01:44.282752Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:25688 2026-01-04T03:01:44.335060Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:44.402215Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:46.530597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336257254727684:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.530722Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336257254727697:2335], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.530852Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.531237Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336257254727700:2337], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.531329Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.538929Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:46.546082Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336257254727737:2340], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.546174Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.547591Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336257254727742:2341], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.548114Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:46.550833Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336257254727699:2336], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T03:01:46.622725Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336257254727759:2642] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:01:47.024420Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591336257254727775:2344], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T03:01:47.030357Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=YzI0YzM3OTktMjU5YWY4NDQtMTRjY2Y1ODYtNzU2MTNlMGE=, ActorId: [1:7591336257254727682:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3f89wafxaf40epg0ksednw, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T03:01:47.035255Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T03:01:47.063271Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.096467Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:47.179706Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591336261549695361:2822] 2026-01-04T03:01:48.277946Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336244369824796:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:48.278051Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T03:01:53.564112Z :HappyWay INFO: TTopicSdkTestSetup started 2026-01-04T03:01:53.586516Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T03:01:53.618679Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591336287319499374:2930] connected; active server actors: 1 2026-01-04T03:01:53.619484Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T03:01:53.623019Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893 ... 892][Partition][0][StateIdle] Try persist 2026-01-04T03:03:05.632572Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037895][Partition][2][StateIdle] No data for blobs compaction 2026-01-04T03:03:05.635093Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037897][Partition][4][StateIdle] No data for blobs compaction 2026-01-04T03:03:05.642305Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037894][Partition][1][StateIdle] No data for blobs compaction 2026-01-04T03:03:05.642551Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:330: [[6:7591336572491487934:3125]] Handle TEvPersQueue::TEvHasDataInfoResponse EndOffset: 0 Cookie: 0 SizeLag: 0 WriteTimestampEstimateMS: 0 ReadingFinished: false 2026-01-04T03:03:05.642572Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:336: [[6:7591336572491487934:3125]] Partition 0 status is 2 2026-01-04T03:03:05.642603Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:274: [[6:7591336572491487934:3125]] Processing 0/1 2026-01-04T03:03:05.642617Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:282: [[6:7591336572491487934:3125]] Skip partition 0 because status is DataReceived 2026-01-04T03:03:05.642625Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:274: [[6:7591336572491487934:3125]] Processing 1/1 2026-01-04T03:03:05.642736Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:540: [[6:7591336572491487934:3125]] Reply to [6:7591336572491487919:3117]: PartResult { Topic: "/Root/topic1" Partition: 1 ReadResult { MaxOffset: 0 ErrorCode: READ_ERROR_TOO_BIG_OFFSET } } 2026-01-04T03:03:05.643124Z node 6 :PQ_FETCH_REQUEST INFO: fetch_request_actor.cpp:156: [[6:7591336593966324597:3197]] Fetch request actor boostrapped. Request is valid: 1 2026-01-04T03:03:05.643152Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:171: [[6:7591336593966324597:3197]] DescribeTopics 2026-01-04T03:03:05.643267Z node 6 :PERSQUEUE DEBUG: pq_impl.cpp:2809: [PQ: 72075186224037894] server disconnected, pipe [6:7591336572491487944:3125] destroyed 2026-01-04T03:03:05.643447Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:186: [[6:7591336593966324597:3197]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-04T03:03:05.643519Z node 6 :PQ_FETCH_REQUEST DEBUG: fetch_request_actor.cpp:540: [[6:7591336593966324597:3197]] Reply to [6:7591336572491487919:3117]: 2026-01-04T03:03:05.645044Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037896][Partition][3][StateIdle] No data for blobs compaction 2026-01-04T03:03:05.648033Z node 6 :PERSQUEUE DEBUG: partition_compaction.cpp:183: [72075186224037898][Partition][0][StateIdle] No data for blobs compaction 2026-01-04T03:03:05.676995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-04T03:03:05.676995Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-04T03:03:05.677026Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677033Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677048Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.677049Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.677074Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677074Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677095Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-04T03:03:05.677095Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-04T03:03:05.677152Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-04T03:03:05.677154Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-04T03:03:05.677160Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677167Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.677176Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677177Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677184Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-04T03:03:05.677187Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.677200Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.677218Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-04T03:03:05.682577Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:03:05.682615Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.682640Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.682665Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.682685Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-04T03:03:05.721496Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:03:05.721535Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.721554Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.721587Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.721607Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist 2026-01-04T03:03:05.778410Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037896][Partition][3][StateIdle] Process user action and tx events 2026-01-04T03:03:05.778454Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778473Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037896][Partition][3][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.778494Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037896][Partition][3][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778512Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037896][Partition][3][StateIdle] Try persist 2026-01-04T03:03:05.778572Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037894][Partition][1][StateIdle] Process user action and tx events 2026-01-04T03:03:05.778583Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778607Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037894][Partition][1][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.778620Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037894][Partition][1][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778630Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037894][Partition][1][StateIdle] Try persist 2026-01-04T03:03:05.778671Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037897][Partition][4][StateIdle] Process user action and tx events 2026-01-04T03:03:05.778682Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778691Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037897][Partition][4][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.778703Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037897][Partition][4][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778714Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037897][Partition][4][StateIdle] Try persist 2026-01-04T03:03:05.778741Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037895][Partition][2][StateIdle] Process user action and tx events 2026-01-04T03:03:05.778751Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778760Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037895][Partition][2][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.778771Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037895][Partition][2][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.778780Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037895][Partition][2][StateIdle] Try persist 2026-01-04T03:03:05.782929Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037898][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:03:05.782977Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.782996Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037898][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.783018Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037898][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.783036Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037898][Partition][0][StateIdle] Try persist 2026-01-04T03:03:05.821963Z node 6 :PERSQUEUE DEBUG: partition.cpp:2329: [72075186224037892][Partition][0][StateIdle] Process user action and tx events 2026-01-04T03:03:05.822009Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.822031Z node 6 :PERSQUEUE DEBUG: partition.cpp:2337: [72075186224037892][Partition][0][StateIdle] Process user action and tx pending commits 2026-01-04T03:03:05.822053Z node 6 :PERSQUEUE DEBUG: partition.cpp:2388: [72075186224037892][Partition][0][StateIdle] Events: 0, PendingCommits: 0, PendingWrites: 0 2026-01-04T03:03:05.822072Z node 6 :PERSQUEUE DEBUG: partition.cpp:2347: [72075186224037892][Partition][0][StateIdle] Try persist |99.0%| [TM] {RESULT} ydb/core/persqueue/public/fetcher/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/fetcher/ut/unittest >> TestFormatHandler::ClientError >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] >> TopicSessionTests::WrongJson |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test.py::test[solomon-InvalidProject-] [GOOD] >> Graph::MemoryBordersOnGet [GOOD] >> Graph::LocalBordersOnGet |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test.py::test[solomon-LabelColumnAliases-default.txt] >> KqpTpch::Query09 [GOOD] >> KqpTpch::Query10 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_replication/unittest >> DataShardReplication::ApplyChangesWithConcurrentTx [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:02:21.032365Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:21.275825Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:21.276501Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:21.276585Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:21.690410Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:21.690561Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:21.783887Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495738299072 != 1767495738299075 2026-01-04T03:02:21.800103Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:21.849576Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:21.968741Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:22.290665Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:22.306534Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:02:22.422163Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:314: actor# [1:68:2115] Handle TEvProposeTransaction 2026-01-04T03:02:22.422263Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:237: actor# [1:68:2115] TxId# 281474976715657 ProcessProposeTransaction 2026-01-04T03:02:22.423166Z node 1 :TX_PROXY DEBUG: proxy_impl.cpp:256: actor# [1:68:2115] Cookie# 0 userReqId# "" txid# 281474976715657 SEND to# [1:861:2744] 2026-01-04T03:02:22.584538Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1822: Actor# [1:861:2744] txid# 281474976715657 Bootstrap EvSchemeRequest record: Transaction { ModifyScheme { WorkingDir: "/Root" OperationType: ESchemeOpCreateTable CreateTable { Name: "table-1" Columns { Name: "key" Type: "Uint32" FamilyName: "" NotNull: false } Columns { Name: "value" Type: "Uint32" FamilyName: "" NotNull: false } KeyColumnNames: "key" UniformPartitionsCount: 1 ReplicationConfig { Mode: REPLICATION_MODE_READ_ONLY ConsistencyLevel: CONSISTENCY_LEVEL_GLOBAL } } } } ExecTimeoutPeriod: 18446744073709551615 2026-01-04T03:02:22.584660Z node 1 :TX_PROXY DEBUG: schemereq.cpp:647: Actor# [1:861:2744] txid# 281474976715657 Bootstrap, UserSID: CheckAdministrator: 0 CheckDatabaseAdministrator: 0 2026-01-04T03:02:22.586647Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1887: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P6 2026-01-04T03:02:22.586838Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1877: Actor# [1:861:2744] txid# 281474976715657 TEvNavigateKeySet requested from SchemeCache 2026-01-04T03:02:22.587257Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1631: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvNavigateKeySetResult TFlatSchemeReq marker# P5 ErrorCount# 0 2026-01-04T03:02:22.587467Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1666: Actor# [1:861:2744] HANDLE EvNavigateKeySetResult, txid# 281474976715657 shardToRequest# 72057594046644480 DomainKey# [OwnerId: 72057594046644480, LocalPathId: 1] DomainInfo.Params# Version: 1 PlanResolution: 500 Coordinators: 72057594046316545 TimeCastBucketsPerMediator: 2 Mediators: 72057594046382081 RedirectRequired# true 2026-01-04T03:02:22.587704Z node 1 :TX_PROXY DEBUG: schemereq.cpp:117: Actor# [1:861:2744] txid# 281474976715657 SEND to# 72057594046644480 shardToRequest {TEvModifySchemeTransaction txid# 281474976715657 TabletId# 72057594046644480} 2026-01-04T03:02:22.588081Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1533: Actor# [1:861:2744] txid# 281474976715657 HANDLE EvClientConnected 2026-01-04T03:02:22.592204Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:02:22.594326Z node 1 :TX_PROXY DEBUG: schemereq.cpp:1555: Actor# [1:861:2744] txid# 281474976715657 Status StatusAccepted HANDLE {TEvModifySchemeTransactionResult Status# StatusAccepted txid# 281474976715657} 2026-01-04T03:02:22.594414Z node 1 :TX_PROXY DEBUG: schemereq.cpp:627: Actor# [1:861:2744] txid# 281474976715657 SEND to# [1:829:2724] Source {TEvProposeTransactionStatus txid# 281474976715657 Status# 53} 2026-01-04T03:02:22.645161Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:02:22.646278Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:02:22.647551Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T03:02:22.647883Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:02:22.696257Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:02:22.696847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:02:22.696936Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:02:22.699441Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:02:22.699569Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:02:22.699628Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:02:22.701472Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:02:22.701668Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:02:22.701779Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T03:02:22.712891Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:02:22.751009Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:02:22.756123Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:02:22.756349Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T03:02:22.756399Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:02:22.756435Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:02:22.756470Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:02:22.756618Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:22.756657Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:02:22.759466Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:02:22.759587Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:02:22.759672Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:02:22.759706Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:02:22.759800Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:02:22.759855Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:02:22.759894Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:02:22.759930Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:02:22.759982Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:02:22.760172Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:22.760222Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:02:22.760292Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T03:02:22.760674Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T03:02:22.760712Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:02:22.760793Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:02:22.761125Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:02:22.761219Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:02:22.762040Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026 ... :09.136679Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:2] at 72075186224037888 2026-01-04T03:03:09.136735Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T03:03:09.136766Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T03:03:09.136792Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T03:03:09.136819Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit ExecuteRead 2026-01-04T03:03:09.137018Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T03:03:09.137397Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710660, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T03:03:09.137505Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-04T03:03:09.137573Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1069:2888], 0} after executionsCount# 1 2026-01-04T03:03:09.137635Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1069:2888], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T03:03:09.137736Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1069:2888], 0} finished in read 2026-01-04T03:03:09.137856Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T03:03:09.137888Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T03:03:09.137916Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:2] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:03:09.137944Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:2] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:03:09.138010Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:2] at 72075186224037888 is Executed 2026-01-04T03:03:09.138042Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:2] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:03:09.138072Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:2] at 72075186224037888 has finished 2026-01-04T03:03:09.138121Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T03:03:09.138246Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T03:03:09.138554Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 275709965, Sender [8:70:2117], Recipient [8:886:2765]: NKikimrLongTxService.TEvLockStatus LockId: 281474976710660 LockNode: 8 Status: STATUS_SUBSCRIBED 2026-01-04T03:03:09.139380Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1069:2888], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T03:03:09.139451Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } 2026-01-04T03:03:09.145109Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [8:1074:2893], Recipient [8:886:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:03:09.145194Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:03:09.145261Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [8:1073:2892], serverId# [8:1074:2893], sessionId# [0:0:0] 2026-01-04T03:03:09.145559Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549570, Sender [8:1072:2891], Recipient [8:886:2765]: NKikimrTxDataShard.TEvApplyReplicationChanges TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Source: "my-source" Changes { SourceOffset: 1 WriteTxId: 0 Key: "\001\000\004\000\000\000\001\000\000\000" Upsert { Tags: 2 Data: "\001\000\004\000\000\000\025\000\000\000" } } 2026-01-04T03:03:09.145765Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2000/281474976710657 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2000/18446744073709551615 ImmediateWriteEdgeReplied# v2000/18446744073709551615 2026-01-04T03:03:09.145943Z node 8 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976710660 marked broken at v{min} 2026-01-04T03:03:09.158885Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:2572: Waiting for PlanStep# 2501 from mediator time cast 2026-01-04T03:03:09.159868Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 270270977, Sender [8:26:2073], Recipient [8:886:2765]: {TEvNotifyPlanStep TabletId# 72075186224037888 PlanStep# 2501} 2026-01-04T03:03:09.159929Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3193: StateWork, processing event TEvMediatorTimecast::TEvNotifyPlanStep 2026-01-04T03:03:09.159995Z node 8 :TX_DATASHARD DEBUG: datashard.cpp:3810: Notified by mediator time cast with PlanStep# 2501 at tablet 72075186224037888 2026-01-04T03:03:09.160068Z node 8 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:03:09.406296Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553215, Sender [8:1097:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC RangesSize: 1 2026-01-04T03:03:09.406506Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 0 2026-01-04T03:03:09.406618Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CheckRead 2026-01-04T03:03:09.406770Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T03:03:09.406823Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CheckRead 2026-01-04T03:03:09.406873Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T03:03:09.406923Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T03:03:09.406999Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:3] at 72075186224037888 2026-01-04T03:03:09.407061Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T03:03:09.407090Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T03:03:09.407116Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T03:03:09.407143Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit ExecuteRead 2026-01-04T03:03:09.407294Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 Columns: 2 Snapshot { Step: 2500 TxId: 18446744073709551615 } LockTxId: 281474976710660 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false LockNodeId: 8 TotalRowsLimit: 1001 LockMode: OPTIMISTIC } 2026-01-04T03:03:09.407602Z node 8 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037888 Acquired lock# 281474976710660, counter# 18446744073709551612 for [OwnerId: 72057594046644480, LocalPathId: 38] 2026-01-04T03:03:09.407685Z node 8 :TX_DATASHARD TRACE: datashard.cpp:2488: PromoteImmediatePostExecuteEdges at 72075186224037888 promoting UnprotectedReadEdge to v2500/18446744073709551615 2026-01-04T03:03:09.407756Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[8:1097:2910], 0} after executionsCount# 1 2026-01-04T03:03:09.407823Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[8:1097:2910], 0} sends rowCount# 1, bytes# 32, quota rows left# 1000, quota bytes left# 5242848, hasUnreadQueries# 0, total queries# 1, firstUnprocessed# 0 2026-01-04T03:03:09.407925Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[8:1097:2910], 0} finished in read 2026-01-04T03:03:09.408002Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T03:03:09.408035Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T03:03:09.408063Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:3] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:03:09.408093Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:3] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:03:09.408145Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:3] at 72075186224037888 is Executed 2026-01-04T03:03:09.408172Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:3] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:03:09.408209Z node 8 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:3] at 72075186224037888 has finished 2026-01-04T03:03:09.408258Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T03:03:09.408387Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T03:03:09.409271Z node 8 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553219, Sender [8:1097:2910], Recipient [8:886:2765]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T03:03:09.409348Z node 8 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 1 } items { uint32_value: 11 } } |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_replication/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_replication/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows >> Graph::LocalBordersOnGet [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_3__SYNC-pk_types1-all_types1-index1---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> test_insert.py::TestInsertOperations::test_several_inserts_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success >> DataShardStats::SharedCacheGarbage [GOOD] >> DataShardStats::CollectStatsForSeveralParts ------- [TM] {asan, default-linux-x86_64, release} ydb/core/graph/ut/unittest >> Graph::LocalBordersOnGet [GOOD] Test command err: Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] IGNORE Leader for TabletID 72057594046678944 is [0:0:0] sender: [1:118:2058] recipient: [1:112:2143] Leader for TabletID 72057594046678944 is [1:126:2151] sender: [1:128:2058] recipient: [1:112:2143] 2026-01-04T03:02:47.144901Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7938: BackgroundCompactionQueue configured: Timeout# 600.000000s, compact single parted# no, Rate# 1, WakeupInterval# 60.000000s, RoundInterval# 172800.000000s, InflightLimit# 1, MinCompactionRepeatDelaySeconds# 600.000000s, MaxRate# 1 2026-01-04T03:02:47.145135Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7966: BorrowedCompactionQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T03:02:47.145205Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7852: StatsBatching config: StatsBatchTimeout# 0.100000s, StatsMaxBatchSize# 100, StatsMaxExecuteTime# 0.010000s 2026-01-04T03:02:47.145254Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7868: OperationsProcessing config: using default configuration 2026-01-04T03:02:47.145305Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxSplitTablePartition, limit 10000 2026-01-04T03:02:47.145347Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7874: OperationsProcessing config: type TxMergeTablePartition, limit 10000 2026-01-04T03:02:47.145420Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:7998: BackgroundCleaningQueue configured: Timeout# 15.000000s, Rate# 0, WakeupInterval# 1.000000s, InflightLimit# 10 2026-01-04T03:02:47.145501Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:39: [RootShredManager] Created: Timeout# 600, Rate# 0, InflightLimit# 10, ShredInterval# 604800.000000s, ShredBSCInterval# 600.000000s, CurrentWakeupInterval# 604800.000000s, IsManualStartup# false 2026-01-04T03:02:47.146463Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_impl.cpp:8069: ExternalSources configured: HostnamePatterns# , AvailableExternalDataSources# 2026-01-04T03:02:47.147985Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:14: TxInitSchema.Execute 2026-01-04T03:02:47.248216Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7755: Cannot subscribe to console configs 2026-01-04T03:02:47.248282Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:47.280316Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__init_schema.cpp:20: TxInitSchema.Complete 2026-01-04T03:02:47.285314Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:96: TTxUpgradeSchema.Execute 2026-01-04T03:02:47.287726Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__upgrade_schema.cpp:44: UpgradeInitState as Uninitialized, schemeshardId: 72057594046678944 2026-01-04T03:02:47.298509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__upgrade_schema.cpp:122: TTxUpgradeSchema.Complete 2026-01-04T03:02:47.298870Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__background_cleaning.cpp:464: Clear TempDirsState with owners number: 0 2026-01-04T03:02:47.302061Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init.cpp:1383: TTxInit, SS hasn't been configured yet, state: 1, at schemeshard: 72057594046678944 2026-01-04T03:02:47.304815Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:33: TTxInitRoot DoExecute, path: MyRoot, pathId: [OwnerId: 72057594046678944, LocalPathId: 1], at schemeshard: 72057594046678944 2026-01-04T03:02:47.334631Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__init_root.cpp:160: TTxInitRoot DoComplete, at schemeshard: 72057594046678944 2026-01-04T03:02:47.338144Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__root_shred_manager.cpp:84: [RootShredManager] Stop 2026-01-04T03:02:47.361878Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046678944 2026-01-04T03:02:47.361965Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594046678944 2026-01-04T03:02:47.362104Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:28: TTxServerlessStorageBilling.Execute 2026-01-04T03:02:47.362166Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__serverless_storage_billing.cpp:38: TTxServerlessStorageBilling: unable to make a bill, domain is not a serverless db, schemeshardId: 72057594046678944, domainId: [OwnerId: 72057594046678944, LocalPathId: 1] 2026-01-04T03:02:47.362325Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__serverless_storage_billing.cpp:213: TTxServerlessStorageBilling.Complete 2026-01-04T03:02:47.363107Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:7111: Handle: TEvAllocateResult: Cookie# 0, at schemeshard: 72057594046678944 2026-01-04T03:02:47.541509Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot" OperationType: ESchemeOpMkDir MkDir { Name: ".sys" } Internal: true FailOnExist: false } TxId: 281474976710657 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.546741Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_permissions" Type: EAuthPermissions } } TxId: 281474976710658 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.546975Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_effective_permissions" Type: EAuthEffectivePermissions } } TxId: 281474976710659 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547110Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_users" Type: EAuthUsers } } TxId: 281474976710660 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547188Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "pg_tables" Type: EPgTables } } TxId: 281474976710661 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547350Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_pdisks" Type: EPDisks } } TxId: 281474976710662 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547424Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "auth_group_members" Type: EAuthGroupMembers } } TxId: 281474976710663 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547499Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_one_minute" Type: ETopPartitionsByCpuOneMinute } } TxId: 281474976710664 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547709Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_request_units_one_hour" Type: ETopQueriesByRequestUnitsOneHour } } TxId: 281474976710665 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547809Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "ds_vslots" Type: EVSlots } } TxId: 281474976710666 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.547914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_partitions_by_tli_one_hour" Type: ETopPartitionsByTliOneHour } } TxId: 281474976710667 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.548006Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "nodes" Type: ENodes } } TxId: 281474976710668 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.548090Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_cpu_time_one_hour" Type: ETopQueriesByCpuTimeOneHour } } TxId: 281474976710669 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.548191Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_hour" Type: ETopQueriesByReadBytesOneHour } } TxId: 281474976710670 Owner: "metadata@system" UserToken: "\n\017metadata@system\022\000" 2026-01-04T03:02:47.548267Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_sysviews_update.cpp:127: SysViewsRosterUpdate# [1:212:2213] at schemeshard: 72057594046678944 Send TEvModifySchemeTransaction: Transaction { WorkingDir: "/MyRoot/.sys" OperationType: ESchemeOpCreateSysView Internal: true FailOnExist: false CreateSysView { Name: "top_queries_by_read_bytes_one_minute" Type: ETopQueriesByR ... 4T03:03:12.220198Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.220233Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.220316Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 109 } Time: 109 2026-01-04T03:03:12.220344Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.220376Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.220409Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.220491Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 110 } Time: 110 2026-01-04T03:03:12.220516Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.220545Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.220578Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.220659Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 111 } Time: 111 2026-01-04T03:03:12.220686Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.220717Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.220751Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.220841Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 112 } Time: 112 2026-01-04T03:03:12.220869Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.220901Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.220933Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221013Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 113 } Time: 113 2026-01-04T03:03:12.221040Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221069Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221102Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221165Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 114 } Time: 114 2026-01-04T03:03:12.221191Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221219Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221250Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221339Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 115 } Time: 115 2026-01-04T03:03:12.221367Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221396Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221427Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221511Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 116 } Time: 116 2026-01-04T03:03:12.221537Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221566Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221601Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221680Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 117 } Time: 117 2026-01-04T03:03:12.221706Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221735Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221770Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.221862Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 118 } Time: 118 2026-01-04T03:03:12.221893Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.221922Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.221957Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.222042Z node 6 :GRAPH TRACE: shard_impl.cpp:192: SHARD Metrics { Name: "test.metric0" Value: 119 } Time: 119 2026-01-04T03:03:12.222069Z node 6 :GRAPH TRACE: shard_impl.cpp:197: SHARD Executing direct TxStoreMetrics 2026-01-04T03:03:12.222100Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:20: SHARD TTxStoreMetrics::Execute 2026-01-04T03:03:12.222140Z node 6 :GRAPH TRACE: backends.cpp:329: DB Stored metrics 2026-01-04T03:03:12.222216Z node 6 :GRAPH TRACE: shard_impl.cpp:226: SHARD Handle TEvGraph::TEvGetMetrics from [6:1012:2944] 2026-01-04T03:03:12.222295Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:20: SHARD TTxGetMetrics::Execute 2026-01-04T03:03:12.222352Z node 6 :GRAPH DEBUG: backends.cpp:352: DB Querying from 0 to 119 2026-01-04T03:03:12.241220Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241324Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241358Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241386Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241415Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241443Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241469Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241495Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241522Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241551Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241578Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241603Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241629Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241657Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241682Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241710Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241734Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241760Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241791Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241840Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241866Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241893Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241920Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241946Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.241973Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242001Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242025Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242054Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242081Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242107Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242133Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242161Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242186Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242211Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242237Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242262Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242287Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242310Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242339Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242369Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242394Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242419Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242448Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242474Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242499Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242525Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242550Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242579Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242604Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242630Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242656Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242682Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242707Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242732Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242758Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242787Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242813Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242839Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242865Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242893Z node 6 :GRAPH DEBUG: tx_store_metrics.cpp:25: SHARD TTxStoreMetrics::Complete 2026-01-04T03:03:12.242929Z node 6 :GRAPH DEBUG: tx_get_metrics.cpp:25: SHARD TTxGetMetric::Complete 2026-01-04T03:03:12.242991Z node 6 :GRAPH TRACE: tx_get_metrics.cpp:26: SHARD TxGetMetrics returned 60 points for request 3 2026-01-04T03:03:12.243236Z node 6 :GRAPH TRACE: service_impl.cpp:201: SVC TEvMetricsResult 3 2026-01-04T03:03:12.243299Z node 6 :GRAPH TRACE: service_impl.cpp:204: SVC TEvMetricsResult found request 3 resending to [6:1013:2945] |99.0%| [TM] {RESULT} ydb/core/graph/ut/unittest >> TTxDataShardFilterKMeansScan::BuildToPosting [GOOD] >> TTxDataShardFilterKMeansScan::BuildToBuild >> TestFormatHandler::ClientError [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/graph/ut/unittest >> test_insert.py::TestInsertOperations::test_insert_plus_update_per_transaction_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case ------- [TM] {asan, default-linux-x86_64, release} ydb/core/grpc_services/tablet/ut/unittest >> TabletService_Restart::OnlyAdminsAllowed [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:02:23.513452Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:23.698163Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:23.698781Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:23.698844Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:24.111720Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:24.111879Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:24.208749Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495740704045 != 1767495740704048 2026-01-04T03:02:24.225749Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:24.277228Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:24.399571Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:24.706725Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:24.720904Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema ... changing schema (dry run) ... reading schema ... changing schema ... reading schema ... waiting for SysViewsRoster update finished 2026-01-04T03:02:29.275297Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:29.280976Z node 2 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:02:29.284790Z node 2 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [2:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:29.285036Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:29.285168Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:29.530663Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:29.530780Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:29.558821Z node 2 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [2:34:2081] 1767495746474600 != 1767495746474604 2026-01-04T03:02:29.565146Z node 2 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:29.614008Z node 2 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:29.682933Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:29.979461Z node 2 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:29.992617Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... reading schema (without token) ... reading schema (non-admin token) ... reading schema (admin token) ... waiting for SysViewsRoster update finished 2026-01-04T03:02:33.872352Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:33.878454Z node 3 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:02:33.882836Z node 3 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [3:114:2161], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:33.883085Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:33.883289Z node 3 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:34.212589Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:34.212726Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:34.239234Z node 3 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [3:34:2081] 1767495750912970 != 1767495750912974 2026-01-04T03:02:34.250592Z node 3 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:34.303211Z node 3 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(3, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:34.392606Z node 3 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:34.701836Z node 3 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:34.719705Z node 3 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T03:02:38.905450Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:38.916592Z node 4 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [4:303:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:38.917033Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:38.917120Z node 4 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:39.197384Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:39.197547Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:39.227129Z node 4 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [4:34:2081] 1767495755562849 != 1767495755562852 2026-01-04T03:02:39.232231Z node 4 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:39.280527Z node 4 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(4, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:39.355820Z node 4 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:39.723472Z node 4 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:39.738306Z node 4 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T03:02:43.576166Z node 5 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:43.582786Z node 5 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:02:43.587179Z node 5 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [5:317:2360], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:43.587463Z node 5 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:43.587629Z node 5 :METADATA_PROVIDER ERROR: log.cpp:84 ... etadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:49.213764Z node 6 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:49.228640Z node 6 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T03:02:53.715191Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:53.727364Z node 7 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:02:53.730962Z node 7 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [7:303:2347], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:53.731290Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:53.731420Z node 7 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:54.088802Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:54.088970Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:54.096053Z node 7 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [7:34:2081] 1767495770113507 != 1767495770113510 2026-01-04T03:02:54.103218Z node 7 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:54.155239Z node 7 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(7, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:54.253048Z node 7 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:02:54.559268Z node 7 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:02:54.574380Z node 7 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T03:02:59.206847Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:59.221290Z node 8 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [8:315:2358], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:59.221447Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:59.221512Z node 8 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:59.586492Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:02:59.586660Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:02:59.621986Z node 8 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [8:34:2081] 1767495775323859 != 1767495775323863 2026-01-04T03:02:59.626236Z node 8 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:02:59.673792Z node 8 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(8, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:02:59.756721Z node 8 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:03:00.064613Z node 8 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:00.084786Z node 8 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... waiting for SysViewsRoster update finished 2026-01-04T03:03:04.596917Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:03:04.604459Z node 9 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:03:04.609555Z node 9 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [9:114:2161], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:03:04.609959Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:03:04.610246Z node 9 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:03:04.917719Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:04.918004Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:04.954315Z node 9 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [9:34:2081] 1767495780897346 != 1767495780897350 2026-01-04T03:03:04.960832Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:03:05.010101Z node 9 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(9, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:05.099919Z node 9 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:03:05.399267Z node 9 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:05.418387Z node 9 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 2026-01-04T03:03:05.605923Z node 9 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded ... waiting for SysViewsRoster update finished 2026-01-04T03:03:10.040933Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:03:10.049634Z node 10 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:03:10.052203Z node 10 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [10:299:2343], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:03:10.052441Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:03:10.052566Z node 10 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:03:10.340128Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:10.340278Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:10.370503Z node 10 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [10:34:2081] 1767495786405682 != 1767495786405685 2026-01-04T03:03:10.376310Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:03:10.427334Z node 10 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(10, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:10.511301Z node 10 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:03:10.812225Z node 10 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:10.827405Z node 10 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; ... restarting tablet 72057594046644480 (without token) ... restarting tablet 72057594046644480 (non-admin token) ... restarting tablet 72057594046644480 (admin token) 2026-01-04T03:03:11.199335Z node 10 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.0%| [TM] {RESULT} ydb/core/grpc_services/tablet/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/grpc_services/tablet/ut/unittest >> TestFormatHandler::ClientErrorWithEmptyFilter |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_insert.py::TestInsertOperations::test_update_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case >> TMemoryController::SharedCache [GOOD] >> TMemoryController::SharedCache_ConfigLimit >> test_insert.py::TestInsertOperations::test_upsert_plus_insert_per_transaction_are_success_prepared_case [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success >> DataShardSnapshotIsolation::ReadWriteObserveOwnChanges [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead >> QuoterWithKesusTest::PrefetchCoefficient [GOOD] >> QuoterWithKesusTest::GetsQuotaAfterPause >> KqpTpch::Query10 [GOOD] >> KqpTpch::Query11 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> ReadUpdateWrite::Load [GOOD] >> test_insert.py::TestInsertOperations::test_insert_plus_upsert_are_success [GOOD] >> test_insert.py::TestInsertOperations::test_insert_revert_basis >> test_read_table.py::TestReadTableSuccessStories::test_read_table_only_specified_ranges [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 >> test.py::test[solomon-LabelColumnAliases-default.txt] [GOOD] >> test.py::test[solomon-LabelColumns-default.txt] >> test_insert.py::TestInsertOperations::test_insert_revert_basis [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs >> test_postgres.py::TestPostgresSuite::test_postgres_suite[numeric] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[name] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] >> TestFormatHandler::ClientErrorWithEmptyFilter [GOOD] >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] >> TestFormatHandler::Watermark >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[int2] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] >> TTopicReaderTests::TestRun_ReadOneMessage [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_1__SYNC-pk_types8-all_types8-index8---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[comments] [GOOD] >> TMLPReaderTests::TopicWithData [GOOD] >> TMLPReaderTests::TopicWithManyIterationsData >> TTopicReaderTests::TestRun_ReadMessages_Output_Base64 [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_constructed_key_range [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns >> test_read_table.py::TestReadTableSuccessStories::test_read_table_reads_only_specified_columns [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_read_table.py::TestReadTableSuccessStories::test_read_table_without_data_has_snapshot [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/olap/high_load/unittest >> ReadUpdateWrite::Load [GOOD] Test command err: Step 1. only write Was written: 0 MiB, Speed: 0 MiB/s Write: 10% Write: 10% 1.152987s 30% 1.152987s 50% 1.152987s 90% 1.152987s 99% 1.152987s 1.039605s 30% 1.039605s 50% 1.039605s 90% 1.039605s 99% 1.039605s Write: 10% 0.789990s 30% 0.789990s 50% 0.789990s 90% 0.789990s 99% 0.789990s Write: 10% 1.239364s 30% 1.239364s 50% 1.239364s 90% 1.239364s 99% 1.239364s Write: 10% 1.247856s 30% 1.247856s 50% 1.247856s 90% 1.247856s 99% 1.247856s Write: 10% 1.212471s 30% 1.212471s 50% 1.212471s 90% 1.212471s 99% 1.212471s Write: 10% 1.262168s 30% 1.262168s 50% Write: 10% 1.023858s 30% 1.023858s 50% 1.262168s 90% 1.262168s 99% 1.262168s 1.023858s 90% 1.023858s 99% 1.023858sWrite: 10% 0.811232s 30% 0.811232s 50% 0.811232s 90% 0.811232s 99% 0.811232s Write: 10% 1.138769s 30% 1.138769s 50% 1.138769s 90% 1.138769s 99% 1.138769s Write: 10% 0.849708s 30% 0.849708s 50% 0.849708s 90% 0.849708s 99% 0.849708s Write: 10% 1.142474s 30% 1.142474s 50% 1.142474s 90% 1.142474s 99% 1.142474s Write: 10% 1.127566s 30% 1.127566s 50% 1.127566s 90% 1.127566s 99% 1.127566s Write: 10% 0.902754s 30% 0.902754s 50% 0.902754s 90% 0.902754s 99% 0.902754s Write: 10% 1.075795s 30% 1.075795s 50% 1.075795s 90% 1.075795s 99% 1.075795s Write: 10% 1.393587s 30% 1.393587s 50% 1.393587s 90% 1.393587s 99% 1.393587s Write: 10% 1.237859s 30% 1.237859s 50% 1.237859s 90% 1.237859s 99% 1.237859s Write: 10% 0.802313s 30% 0.802313s 50% 0.802313s 90% 0.802313s 99% 0.802313s Write: 10% 1.403123s 30% 1.403123s 50% 1.403123s 90% 1.403123s 99% 1.403123s Write: 10% 1.557552s 30% 1.557552s 50% 1.557552s 90% 1.557552s 99% 1.557552s Write: 10% 0.769654s 30% 0.769654s 50% Write: 10% 1.455537s 30% 1.455537s 50% 1.455537s 90% 1.455537s 99% 1.455537s 0.769654s 90% 0.769654s 99% 0.769654s Write: 10% 1.607564s 30% 1.607564s 50% 1.607564s 90% 1.607564s 99% 1.607564s Write: 10% 1.224834s 30% 1.224834s 50% 1.224834s 90% 1.224834s 99% 1.224834s Write: 10% 1.289050s 30% 1.289050s 50% 1.289050s 90% 1.289050s 99% 1.289050s Write: 10% 1.392498s 30% 1.392498s 50% 1.392498s 90% 1.392498s 99% 1.392498s Write: 10% 1.506847s 30% 1.506847s 50% 1.506847s 90% 1.506847s 99% 1.506847s Write: 10% 1.103275s 30% 1.103275s 50% 1.103275s 90% 1.103275s 99% 1.103275s Write: 10% 1.280698s 30% 1.280698s 50% 1.280698s 90% 1.280698s 99% 1.280698s Write: 10% 1.709572s 30% 1.709572s 50% 1.709572s 90% 1.709572s 99% 1.709572s Write: 10% 1.272230s 30% 1.272230s 50% 1.272230s 90% 1.272230s 99% 1.272230s Write: 10% 1.177432s 30% 1.177432s 50% 1.177432s 90% 1.177432s 99% 1.177432s Write: 10% 1.321063s 30% 1.321063s 50% 1.321063s 90% 1.321063s 99% 1.321063s Write: 10% 0.901835s 30% 0.901835s 50% 0.901835s 90% 0.901835s 99% 0.901835s Write: 10% 1.652546s 30% 1.652546s 50% 1.652546s 90% 1.652546s 99% 1.652546s Write: 10% 0.700733s 30% 0.700733s 50% 0.700733s 90% 0.700733s 99% 0.700733s Write: 10% 1.655379s 30% 1.655379s 50% 1.655379s 90% 1.655379s 99% 1.655379s Write: 10% 0.758954s 30% 0.758954s 50% 0.758954s 90% 0.758954s 99% 0.758954s Write: 10% 1.302769s 30% 1.302769s 50% 1.302769s 90% 1.302769s 99% 1.302769s Write: 10% 0.747465s 30% 0.747465s 50% 0.747465s 90% 0.747465s 99% 0.747465s Write: 10% 1.513832s 30% 1.513832s 50% 1.513832s 90% 1.513832s 99% 1.513832s Write: 10% 1.141131s 30% 1.141131s 50% 1.141131s 90% 1.141131s 99% 1.141131s Write: 10% 1.549331s 30% 1.549331s 50% 1.549331s 90% 1.549331s 99% 1.549331s Write: 10% 0.958252s 30% 0.958252s 50% 0.958252s 90% 0.958252s 99% 0.958252s Write: 10% 0.835149s 30% 0.835149s 50% 0.835149s 90% 0.835149s 99% 0.835149s Write: 10% 0.807203s 30% 0.807203s 50% 0.807203s 90% 0.807203s 99% 0.807203s Write: 10% 0.823300s 30% 0.823300s 50% 0.823300s 90% 0.823300s 99% 0.823300s Write: 10% 0.655877s 30% 0.655877s 50% 0.655877s 90% 0.655877s 99% 0.655877s Write: 10% 1.331500s 30% 1.331500s 50% 1.331500s 90% 1.331500s 99% 1.331500s Write: 10% 1.631934s 30% 1.631934s 50% 1.631934s 90% 1.631934s 99% 1.631934s Write: 10% 0.905183s 30% 0.905183s 50% 0.905183s 90% 0.905183s 99% 0.905183s Write: 10% 0.888266s 30% 0.888266s 50% 0.888266s 90% 0.888266s 99% 0.888266sWrite: 10% 0.781966s 30% 0.781966s 50% 0.781966s 90% 0.781966s 99% 0.781966s Write: 10% 0.936573s 30% 0.936573s 50% 0.936573s 90% 0.936573s 99% 0.936573s Write: 10% 1.199724s 30% 1.199724s 50% 1.199724s 90% 1.199724s 99% 1.199724s Write: 10% 0.800069s 30% 0.800069s 50% 0.800069s 90% 0.800069s 99% 0.800069s Write: 10% 1.194547s 30% 1.194547s 50% 1.194547s 90% 1.194547s 99% 1.194547s Write: 10% 1.371477s 30% 1.371477s 50% 1.371477s 90% 1.371477s 99% 1.371477s Write: 10% 0.952988s 30% 0.952988s 50% 0.952988s 90% 0.952988s 99% 0.952988s Write: 10% 1.207856s 30% 1.207856s 50% 1.207856s 90% 1.207856s 99% 1.207856s Write: 10% 0.790259s 30% 0.790259s 50% 0.790259s 90% 0.790259s 99% 0.790259s Write: 10% 1.051211s 30% 1.051211s 50% 1.051211s 90% 1.051211s 99% 1.051211sWrite: 10% 1.379949s 30% 1.379949s 50% 1.379949s 90% 1.379949s 99% Write: 10% 1.223601s 30% 1.223601s 50% 1.379949s 1.223601s 90% 1.223601s 99% 1.223601s Step 2. read write Write: 10% 0.423688s 30% 0.423688s 50% 0.423688s 90% 0.423688s 99% 0.423688s Write: 10% 0.501295s 30% 0.501295s 50% 0.501295s 90% 0.501295s 99% 0.501295s Write: 10% 0.446311s 30% 0.446311s 50% Write: 10% 0.473495s 30% 0.473495s 50% Write: 10% 0.520102s 30% 0.520102s 50% 0.520102s 90% 0.520102s 99% 0.520102s 0.473495s 90% 0.473495s 99% 0.473495s0.446311s 90% 0.446311s 99% 0.446311s Write: 10% 0.516234s 30% 0.516234s 50% Write: 10% 0.512798s 30% 0.512798s 50% 0.512798s 90% 0.512798s 99% 0.512798s 0.516234s 90% 0.516234s 99% 0.516234sWrite: 10% 0.538971s 30% 0.538971s 50% 0.538971s 90% 0.538971s 99% 0.538971s Write: 10% 0.578536s 30% 0.578536s 50% 0.578536s 90% 0.578536s 99% 0.578536s Write: 10% 0.682364s 30% 0.682364s 50% 0.682364s 90% 0.682364s 99% 0.682364s Write: 10% 0.665867s 30% 0.665867s 50% 0.665867s 90% 0.665867s 99% 0.665867s Write: 10% 0.653058s 30% 0.653058s 50% Write: 10% 0.652714s 30% 0.652714s 50% 0.653058s 90% 0.653058s 99% 0.653058s 0.652714s 90% 0.652714s 99% 0.652714sWrite: 10% 0.668313s 30% 0.668313s 50% 0.668313s 90% 0.668313s 99% 0.668313s Write: 10% 0.712106s 30% 0.712106s 50% Write: 10% 0.758208s 30% 0.758208s 50% 0.758208s 90% 0.758208s 99% 0.758208s 0.712106s 90% 0.712106s 99% 0.712106s Write: 10% 0.766108s 30% 0.766108s 50% 0.766108s 90% 0.766108s 99% 0.766108s Write: 10% 0.726678s 30% 0.726678s 50% Write: 10% 0.570340s 30% 0.570340s 50% 0.570340s 90% 0.570340s 99% 0.570340s 0.726678s 90% 0.726678s 99% 0.726678sWrite: 10% 0.775185s 30% 0.775185s 50% Write: 10% 0.775345s 30% 0.775345s 50% 0.775185s 90% 0.775185s 99% 0.775185sWrite: 10% 0.762161s 30% 0.762161s 50% 0.762161s 90% 0.762161s 99% 0.762161s 0.775345s 90% 0.775345s 99% 0.775345s Write: 10% 0.843150s 30% 0.843150s 50% 0.843150s 90% 0.843150s 99% 0.843150s Write: 10% 0.790641sWrite: 10% 0.832934s 30% 0.832934s 50% 30% 0.790641s 50% 0.832934s 90% 0.832934s 99% 0.832934s 0.790641s 90% 0.790641s 99% 0.790641s Write: 10% 0.538604s 30% 0.538604s 50% 0.538604s 90% 0.538604s 99% 0.538604s Write: 10% 0.796536s 30% 0.796536s 50% 0.796536s 90% 0.796536s 99% 0.796536s Write: 10% 0.803855s 30% 0.803855s 50% 0.803855s 90% 0.803855s 99% 0.803855s Write: 10% 0.845754s 30% 0.845754s 50% 0.845754s 90% 0.845754s 99% 0.845754s Write: 10% 0.782187s 30% 0.782187s 50% 0.782187s 90% 0.782187s 99% 0.782187s Write: 10% 0.524736s 30% 0.524736s 50% 0.524736s 90% 0.524736s 99% 0.524736s Write: 10% 0.630338s 30% 0.630338s 50% 0.630338s 90% 0.630338s 99% 0.630338s Write: 10% 0.901022s 30% 0.901022s 50% 0.901022s 90% 0.901022s 99% 0.901022s Write: 10% 0.615421s 30% 0.615421s 50% 0.615421s 90% 0.615421s 99% 0.615421s Write: 10% 0.686344s 30% 0.686344s 50% 0.686344s 90% 0.686344s 99% 0.686344s Write: 10% 0.635231s 30% 0.635231s 50% 0.635231s 90% 0.635231s 99% 0.635231s Write: 10% 0.352439s 30% 0.352439s 50% 0.352439s 90% 0.352439s 99% 0.352439s Write: 10% 0.328945s 30% 0.328945s 50% 0.328945s 90% 0.328945s 99% 0.328945s Write: 10% 0.949396s 30% 0.949396s 50% 0.949396s 90% 0.949396s 99% 0.949396s Write: 10% 0.488393s 30% 0.488393s 50% 0.488393s 90% 0.488393s 99% 0.488393s Write: 10% 0.651597s 30% 0.651597s 50% 0.651597s 90% 0.651597s 99% 0.651597s Write: 10% 0.723493s 30% 0.723493s 50% 0.723493s 90% 0.723493s 99% 0.723493s Write: 10% 0.409834s 30% 0.409834s 50% 0.409834s 90% 0.409834s 99% 0.409834s Write: 10% 0.741041s 30% 0.741041s 50% 0.741041s 90% 0.741041s 99% 0.741041s Write: 10% 0.451430s 30% 0.451430s 50% 0.451430s 90% 0.451430s 99% 0.451430s Write: 10% 0.843829s 30% 0.843829s 50% 0.843829s 90% 0.843829s 99% 0.843829s Write: 10% 0.756768s 30% 0.756768s 50% 0.756768s 90% 0.756768s 99% 0.756768s Write: 10% 0.714207s 30% 0.714207s 50% 0.714207s 90% 0.714207s 99% 0.714207s Write: 10% 0.798234s 30% 0.798234s 50% 0.798234s 90% 0.798234s 99% 0.798234s Write: 10% 0.810890s 30% 0.810890s 50% 0.810890s 90% 0.810890s 99% 0.810890s Write: 10% 0.354319s 30% 0.354319s 50% 0.354319s 90% 0.354319s 99% 0.354319s Write: 10% 0.551637s 30% 0.551637s 50% 0.551637s 90% 0.551637s 99% 0.551637s Write: 10% 0.354203s 30% 0.354203s 50% 0.354203s 90% 0.354203s 99% 0.354203s Write: 10% 0.688591s 30% 0.688591s 50% 0.688591s 90% 0.688591s 99% 0.688591s Write: 10% 0.390046s 30% 0.390046s 50% 0.390046s 90% 0.390046s 99% 0.390046s Write: 10% 0.289034s 30% 0.289034s 50% 0.289034s 90% 0.289034s 99% 0.289034s Write: 10% 0.326564s 30% 0.326564s 50% 0.326564s 90% 0.326564s 99% 0.326564s Write: 10% 0.386002s 30% 0.386002s 50% 0.386002s 90% 0.386002s 99% 0.386002s Write: 10% 0.369907s 30% 0.369907s 50% Write: 10% 0.261687s 30% 0.261687s 50% 0.261687s 90% 0.261687s 99% 0.261687s 0.369907s 90% 0.369907s 99% 0.369907s Write: 10% 0.323465s 30% 0.323465s 50% 0.323465s 90% 0.323465s 99% 0.323465s Write: 10% 0.512622s 30% 0.512622s 50% 0.512622s 90% 0.512622s 99% 0.512622s Write: 10% 0.387235s 30% 0.387235s 50% 0.387235s 90% 0.387235s 99% 0.387235s Write: 10% 0.390411s 30% 0.390411s 50% 0.390411s 90% 0.390411s 99% 0.390411s Read: 10% 1.696408s 30% 1.696408s 50% 1.696408s 90% 1.696408s 99% 1.696408s Step 3. write modify Write: 10% 0.323820s 30% 0.323820s 50% 0.323820s 90% 0.323820s 99% 0.323820s Write: 10% 0.182828s 30% 0.182828s 50% 0.182828s 90% 0.182828s 99% 0.182828s Write: 10% 0.305485s 30% 0.305485s 50% 0.305485s 90% 0.305485s 99% 0.305485s Write: 10% 0.215284s 30% 0.215284s 50% 0.215284s 90% 0.215284s 99% 0.215284s Write: 10% 0.226068s 30% 0.226068s 50% 0.226068s 90% 0.226068s 99% 0.226068s Write: 10% 0.211737s 30% 0.211737s 50% Write: 10% 0.231208s 30% 0.231208s 50% 0.231208s 90% 0.231208s 99% 0.231208s0.211737s 90% 0.211737s 99% 0.211737s Write: 10% 0.329436s 30% 0.329436s 50% 0.329436s 90% 0.329436s 99% 0.329436s Write: 10% 0.231724s 30% 0.231724s 50% 0.231724s 90% 0.231724s 99% 0.231724sWrite: 10% Write: 10% 0.378130s 30% 0.381168s0.378130s 30% 0.381168s 50% 50% Write: 10% 0.322887s 30% 0.322887s 50% 0.322887s 90% 0.322887s 99% 0.322887s0.381168s 90% 0.378130s 90% 0.378130s 99% 0.378130s0.381168s 99% 0.381168s Write: 10% 0.318982s 30% 0.318982s 50% 0.318982s 90% 0.318982s 99% 0.318982s Write: 10% 0.326868s 30% 0.326868s 50% 0.326868s 90% 0.326868s 99% 0.326868s Write: 10% 0.384895s 30% 0.384895s 50% 0.384895s 90% 0.384895s 99% 0.384895s Write: 10% 0.410249s 30% 0.410249s 50% Write: 10% 0.372366s 30% 0.372366s 50% 0.372366s 90% 0.372366s 99% 0.372366s 0.410249s 90% 0.410249s 99% 0.410249sWrite: 10% 0.494174s 30% 0.494174s 50% 0.494174s 90% 0.494174s 99% 0.494174s Write: 10% 0.303544s 30% 0.303544s 50% Write: 10% 0.423889s 30% 0.423889s 50% Write: 10% 0.433249s 30% 0.433249s 50% 0.433249s 90% 0.433249s 99% 0.433249s Write: 10% 0.413190s 30% 0.413190s 50% 0.423889s 90% 0.423889s 99% 0.423889s0.303544s 90% 0.303544s 99% 0.303544s 0.413190s 90% 0.413190s 99% 0.413190s Write: 10% 0.529994s 30% 0.529994s 50% 0.529994s 90% 0.529994s 99% 0.529994sWrite: 10% 0.411504s 30% 0.411504s 50% 0.411504s 90% 0.411504s 99% 0.411504s Write: 10% 0.580918s 30% 0.580918s 50% 0.580918s 90% 0.580918s 99% 0.580918s Write: 10% 0.200083s 30% 0.200083s 50% 0.200083s 90% 0.200083s 99% 0.200083s Write: 10% 0.150562s 30% 0.150562s 50% 0.150562s 90% 0.150562s 99% 0.150562s Write: 10% 0.489436s 30% 0.489436s 50% Write: 10% 0.507892s 30% 0.507892s 50% 0.507892s 90% 0.507892s 99% 0.507892s 0.489436s 90% 0.489436s 99% 0.489436s Write: 10% 0.188394s 30% 0.188394s 50% 0.188394s 90% 0.188394s 99% 0.188394sWrite: 10% 0.282329s 30% 0.282329s 50% Write: 10% 0.486696s 30% 0.486696s 50% Write: 10% 0.244764s 30% 0.244764s 50% 0.282329s 90% 0.282329s 99% 0.282329sWrite: 10% 0.202938s 30% 0.202938s 50% 0.244764s 90% 0.244764s 99% 0.244764s 0.486696s 90% 0.486696s 99% 0.486696sWrite: 10% 0.561978s 30% 0.561978s 50% 0.561978s 90% 0.561978s 99% 0.561978s 0.202938s 90% 0.202938s 99% 0.202938s Write: 10% 0.235321s 30% 0.235321s 50% 0.235321s 90% 0.235321s 99% 0.235321s Write: 10% 0.201899s 30% 0.201899s 50% 0.201899s 90% 0.201899s 99% 0.201899s Write: 10% 0.169956s 30% 0.169956s 50% 0.169956s 90% 0.169956s 99% 0.169956s Write: 10% 0.199234s 30% 0.199234s 50% 0.199234s 90% 0.199234s 99% 0.199234s Write: 10% 0.223746s 30% 0.223746s 50% 0.223746s 90% 0.223746s 99% 0.223746s Write: 10% 0.180913s 30% 0.180913s 50% 0.180913s 90% 0.180913s 99% 0.180913s Write: 10% 0.222240s 30% 0.222240s 50% 0.222240s 90% 0.222240s 99% 0.222240s Write: 10% 0.138946s 30% 0.138946s 50% 0.138946s 90% 0.138946s 99% 0.138946s Write: 10% 0.120423s 30% 0.120423s 50% 0.120423s 90% 0.120423s 99% 0.120423s Write: 10% 0.202490s 30% 0.202490s 50% 0.202490s 90% 0.202490s 99% 0.202490s Write: 10% 0.256499s 30% 0.256499s 50% 0.256499s 90% 0.256499s 99% 0.256499s Write: 10% 0.106045s 30% 0.106045s 50% 0.106045s 90% 0.106045s 99% 0.106045s Write: 10% 0.153317s 30% 0.153317s 50% 0.153317s 90% 0.153317s 99% 0.153317s Write: 10% 0.127461s 30% 0.127461s 50% 0.127461s 90% 0.127461s 99% 0.127461s Write: 10% 0.272842s 30% 0.272842s 50% 0.272842s 90% 0.272842s 99% 0.272842s Write: 10% 0.151436s 30% 0.151436s 50% 0.151436s 90% 0.151436s 99% 0.151436s Write: 10% 0.283066s 30% 0.283066s 50% 0.283066s 90% 0.283066s 99% 0.283066s Write: 10% 0.219207s 30% 0.219207s 50% 0.219207s 90% 0.219207s 99% 0.219207s Write: 10% 0.191342s 30% 0.191342s 50% 0.191342s 90% 0.191342s 99% 0.191342s Write: 10% 0.289727s 30% 0.289727s 50% 0.289727s 90% 0.289727s 99% 0.289727s Write: 10% 0.290849s 30% 0.290849s 50% 0.290849s 90% 0.290849s 99% 0.290849s Write: 10% 0.494684s 30% 0.494684s 50% 0.494684s 90% 0.494684s 99% 0.494684s Write: 10% 0.209038s 30% 0.209038s 50% 0.209038s 90% 0.209038s 99% 0.209038s Write: 10% 0.223980s 30% 0.223980s 50% 0.223980s 90% 0.223980s 99% 0.223980s Write: 10% 0.245158s 30% 0.245158s 50% 0.245158s 90% 0.245158s 99% 0.245158s Write: 10% Write: 10% 0.225519s 30% 0.225519s 50% 0.227593s 30% 0.227593s0.225519s 90% 0.225519s 99% 0.225519s 50% 0.227593s 90% 0.227593s 99% 0.227593s Write: 10% 0.251808s 30% 0.251808s 50% 0.251808s 90% 0.251808s 99% 0.251808s Write: 10% 0.236135s 30% 0.236135s 50% 0.236135s 90% 0.236135s 99% 0.236135s Update: 10% 0.257810s 30% 0.257810s 50% 0.257810s 90% 0.257810s 99% 0.257810s Step 4. read modify write Write: 10% 0.220207s 30% 0.220207s 50% 0.220207s 90% 0.220207s 99% 0.220207s Write: 10% 0.176680s 30% 0.176680s 50% 0.176680s 90% 0.176680s 99% 0.176680s Write: 10% 0.568601s 30% 0.568601s 50% 0.568601s 90% 0.568601s 99% 0.568601s Write: 10% 0.488951sWrite: 10% 0.444787s 30% 0.444787s 50% 0.444787s 90% 0.444787s 99% 0.444787s 30% 0.488951s 50% 0.488951s 90% 0.488951s 99% 0.488951s Write: 10% 1.080587s 30% 1.080587s 50% 1.080587s 90% 1.080587s 99% 1.080587s Write: 10% 2.304870s 30% 2.304870s 50% 2.304870s 90% 2.304870s 99% 2.304870s Write: 10% 3.943656s 30% 3.943656s 50% 3.943656s 90% 3.943656s 99% 3.943656s Write: 10% 3.826991s 30% 3.826991s 50% 3.826991s 90% 3.826991s 99% 3.826991s Write: 10% 3.967495s 30% 3.967495s 50% 3.967495s 90% 3.967495s 99% 3.967495s Write: 10% 4.045451s 30% 4.045451s 50% 4.045451s 90% 4.045451s 99% 4.045451s Write: 10% 4.149026s 30% 4.149026s 50% 4.149026s 90% 4.149026s 99% 4.149026s Write: 10% 4.074884s 30% 4.074884s 50% 4.074884s 90% 4.074884s 99% 4.074884s Write: 10% 4.273216s 30% 4.273216s 50% 4.273216s 90% 4.273216s 99% 4.273216s Write: 10% 4.193144s 30% 4.193144s 50% 4.193144s 90% 4.193144s 99% 4.193144s Write: 10% 4.275093s 30% 4.275093s 50% 4.275093s 90% 4.275093s 99% 4.275093s Write: 10% 4.280691s 30% 4.280691s 50% 4.280691s 90% 4.280691s 99% 4.280691s Write: 10% 4.340440s 30% 4.340440s 50% 4.340440s 90% 4.340440s 99% 4.340440s Was written: 87 MiB, Speed: 8 MiB/s Write: 10% 4.413432s 30% 4.413432s 50% 4.413432s 90% 4.413432s 99% 4.413432s Write: 10% 4.309002s 30% 4.309002s 50% 4.309002s 90% 4.309002s 99% 4.309002s Write: 10% 4.315653s 30% 4.315653s 50% 4.315653s 90% 4.315653s 99% 4.315653s Write: 10% 4.451573s 30% 4.451573s 50% 4.451573s 90% 4.451573s 99% 4.451573s Write: 10% 4.259767s 30% 4.259767s 50% 4.259767s 90% 4.259767s 99% 4.259767s Write: 10% 4.447317s 30% 4.447317s 50% 4.447317s 90% 4.447317s 99% 4.447317s Write: 10% 4.525442s 30% 4.525442s 50% 4.525442s 90% 4.525442s 99% 4.525442s Write: 10% 4.596541s 30% 4.596541s 50% 4.596541s 90% 4.596541s 99% 4.596541s Write: 10% 4.583657s 30% 4.583657s 50% 4.583657s 90% 4.583657s 99% 4.583657s Write: 10% 4.724445s 30% 4.724445s 50% 4.724445s 90% 4.724445s 99% 4.724445s Write: 10% 4.732388s 30% 4.732388s 50% 4.732388s 90% 4.732388s 99% 4.732388s Write: 10% 4.730058s 30% 4.730058s 50% 4.730058s 90% 4.730058s 99% 4.730058s Write: 10% 4.755131s 30% 4.755131s 50% 4.755131s 90% 4.755131s 99% 4.755131s Write: 10% 4.922106s 30% 4.922106s 50% 4.922106s 90% 4.922106s 99% 4.922106s Write: 10% 4.914224s 30% 4.914224s 50% 4.914224s 90% 4.914224s 99% 4.914224s Write: 10% 4.903799s 30% 4.903799s 50% 4.903799s 90% 4.903799s 99% 4.903799s Write: 10% 4.892228s 30% 4.892228s 50% 4.892228s 90% 4.892228s 99% 4.892228s Write: 10% 4.937963s 30% 4.937963s 50% 4.937963s 90% 4.937963s 99% 4.937963s Write: 10% 4.941832s 30% 4.941832s 50% 4.941832s 90% 4.941832s 99% 4.941832s Write: 10% 4.990479s 30% 4.990479s 50% 4.990479s 90% 4.990479s 99% 4.990479s Write: 10% 5.008163s 30% 5.008163s 50% 5.008163s 90% 5.008163s 99% 5.008163s Write: 10% 5.011136s 30% 5.011136s 50% 5.011136s 90% 5.011136s 99% 5.011136s Write: 10% 5.036958s 30% 5.036958s 50% 5.036958s 90% 5.036958s 99% 5.036958s Write: 10% 5.066851s 30% 5.066851s 50% 5.066851s 90% 5.066851s 99% 5.066851s Write: 10% 5.066337s 30% 5.066337s 50% 5.066337s 90% 5.066337s 99% 5.066337s Write: 10% 5.093536s 30% 5.093536s 50% 5.093536s 90% 5.093536s 99% 5.093536s Write: 10% 5.387565s 30% 5.387565s 50% 5.387565s 90% 5.387565s 99% 5.387565s Write: 10% 5.550211s 30% 5.550211s 50% 5.550211s 90% 5.550211s 99% 5.550211s Write: 10% 5.607258s 30% 5.607258s 50% 5.607258s 90% 5.607258s 99% 5.607258s Write: 10% 5.655502s 30% 5.655502s 50% 5.655502s 90% 5.655502s 99% 5.655502s Write: 10% 5.665482s 30% 5.665482s 50% 5.665482s 90% 5.665482s 99% 5.665482s Write: 10% 5.735457s 30% 5.735457s 50% 5.735457s 90% 5.735457s 99% 5.735457s Write: 10% 5.740931s 30% 5.740931s 50% 5.740931s 90% 5.740931s 99% 5.740931s Write: 10% 5.720602s 30% 5.720602s 50% 5.720602s 90% 5.720602s 99% 5.720602s Write: 10% 5.770561s 30% 5.770561s 50% 5.770561s 90% 5.770561s 99% 5.770561s Write: 10% 5.800543s 30% 5.800543s 50% 5.800543s 90% 5.800543s 99% 5.800543s Write: 10% 5.830485s 30% 5.830485s 50% 5.830485s 90% 5.830485s 99% 5.830485s Write: 10% 5.791245s 30% 5.791245s 50% 5.791245s 90% 5.791245s 99% 5.791245s Write: 10% 5.819694s 30% 5.819694s 50% 5.819694s 90% 5.819694s 99% 5.819694s Write: 10% 5.812899s 30% 5.812899s 50% 5.812899s 90% 5.812899s 99% 5.812899s Write: 10% 5.842639s 30% 5.842639s 50% 5.842639s 90% 5.842639s 99% 5.842639s Write: 10% 5.850002s 30% 5.850002s 50% 5.850002s 90% 5.850002s 99% 5.850002s Write: 10% 5.891514s 30% 5.891514s 50% 5.891514s 90% 5.891514s 99% 5.891514s Write: 10% 5.936582s 30% 5.936582s 50% 5.936582s 90% 5.936582s 99% 5.936582s Write: 10% 5.958386s 30% 5.958386s 50% 5.958386s 90% 5.958386s 99% 5.958386s Write: 10% 5.951234s 30% 5.951234s 50% 5.951234s 90% 5.951234s 99% 5.951234s Update: 10% 5.964085s 30% 5.964085s 50% 5.964085s 90% 5.964085s 99% 5.964085s Read: 10% 0.900485s 30% 0.900485s 50% 1.833821s 90% 4.389780s 99% 4.389780s |99.0%| [TM] {RESULT} ydb/tests/olap/high_load/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/olap/high_load/unittest >> TMemoryController::SharedCache_ConfigLimit [GOOD] >> TMemoryController::MemTable |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TestFormatHandler::Watermark [GOOD] >> KqpTpch::Query11 [GOOD] >> KqpTpch::Query12 >> TTopicReaderTests::TestRun_ReadTwoMessages_With_Limit_1 [GOOD] >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent >> DataShardStats::CollectStatsForSeveralParts [GOOD] >> DataShardStats::NoData >> TestFormatHandler::WatermarkWhere >> TTxDataShardFilterKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BadRequest >> test.py::test[solomon-LabelColumns-default.txt] [GOOD] >> test.py::test[solomon-Subquery-default.txt] [SKIPPED] >> test.py::test[solomon-UnknownSetting-] >> QuoterWithKesusTest::GetsQuotaAfterPause [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas >> DataShardSnapshotIsolation::ReadWriteCommitConflictThenRead [GOOD] >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading >> test_postgres.py::TestPostgresSuite::test_postgres_suite[boolean] [GOOD] >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] >> KqpTpch::Query12 [GOOD] >> KqpTpch::Query13 >> KeyValueGRPCService::SimpleWriteReadWithoutTokenV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_4__SYNC-pk_types0-all_types0-index0---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test >> TestFormatHandler::WatermarkWhere [GOOD] >> TestFormatHandler::WatermarkWhereFalse |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/wardens/py3test >> test_liveness_wardens.py::TestLivenessWarden::test_scheme_shard_has_no_in_flight_transactions [GOOD] |99.0%| [TM] {RESULT} ydb/tests/functional/wardens/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] >> TopicSessionTests::WrongJson [GOOD] >> KqpTpch::Query13 [GOOD] >> KqpTpch::Query14 |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/wardens/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TopicSessionTests::WrongJsonOffset >> TTxDataShardLocalKMeansScan::BadRequest [GOOD] >> TTxDataShardLocalKMeansScan::TooManyClusters >> KqpTpch::Query14 [GOOD] >> KqpTpch::Query15 >> test.py::test[solomon-UnknownSetting-] [GOOD] >> TestFormatHandler::WatermarkWhereFalse [GOOD] >> TestJsonParser::Simple1 |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/common/py3test >> test_unknown_data_source.py::TestUnknownDataSource::test_should_fail_unknown_data_source[v1-client0] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/fq/common/py3test >> TestJsonParser::Simple1 [GOOD] >> TestJsonParser::Simple2 [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/common/py3test >> TestJsonParser::Simple3 >> TestJsonParser::Simple3 [GOOD] >> TestJsonParser::Simple4 >> TestJsonParser::Simple4 [GOOD] >> TestJsonParser::LargeStrings >> TestJsonParser::LargeStrings [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, pic, release} ydb/tests/fq/solomon/py3test >> test.py::test[solomon-UnknownSetting-] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/fq/solomon/py3test >> TestJsonParser::ManyValues |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/solomon/py3test >> TestJsonParser::ManyValues [GOOD] >> TestJsonParser::MissingFields [GOOD] >> TestJsonParser::NestedTypes >> TestJsonParser::NestedTypes [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] >> TestJsonParser::SimpleBooleans >> TestJsonParser::SimpleBooleans [GOOD] >> TestJsonParser::ChangeParserSchema >> TestJsonParser::ChangeParserSchema [GOOD] >> TestJsonParser::ManyBatches >> TestJsonParser::ManyBatches [GOOD] >> QuoterWithKesusTest::GetsSeveralQuotas [GOOD] >> QuoterWithKesusTest::KesusRecreation >> TestJsonParser::LittleBatches [GOOD] >> TestJsonParser::MissingFieldsValidation [GOOD] >> TestJsonParser::TypeKindsValidation >> TestJsonParser::TypeKindsValidation [GOOD] >> TestJsonParser::NumbersValidation [GOOD] >> TestJsonParser::StringsValidation |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> TestJsonParser::StringsValidation [GOOD] >> TestJsonParser::NestedJsonValidation >> KqpTpch::Query15 [GOOD] >> KqpTpch::Query16 |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TestJsonParser::NestedJsonValidation [GOOD] >> TestJsonParser::BoolsValidation >> TestJsonParser::BoolsValidation [GOOD] >> TestJsonParser::JsonStructureValidation >> TestJsonParser::JsonStructureValidation [GOOD] >> TestJsonParser::SkipErrors_Simple1 >> TestJsonParser::SkipErrors_Simple1 [GOOD] >> TestJsonParser::SkipErrors_StringValidation >> TestJsonParser::SkipErrors_StringValidation [GOOD] >> TestJsonParser::SkipErrors_NoField >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] >> TestJsonParser::SkipErrors_NoField [GOOD] >> TestJsonParser::SkipErrors_NoJson >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 >> TestJsonParser::SkipErrors_NoJson [GOOD] >> TestJsonParser::SkipErrors_Optional >> TestJsonParser::SkipErrors_Optional [GOOD] >> TestJsonParser::SkipErrors1JsonIn2Messages >> TestJsonParser::SkipErrors1JsonIn2Messages [GOOD] >> TestPurecalcFilter::Simple1 >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_rows [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NewlineDelimited [GOOD] >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> DataShardSnapshotIsolation::ReadWriteCommitConflictWhileReading [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:26.200376Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:26.364364Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:26.365091Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:26.365154Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:26.769005Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:26.769138Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:26.846107Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495682807067 != 1767495682807070 2026-01-04T03:01:26.873514Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:26.927087Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:27.076766Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:27.491708Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:27.506149Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:28.206100Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:865:2748], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:28.206318Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:28.206857Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:883:2753], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:28.206939Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:28.300670Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:28.351847Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:28.352929Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:28.353231Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:912:2779] 2026-01-04T03:01:28.353761Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:28.397311Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:904:2773], Recipient [1:912:2779]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:28.398273Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:28.398447Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:28.401127Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:01:28.401226Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:01:28.401291Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:01:28.403198Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:28.403400Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:28.403521Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:928:2779] in generation 1 2026-01-04T03:01:28.404090Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:28.449420Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:01:28.450663Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:28.450825Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:930:2789] 2026-01-04T03:01:28.450892Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:28.450938Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:01:28.450975Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:28.451211Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:912:2779], Recipient [1:912:2779]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:28.452042Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:28.453086Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:01:28.453212Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:01:28.453290Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:28.453328Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:28.453378Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:01:28.453419Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:01:28.453454Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:01:28.453496Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:01:28.453553Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:28.453664Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:919:2783], Recipient [1:912:2779]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:28.453701Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:28.453742Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:911:2778], serverId# [1:919:2783], sessionId# [0:0:0] 2026-01-04T03:01:28.453856Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:919:2783] 2026-01-04T03:01:28.453920Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:01:28.454056Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:01:28.454497Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:01:28.454565Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:01:28.455195Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:01:28.455363Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T03:01:28.455404Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T03:01:28.455440Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T03:01:28.455474Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T03:01:28.455804Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T03:01:28.455877Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T03:01:28.455913Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T03:01:28.455944Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T03:01:28.455992Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T03:01:28.456019Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPr ... 86224037888 read iterator# {[19:1127:2934], 0} exhausted 2026-01-04T03:03:30.251544Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T03:03:30.251582Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T03:03:30.251615Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:6] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:03:30.251665Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:6] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:03:30.251723Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:6] at 72075186224037888 is Executed 2026-01-04T03:03:30.251753Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:6] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:03:30.251790Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:6] at 72075186224037888 has finished 2026-01-04T03:03:30.251861Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T03:03:30.252062Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T03:03:30.523687Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [19:1137:2944], Recipient [19:918:2782]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:03:30.523825Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:03:30.523945Z node 19 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [19:1136:2943], serverId# [19:1137:2944], sessionId# [0:0:0] 2026-01-04T03:03:30.524385Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 278003712, Sender [19:1135:2942], Recipient [19:918:2782]: NKikimrDataEvents.TEvWrite TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-04T03:03:30.524441Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:182: Handle TTxWrite: at tablet# 72075186224037888 2026-01-04T03:03:30.524694Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435074, Sender [19:918:2782], Recipient [19:918:2782]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T03:03:30.524757Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3180: StateWork, processing event TEvPrivate::TEvDelayedProposeTransaction 2026-01-04T03:03:30.524917Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:28: TTxWrite:: execute at tablet# 72075186224037888 2026-01-04T03:03:30.525170Z node 19 :TX_DATASHARD TRACE: datashard_write_operation.cpp:72: Parsing write transaction for 0 at 72075186224037888, record: TxId: 0 TxMode: MODE_IMMEDIATE Locks { Locks { LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true } SendingShards: 72075186224037888 ReceivingShards: 72075186224037888 Op: Commit } LockMode: OPTIMISTIC_SNAPSHOT_ISOLATION 2026-01-04T03:03:30.525364Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:33: -- AddReadRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T03:03:30.525519Z node 19 :TX_DATASHARD TRACE: key_validator.cpp:54: -- AddWriteRange: (Uint64 : 281474976715661, Uint64 : 72075186224037888, Uint64 : 72057594046644480, Uint64 : 38) table: [1:997:0] 2026-01-04T03:03:30.525680Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CheckWrite 2026-01-04T03:03:30.525779Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T03:03:30.526570Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CheckWrite 2026-01-04T03:03:30.526657Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T03:03:30.526719Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T03:03:30.526796Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T03:03:30.526910Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:7] at 72075186224037888 2026-01-04T03:03:30.526998Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T03:03:30.527040Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T03:03:30.527092Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit BlockFailPoint 2026-01-04T03:03:30.527129Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit BlockFailPoint 2026-01-04T03:03:30.527168Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T03:03:30.527197Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit BlockFailPoint 2026-01-04T03:03:30.527226Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit ExecuteWrite 2026-01-04T03:03:30.527256Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit ExecuteWrite 2026-01-04T03:03:30.527319Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:7] at 72075186224037888 2026-01-04T03:03:30.527455Z node 19 :TX_DATASHARD TRACE: datashard.cpp:2377: GetMvccTxVersion at 72075186224037888 CompleteEdge# v2500/0 IncompleteEdge# v{min} UnprotectedReadEdge# v2500/18446744073709551615 ImmediateWriteEdge# v2500/18446744073709551615 ImmediateWriteEdgeReplied# v2500/18446744073709551615 2026-01-04T03:03:30.527667Z node 19 :TX_DATASHARD TRACE: datashard_kqp.cpp:834: KqpCommitLock LockId: 281474976715661 DataShard: 72075186224037888 Generation: 1 Counter: 0 SchemeShard: 72057594046644480 PathId: 38 HasWrites: true 2026-01-04T03:03:30.527767Z node 19 :TX_DATASHARD TRACE: datashard_user_db.cpp:517: Committing changes lockId# 281474976715661 in localTid# 1001 shard# 72075186224037888 2026-01-04T03:03:30.527926Z node 19 :TX_DATASHARD DEBUG: execute_write_unit.cpp:469: Skip empty write operation for [0:7] at 72075186224037888 2026-01-04T03:03:30.528035Z node 19 :TX_DATASHARD TRACE: locks.cpp:194: Lock 281474976715662 marked broken at v{min} 2026-01-04T03:03:30.528359Z node 19 :TX_DATASHARD TRACE: execute_write_unit.cpp:48: add locks to result: 0 2026-01-04T03:03:30.528493Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T03:03:30.528554Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit ExecuteWrite 2026-01-04T03:03:30.528629Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit FinishProposeWrite 2026-01-04T03:03:30.528686Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T03:03:30.528802Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T03:03:30.528869Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit FinishProposeWrite 2026-01-04T03:03:30.528963Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:7] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:03:30.529029Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:7] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:03:30.529100Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:7] at 72075186224037888 is Executed 2026-01-04T03:03:30.529135Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:7] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:03:30.529191Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:7] at 72075186224037888 has finished 2026-01-04T03:03:30.530221Z node 19 :TX_DATASHARD TRACE: datashard__write.cpp:150: TTxWrite complete: at tablet# 72075186224037888 2026-01-04T03:03:30.530328Z node 19 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:7] at 72075186224037888 on unit FinishProposeWrite 2026-01-04T03:03:30.530422Z node 19 :TX_DATASHARD TRACE: finish_propose_write_unit.cpp:163: Propose transaction complete txid 7 at tablet 72075186224037888 send to client, propose latency: 0 ms, status: STATUS_COMPLETED 2026-01-04T03:03:30.530564Z node 19 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:03:30.531356Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553218, Sender [19:1127:2934], Recipient [19:918:2782]: NKikimrTxDataShard.TEvReadAck ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 2026-01-04T03:03:30.531600Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3660: 72075186224037888 ReadAck for read iterator# {[19:1127:2934], 0}: { ReadId: 0 SeqNo: 1 MaxRows: 1 MaxBytes: 4294967295 }, read continued, bytesLeft# 4294967295, rowsLeft# 1 2026-01-04T03:03:30.531774Z node 19 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269553217, Sender [19:918:2782], Recipient [19:918:2782]: NKikimr::TEvDataShard::TEvReadContinue 2026-01-04T03:03:30.531912Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3081: 72075186224037888 ReadContinue for iterator# {[19:1127:2934], 0}, firstUnprocessedQuery# 0 2026-01-04T03:03:30.532046Z node 19 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3174: 72075186224037888 ReadContinue: iterator# {[19:1127:2934], 0}, FirstUnprocessedQuery# 0 2026-01-04T03:03:30.532419Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3277: 72075186224037888 read iterator# {[19:1127:2934], 0} TTxReadContinue::Execute() found broken lock# 281474976715662 2026-01-04T03:03:30.532738Z node 19 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:3321: 72075186224037888 read iterator# {[19:1127:2934], 0} TTxReadContinue::Execute() finished with error, aborting: ReadId: 0 SeqNo: 2 Status { Code: ABORTED Issues { message: "Read conflict with concurrent transaction" severity: 1 } } BrokenTxLocks { LockId: 281474976715662 DataShard: 72075186224037888 Generation: 1 Counter: 18446744073709551615 SchemeShard: 72057594046644480 PathId: 38 } NodeId: 19 |99.0%| [TM] {RESULT} ydb/core/tx/datashard/ut_snapshot_isolation/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_snapshot_isolation/unittest >> TTxDataShardLocalKMeansScan::TooManyClusters [GOOD] >> TTxDataShardLocalKMeansScan::MainToPosting >> TopicSessionTests::WrongJsonOffset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Offset [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4__SYNC-pk_types5-all_types5-index5---SYNC] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> RowDispatcherTests::OneClientOneSession >> RowDispatcherTests::OneClientOneSession [GOOD] >> KqpTpch::Query16 [GOOD] >> KqpTpch::Query17 >> RowDispatcherTests::TwoClientOneSession >> RowDispatcherTests::TwoClientOneSession [GOOD] >> RowDispatcherTests::SessionError >> test_canonical_requests.py::Test::test_vdisk_ready_stable_period [GOOD] >> RowDispatcherTests::SessionError [GOOD] >> TestPurecalcFilter::Simple1 [GOOD] >> RowDispatcherTests::CoordinatorSubscribe >> RowDispatcherTests::CoordinatorSubscribe [GOOD] >> TestPurecalcFilter::Simple2 >> TTopicReaderTests::TestRun_Read_Less_Messages_Than_Sent [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged >> RowDispatcherTests::CoordinatorSubscribeBeforeCoordinatorChanged [GOOD] >> RowDispatcherTests::TwoClients4Sessions >> RowDispatcherTests::TwoClients4Sessions [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> RowDispatcherTests::ReinitConsumerIfNewGeneration |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> RowDispatcherTests::ReinitConsumerIfNewGeneration [GOOD] >> RowDispatcherTests::HandleTEvUndelivered >> RowDispatcherTests::HandleTEvUndelivered [GOOD] >> RowDispatcherTests::TwoClientTwoConnection >> RowDispatcherTests::TwoClientTwoConnection [GOOD] >> RowDispatcherTests::ProcessNoSession ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimited_With_Two_Delimiters_In_A_Row [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-04T03:03:07.606080Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336604702738783:2090];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:07.616880Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:07.677547Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591336604392660680:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:07.677587Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:07.681427Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:07.748971Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:07.959747Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:08.036449Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:08.080994Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:08.081076Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:08.082992Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:08.083061Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:08.122633Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:08.132081Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T03:03:08.132577Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:08.152478Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:03:08.190702Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:08.273985Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:08.315485Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c2b/r3tmp/yandex6yF5Fd.tmp 2026-01-04T03:03:08.315510Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c2b/r3tmp/yandex6yF5Fd.tmp 2026-01-04T03:03:08.316653Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c2b/r3tmp/yandex6yF5Fd.tmp 2026-01-04T03:03:08.316755Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:03:08.361184Z INFO: TTestServer started on Port 5176 GrpcPort 24972 PQClient connected to localhost:24972 === TenantModeEnabled() = 0 === Init PQ - start server on port 24972 2026-01-04T03:03:08.630487Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:03:08.738087Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:03:08.774193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T03:03:08.774343Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:08.774517Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T03:03:08.774534Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T03:03:08.774707Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T03:03:08.774746Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:08.778877Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:08.778986Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T03:03:08.779139Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:08.779177Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T03:03:08.779193Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T03:03:08.779203Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T03:03:08.780682Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:03:08.780704Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T03:03:08.780724Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:03:08.782885Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:08.782921Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T03:03:08.782936Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T03:03:08.786824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:08.786857Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:08.786873Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:08.786898Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T03:03:08.806322Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T03:03:08.811820Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T03:03:08.811922Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T03:03:08.814607Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767495788856, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:08.814749Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767495788856 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T03:03:08.815047Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:08.816076Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T03:03:08.816121Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:08.816263Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T03:03:08.816311Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T03:03:08.818839Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T03:03:08.818864Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_schem ... -01-04T03:03:34.235896Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [3:7591336720924257791:2571] 2026-01-04T03:03:34.236065Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_3_1_14462987084646807272_v1:1 with generation 1 2026-01-04T03:03:34.239400Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 3 WriteTimestampMS: 1767495814107 CreateTimestampMS: 1767495814104 SizeLag: 280 WriteTimestampEstimateMS: 1767495814210 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-04T03:03:34.239448Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 2026-01-04T03:03:34.239518Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 sending to client partition status 2026-01-04T03:03:34.240308Z :INFO: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2026-01-04T03:03:34.240731Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2026-01-04T03:03:34.240841Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2026-01-04T03:03:34.240890Z node 3 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2026-01-04T03:03:34.240916Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 3 2026-01-04T03:03:34.240972Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 3, WTime# 1767495814107, sizeLag# 280 2026-01-04T03:03:34.240988Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1TEvPartitionReady. Aval parts: 1 2026-01-04T03:03:34.241029Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 performing read request: guid# 33db85f9-3a8c243-87de7d6e-60170ecf, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 3, size# 336, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-04T03:03:34.241175Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 3 maxSize 336 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 3 ClientCommitOffset 0 committedOffset 0 Guid 33db85f9-3a8c243-87de7d6e-60170ecf 2026-01-04T03:03:34.243027Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 3 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1767495814107 CreateTimestampMS: 1767495814104 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1767495814139 CreateTimestampMS: 1767495814105 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1767495814139 CreateTimestampMS: 1767495814105 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 2 SizeLag: 18446744073709551530 RealReadOffset: 2 WaitQuotaTimeMs: 0 EndOffset: 3 StartOffset: 0 } Cookie: 0 } 2026-01-04T03:03:34.243181Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 3 2026-01-04T03:03:34.243218Z node 3 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 3 ReadOffset 3 ReadGuid 33db85f9-3a8c243-87de7d6e-60170ecf has messages 1 2026-01-04T03:03:34.243287Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 read done: guid# 33db85f9-3a8c243-87de7d6e-60170ecf, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 366 2026-01-04T03:03:34.243310Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 response to read: guid# 33db85f9-3a8c243-87de7d6e-60170ecf 2026-01-04T03:03:34.243903Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Got ReadResponse, serverBytesSize = 366, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428434 2026-01-04T03:03:34.244011Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428434 2026-01-04T03:03:34.243480Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 Process answer. Aval parts: 0 2026-01-04T03:03:34.245146Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-2) 2026-01-04T03:03:34.245192Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Returning serverBytesSize = 366 to budget 2026-01-04T03:03:34.245236Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] In ContinueReadingDataImpl, ReadSizeBudget = 366, ReadSizeServerDelta = 52428434 2026-01-04T03:03:34.245459Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-04T03:03:34.245617Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T03:03:34.245664Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T03:03:34.245691Z :DEBUG: [] Take Data. Partition 0. Read: {1, 1} (2-2) 2026-01-04T03:03:34.245660Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc read done: success# 1, data# { read_request { bytes_size: 366 } } 2026-01-04T03:03:34.245738Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] The application data is transferred to the client. Number of messages 3, size 24 bytes 2026-01-04T03:03:34.245781Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Returning serverBytesSize = 0 to budget 2026-01-04T03:03:34.245772Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 got read request: guid# bf5b9415-bd5923ea-7460d5f4-5a414251 2026-01-04T03:03:34.245903Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Requesting status for partition stream id: 1 2026-01-04T03:03:34.246145Z :INFO: [] [] [7620286-4c23b626-e2141a54-c1b9b104] Closing read session. Close timeout: 0.000000s 2026-01-04T03:03:34.246201Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:2:0 2026-01-04T03:03:34.246246Z :INFO: [] [] [7620286-4c23b626-e2141a54-c1b9b104] Counters: { Errors: 0 CurrentSessionLifetimeMs: 36 BytesRead: 24 MessagesRead: 3 BytesReadCompressed: 24 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:03:34.246345Z :NOTICE: [] [] [7620286-4c23b626-e2141a54-c1b9b104] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T03:03:34.246387Z :DEBUG: [] [] [7620286-4c23b626-e2141a54-c1b9b104] [] Abort session to cluster 2026-01-04T03:03:34.246828Z :NOTICE: [] [] [7620286-4c23b626-e2141a54-c1b9b104] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T03:03:34.246601Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2026-01-04T03:03:34.246992Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 sending to client partition status 2026-01-04T03:03:34.248088Z node 3 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc read done: success# 0, data# { } 2026-01-04T03:03:34.248106Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc read failed 2026-01-04T03:03:34.248125Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 grpc closed 2026-01-04T03:03:34.248155Z node 3 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_3_1_14462987084646807272_v1 is DEAD 2026-01-04T03:03:34.249601Z node 4 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [3:7591336720924257789:2568] disconnected. 2026-01-04T03:03:34.249636Z node 4 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [3:7591336720924257789:2568] disconnected; active server actors: 1 2026-01-04T03:03:34.249654Z node 4 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [3:7591336720924257789:2568] client user disconnected session shared/user_3_1_14462987084646807272_v1 2026-01-04T03:03:34.250419Z node 3 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_3_1_14462987084646807272_v1 |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> RowDispatcherTests::ProcessNoSession [GOOD] >> RowDispatcherTests::IgnoreWrongPartitionId >> RowDispatcherTests::IgnoreWrongPartitionId [GOOD] >> QuoterWithKesusTest::KesusRecreation [GOOD] >> QuoterWithKesusTest::AllocationStatistics >> RowDispatcherTests::SessionFatalError >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_all_types-pk_types7-all_types7-index7---] [GOOD] >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] >> RowDispatcherTests::SessionFatalError [GOOD] >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] >> KqpTpch::Query17 [GOOD] >> KqpTpch::Query18 >> TMLPReaderTests::TopicWithManyIterationsData [GOOD] >> TMLPReaderTests::TopicWithBigMessage >> TestPurecalcFilter::Simple2 [GOOD] >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v1-client0] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_2__SYNC-pk_types2-all_types2-index2---SYNC] [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration1V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 >> TestPurecalcFilter::ManyValues >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform_Invalid_Encode [GOOD] >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_With_Base64_Transform [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets >> TTxDataShardLocalKMeansScan::MainToPosting [GOOD] >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap >> test_canonical_requests.py::Test::test_cluster_get_set >> KqpTpch::Query18 [GOOD] >> KqpTpch::Query19 >> TMemoryController::MemTable [GOOD] >> TMemoryController::ResourceBroker >> test_inserts.py::TestYdbInsertsOperations::test_concurrent_inserts [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/ut/unittest >> RowDispatcherTests::SessionFatalError [GOOD] Test command err: 2026-01-04T03:01:10.241442Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [1:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-04T03:01:10.242462Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-04T03:01:10.242602Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [1:25:2054] 2026-01-04T03:01:10.242646Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [1:25:2054] 2026-01-04T03:01:10.242682Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [1:25:2054] 2026-01-04T03:01:10.242717Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:26:2054] 2026-01-04T03:01:10.242737Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:26:2054] 2026-01-04T03:01:10.242775Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:10.242806Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:26:2054] 2026-01-04T03:01:10.242840Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [3:27:2054] 2026-01-04T03:01:10.242861Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [3:27:2054] 2026-01-04T03:01:10.242884Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:10.242914Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-04T03:01:10.242939Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [3:27:2054] 2026-01-04T03:01:10.243044Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-04T03:01:10.243178Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-04T03:01:10.243342Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 0 2026-01-04T03:01:10.243395Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-04T03:01:10.252643Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-04T03:01:10.252779Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-04T03:01:10.252960Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:32:2055] 2026-01-04T03:01:10.253007Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:32:2055] 2026-01-04T03:01:10.253070Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:26:2054] to new [2:32:2055] 2026-01-04T03:01:10.253118Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:32:2055] 2026-01-04T03:01:10.253181Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [2:33:2056] 2026-01-04T03:01:10.253219Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [2:33:2056] 2026-01-04T03:01:10.253258Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:313: Coordinator: Move all Locations from old actor [2:32:2055] to new [2:33:2056] 2026-01-04T03:01:10.253289Z node 1 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [2:33:2056] 2026-01-04T03:01:10.253368Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:28:2055], topic1, partIds: 0 2026-01-04T03:01:10.253418Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:28:2055] 2026-01-04T03:01:10.253553Z node 1 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [1:29:2056], topic1, partIds: 1 2026-01-04T03:01:10.253624Z node 1 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [1:29:2056] 2026-01-04T03:01:10.352203Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [5:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-04T03:01:10.352918Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-04T03:01:10.357254Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [5:25:2054] 2026-01-04T03:01:10.357330Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [5:25:2054] 2026-01-04T03:01:10.357376Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [5:25:2054] 2026-01-04T03:01:10.357446Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [6:26:2054] 2026-01-04T03:01:10.357468Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [6:26:2054] 2026-01-04T03:01:10.357511Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:10.357546Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [6:26:2054] 2026-01-04T03:01:10.357588Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [7:27:2054] 2026-01-04T03:01:10.357629Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [7:27:2054] 2026-01-04T03:01:10.357661Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:10.357684Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-04T03:01:10.357712Z node 5 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [7:27:2054] 2026-01-04T03:01:10.357858Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:28:2055], topic1, partIds: 0, 1, 2 2026-01-04T03:01:10.358025Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:28:2055] 2026-01-04T03:01:10.358218Z node 5 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [5:29:2056], topic1, partIds: 3 2026-01-04T03:01:10.358345Z node 5 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [5:29:2056] 2026-01-04T03:01:10.458390Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [9:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-04T03:01:10.458833Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-04T03:01:10.458899Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [10:26:2054] 2026-01-04T03:01:10.458938Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [10:26:2054] 2026-01-04T03:01:10.458969Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:10.459017Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [10:26:2054] 2026-01-04T03:01:10.459142Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [9:28:2055], topic1, partIds: 0 2026-01-04T03:01:10.459219Z node 9 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:514: Coordinator: Not all nodes connected, nodes count: 3, known rd count: 2, add request into pending queue 2026-01-04T03:01:11.461935Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [11:27:2054] 2026-01-04T03:01:11.462041Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [11:27:2054] 2026-01-04T03:01:11.462080Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:11.462134Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-04T03:01:11.462263Z node 9 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [9:28:2055] 2026-01-04T03:01:11.462342Z node 9 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [11:27:2054] 2026-01-04T03:01:11.592359Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [13:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-04T03:01:11.592544Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 3, AssignedNodes: 0, 1, 2 2026-01-04T03:01:11.592606Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [13:25:2054] 2026-01-04T03:01:11.592645Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [13:25:2054] 2026-01-04T03:01:11.592694Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [13:25:2054] 2026-01-04T03:01:11.592755Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [14:26:2054] 2026-01-04T03:01:11.592779Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [14:26:2054] 2026-01-04T03:01:11.592809Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:11.592858Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [14:26:2054] 2026-01-04T03:01:11.592896Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:356: Coordinator: TEvPing received, [15:27:2054] 2026-01-04T03:01:11.592917Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [15:27:2054] 2026-01-04T03:01:11.592935Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:01:11.592957Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-04T03:01:11.592984Z node 13 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:359: Coordinator: Send TEvPong to [15:27:2054] 2026-01-04T03:01:11.593094Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-04T03:01:11.593204Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-04T03:01:11.593401Z node 13 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:507: Coordinator: TEvCoordinatorRequest from [13:28:2055], topic1, partIds: 0, 1, 2 2026-01-04T03:01:11.593522Z node 13 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:562: Coordinator: Send TEvCoordinatorResult to [13:28:2055] 2026-01-04T03:01:11.691739Z node 17 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [17:31:2058], NodesManagerId [0:0:0], rebalancing timeout 1.000000s 2026-01-04T03:01:11.692615Z node 17 :FQ_ROW_DISPATCHER INFO: coordinator.cpp:591: Coordinator: Updated node info, node count: 1, AssignedNodes: 0 2026-01-04T03:01:11.692668Z node 17 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:700: Coordinator: Change global state to Started (by nodes count) 2026-01-04T03:01:11.692720Z ... topic part id 100 query id QueryId cookie 42 2026-01-04T03:03:37.750555Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-04T03:03:37.754449Z node 60 :FQ_ROW_DISPATCHER ERROR: schema.cpp:160: Create coordination node "YDB_DATABASE/RowDispatcher/Tenant" error: TRANSPORT_UNAVAILABLE [ {
: Error: GRpc error: (14): DNS resolution failed for YDB_ENDPOINT: C-ares status is not ARES_SUCCESS qtype=A name=YDB_ENDPOINT is_balancer=0: DNS server returned general failure } {
: Error: Grpc error response on endpoint YDB_ENDPOINT } ] 2026-01-04T03:03:37.755533Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1052: RowDispatcher: TEvTryConnect to node id 61 2026-01-04T03:03:37.760307Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:587: RowDispatcher: EvNodeConnected, node id 61 2026-01-04T03:03:37.761203Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-04T03:03:37.761606Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-04T03:03:37.761749Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-04T03:03:37.762605Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 41 2026-01-04T03:03:37.762735Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-04T03:03:37.763141Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [61:16:2053] part id 100 query id QueryId 2026-01-04T03:03:37.763272Z node 60 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [60:22:2063] to [61:16:2053] query id QueryId 2026-01-04T03:03:37.763506Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:967: RowDispatcher: Received TEvNoSession from [61:16:2053], generation 42 2026-01-04T03:03:37.763570Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [61:16:2053] query id QueryId, partitions size 1 2026-01-04T03:03:37.763684Z node 60 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [60:22:2063] 2026-01-04T03:03:38.052536Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [62:17:2058], tenant Tenant 2026-01-04T03:03:38.052650Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [62:17:2058] 2026-01-04T03:03:38.052682Z node 62 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:03:38.052914Z node 62 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [62:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-04T03:03:38.052971Z node 62 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [62:19:2060] Successfully bootstrapped, local coordinator id [62:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-04T03:03:38.066421Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-04T03:03:38.066476Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-04T03:03:38.066501Z node 62 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-04T03:03:38.066768Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [62:18:2059] 2026-01-04T03:03:38.067474Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [62:14:2056], read group connection_id1, topicPath topic part id 100 query id QueryId cookie 1 2026-01-04T03:03:38.067643Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-04T03:03:38.067990Z node 62 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [62:22:2063] to [62:14:2056] query id QueryId 2026-01-04T03:03:38.068089Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1001: RowDispatcher: Received TEvStopSession from [62:14:2056] topic topic query id QueryId 2026-01-04T03:03:38.068150Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [62:14:2056] query id QueryId, partitions size 1 2026-01-04T03:03:38.068235Z node 62 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [62:22:2063] 2026-01-04T03:03:38.436573Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:544: RowDispatcher: Successfully bootstrapped row dispatcher, id [64:17:2058], tenant Tenant 2026-01-04T03:03:38.436712Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:299: Coordinator: UpdateKnownRowDispatchers [64:17:2058] 2026-01-04T03:03:38.436751Z node 64 :FQ_ROW_DISPATCHER TRACE: coordinator.cpp:329: Coordinator: Add new row dispatcher to map (state 1) 2026-01-04T03:03:38.437066Z node 64 :FQ_ROW_DISPATCHER DEBUG: coordinator.cpp:293: Coordinator: Successfully bootstrapped coordinator, id [64:18:2059], NodesManagerId [0:0:0], rebalancing timeout 120.000000s 2026-01-04T03:03:38.437160Z node 64 :FQ_ROW_DISPATCHER DEBUG: leader_election.cpp:232: TLeaderElection [64:19:2060] Successfully bootstrapped, local coordinator id [64:18:2059], tenant id Tenant, local mode 0, coordination node path YDB_DATABASE/RowDispatcher/Tenant, endpoint YDB_ENDPOINT 2026-01-04T03:03:38.455213Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:71: Run create coordination node "YDB_DATABASE/RowDispatcher/Tenant" actor 2026-01-04T03:03:38.455287Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:113: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-04T03:03:38.455326Z node 64 :FQ_ROW_DISPATCHER DEBUG: schema.cpp:411: Call create coordination node "YDB_DATABASE/RowDispatcher/Tenant" 2026-01-04T03:03:38.466440Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:637: RowDispatcher: TEvCoordinatorChangesSubscribe from [64:18:2059] 2026-01-04T03:03:38.470856Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-04T03:03:38.471212Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-04T03:03:38.471478Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 101 2026-01-04T03:03:38.471963Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-04T03:03:38.472389Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:14:2056] query id QueryId 2026-01-04T03:03:38.472464Z node 64 :FQ_ROW_DISPATCHER WARN: row_dispatcher.cpp:1140: RowDispatcher: Fatal session error, remove session [64:22:2063] 2026-01-04T03:03:38.472542Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:14:2056] query id QueryId, partitions size 2 2026-01-04T03:03:38.473117Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.473258Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-04T03:03:38.473370Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.473517Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:14:2056], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-04T03:03:38.473641Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:892: RowDispatcher: Create new session: read group connection_id1 topic topic part id 100 2026-01-04T03:03:38.474273Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1120: RowDispatcher: Forward TEvSessionError from [64:22:2063] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.474396Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1016: RowDispatcher: DeleteConsumer, readActorId [64:15:2057] query id QueryId, partitions size 2 2026-01-04T03:03:38.474507Z node 64 :FQ_ROW_DISPATCHER ERROR: row_dispatcher.cpp:1033: RowDispatcher: Wrong readActorId [64:15:2057], no such consumer 2026-01-04T03:03:38.474571Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:1036: RowDispatcher: Session is not used, sent TEvPoisonPill to [64:22:2063] 2026-01-04T03:03:38.476867Z node 64 :FQ_ROW_DISPATCHER DEBUG: row_dispatcher.cpp:853: RowDispatcher: Received TEvStartSession from [64:15:2057], read group connection_id1, topicPath topic part id 100,101 query id QueryId cookie 1 2026-01-04T03:03:38.484282Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-04T03:03:38.486375Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 100 query id QueryId 2026-01-04T03:03:38.486558Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:14:2056] query id QueryId 2026-01-04T03:03:38.486686Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.486827Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 100 query id QueryId 2026-01-04T03:03:38.486962Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:24:2065] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.487080Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-04T03:03:38.487192Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:14:2056] part id 101 query id QueryId 2026-01-04T03:03:38.487304Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:14:2056] query id QueryId 2026-01-04T03:03:38.487408Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1081: RowDispatcher: Forward TEvNewDataArrived from [64:23:2064] to [64:15:2057] query id QueryId 2026-01-04T03:03:38.487521Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:940: RowDispatcher: Received TEvGetNextBatch from [64:15:2057] part id 101 query id QueryId 2026-01-04T03:03:38.487644Z node 64 :FQ_ROW_DISPATCHER TRACE: row_dispatcher.cpp:1100: RowDispatcher: Forward TEvMessageBatch from [64:23:2064] to [64:15:2057] query id QueryId |99.0%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/ut/unittest |99.0%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/ut/unittest >> TTopicWriterTests::TestEnterMessage_1KiB_No_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_1KiB_Newline_Delimiter [GOOD] >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] >> TestPurecalcFilter::ManyValues [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_Custom_Delimiter_Delimited [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TestPurecalcFilter::NullValues >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/postgresql/py3test >> test_postgres.py::TestPostgresSuite::test_postgres_suite[strings] [GOOD] |99.0%| [TM] {RESULT} ydb/tests/functional/postgresql/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/postgresql/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] >> DataShardStats::NoData [GOOD] >> DataShardStats::Follower >> QuoterWithKesusTest::AllocationStatistics [GOOD] >> QuoterWithKesusTest::UpdatesCountersForParentResources >> TTopicWriterTests::TestEnterMessage_EmptyInput [GOOD] >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] >> KqpTpch::Query19 [GOOD] >> KqpTpch::Query20 >> test_inserts.py::TestYdbInsertsOperations::test_transactional_update [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_No_Base64_Transform [GOOD] |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> TestPurecalcFilter::NullValues [GOOD] >> TestPurecalcFilter::PartialPush >> TTxDataShardLocalKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuild >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_simple_acquire [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_keep_alive [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_3 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 >> test_session_pool.py::TestSessionPool::test_session_pool_no_race_after_future_cancel_case_4 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_release_logic [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_1 [GOOD] >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure >> TMemoryController::ResourceBroker [GOOD] >> TMemoryController::ResourceBroker_ConfigLimit |99.0%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.0%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMoreMessagesThanLimit_Without_Wait_NoDelimiter [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-04T03:03:05.051805Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336593847421405:2079];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:05.051860Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:05.095162Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:05.095778Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591336593497143731:2153];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:05.095994Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:05.107342Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:05.294589Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:05.298726Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:05.367526Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:05.367658Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:05.374100Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:05.374178Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:05.380938Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:05.409863Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T03:03:05.413843Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:05.483780Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:03:05.564150Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:05.597902Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:05.600461Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001c2d/r3tmp/yandexfNnyiX.tmp 2026-01-04T03:03:05.600516Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001c2d/r3tmp/yandexfNnyiX.tmp 2026-01-04T03:03:05.600710Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001c2d/r3tmp/yandexfNnyiX.tmp 2026-01-04T03:03:05.600815Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:03:05.638886Z INFO: TTestServer started on Port 18138 GrpcPort 17629 PQClient connected to localhost:17629 === TenantModeEnabled() = 0 === Init PQ - start server on port 17629 2026-01-04T03:03:06.003914Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976710657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T03:03:06.004116Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:06.004294Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T03:03:06.004324Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976710657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T03:03:06.005258Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976710657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T03:03:06.005314Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:06.012450Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976710657, response: Status: StatusAccepted TxId: 281474976710657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:06.012628Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976710657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T03:03:06.012824Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:06.012890Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976710657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T03:03:06.012924Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976710657:0 ProgressState no shards to create, do next state 2026-01-04T03:03:06.012945Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 2 -> 3 2026-01-04T03:03:06.015366Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:03:06.015393Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976710657, ready parts: 0/1, is published: true 2026-01-04T03:03:06.015432Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976710657, at schemeshard: 72057594046644480 2026-01-04T03:03:06.016780Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:06.016822Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976710657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T03:03:06.016838Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 3 -> 128 2026-01-04T03:03:06.024426Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:06.024472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976710657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:06.024504Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:06.024529Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976710657 ready parts: 1/1 2026-01-04T03:03:06.051810Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976710657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T03:03:06.053777Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976710657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976710657 msg type: 269090816 2026-01-04T03:03:06.053893Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976710657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T03:03:06.055991Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767495786098, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:06.056127Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767495786098 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T03:03:06.056156Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:06.056392Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976710657:0 128 -> 240 2026-01-04T03:03:06.056431Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976710657:0, at tablet# 72057594046644480 2026-01-04T03:03:06.056591Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T03:03:06.056644Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T03:03:06.059021Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T03:03:06.059052Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_scheme_board.cpp:40: TTxPublishToSchemeBoard DescribePath, at schemeshard: 72057594046644480, txId: 281474976710657, path id: [OwnerId: 72057594046644480, LocalPathId: 1] 2026-01-04T03:03:06.059260Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:54: TTxPublishToSchemeBoard DoComplete, at schemeshard: 72057594 ... start attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7591336772688769799:2572] 2026-01-04T03:03:46.889880Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_6686084643998263946_v1:1 with generation 1 2026-01-04T03:03:46.894212Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 4 WriteTimestampMS: 1767495826743 CreateTimestampMS: 1767495826741 SizeLag: 549 WriteTimestampEstimateMS: 1767495826889 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-04T03:03:46.894258Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 2026-01-04T03:03:46.894325Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 sending to client partition status 2026-01-04T03:03:46.896586Z :INFO: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: (NULL) 2026-01-04T03:03:46.897352Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 } } 2026-01-04T03:03:46.897506Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, commitOffset# (empty maybe) 2026-01-04T03:03:46.897584Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 0 2026-01-04T03:03:46.897619Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:970: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) ready for read with readOffset 0 endOffset 4 2026-01-04T03:03:46.897679Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2343: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 partition ready for read: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 0, endOffset# 4, WTime# 1767495826743, sizeLag# 549 2026-01-04T03:03:46.897699Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2354: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1TEvPartitionReady. Aval parts: 1 2026-01-04T03:03:46.897745Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2277: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 performing read request: guid# 94f4df69-a95ba2cd-1d4c351e-7a37d446, from# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), count# 4, size# 658, partitionsAsked# 1, maxTimeLag# 0ms 2026-01-04T03:03:46.897894Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1396: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 READ FROM TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1)maxCount 4 maxSize 658 maxTimeLagMs 0 readTimestampMs 0 readOffset 0 EndOffset 4 ClientCommitOffset 0 committedOffset 0 Guid 94f4df69-a95ba2cd-1d4c351e-7a37d446 2026-01-04T03:03:46.899616Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 1 event { CmdReadResult { MaxOffset: 4 Result { Offset: 0 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 1 WriteTimestampMS: 1767495826743 CreateTimestampMS: 1767495826741 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 1 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 2 WriteTimestampMS: 1767495826748 CreateTimestampMS: 1767495826742 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 2 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 3 WriteTimestampMS: 1767495826788 CreateTimestampMS: 1767495826742 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } Result { Offset: 3 Data: "... 79 bytes ..." SourceId: "\000source1" SeqNo: 4 WriteTimestampMS: 1767495826794 CreateTimestampMS: 1767495826742 UncompressedSize: 8 PartitionKey: "" ExplicitHash: "" } BlobsFromDisk: 0 BlobsFromCache: 3 SizeLag: 61 RealReadOffset: 3 WaitQuotaTimeMs: 0 EndOffset: 4 StartOffset: 0 } Cookie: 0 } 2026-01-04T03:03:46.899842Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 4 2026-01-04T03:03:46.899895Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:902: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 after read state TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 4 ReadOffset 4 ReadGuid 94f4df69-a95ba2cd-1d4c351e-7a37d446 has messages 1 2026-01-04T03:03:46.900003Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1951: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 read done: guid# 94f4df69-a95ba2cd-1d4c351e-7a37d446, partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), size# 650 2026-01-04T03:03:46.900036Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2113: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 response to read: guid# 94f4df69-a95ba2cd-1d4c351e-7a37d446 2026-01-04T03:03:46.900238Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:2156: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 Process answer. Aval parts: 0 2026-01-04T03:03:46.900750Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Got ReadResponse, serverBytesSize = 650, now ReadSizeBudget = 0, ReadSizeServerDelta = 52428150 2026-01-04T03:03:46.900872Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52428150 2026-01-04T03:03:46.901094Z :DEBUG: [] Decompression task done. Partition/PartitionSessionId: 1 (0-3) 2026-01-04T03:03:46.901145Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Returning serverBytesSize = 650 to budget 2026-01-04T03:03:46.901202Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] In ContinueReadingDataImpl, ReadSizeBudget = 650, ReadSizeServerDelta = 52428150 2026-01-04T03:03:46.901474Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-04T03:03:46.901725Z :DEBUG: [] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T03:03:46.901793Z :DEBUG: [] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T03:03:46.901848Z :DEBUG: [] Take Data. Partition 0. Read: {2, 0} (2-2) 2026-01-04T03:03:46.901876Z :DEBUG: [] Take Data. Partition 0. Read: {3, 0} (3-3) 2026-01-04T03:03:46.901923Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] The application data is transferred to the client. Number of messages 4, size 32 bytes 2026-01-04T03:03:46.901725Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc read done: success# 1, data# { read_request { bytes_size: 650 } } 2026-01-04T03:03:46.901864Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 got read request: guid# 86864342-ba1f2d55-cd42b102-46d990c1 2026-01-04T03:03:46.901973Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Returning serverBytesSize = 0 to budget 2026-01-04T03:03:46.902138Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Requesting status for partition stream id: 1 2026-01-04T03:03:46.904039Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc read done: success# 1, data# { partition_session_status_request { partition_session_id: 1 } } 2026-01-04T03:03:46.904213Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 sending to client partition status 2026-01-04T03:03:47.003054Z :INFO: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] Closing read session. Close timeout: 0.000000s 2026-01-04T03:03:47.003139Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:3:0 2026-01-04T03:03:47.003211Z :INFO: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] Counters: { Errors: 0 CurrentSessionLifetimeMs: 148 BytesRead: 32 MessagesRead: 4 BytesReadCompressed: 32 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:03:47.003326Z :NOTICE: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T03:03:47.003379Z :DEBUG: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] [] Abort session to cluster 2026-01-04T03:03:47.003952Z :NOTICE: [] [] [147a7b8c-9800324e-f542f09f-75a54c3d] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T03:03:47.005030Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc read done: success# 0, data# { } 2026-01-04T03:03:47.005059Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc read failed 2026-01-04T03:03:47.005086Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 grpc closed 2026-01-04T03:03:47.005127Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_6686084643998263946_v1 is DEAD 2026-01-04T03:03:47.007627Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_6686084643998263946_v1 2026-01-04T03:03:47.007298Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336772688769797:2569] disconnected. 2026-01-04T03:03:47.007339Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336772688769797:2569] disconnected; active server actors: 1 2026-01-04T03:03:47.007360Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336772688769797:2569] client user disconnected session shared/user_5_1_6686084643998263946_v1 |99.0%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest ------- [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicReaderTests::TestRun_ReadMessages_With_Future_Offset [GOOD] Test command err: === Starting PQ server === Server->StartServer(false); 2026-01-04T03:03:04.486421Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336590413705312:2077];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:04.486505Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:04.548034Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:04.550316Z node 2 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[2:7591336589359696453:2081];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:03:04.551422Z node 2 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:03:04.558807Z node 2 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:03:04.690504Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:04.707287Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:03:04.893613Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:04.893779Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:04.896587Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:03:04.896715Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:03:04.904195Z node 1 :HIVE WARN: hive_impl.cpp:814: HIVE#72057594037968897 Handle TEvInterconnect::TEvNodeConnected, NodeId 2 Cookie 2 2026-01-04T03:03:04.904394Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:04.905240Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(2, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:03:04.991881Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:03:04.996166Z node 2 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:05.002095Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:03:05.206545Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/001d12/r3tmp/yandexaXoU4U.tmp 2026-01-04T03:03:05.206664Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/001d12/r3tmp/yandexaXoU4U.tmp 2026-01-04T03:03:05.209093Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/001d12/r3tmp/yandexaXoU4U.tmp 2026-01-04T03:03:05.209240Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:03:05.434761Z INFO: TTestServer started on Port 31977 GrpcPort 15514 2026-01-04T03:03:05.496285Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:03:05.558284Z node 2 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; PQClient connected to localhost:15514 === TenantModeEnabled() = 0 === Init PQ - start server on port 15514 2026-01-04T03:03:05.843037Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:377: TTxOperationPropose Execute, message: Transaction { WorkingDir: "/" OperationType: ESchemeOpAlterSubDomain SubDomain { Name: "Root" StoragePools { Name: "/Root:test" Kind: "test" } } } TxId: 281474976715657 TabletId: 72057594046644480 PeerName: "" , at schemeshard: 72057594046644480 2026-01-04T03:03:05.843745Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation_alter_subdomain.cpp:89: TAlterSubDomain Propose, path: //Root, opId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:05.843981Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason transaction target path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 1 2026-01-04T03:03:05.844013Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:5553: CreateTx for txid 281474976715657:0 type: TxAlterSubDomain target path: [OwnerId: 72057594046644480, LocalPathId: 1] source path: 2026-01-04T03:03:05.844888Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:127: IgniteOperation, opId: 281474976715657:1, propose status:StatusAccepted, reason: , at schemeshard: 72057594046644480 2026-01-04T03:03:05.844963Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:03:05.847787Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:460: TTxOperationPropose Complete, txId: 281474976715657, response: Status: StatusAccepted TxId: 281474976715657 SchemeshardId: 72057594046644480 PathId: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:05.847950Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard_audit_log.cpp:222: AUDIT: txId: 281474976715657, subject: , status: StatusAccepted, operation: ALTER DATABASE, path: //Root 2026-01-04T03:03:05.848186Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:05.848251Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common.cpp:315: TCreateParts opId# 281474976715657:0 ProgressState, operation type: TxAlterSubDomain, at tablet# 72057594046644480 2026-01-04T03:03:05.848271Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common.cpp:369: TCreateParts opId# 281474976715657:0 ProgressState no shards to create, do next state 2026-01-04T03:03:05.848282Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 2 -> 3 2026-01-04T03:03:05.851318Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:05.851361Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:141: NSubDomainState::TConfigureParts operationId# 281474976715657:0 ProgressState, at schemeshard: 72057594046644480 2026-01-04T03:03:05.851377Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 3 -> 128 2026-01-04T03:03:05.853216Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:494: TTxOperationProgress Execute, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:05.853266Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:373: NSubDomainState::TPropose ProgressState, operationId: 281474976715657:0, at schemeshard: 72057594046644480 2026-01-04T03:03:05.853282Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:389: NSubDomainState::TPropose ProgressState leave, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T03:03:05.853313Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1723: TOperation IsReadyToPropose , TxId: 281474976715657 ready parts: 1/1 2026-01-04T03:03:05.857738Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1792: TOperation DoPropose send propose to coordinator: 72057594046316545 message:Transaction { AffectedSet { TabletId: 72057594046644480 Flags: 2 } ExecLevel: 0 TxId: 281474976715657 MinStep: 0 MaxStep: 18446744073709551615 IgnoreLowDiskSpace: true } CoordinatorID: 72057594046316545 2026-01-04T03:03:05.858419Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__notify.cpp:31: NotifyTxCompletion operation in-flight, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T03:03:05.858442Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1679: TOperation IsReadyToNotify, TxId: 281474976715657, ready parts: 0/1, is published: true 2026-01-04T03:03:05.858472Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__notify.cpp:132: NotifyTxCompletion transaction is registered, txId: 281474976715657, at schemeshard: 72057594046644480 2026-01-04T03:03:05.860257Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:665: Send tablet strongly msg operationId: 281474976715657:4294967295 from tablet: 72057594046644480 to tablet: 72057594046316545 cookie: 0:281474976715657 msg type: 269090816 2026-01-04T03:03:05.860359Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:1824: TOperation RegisterRelationByTabletId, TxId: 281474976715657, partId: 4294967295, tablet: 72057594046316545 2026-01-04T03:03:05.862951Z node 1 :FLAT_TX_SCHEMESHARD NOTICE: schemeshard__operation.cpp:685: TTxOperationPlanStep Execute, stepId: 1767495785909, transactions count in step: 1, at schemeshard: 72057594046644480 2026-01-04T03:03:05.863143Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation.cpp:689: TTxOperationPlanStep Execute, message: Transactions { TxId: 281474976715657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767495785909 MediatorID: 72057594046382081 TabletID: 72057594046644480, at schemeshard: 72057594046644480 2026-01-04T03:03:05.863187Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__operation_common_subdomain.cpp:303: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T03:03:05.863522Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard_impl.cpp:2700: Change state for txid 281474976715657:0 128 -> 240 2026-01-04T03:03:05.863838Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_common_subdomain.cpp:362: NSubDomainState::TPropose HandleReply TEvOperationPlan, operationId 281474976715657:0, at tablet# 72057594046644480 2026-01-04T03:03:05.864035Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard_impl.cpp:600: IncrementPathDbRefCount reason publish path for pathId [OwnerId: 72057594046644480, LocalPathId: 1] was 2 2026-01-04T03:03:05.864073Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__operation_side_effects.cpp:412: DoUpdateTenant no IsExternalSubDomainRoot, pathId: : [OwnerId: 72057594046644480, LocalPathId: 1], at schemeshard: 72057594046644480 2026-01-04T03:03:05.866062Z node 1 :FLAT_TX_SCHEMESHARD INFO: schemeshard__publish_to_scheme_board.cpp:28: TTxPublishToSchemeBoard DoExecute, at schemeshard: 72057594046644480 2026-01-04T03:03:05.866087Z node 1 :FLAT_TX_SCHEMESHARD DEBUG: schemeshard__publish_to_sche ... ession 2026-01-04T03:03:47.884506Z node 5 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:109: new grpc connection 2026-01-04T03:03:47.884529Z node 5 :PQ_READ_PROXY DEBUG: grpc_pq_read.h:131: new session created cookie 1 2026-01-04T03:03:47.886050Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer session grpc read done: success# 1, data# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2026-01-04T03:03:47.886269Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:941: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 read init: from# ipv6:[::1]:56896, request# { init_request { topics_read_settings { path: "rt3.dc1--topic1" } consumer: "user" } } 2026-01-04T03:03:47.886670Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:41: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 auth for : user 2026-01-04T03:03:47.887209Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:131: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 Handle describe topics response 2026-01-04T03:03:47.887353Z node 5 :PQ_READ_PROXY DEBUG: read_init_auth_actor.cpp:68: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 auth is DEAD 2026-01-04T03:03:47.887452Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1058: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 auth ok: topics# 1, initDone# 0 2026-01-04T03:03:47.888649Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1229: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 register session: topic# rt3.dc1--topic1 2026-01-04T03:03:47.889033Z :INFO: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] [] Got InitResponse. ReadSessionId: shared/user_5_1_10753993518068878016_v1 2026-01-04T03:03:47.889078Z :DEBUG: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T03:03:47.889267Z :DEBUG: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-04T03:03:47.889216Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336773607329524:2568] connected; active server actors: 1 2026-01-04T03:03:47.889633Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1708: [72075186224037898][rt3.dc1--topic1] consumer "user" register session for pipe [5:7591336773607329524:2568] session shared/user_5_1_10753993518068878016_v1 2026-01-04T03:03:47.889690Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:639: [72075186224037898][rt3.dc1--topic1] consumer user register readable partition 0 2026-01-04T03:03:47.889754Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:669: [72075186224037898][rt3.dc1--topic1] consumer user family created family=1 (Status=Free, Partitions=[0]) 2026-01-04T03:03:47.889821Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:871: [72075186224037898][rt3.dc1--topic1] consumer user register reading session ReadingSession "shared/user_5_1_10753993518068878016_v1" (Sender=[5:7591336773607329521:2568], Pipe=[5:7591336773607329524:2568], Partitions=[], ActiveFamilyCount=0) 2026-01-04T03:03:47.889857Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1187: [72075186224037898][rt3.dc1--topic1] consumer user rebalancing was scheduled 2026-01-04T03:03:47.889908Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1259: [72075186224037898][rt3.dc1--topic1] consumer user balancing. Sessions=1, Families=1, UnreadableFamilies=1 [1 (0), ], RequireBalancing=0 [] 2026-01-04T03:03:47.889978Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1306: [72075186224037898][rt3.dc1--topic1] consumer user balancing family=1 (Status=Free, Partitions=[0]) for ReadingSession "shared/user_5_1_10753993518068878016_v1" (Sender=[5:7591336773607329521:2568], Pipe=[5:7591336773607329524:2568], Partitions=[], ActiveFamilyCount=0) 2026-01-04T03:03:47.890045Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:549: [72075186224037898][rt3.dc1--topic1] consumer user family 1 status Active partitions [0] session "shared/user_5_1_10753993518068878016_v1" sender [5:7591336773607329521:2568] lock partition 0 for ReadingSession "shared/user_5_1_10753993518068878016_v1" (Sender=[5:7591336773607329521:2568], Pipe=[5:7591336773607329524:2568], Partitions=[], ActiveFamilyCount=1) generation 1 step 1 2026-01-04T03:03:47.890113Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1326: [72075186224037898][rt3.dc1--topic1] consumer user start rebalancing. familyCount=1, sessionCount=1, desiredFamilyCount=1, allowPlusOne=0 2026-01-04T03:03:47.890143Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1404: [72075186224037898][rt3.dc1--topic1] consumer user balancing duration: 0.000210s 2026-01-04T03:03:47.889756Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 grpc read done: success# 1, data# { read_request { bytes_size: 52428800 } } 2026-01-04T03:03:47.889909Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1850: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 got read request: guid# 7c3445bc-deb8add-6e015806-62174984 2026-01-04T03:03:47.891214Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:1347: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 assign: record# { Partition: 0 TabletId: 72075186224037897 Topic: "rt3.dc1--topic1" Generation: 1 Step: 1 Session: "shared/user_5_1_10753993518068878016_v1" ClientId: "user" PipeClient { RawX1: 7591336773607329524 RawX2: 4503621102209544 } Path: "/Root/PQ/rt3.dc1--topic1" } 2026-01-04T03:03:47.891332Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1144: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 INITING TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) 2026-01-04T03:03:47.891677Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:984: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) pipe restart attempt 0 pipe creation result: OK TabletId: 72075186224037897 Generation: 1, pipe: [5:7591336773607329526:2571] 2026-01-04T03:03:47.892544Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:283: Direct read cache: registered server session: shared/user_5_1_10753993518068878016_v1:1 with generation 1 2026-01-04T03:03:47.894802Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:664: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) initDone 0 event { CmdGetClientOffsetResult { Offset: 0 EndOffset: 6 WriteTimestampMS: 1767495827748 CreateTimestampMS: 1767495827746 SizeLag: 280 WriteTimestampEstimateMS: 1767495827871 ClientHasAnyCommits: false } Cookie: 18446744073709551615 } 2026-01-04T03:03:47.894872Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:695: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 INIT DONE TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 2026-01-04T03:03:47.894954Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:1446: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 sending to client partition status 2026-01-04T03:03:47.895874Z :INFO: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "/topic1". Partition: 0. Read offset: 10 2026-01-04T03:03:47.896320Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 grpc read done: success# 1, data# { start_partition_session_response { partition_session_id: 1 read_offset: 10 } } 2026-01-04T03:03:47.896454Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:551: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 got StartRead from client: partition# TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1), readOffset# 10, commitOffset# (empty maybe) 2026-01-04T03:03:47.896517Z node 5 :PQ_READ_PROXY INFO: partition_actor.cpp:1024: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 Start reading TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) EndOffset 6 readOffset 0 committedOffset 0 clientCommitOffset (empty maybe) clientReadOffset 10 2026-01-04T03:03:47.896560Z node 5 :PQ_READ_PROXY DEBUG: partition_actor.cpp:1278: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 TopicId: Topic rt3.dc1--topic1 in dc dc1 in database: Root, partition 0(assignId:1) wait data in partition inited, cookie 1 from offset 10 2026-01-04T03:03:47.977042Z :INFO: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] Closing read session. Close timeout: 0.000000s 2026-01-04T03:03:47.977142Z :INFO: [] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:/topic1:0:1:0:0 2026-01-04T03:03:47.977206Z :INFO: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] Counters: { Errors: 0 CurrentSessionLifetimeMs: 123 BytesRead: 0 MessagesRead: 0 BytesReadCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:03:47.977335Z :NOTICE: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T03:03:47.977399Z :DEBUG: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] [] Abort session to cluster 2026-01-04T03:03:47.978384Z :NOTICE: [] [] [6c2c99eb-e27aebbd-af4dfb96-918e6e9f] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T03:03:47.980484Z node 5 :PQ_READ_PROXY DEBUG: read_session_actor.cpp:122: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 grpc read done: success# 0, data# { } 2026-01-04T03:03:47.980548Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:125: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 grpc read failed 2026-01-04T03:03:47.980579Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:92: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 grpc closed 2026-01-04T03:03:47.980622Z node 5 :PQ_READ_PROXY INFO: read_session_actor.cpp:383: session cookie 1 consumer shared/user session shared/user_5_1_10753993518068878016_v1 is DEAD 2026-01-04T03:03:47.983177Z node 6 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1661: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336773607329524:2568] disconnected. 2026-01-04T03:03:47.983220Z node 6 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1672: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336773607329524:2568] disconnected; active server actors: 1 2026-01-04T03:03:47.983244Z node 6 :PERSQUEUE_READ_BALANCER NOTICE: read_balancer__balancing.cpp:1681: [72075186224037898][rt3.dc1--topic1] pipe [5:7591336773607329524:2568] client user disconnected session shared/user_5_1_10753993518068878016_v1 2026-01-04T03:03:47.984382Z node 5 :PQ_READ_PROXY DEBUG: caching_service.cpp:139: Direct read cache: server session deregistered: shared/user_5_1_10753993518068878016_v1 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/checkpointing/ut/unittest >> TCheckpointCoordinatorTests::ShouldDoNothingIfNoIngressTasks [GOOD] Test command err: 2026-01-04T03:01:43.425229Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-04T03:01:43.425408Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-04T03:01:43.425464Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-04T03:01:43.425851Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-04T03:01:43.425887Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-04T03:01:43.425938Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-04T03:01:43.427116Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-04T03:01:43.435358Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-04T03:01:43.435446Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-04T03:01:43.435483Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-04T03:01:43.447478Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-04T03:01:43.447574Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-04T03:01:43.448306Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-04T03:01:43.448596Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.448652Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-04T03:01:43.448694Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.448733Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-04T03:01:43.448806Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.448851Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-04T03:01:43.448881Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-04T03:01:43.449009Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-04T03:01:43.449046Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-04T03:01:43.449239Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-04T03:01:43.449276Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:6:2053], need 1 more acks 2026-01-04T03:01:43.449332Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-04T03:01:43.449371Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [1:8:2055], need 0 more acks 2026-01-04T03:01:43.449405Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-04T03:01:43.449480Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-04T03:01:43.449511Z node 1 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-04T03:01:43.449557Z node 1 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-04T03:01:43.534592Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-04T03:01:43.534699Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-04T03:01:43.534740Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-04T03:01:43.534989Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-04T03:01:43.535018Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-04T03:01:43.535043Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-04T03:01:43.535095Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-04T03:01:43.535257Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-04T03:01:43.535292Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-04T03:01:43.535324Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-04T03:01:43.535469Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-04T03:01:43.535532Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-04T03:01:43.535590Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-04T03:01:43.535768Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.535804Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-04T03:01:43.535846Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.535935Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 1 more acks 2026-01-04T03:01:43.535982Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.536017Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 0 more acks 2026-01-04T03:01:43.536052Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-04T03:01:43.536141Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:1] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-04T03:01:43.536183Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:1] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-04T03:01:43.536333Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 1 2026-01-04T03:01:43.536380Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:6:2053], need 1 more acks 2026-01-04T03:01:43.536420Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:1] Got TEvStateCommitted; task: 3 2026-01-04T03:01:43.536456Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:1] State committed [2:8:2055], need 0 more acks 2026-01-04T03:01:43.536498Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:1] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-04T03:01:43.536567Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:1] Got TEvCompleteCheckpointResponse 2026-01-04T03:01:43.536610Z node 2 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:1] Checkpoint completed 2026-01-04T03:01:43.536652Z node 2 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:649: [my-graph-id.42] Got TEvRunGraph 2026-01-04T03:01:43.615183Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-04T03:01:43.615275Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-04T03:01:43.615313Z node 3 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Wa ... -01-04T03:01:43.705582Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 1 more acks 2026-01-04T03:01:43.705617Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:3] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.705641Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:3] Task state saved, need 0 more acks 2026-01-04T03:01:43.705674Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-04T03:01:43.705725Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:3] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-04T03:01:43.705762Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:3] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-04T03:01:43.705878Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 1 2026-01-04T03:01:43.705910Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:6:2053], need 1 more acks 2026-01-04T03:01:43.705939Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:3] Got TEvStateCommitted; task: 3 2026-01-04T03:01:43.705989Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:3] State committed [4:8:2055], need 0 more acks 2026-01-04T03:01:43.706020Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:3] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-04T03:01:43.706077Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:3] Got TEvCompleteCheckpointResponse 2026-01-04T03:01:43.706108Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:3] Checkpoint completed 2026-01-04T03:01:43.706137Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-04T03:01:43.706177Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:4] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-04T03:01:43.706222Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:4] Got TEvCreateCheckpointResponse 2026-01-04T03:01:43.706248Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:4] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-04T03:01:43.706317Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.706356Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 2 more acks 2026-01-04T03:01:43.706397Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.706422Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 1 more acks 2026-01-04T03:01:43.706454Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:4] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.706478Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:4] Task state saved, need 0 more acks 2026-01-04T03:01:43.706501Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:489: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'PendingCommit' Waiting for TEvSetCheckpointPendingCommitStatusRequest (storage) 2026-01-04T03:01:43.706541Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:501: [my-graph-id.42] [42:4] Got TEvSetCheckpointPendingCommitStatusResponse 2026-01-04T03:01:43.706609Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:516: [my-graph-id.42] [42:4] Checkpoint status changed to 'PendingCommit', committing states to 2 actor(s) Waiting for TEvCommitChanges (ingress) Waiting for TEvCommitChanges (egress) 2026-01-04T03:01:43.706710Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 1 2026-01-04T03:01:43.706737Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:6:2053], need 1 more acks 2026-01-04T03:01:43.706778Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:532: [my-graph-id.42] [42:4] Got TEvStateCommitted; task: 3 2026-01-04T03:01:43.706811Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:541: [my-graph-id.42] [42:4] State committed [4:8:2055], need 0 more acks 2026-01-04T03:01:43.706836Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:543: [my-graph-id.42] [42:4] Got all acks, changing checkpoint status to 'Completed' Waiting for TEvCompleteCheckpointRequest (storage) 2026-01-04T03:01:43.706882Z node 4 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:564: [my-graph-id.42] [42:4] Got TEvCompleteCheckpointResponse 2026-01-04T03:01:43.706907Z node 4 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:579: [my-graph-id.42] [42:4] Checkpoint completed 2026-01-04T03:01:43.790688Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-04T03:01:43.790818Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 1, ActorsToNotify count: 2, ActorsToWaitFor count: 3 2026-01-04T03:01:43.790871Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:114: [my-graph-id.42] Send TEvRegisterCoordinatorRequest Waiting for TEvRegisterCoordinatorRequest (storage) 2026-01-04T03:01:43.791151Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:131: [my-graph-id.42] Got TEvRegisterCoordinatorResponse; issues: 2026-01-04T03:01:43.791184Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:140: [my-graph-id.42] Successfully registered in storage 2026-01-04T03:01:43.791220Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:141: [my-graph-id.42] Send TEvNewCheckpointCoordinator to 3 actor(s) 2026-01-04T03:01:43.791301Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:149: [my-graph-id.42] Send TEvGetCheckpointsMetadataRequest; state load mode: FROM_LAST_CHECKPOINT; load graph: 0 Waiting for TEvGetCheckpointsMetadataRequest (storage) 2026-01-04T03:01:43.791482Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:186: [my-graph-id.42] Got TEvGetCheckpointsMetadataResponse 2026-01-04T03:01:43.791516Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:212: [my-graph-id.42] Found no checkpoints to restore from, creating a 'zero' checkpoint 2026-01-04T03:01:43.791550Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:1] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-04T03:01:43.791702Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:1] Got TEvCreateCheckpointResponse 2026-01-04T03:01:43.791742Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:1] Checkpoint successfully created, going to inject barriers to 1 actor(s) 2026-01-04T03:01:43.791799Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-04T03:01:43.791973Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: OK, size: 100 2026-01-04T03:01:43.792016Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:476: [my-graph-id.42] [42:1] Task state saved, need 2 more acks 2026-01-04T03:01:43.792096Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-04T03:01:43.792133Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-04T03:01:43.792185Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:466: [my-graph-id.42] [42:1] Got TEvSaveTaskStateResult; task 0, status: STORAGE_ERROR, size: 0 2026-01-04T03:01:43.792229Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:479: [my-graph-id.42] [42:1] StorageError: can't save node state, aborting checkpoint 2026-01-04T03:01:43.792261Z node 5 :STREAMS_CHECKPOINT_COORDINATOR ERROR: checkpoint_coordinator.cpp:484: [my-graph-id.42] [42:1] Got all acks for aborted checkpoint, aborting in storage Waiting for TEvAbortCheckpointRequest (storage) 2026-01-04T03:01:43.792328Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:590: [my-graph-id.42] [42:1] Got TEvAbortCheckpointResponse 2026-01-04T03:01:43.792364Z node 5 :STREAMS_CHECKPOINT_COORDINATOR WARN: checkpoint_coordinator.cpp:596: [my-graph-id.42] [42:1] Checkpoint aborted 2026-01-04T03:01:43.792410Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:373: [my-graph-id.42] Got TEvScheduleCheckpointing 2026-01-04T03:01:43.792449Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:349: [my-graph-id.42] [42:2] Registering new checkpoint in storage Waiting for TEvCreateCheckpointRequest (storage) 2026-01-04T03:01:43.792519Z node 5 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:389: [my-graph-id.42] [42:2] Got TEvCreateCheckpointResponse 2026-01-04T03:01:43.792559Z node 5 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:435: [my-graph-id.42] [42:2] Checkpoint successfully created, going to inject barriers to 1 actor(s) Waiting for TEvInjectCheckpointBarrier (ingress) 2026-01-04T03:01:43.883363Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:70: [my-graph-id.42] TEvReadyState, streaming disposition { }, state load mode FROM_LAST_CHECKPOINT, checkpointing period 3600.000000s 2026-01-04T03:01:43.883491Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:104: [my-graph-id.42] AllActors count: 3, ActorsToTrigger count: 0, ActorsToNotify count: 1, ActorsToWaitFor count: 2 2026-01-04T03:01:43.883529Z node 6 :STREAMS_CHECKPOINT_COORDINATOR DEBUG: checkpoint_coordinator.cpp:107: [my-graph-id.42] No ingress tasks, coordinator was disabled 2026-01-04T03:01:43.883562Z node 6 :STREAMS_CHECKPOINT_COORDINATOR INFO: checkpoint_coordinator.cpp:445: [my-graph-id.42] Send TEvRun to all actors |99.0%| [TM] {RESULT} ydb/core/fq/libs/checkpointing/ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/checkpointing/ut/unittest >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] >> KqpTpch::Query20 [GOOD] >> KqpTpch::Query21 >> TestPurecalcFilter::PartialPush [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> TestPurecalcFilter::CompilationValidation >> TMLPReaderTests::TopicWithBigMessage [GOOD] >> TMLPReaderTests::TopicWithKeepMessageOrder |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicWriterTests::TestEnterMessage_OnlyDelimiters [GOOD] >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple >> TTopicWriterTests::TestTopicWriterParams_No_Delimiter [GOOD] >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestEnterMessage_SomeBinaryData [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest >> QuoterWithKesusTest::UpdatesCountersForParentResources [GOOD] >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_InvalidDelimiter [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardLocalKMeansScan::MainToBuild [GOOD] >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap >> TestPurecalcFilter::CompilationValidation [GOOD] >> TestPurecalcFilter::Emtpy >> TestPurecalcFilter::Emtpy [GOOD] >> TestPurecalcFilter::Watermark >> TMemoryController::ResourceBroker_ConfigLimit [GOOD] >> TMemoryController::ResourceBroker_ConfigCS >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_same_values_simple [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_insert.py::TestInsertOperations::test_query_pairs [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> KeyValueGRPCService::SimpleWriteReadWithoutLockGeneration2V2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_hive_it_will_be_restarted_and_can_create_tablets [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardStats::Follower [GOOD] >> DataShardStats::Tli >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_with_valid_and_invalid_data [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[async_read_table] [GOOD] >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_3__SYNC-pk_types6-all_types6-index6---SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TestPurecalcFilter::Watermark [GOOD] >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TestPurecalcFilter::WatermarkWhere >> KqpTpch::Query21 [GOOD] >> KqpTpch::Query22 >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] >> test_public_api.py::TestExplain::test_explain_data_query >> QuoterWithKesusTest::CanDeleteResourceWhenUsingIt [GOOD] >> QuoterWithKesusTest::CanKillKesusWhenUsingIt >> TTxDataShardLocalKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToPosting >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache >> TRowVersionRangesTest::SimpleInserts [GOOD] >> TRowVersionRangesTest::MergeFailLeft [GOOD] >> TRowVersionRangesTest::MergeFailRight [GOOD] >> TRowVersionRangesTest::MergeFailOuter [GOOD] >> TRowVersionRangesTest::MergeFailInner [GOOD] >> TRowVersionRangesTest::MergeExtendRightInner [GOOD] >> TRowVersionRangesTest::MergeHoleExact [GOOD] >> TRowVersionRangesTest::MergeHoleInner [GOOD] >> TRowVersionRangesTest::MergeHoleOuter [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorOrder [GOOD] >> TRowVersionRangesTest::SteppedCookieAllocatorLowerBound [GOOD] >> TS3FIFOCache::Touch [GOOD] >> TS3FIFOCache::Touch_MainQueue [GOOD] >> TS3FIFOCache::EvictNext [GOOD] >> TS3FIFOCache::UpdateLimit [GOOD] >> TS3FIFOCache::Erase [GOOD] >> TS3FIFOCache::Random [GOOD] >> TS3FIFOCache::InsertUntouched [GOOD] >> TS3FIFOCache::EnsureLimits [GOOD] >> TS3FIFOGhostQueue::Basics [GOOD] >> TScheme::Shapshot [GOOD] >> TScheme::Delta [GOOD] >> TScheme::Policy [GOOD] >> TScreen::Cuts [GOOD] >> TScreen::Join [GOOD] >> TScreen::Sequential >> TFlatTableExecutor_TryKeepInMemory::TestOnceSharedCache [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory >> TFlatTableExecutor_LongTx::MemTableLongTx >> TMemoryController::ResourceBroker_ConfigCS [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTx [GOOD] >> TFlatTableExecutor_LongTx::LongTxBorrow [GOOD] >> TFlatTableExecutor_LongTx::MemTableLongTxRead >> TMemoryController::GroupedMemoryLimiter_ConfigCS >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain >> TFlatTableExecutor_LongTx::MemTableLongTxRead [GOOD] >> TFlatTableExecutor_LongTx::MergeSkewedCommitted >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryMain [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex >> TFlatTableExecutor_LongTx::MergeSkewedCommitted [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::SmallValues >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_FlatIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex >> TFlatTableExecutor_LongTxAndBlobs::SmallValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::OuterBlobValues [GOOD] >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues >> TScreen::Sequential [GOOD] >> TScreen::Random >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll >> TFlatTableExecutor_LongTxAndBlobs::ExternalBlobValues [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestEnqueueCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority >> TFlatTableExecutor_TryKeepInMemory::TestTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory >> TFlatTableExecutor_LowPriorityTxs::TestLowPriority [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityCancel [GOOD] >> TFlatTableExecutor_LowPriorityTxs::TestLowPriorityAllocatingCancel [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory >> TFlatTableExecutor_MoveTableData::TestMoveSnapshot [GOOD] >> TFlatTableExecutor_MoveTableData::TestMoveSnapshotFollower [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScan >> TFlatTableExecutor_TryKeepInMemory::TestAlterAddFamilyPartiallyTryKeepInMemory [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryAll [GOOD] >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially >> TFlatTableExecutor_PostponedScan::TestPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan >> TFlatTableExecutor_TryKeepInMemory::TestAlterFamilyDisableTryKeepInMemoryPartially [GOOD] >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs >> TFlatTableExecutor_PostponedScan::TestCancelFinishedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan >> TFlatTableExecutor_VersionedLargeBlobs::TestMultiVersionCompactionLargeBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRows >> TestPurecalcFilter::WatermarkWhere [GOOD] >> TFlatTableExecutor_PostponedScan::TestCancelRunningPostponedScan [GOOD] >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC >> TFlatTableExecutor_PostponedScan::TestPostponedScanSnapshotMVCC [GOOD] >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign >> TFlatTableExecutor_VersionedRows::TestVersionedRows [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs >> TFlatTableExecutor_Reboot::TestSchemeGcAfterReassign [GOOD] >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability >> TScreen::Random [GOOD] >> TScreen::Shrink [GOOD] >> TScreen::Cook [GOOD] >> TSharedPageCache::Limits >> TestPurecalcFilter::WatermarkWhereFalse >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/dstool/py3test >> test_canonical_requests.py::Test::test_cluster_get_set [GOOD] |99.1%| [TM] {RESULT} ydb/tests/functional/dstool/py3test >> TFlatTableExecutor_RejectProbability::MaxedOutRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::SomeRejectProbability >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/dstool/py3test >> TFlatTableExecutor_RejectProbability::SomeRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability >> TSharedPageCache::Limits [GOOD] >> TSharedPageCache::Limits_Config >> TFlatTableExecutor_RejectProbability::ZeroRejectProbability [GOOD] >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables >> TMLPReaderTests::TopicWithKeepMessageOrder [GOOD] >> TMLPWriterTests::TopicNotExists >> TFlatTableExecutor_RejectProbability::ZeroRejectProbabilityMultipleTables [GOOD] >> TFlatTableExecutor_Reschedule::TestExecuteReschedule [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorSetResourceProfile [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorStaticMemoryLimits [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory >> TSharedPageCache::Limits_Config [GOOD] >> TSharedPageCache::S3FIFO >> TFlatTableExecutor_ResourceProfile::TestExecutorReuseStaticMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestPages [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded >> KqpTpch::Query22 [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPageLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory >> test_read_table.py::TestReadTableTruncatedResults::test_truncated_results[read_table] [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemory [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower >> test_public_api.py::TestExplain::test_explain_data_query [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorRequestMemoryFollower [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorMemoryLimitExceeded [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData >> TFlatTableExecutor_ResourceProfile::TestExecutorPreserveTxData [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxDataGC [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxPartialDataHold [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldAndUse [GOOD] >> TFlatTableExecutor_ResourceProfile::TestExecutorTxHoldOnRelease [GOOD] >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/s3/py3test >> test_s3.py::TestYdbS3TTL::test_s3[table_index_4_UNIQUE_SYNC-pk_types0-all_types0-index0--UNIQUE-SYNC] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/s3/py3test >> TFlatTableExecutor_ResourceProfile::TestUpdateConfig [GOOD] >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan >> TSharedPageCache::S3FIFO [GOOD] >> TSharedPageCache::BigCache_BTreeIndex >> TSharedPageCache::BigCache_BTreeIndex [GOOD] >> TSharedPageCache::BigCache_FlatIndex >> TestPurecalcFilter::WatermarkWhereFalse [GOOD] |99.1%| [TA] $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> TSharedPageCache::BigCache_FlatIndex [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {RESULT} $(B)/ydb/tests/datashard/s3/test-results/py3test/{meta.json ... results_accumulator.log} >> TestRawParser::Simple [GOOD] >> TestRawParser::ManyValues [GOOD] >> TSharedPageCache::MiddleCache_BTreeIndex [GOOD] >> TSharedPageCache::MiddleCache_FlatIndex >> TestRawParser::ChangeParserSchema [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> test.py::TestSqsSplitMergeStdTables::test_std_merge_split [GOOD] >> TestRawParser::TypeKindsValidation [GOOD] |99.1%| [TM] {RESULT} ydb/tests/functional/sqs/merge_split_common_table/std/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/sqs/merge_split_common_table/std/py3test >> TTxDataShardLocalKMeansScan::BuildToPosting [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild >> TSharedPageCache::MiddleCache_FlatIndex [GOOD] >> TSharedPageCache::ZeroCache_BTreeIndex >> DataShardStats::Tli [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex >> TFlatTableExecutor_SliceOverlapScan::TestSliceOverlapScan [GOOD] >> TFlatTableExecutor_SnapshotWithCommits::SnapshotWithCommits [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex >> TFlatTableExecutor_StickyPages::TestNonSticky_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestSticky >> TMemoryController::GroupedMemoryLimiter_ConfigCS [GOOD] >> TMemoryController::ColumnShardCaches_Config >> TFlatTableExecutor_StickyPages::TestSticky [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex >> TSharedPageCache::ZeroCache_BTreeIndex [GOOD] >> TSharedPageCache::ZeroCache_FlatIndex >> TFlatTableExecutor_StickyPages::TestNonStickyGroup_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyMain |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_simple_table-False] [GOOD] ------- [TM] {asan, default-linux-x86_64, pic, release} ydb/core/kqp/tests/kikimr_tpch/unittest >> KqpTpch::Query22 [GOOD] Test command err: -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 5 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 20 -- result -- rowIndex: 0 rowIndex: 10 -- result -- rowIndex: 0 rowIndex: 2 -- result -- rowIndex: 0 rowIndex: 28 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 37 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 4 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 1 -- result -- rowIndex: 0 rowIndex: 5 >> TFlatTableExecutor_StickyPages::TestStickyMain [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex >> TFlatTableExecutor_StickyPages::TestStickyAlt_FlatIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex >> TSharedPageCache::ZeroCache_FlatIndex [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Basics |99.1%| [TM] {RESULT} ydb/core/kqp/tests/kikimr_tpch/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/kqp/tests/kikimr_tpch/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TFlatTableExecutor_StickyPages::TestStickyAlt_BTreeIndex [GOOD] >> TFlatTableExecutor_StickyPages::TestStickyAll ------- [TM] {asan, default-linux-x86_64, release} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TestRawParser::TypeKindsValidation [GOOD] Test command err: 2026-01-04T03:02:37.582200Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:02:37.585237Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-04T03:02:37.585260Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_0 == "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-04T03:02:37.585319Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-04T03:02:37.585474Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [1:7591336474119428075:2051] 2026-01-04T03:02:40.450321Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7591336474119428075:2051] [id 1]: Started compile request 2026-01-04T03:02:41.159416Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7591336474119428075:2051] [id 1]: Compilation completed for request 2026-01-04T03:02:41.159548Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [1:7591336474119428075:2051] 2026-01-04T03:02:41.160105Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-04T03:02:41.160139Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:02:41.160202Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-04T03:02:41.160267Z node 1 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:02:41.160512Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [1:0:0] 2026-01-04T03:02:41.160527Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(col_1 == "str2", FALSE) AS _filter, _offset FROM Input; ' (client id: [1:0:0]) 2026-01-04T03:02:41.160551Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 2 2026-01-04T03:02:41.160608Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 2 from [1:7591336474119428075:2051] 2026-01-04T03:02:41.160677Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [1:7591336474119428075:2051] [id 2]: Started compile request 2026-01-04T03:02:41.192682Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [1:7591336474119428075:2051] [id 2]: Compilation completed for request 2026-01-04T03:02:41.192780Z node 1 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 2 from [1:7591336474119428075:2051] 2026-01-04T03:02:41.192901Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 2 2026-01-04T03:02:41.192919Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:02:41.192944Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [1:0:0] 2026-01-04T03:02:41.192996Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:439: GenerateSql: No sql was generated 2026-01-04T03:02:41.193012Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [2:0:0] 2026-01-04T03:02:41.193053Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [2:0:0] 2026-01-04T03:02:41.193132Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 3 clients, number rows: 3 2026-01-04T03:02:41.193150Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [1:0:0]) 2026-01-04T03:02:41.193157Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2026-01-04T03:02:41.195431Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [2:0:0]) 2026-01-04T03:02:41.195453Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2026-01-04T03:02:41.195463Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 3 rows to client [2:0:0] without processing 2026-01-04T03:02:41.195479Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 3 rows to purecalc filter (client id: [0:0:0]) 2026-01-04T03:02:41.195483Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 3 rows 2026-01-04T03:02:41.195554Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [2:0:0] 2026-01-04T03:02:41.195590Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 2 clients, number rows: 1 2026-01-04T03:02:41.195605Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 1 rows to purecalc filter (client id: [1:0:0]) 2026-01-04T03:02:41.195610Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-04T03:02:41.195625Z node 1 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 1 rows to purecalc filter (client id: [0:0:0]) 2026-01-04T03:02:41.195629Z node 1 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-04T03:02:41.451188Z node 2 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:02:41.451403Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-04T03:02:41.451419Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 = "str1", FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-04T03:02:41.451450Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-04T03:02:41.452194Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [2:7591336489722352397:2051] 2026-01-04T03:02:44.172570Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [2:7591336489722352397:2051] [id 1]: Started compile request 2026-01-04T03:02:44.191553Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [2:7591336489722352397:2051] [id 1]: Compilation completed for request 2026-01-04T03:02:44.191659Z node 2 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [2:7591336489722352397:2051] 2026-01-04T03:02:44.191870Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-04T03:02:44.191923Z node 2 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:02:44.191970Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-04T03:02:44.192007Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-04T03:02:44.192079Z node 2 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:94: TTopicFilters: Remove program with client id [0:0:0] 2026-01-04T03:02:44.588289Z node 3 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:02:44.588509Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-04T03:02:44.588531Z node 3 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; ' (client id: [0:0:0]) 2026-01-04T03:02:44.588582Z node 3 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-04T03:02:44.588668Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [3:7591336505139187368:2051] 2026-01-04T03:02:47.533928Z node 3 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [3:7591336505139187368:2051] [id 1]: Started compile request 2026-01-04T03:02:47.869920Z node 4 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-04T03:02:47.870173Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:116: TTopicFilters: Create program with client id [0:0:0] 2026-01-04T03:02:47.870185Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:123: TTopicFilters: Create purecalc program for query ' PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; ' (client id: [0:0:0]) 2026-01-04T03:02:47.870216Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 1 2026-01-04T03:02:47.870295Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 1 from [4:7591336515918092965:2051] 2026-01-04T03:02:51.082210Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [4:7591336515918092965:2051] [id 1]: Started compile request 2026-01-04T03:02:51.152985Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [4:7591336515918092965:2051] [id 1]: Compilation completed for request 2026-01-04T03:02:51.153109Z node 4 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 1 from [4:7591336515918092965:2051] 2026-01-04T03:02:51.153244Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:57: TTopicFilters: Got compile response for request with id 1 2026-01-04T03:02:51.153275Z node 4 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:02:51.153320Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:110: TTopicFilters: Start program with client id [0:0:0] 2026-01-04T03:02:51.153379Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:34: TTopicFilters: ProcessData for 1 clients, number rows: 2 2026-01-04T03:02:51.153397Z node 4 :FQ_ROW_DISPATCHER TRACE: filters_set.cpp:223: TTopicFilters: Pass 2 rows to purecalc filter (client id: [0:0:0]) 2026-01-04T03:02:51.153420Z no ... PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 100, FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:03:40.264281Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:03:40.264355Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [42:7591336744489647704:2051] 2026-01-04T03:03:43.861050Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [42:7591336744489647704:2051] [id 0]: Started compile request 2026-01-04T03:03:43.894427Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [42:7591336744489647704:2051] [id 0]: Compilation completed for request 2026-01-04T03:03:43.894528Z node 42 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [42:7591336744489647704:2051] 2026-01-04T03:03:43.894700Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:03:43.894822Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:03:43.894916Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:03:43.894938Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:03:43.894975Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:03:43.894996Z node 42 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:03:44.351616Z node 43 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a1 is null, FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:03:44.351932Z node 43 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:03:44.352054Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [43:7591336763297911318:2051] 2026-01-04T03:03:47.796744Z node 43 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [43:7591336763297911318:2051] [id 0]: Started compile request 2026-01-04T03:03:48.295580Z node 44 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 > 50, FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:03:48.297800Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:03:48.298074Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [44:7591336780107171918:2051] 2026-01-04T03:03:52.052016Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [44:7591336780107171918:2051] [id 0]: Started compile request 2026-01-04T03:03:52.078786Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [44:7591336780107171918:2051] [id 0]: Compilation completed for request 2026-01-04T03:03:52.078890Z node 44 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [44:7591336780107171918:2051] 2026-01-04T03:03:52.079039Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:03:52.079168Z node 44 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-04T03:03:52.624497Z node 45 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(a2 ... 50, FALSE) AS _filter, _offset FROM Input; 2026-01-04T03:03:52.624785Z node 45 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:03:52.625214Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [45:7591336798119658670:2051] 2026-01-04T03:03:56.039564Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [45:7591336798119658670:2051] [id 0]: Started compile request 2026-01-04T03:03:56.043125Z node 45 :FQ_ROW_DISPATCHER ERROR: compile_service.cpp:67: TPurecalcCompileActor [45:7591336798119658670:2051] [id 0]: Compilation failed for request 2026-01-04T03:03:56.043201Z node 45 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [45:7591336798119658670:2051] 2026-01-04T03:03:56.580986Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:439: GenerateSql: No sql was generated 2026-01-04T03:03:56.581099Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-04T03:03:56.581126Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 1 rows to client [0:0:0] without processing 2026-01-04T03:03:56.581175Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 1 rows 2026-01-04T03:03:56.581190Z node 46 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:411: TProgramRunHandler: Add 1 rows to client [0:0:0] without processing 2026-01-04T03:03:56.670911Z node 47 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(TRUE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-04T03:03:56.671230Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:03:56.671327Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [47:7591336812148977051:2051] 2026-01-04T03:04:00.624033Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [47:7591336812148977051:2051] [id 0]: Started compile request 2026-01-04T03:04:00.656538Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [47:7591336812148977051:2051] [id 0]: Compilation completed for request 2026-01-04T03:04:00.656687Z node 47 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [47:7591336812148977051:2051] 2026-01-04T03:04:00.656789Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:04:00.656907Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:00.657047Z node 47 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:01.115982Z node 48 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(pass > 0, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-04T03:04:01.116373Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:04:01.116451Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [48:7591336834569853381:2051] 2026-01-04T03:04:04.237903Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [48:7591336834569853381:2051] [id 0]: Started compile request 2026-01-04T03:04:04.279147Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [48:7591336834569853381:2051] [id 0]: Compilation completed for request 2026-01-04T03:04:04.279283Z node 48 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [48:7591336834569853381:2051] 2026-01-04T03:04:04.279396Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:04:04.279527Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:04.279721Z node 48 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:04.895579Z node 49 :FQ_ROW_DISPATCHER DEBUG: purecalc_filter.cpp:461: GenerateSql: Generated sql: PRAGMA config.flags("LLVM", "OFF"); SELECT COALESCE(FALSE, FALSE) AS _filter, _offset, (CAST(`ts` AS Timestamp?) - Interval("PT5S")) AS _watermark FROM Input; 2026-01-04T03:04:04.895911Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:341: TProgramCompileHandler: Send compile request with id 0 2026-01-04T03:04:04.895987Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:131: TPurecalcCompileService: Add to compile queue request with id 0 from [49:7591336849546873624:2051] 2026-01-04T03:04:08.156684Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:52: TPurecalcCompileActor [49:7591336849546873624:2051] [id 0]: Started compile request 2026-01-04T03:04:08.179276Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:70: TPurecalcCompileActor [49:7591336849546873624:2051] [id 0]: Compilation completed for request 2026-01-04T03:04:08.179363Z node 49 :FQ_ROW_DISPATCHER TRACE: compile_service.cpp:151: TPurecalcCompileService: Compile finished for request with id 0 from [49:7591336849546873624:2051] 2026-01-04T03:04:08.179450Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:370: TProgramCompileHandler: Program compilation finished 2026-01-04T03:04:08.179526Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:08.179597Z node 49 :FQ_ROW_DISPATCHER TRACE: purecalc_filter.cpp:408: TProgramRunHandler: ProcessData for 2 rows 2026-01-04T03:04:08.586523Z node 50 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 1 messages to parse 2026-01-04T03:04:08.586558Z node 50 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2026-01-04T03:04:08.691405Z node 51 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 3 messages to parse 2026-01-04T03:04:08.691453Z node 51 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1", "a2": "101", "event": "event1"} 2026-01-04T03:04:08.691552Z node 51 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 43, value: {"a1": "hello1", "a2": "101", "event": "event2"} 2026-01-04T03:04:08.691582Z node 51 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 44, value: {"a2": "101", "a1": "hello1", "event": "event3"} 2026-01-04T03:04:08.758251Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 1 messages to parse 2026-01-04T03:04:08.758310Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello1__large_str", "a2": 101, "event": "event1"} 2026-01-04T03:04:08.758578Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:56: TRawParser: Add 1 messages to parse 2026-01-04T03:04:08.758634Z node 52 :FQ_ROW_DISPATCHER TRACE: raw_parser.cpp:93: TRawParser: Do parsing, first offset: 42, value: {"a1": "hello2__large_str", "a2": 101, "event": "event2"} |99.1%| [TM] {RESULT} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/fq/libs/row_dispatcher/format_handler/ut/unittest >> TFlatTableExecutor_StickyPages::TestStickyAll [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky >> TFlatTableExecutor_StickyPages::TestAlterAddFamilySticky [GOOD] >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky >> TFlatTableExecutor_StickyPages::TestAlterAddFamilyPartiallySticky [GOOD] >> TFlatTableExecutor_Truncate::Truncate [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateWhileCompacting >> TFlatTableExecutor_Truncate::TruncateWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteWhileCompacting [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncate >> TFlatTableExecutor_Truncate::CompactThenTruncate [GOOD] >> TFlatTableExecutor_Truncate::CompactThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_Truncate::TruncateAtFollower >> test_isolation.py::TestTransactionIsolation::test_prevents_write_cycles_g0 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a >> TSharedPageCache::TryKeepInMemoryMode_Basics [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Enabling >> TFlatTableExecutor_Truncate::TruncateAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteAtFollower [GOOD] >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower >> TFlatTableExecutor_Truncate::TruncateAndWriteThenAttachFollower [GOOD] >> TFlatTableExecutor_Truncate::PartiallyCommitThenTruncateAndWrite [GOOD] >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/quoter/ut/unittest >> QuoterWithKesusTest::CanKillKesusWhenUsingIt [GOOD] Test command err: 2026-01-04T03:02:08.222221Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.223954Z node 1 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.228098Z node 1 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.228163Z node 1 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.250766Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.250890Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.251042Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.251121Z node 2 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-04T03:02:08.251147Z node 2 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.251384Z node 2 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.269651Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.269778Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.270134Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.270171Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.270448Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.270547Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.270576Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.270731Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.270772Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.270790Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.270940Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.270994Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.271019Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.271177Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.271232Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:809: [/Path/KesusName]: Failed to connect to tablet. Status: ERROR 2026-01-04T03:02:08.271256Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.271441Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.271482Z node 3 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-04T03:02:08.271503Z node 3 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.271721Z node 3 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.289595Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.289710Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.290125Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "/resource" 2026-01-04T03:02:08.290176Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "/resource" has incorrect name. Maybe this was some error on client side. 2026-01-04T03:02:08.290215Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("/resource", Error: GenericError) 2026-01-04T03:02:08.290276Z node 4 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.290338Z node 4 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "resource//resource" 2026-01-04T03:02:08.290383Z node 4 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:493: [/Path/KesusName]: Resource "resource//resource" has incorrect name. Maybe this was some error on client side. 2026-01-04T03:02:08.290426Z node 4 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:356: [/Path/KesusName]: ProxySession("resource//resource", Error: GenericError) 2026-01-04T03:02:08.298326Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.298438Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.298591Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res" 2026-01-04T03:02:08.300133Z node 5 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.316139Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 100 } } } }) 2026-01-04T03:02:08.316229Z node 5 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res" 2026-01-04T03:02:08.316278Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res", 42) 2026-01-04T03:02:08.316363Z node 5 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res", Normal, {0: Front(20, 2)} }]) 2026-01-04T03:02:08.324112Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1091: [/Path/KesusName]: Created kesus quoter proxy. Tablet id: 100500 2026-01-04T03:02:08.324201Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:1150: [/Path/KesusName]: Connecting to kesus 2026-01-04T03:02:08.324490Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res0" 2026-01-04T03:02:08.324637Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.324899Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-04T03:02:08.324947Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res0" 2026-01-04T03:02:08.324999Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res0", 42) 2026-01-04T03:02:08.325114Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res0", Normal, {0: Front(1, 2)} }]) 2026-01-04T03:02:08.325225Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res1" 2026-01-04T03:02:08.325310Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res1" 2026-01-04T03:02:08.325524Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-04T03:02:08.325558Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res1" 2026-01-04T03:02:08.325593Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res1", 43) 2026-01-04T03:02:08.325634Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-04T03:02:08.325747Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:486: [/Path/KesusName]: ProxyRequest "res2" 2026-01-04T03:02:08.325855Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:517: [/Path/KesusName]: Subscribe on resource "res2" 2026-01-04T03:02:08.326061Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } }) 2026-01-04T03:02:08.326105Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:848: [/Path/KesusName]: Initialized new session with resource "res2" 2026-01-04T03:02:08.326137Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:382: [/Path/KesusName]: ProxySession("res2", 44) 2026-01-04T03:02:08.326201Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res2", Normal, {0: Front(1, 2)} }]) 2026-01-04T03:02:08.326430Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/Path/KesusName]: ProxyStats([{"res1", Consumed: 0, Queue: 5}]) 2026-01-04T03:02:08.326472Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/Path/KesusName]: Set info for resource "res1": { Available: 1, QueueWeight: 5 } 2026-01-04T03:02:08.326496Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:662: [/Path/KesusName]: Activate session to "res1". Connected: 1 2026-01-04T03:02:08.327252Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:585: [/Path/KesusName]: UpdateConsumptionState({ ResourcesInfo { ResourceId: 43 ConsumeResource: true Amount: inf } ActorID { RawX1: 5 RawX2: 25769805828 } }) 2026-01-04T03:02:08.327329Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/Path/KesusName]: ProxyUpdate(Normal, [{ "res1", Normal, {0: Front(1, 2)} }]) 2026-01-04T03:02:08.327625Z node 6 :QUOTER_PROXY WARN: kesus_quoter_proxy.cpp:824: [/Path/KesusName]: Disconnected from tablet 2026-01-04T03:02:08.327674Z node 6 :QUOTER_PROXY INFO: kesus_quoter_proxy.cpp:1148: [/Path/KesusName]: Reconnecting to kesus 2026-01-04T03:02:08.327805Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:640: [/Path/KesusName]: Mark "res1" for offline allocation. Connected: 0, SessionIsActive: 1, AverageDuration: 0.100000s, AverageAmount: 0.5 2026-01-04T03:02:08.327849Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:614: [/Path/KesusName]: Schedule offline allocation in 0.000000s: [{ "res1", 0.5 }] 2026-01-04T03:02:08.328036Z node 6 :QUOTER_PROXY DEBUG: kesus_quoter_proxy.cpp:800: [/Path/KesusName]: Successfully connected to tablet 2026-01-04T03:02:08.328289Z node 6 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:839: [/Path/KesusName]: SubscribeOnResourceResult({ Results { ResourceId: 42 Error { Status: SUCCESS } EffectiveProps { ResourceId: 42 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 43 Error { Status: SUCCESS } EffectiveProps { ResourceId: 43 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: 5 } } } Results { ResourceId: 44 Error { Status: SUCCESS } EffectiveProps { ResourceId: 44 HierarchicalDRRResourceConfig { MaxUnitsPerSecond: ... : [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -3.000100566, QueueWeight: 5 } 2026-01-04T03:04:04.061029Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.061144Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.100879Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7591336845515327044:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-04T03:04:04.101323Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-04T03:04:04.101392Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-04T03:04:04.101441Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.101491Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.160543Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-04T03:04:04.160585Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-04T03:04:04.260000Z 2026-01-04T03:04:04.160608Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-04T03:04:04.160790Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-04T03:04:04.160826Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -2.000100566, QueueWeight: 5 } 2026-01-04T03:04:04.160875Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.160923Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.201046Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7591336845515327044:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-04T03:04:04.202035Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-04T03:04:04.202084Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-04T03:04:04.202136Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.202412Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.261461Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-04T03:04:04.261504Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-04T03:04:04.360000Z 2026-01-04T03:04:04.261532Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-04T03:04:04.261689Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-04T03:04:04.261730Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -1.000100566, QueueWeight: 5 } 2026-01-04T03:04:04.261780Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.261946Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.300536Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7591336845515327044:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-04T03:04:04.300972Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-04T03:04:04.301019Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-04T03:04:04.301067Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.301103Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.360699Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-04T03:04:04.360753Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-04T03:04:04.460000Z 2026-01-04T03:04:04.360777Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-04T03:04:04.360918Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 0, Queue: 5}]) 2026-01-04T03:04:04.360954Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -0.0001005662369, QueueWeight: 5 } 2026-01-04T03:04:04.361006Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.361248Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.400918Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7591336845515327044:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-04T03:04:04.401353Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-04T03:04:04.401403Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-04T03:04:04.401488Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Front(0.9998994338, 2)} }]) 2026-01-04T03:04:04.401721Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.461698Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0.9998994338. FreeBalance: 0.9998994338 2026-01-04T03:04:04.461740Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:346: Schedule next tick for "Resource". Tick size: 0.100000s. Time: 2026-01-04T03:04:04.560000Z 2026-01-04T03:04:04.461764Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1338: Allocate resource "Resource" 2026-01-04T03:04:04.461833Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:275: Charge "Resource" for 5. Balance: 0.9998994338. FreeBalance: 0.9998994338. TicksToFullfill: 5.000502882. DurationToFullfillInUs: 500050.2882. TimeToFullfill: 2026-01-04T03:04:04.057378Z. Now: 2026-01-04T03:04:04.461591Z. LastAllocated: 2026-01-04T03:04:03.557328Z 2026-01-04T03:04:04.462101Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:732: [/dc-1/KesusQuoter]: ProxyStats([{"Resource", Consumed: 5, Queue: 0}]) 2026-01-04T03:04:04.462138Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:753: [/dc-1/KesusQuoter]: Set info for resource "Resource": { Available: -4.000100566, QueueWeight: 0 } 2026-01-04T03:04:04.462186Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.462252Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.500718Z node 51 :KESUS_TABLET TRACE: quoter_runtime.cpp:93: [72075186224037888] Send TEvResourcesAllocated to [50:7591336845515327044:2290]. Cookie: 0. Data: { ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } } 2026-01-04T03:04:04.501317Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:873: [/dc-1/KesusQuoter]: ResourcesAllocated({ ResourcesInfo { ResourceId: 1 Amount: 1 StateNotification { Status: SUCCESS } } }) 2026-01-04T03:04:04.501380Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:882: [/dc-1/KesusQuoter]: Kesus allocated {"Resource", 1} 2026-01-04T03:04:04.501428Z node 50 :QUOTER_PROXY TRACE: kesus_quoter_proxy.cpp:1027: [/dc-1/KesusQuoter]: ProxyUpdate(Normal, [{ "Resource", Normal, {0: Sustained(0, 0)} }]) 2026-01-04T03:04:04.501517Z node 50 :QUOTER_SERVICE DEBUG: quoter_service.cpp:1087: ProxyUpdate for quoter /dc-1/KesusQuoter 2026-01-04T03:04:04.561413Z node 50 :QUOTER_SERVICE TRACE: quoter_service.cpp:1303: Feed resource "Resource". Balance: 0. FreeBalance: 0 2026-01-04T03:04:04.955919Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7591336845515326536:2394] 2026-01-04T03:04:04.955955Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7591336845515326536:2394] 2026-01-04T03:04:05.953186Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7591336845515326536:2394] 2026-01-04T03:04:05.953227Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7591336845515326536:2394] 2026-01-04T03:04:06.954954Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7591336845515326536:2394] 2026-01-04T03:04:06.954990Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7591336845515326536:2394] 2026-01-04T03:04:07.801399Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[50:7591336841220358596:2076];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:04:07.801511Z node 50 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//dc-1/.metadata/initialization/migrations;error=timeout; 2026-01-04T03:04:07.808735Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7591336841220358686:2111] 2026-01-04T03:04:07.808765Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7591336841220358686:2111] 2026-01-04T03:04:07.953429Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594046382081] send [50:7591336845515326536:2394] 2026-01-04T03:04:07.953477Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594046382081] push event to server [50:7591336845515326536:2394] 2026-01-04T03:04:08.016187Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:154: TClient[72057594037968897] send [50:7591336841220358686:2111] 2026-01-04T03:04:08.016223Z node 50 :PIPE_CLIENT DEBUG: tablet_pipe_client.cpp:645: TClient[72057594037968897] push event to server [50:7591336841220358686:2111] |99.1%| [TM] {RESULT} ydb/core/quoter/ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/quoter/ut/unittest >> test_isolation.py::TestTransactionIsolation::test_prevents_aborted_reads_g1a [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b >> TSharedPageCache::TryKeepInMemoryMode_Enabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling >> test_isolation.py::TestTransactionIsolation::test_prevents_intermediate_reads_g1b [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c >> BulkUpsert::BulkUpsert [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_Disabling [GOOD] >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction >> TFlatTableExecutor_VersionedRows::TestVersionedRowsSmallBlobs [GOOD] >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutor_TryKeepInMemory::FollowerPromoteToLeaderWithTryKeepInMemoryEnabled [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:03.178979Z 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.021 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.023 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.023 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.023 II| FAKE_ENV: DS.0 gone, left {525b, 8}, put {545b, 9} 00000.023 II| FAKE_ENV: DS.1 gone, left {582b, 8}, put {582b, 8} 00000.023 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.023 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.023 II| FAKE_ENV: All BS storage groups are stopped 00000.023 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.023 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:03.211361Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.008 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.009 II| FAKE_ENV: Starting storage for BS group 2 00000.009 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.010 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 84b annex 0, ~{ } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{2, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{3, redo 78b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{4, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{5, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u> 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<3u>} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 ...compacting 00000.019 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Full 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.019 II| TABLET_EXECUTOR: Leader{1:2:8} starting compaction 00000.019 II| TABLET_EXECUTOR: Leader{1:2:9} starting Scan{1 on 101, Compact{1.2.8, eph 1}} 00000.019 II| TABLET_EXECUTOR: Leader{1:2:9} started compaction 1 00000.019 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.020 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:69632:397:0] left 397b 00000.020 DD| OPS_COMPACT: Compact{1.2.8, eph 1} saving [1:2:8:1:12288:211:0] left 608b 00000.021 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:69632:397:0] result OK flags { Valid } left 211b 00000.021 DD| OPS_COMPACT: Compact{1.2.8, eph 1} put [1:2:8:1:12288:211:0] result OK flags { Valid } left 0b 00000.022 II| OPS_COMPACT: Compact{1.2.8, eph 1} end=Done, 2 blobs 3r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (397 0 0)b }, ecr=1.000 00000.023 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 8, product {1 parts epoch 2} done 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.023 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 8 for step 8 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 3 for step 9 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} switch applied on followers, step 9 ...waiting until compacted 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{8, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} hope 1 -> done Change{8, redo 72b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCommitLongTx} release 4194304b of static, Memory{0 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{9, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...making snapshot with concurrent commit 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxM ... mory{0 dyn 0} 00000.015 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...checking rows (expecting new data and no metadata for old transactions) 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.016 II| TABLET_EXECUTOR: Leader{1:2:7} suiciding, Waste{2:0, 357b +(4, 602b), 6 trc, -602b acc} 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 401 bytes, 401 total, blobs: { [1:2:2:1:8192:209:0], [1:2:5:1:32768:130:0], [1:2:6:1:32768:62:0] } 00000.019 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 271 bytes, 271 total, blobs: { [1:2:3:1:24576:123:0], [1:2:6:1:24576:148:0] } 00000.020 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.020 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 177b, wait} done, Waste{2:0, 357b +(4, 602b), 6 trc} 00000.020 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.021 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.021 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.021 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.023 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 177 bytes, 177 total, blobs: { [1:3:1:1:28672:177:0] } 00000.023 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 209 bytes, 209 total, blobs: { [1:2:2:1:8192:209:0] } 00000.023 DD| TABLET_EXECUTOR: Leader{1:4:-} sending TEvGet batch 148 bytes, 148 total, blobs: { [1:2:6:1:24576:148:0] } 00000.023 II| TABLET_EXECUTOR: Leader{1:4:0} activating executor 00000.023 II| TABLET_EXECUTOR: LSnap{1:4, on 4:1, 177b, wait} done, Waste{2:0, 357b +(0, 0b), 1 trc} 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} commited cookie 2 for step 1 ...checking rows (expecting new) 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.024 DD| TABLET_EXECUTOR: Leader{1:4:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_Truncate::TTxLambdaGeneric} release 4194304b of static, Memory{0 dyn 0} 00000.024 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.025 II| TABLET_EXECUTOR: Leader{1:4:2} suiciding, Waste{2:0, 357b +(0, 0b), 1 trc, -602b acc} 00000.025 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.025 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.025 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {736b, 11} 00000.025 II| FAKE_ENV: DS.1 gone, left {534b, 3}, put {1540b, 11} 00000.025 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.025 II| FAKE_ENV: All BS storage groups are stopped 00000.025 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.025 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 80}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:10.806742Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.007 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... initializing schema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.008 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 64b alter 11b annex 0, ~{ 101 } -{ }, 0 gb} 00000.009 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 ... inserting rows 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 512b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.010 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 ...compacting table 00000.010 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Full, forced state None, forced mode Full 00000.011 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.011 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.011 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.011 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.011 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.020 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {1 parts epoch 2} done 00000.020 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 ... observed TEvAttach from [63:30:2062] (leader) 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 8 for step 5 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ... starting follower ... wait first follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.021 II| TABLET_EXECUTOR: LSnap{1:2, on 2:7, 251b, wait} done, Waste{2:0, 770b +(2, 226b), 6 trc} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 2 for step 7 ... observed TEvTablet::TEvFBoot, followerSysActor: [63:51:2087] 00000.022 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.022 DD| TABLET_EXECUTOR: Follower{1:2:-} sending TEvGet batch 161 bytes, 161 total, blobs: { [1:2:5:1:12288:161:0] } 00000.023 II| TABLET_EXECUTOR: Follower{1:2:0} activating executor ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) ... stopping leader 00000.023 DD| TABLET_EXECUTOR: Follower{1:2:0} got result TEvResult{3 pages [1:2:5:1:12288:161:0] ok OK}, type 5 00000.023 II| TABLET_EXECUTOR: Leader{1:2:8} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} ... promoting follower ... wait secont follower EvAttach ... waiting for NKikimr::NSharedCache::TEvAttach 00000.025 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 251 bytes, 251 total, blobs: { [1:2:7:1:28672:251:0] } 00000.026 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 220 bytes, 220 total, blobs: { [1:2:2:1:8192:209:0], [1:2:3:1:8192:11:0] } 00000.026 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.027 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 251b, wait} done, Waste{2:0, 770b +(0, 0b), 1 trc} ... observed TEvAttach from [63:58:2091] (follower) ... waiting for NKikimr::NSharedCache::TEvAttach (done) 00000.027 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.027 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 770b +(0, 0b), 1 trc, -226b acc} 00000.028 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 27b} miss {0 0b} in-memory miss {0 0b} 00000.028 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.028 II| FAKE_ENV: DS.0 gone, left {119b, 4}, put {673b, 11} 00000.028 II| FAKE_ENV: DS.1 gone, left {1637b, 10}, put {1637b, 10} 00000.028 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.028 II| FAKE_ENV: All BS storage groups are stopped 00000.028 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.028 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 57}, stopped |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_isolation.py::TestTransactionIsolation::test_prevents_circular_information_flow_g1c [GOOD] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example >> KeyValueGRPCService::SimpleWriteReadWithGetChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV1 >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_select_with_repetitions[10-64] [GOOD] >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] >> test_isolation.py::TestTransactionIsolation::test_isolation_mailing_list_example [GOOD] >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv >> TSharedPageCache::TryKeepInMemoryMode_AfterCompaction [GOOD] >> TSharedPageCache_Actor::Attach_Basics >> TSharedPageCache_Actor::Attach_Basics [GOOD] >> TSharedPageCache_Actor::Attach_Request >> TSharedPageCache_Actor::Attach_Request [GOOD] >> TSharedPageCache_Actor::Detach_Basics >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario >> TSharedPageCache_Actor::Detach_Basics [GOOD] >> TSharedPageCache_Actor::Detach_Cached [GOOD] >> TSharedPageCache_Actor::Detach_Expired >> TSharedPageCache_Actor::Detach_Expired [GOOD] >> TSharedPageCache_Actor::Detach_InFly >> TSharedPageCache_Actor::Detach_InFly [GOOD] >> TSharedPageCache_Actor::Detach_Queued [GOOD] >> TSharedPageCache_Actor::InMemory_Basics >> TSharedPageCache_Actor::InMemory_Basics [GOOD] >> TSharedPageCache_Actor::InMemory_NotEnoughMemory >> TSharedPageCache_Actor::InMemory_NotEnoughMemory [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling >> TSharedPageCache_Actor::InMemory_Enabling [GOOD] >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> TSharedPageCache_Actor::InMemory_Enabling_AllRequested [GOOD] >> TSharedPageCache_Actor::InMemory_Disabling >> test_isolation.py::TestTransactionIsolation::test_prevents_observed_transaction_vanishes_otv [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp >> TSharedPageCache_Actor::InMemory_Disabling [GOOD] >> TSharedPageCache_Actor::InMemory_Detach |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TSharedPageCache_Actor::InMemory_Detach [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory >> TSharedPageCache_Actor::InMemory_MoveEvictedToInMemory [GOOD] >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular >> TSharedPageCache_Actor::InMemory_MoveEvictedToRegular [GOOD] >> TSharedPageCache_Actor::InMemory_AttachRepeated >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp [GOOD] >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates >> TSharedPageCache_Actor::InMemory_AttachRepeated [GOOD] >> TSharedPageCache_Actor::GC_Manual |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/audit/py3test >> test_canonical_records.py::test_create_drop_and_alter_database [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/audit/py3test >> TSharedPageCache_Actor::GC_Manual [GOOD] >> TSharedPageCache_Actor::GC_Scheduled >> TSharedPageCache_Actor::GC_Scheduled [GOOD] >> TSharedPageCache_Actor::Evict_Active >> test_isolation.py::TestTransactionIsolation::test_does_not_prevent_predicate_many_preceders_pmp_for_write_predicates [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 >> TSharedPageCache_Actor::Evict_Active [GOOD] >> TSharedPageCache_Actor::Evict_Passive >> TSharedPageCache_Actor::Evict_Passive [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap >> TMemoryController::ColumnShardCaches_Config [GOOD] >> TMemTableMemoryConsumersCollection::Empty [GOOD] >> TMemTableMemoryConsumersCollection::Destruction [GOOD] >> TMemTableMemoryConsumersCollection::Register [GOOD] >> TMemTableMemoryConsumersCollection::Unregister [GOOD] >> TMemTableMemoryConsumersCollection::SetConsumption [GOOD] >> TMemTableMemoryConsumersCollection::CompactionComplete [GOOD] >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> BulkUpsert::BulkUpsert [GOOD] |99.1%| [TM] {RESULT} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest |99.1%| [TM] {BAZEL_UPLOAD} ydb/public/sdk/cpp/tests/integration/bulk_upsert/gtest >> test_isolation.py::TestTransactionIsolation::test_lost_update_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TSharedPageCache_Actor::Evict_Passive [GOOD] Test command err: 0.29236 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:04.749010Z 00000.008 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.009 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.011 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.011 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.012 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} queued, type NKikimr::NSharedCache::TTxInitSchema 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} hope 1 -> done Change{2, redo 0b alter 500b annex 0, ~{ } -{ }, 0 gb} 00000.013 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NSharedCache::TTxInitSchema} release 4194304b of static, Memory{0 dyn 0} 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.013 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.014 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.015 NN| TABLET_SAUSAGECACHE: Update config MemoryLimit: 8388608 00000.016 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.016 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{2, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.017 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{3, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{3, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.018 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{4, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{4, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.019 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{5, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{5, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.020 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{6, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.021 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{7, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{7, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.022 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{8, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.023 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{9, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.024 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{10, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{10, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{11, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{11, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{12, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{12, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{13, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{13, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{14, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{14, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:16} commited cookie 1 for step 15 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{15, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:16} Tx{15, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:17} commited cookie 1 for step 16 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.031 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{16, redo 102475b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:17} Tx{16, NKikimr::NSharedCache::TTxWriteRow} release 4194304b of static, Memory{0 dyn 0} 00000.032 DD| TABLET_EXECUTOR: Leader{1:2:18} commited cookie 1 for step 17 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} queued, type NKikimr::NSharedCache::TTxWriteRow 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.033 DD| TABLET_EXECUTOR: Leader{1:2:18} Tx{17, NKikimr::NSharedCache::TTxWriteRow} hope 1 -> done Change{17, r ... :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #5 (done) Checking results#5 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 5 Pages: [ 13 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.767944Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 6 class Online from cache [ ] already requested [ ] to request [ 14 ] 2026-01-04T03:04:14.767977Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 13 ] owner [33:5:2052] 2026-01-04T03:04:14.768001Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #6 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #6 (done) Checking fetches#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 14 ] ... waiting for results #6 2026-01-04T03:04:14.768079Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 14 ] 2026-01-04T03:04:14.768095Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 14 ] cookie 6 2026-01-04T03:04:14.768125Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #6 (done) Checking results#6 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 6 Pages: [ 14 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.768197Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 7 class Online from cache [ ] already requested [ ] to request [ 15 ] 2026-01-04T03:04:14.768229Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 14 ] owner [33:5:2052] 2026-01-04T03:04:14.768249Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #7 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #7 (done) Checking fetches#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 15 ] ... waiting for results #7 2026-01-04T03:04:14.768336Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 15 ] 2026-01-04T03:04:14.768364Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 15 ] cookie 7 2026-01-04T03:04:14.768387Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #7 (done) Checking results#7 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 7 Pages: [ 15 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.768491Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 8 class Online from cache [ ] already requested [ ] to request [ 16 ] 2026-01-04T03:04:14.768525Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 15 ] owner [33:5:2052] 2026-01-04T03:04:14.768545Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #8 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #8 (done) Checking fetches#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 16 ] ... waiting for results #8 2026-01-04T03:04:14.768638Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 16 ] 2026-01-04T03:04:14.768655Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 16 ] cookie 8 2026-01-04T03:04:14.768679Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #8 (done) Checking results#8 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 8 Pages: [ 16 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.768829Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 9 class Online from cache [ ] already requested [ ] to request [ 17 ] 2026-01-04T03:04:14.768879Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 16 ] owner [33:5:2052] 2026-01-04T03:04:14.768902Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #9 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #9 (done) Checking fetches#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 17 ] ... waiting for results #9 2026-01-04T03:04:14.768996Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 17 ] 2026-01-04T03:04:14.769014Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 17 ] cookie 9 2026-01-04T03:04:14.769038Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #9 (done) Checking results#9 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 9 Pages: [ 17 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.769122Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 10 class Online from cache [ ] already requested [ ] to request [ 18 ] 2026-01-04T03:04:14.769152Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 17 ] owner [33:5:2052] 2026-01-04T03:04:14.769172Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #10 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #10 (done) Checking fetches#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for results #10 2026-01-04T03:04:14.769283Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 18 ] 2026-01-04T03:04:14.769301Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 18 ] cookie 10 2026-01-04T03:04:14.769323Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #10 (done) Checking results#10 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 18 ] ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:14.769430Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [33:5:2052] cookie 11 class Online from cache [ ] already requested [ ] to request [ 19 ] 2026-01-04T03:04:14.769470Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 18 ] owner [33:5:2052] 2026-01-04T03:04:14.769493Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 366B LoadInFly: 122B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #11 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #11 (done) Checking fetches#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 10 Pages: [ 19 ] ... waiting for results #11 2026-01-04T03:04:14.769574Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 19 ] 2026-01-04T03:04:14.769591Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [33:5:2052] class Online pages [ 19 ] cookie 11 2026-01-04T03:04:14.769611Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #11 (done) Checking results#11 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 11 Pages: [ 19 ] ... waiting for NActors::TEvents::TEvWakeup 2026-01-04T03:04:14.769690Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-04T03:04:14.769738Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 366B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) ... waiting for NActors::TEvents::TEvWakeup 2026-01-04T03:04:14.769853Z node 33 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:865: Wakeup DoGCManual 2026-01-04T03:04:14.769921Z node 33 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:956: Drop page collection [1:0:256:0:0:0:1] pages [ 1 2 3 ] owner [33:5:2052] 2026-01-04T03:04:14.769970Z node 33 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 122B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NActors::TEvents::TEvWakeup (done) |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_crud.py::TestCreateAndUpsertWithRepetitions::test_create_and_upsert_data_with_repetitions[10-64] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_lost_update_on_value_with_upsert_p4 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single ------- [TM] {asan, default-linux-x86_64, release} ydb/core/memory_controller/ut/unittest >> TMemTableMemoryConsumersCollection::SelectForCompaction [GOOD] Test command err: ResourceBrokerSelfConfig: LimitBytes: 0B ... waiting for SysViewsRoster update finished 2026-01-04T03:02:29.671991Z node 1 :RESOURCE_BROKER DEBUG: resource_broker.cpp:1115: TResourceBrokerActor bootstrap 2026-01-04T03:02:29.677412Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 33554432 2026-01-04T03:02:32.580919Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: none MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 200MiB SoftLimit: 150MiB TargetUtilization: 100MiB ActivitiesLimitBytes: 60MiB ConsumersConsumption: 0B OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 100MiB ResultingConsumersConsumption: 6MiB Coefficient: 0.9999990463 2026-01-04T03:02:32.582635Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 0B Limit: 6MiB Min: 2MiB Max: 6MiB 2026-01-04T03:02:32.582778Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 12.5MiB Min: 12.5MiB Max: 12.5MiB 2026-01-04T03:02:32.583533Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 0B Limit: 40MiB 2026-01-04T03:02:32.583646Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 60MiB queue_cs_general: 7.5MiB queue_cs_indexation: 2.5MiB queue_cs_normalizer: 7.5MiB queue_cs_ttl: 2.5MiB queue_kqp_resource_manager: 40MiB 2026-01-04T03:02:32.584483Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:161: Bootstrapped with config HardLimitBytes: 209715200 2026-01-04T03:02:32.584935Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer SharedCache [1:20:2067] registered 2026-01-04T03:02:32.586494Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:238: Register memory consumer 2026-01-04T03:02:32.586561Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-04T03:02:32.589243Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:02:32.594539Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesBlobCache [1:21:2068] registered 2026-01-04T03:02:32.750019Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDataAccessorCache [1:22:2069] registered 2026-01-04T03:02:32.751232Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 2621440 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 7864320 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 2621440 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 41943040 } } ResourceLimit { Memory: 62914560 } 2026-01-04T03:02:32.752028Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 2621440 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 7864320 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 41943040 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 62914560 } 2026-01-04T03:02:32.753276Z node 1 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-04T03:02:32.753694Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-04T03:02:32.754269Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesColumnDataCache [1:23:2070] registered 2026-01-04T03:02:32.765180Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesScanGroupedMemory [1:51:2098] registered 2026-01-04T03:02:32.765413Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesCompGroupedMemory [1:52:2099] registered 2026-01-04T03:02:32.765874Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:305: Consumer ColumnTablesDeduplicationGroupedMemory [1:53:2100] registered 2026-01-04T03:02:32.767329Z node 1 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-04T03:02:32.787297Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:125: [WorkloadService] [Service] Failed to discover tenant nodes 2026-01-04T03:02:32.809526Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:317:2361], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:02:32.810002Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:02:32.810203Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:02:32.969989Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:427:2393] 1 registered 2026-01-04T03:02:32.987717Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:436:2395] 0 registered 2026-01-04T03:02:32.987906Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:436:2395] 2 registered 2026-01-04T03:02:32.988148Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:436:2395] 4 registered 2026-01-04T03:02:32.988568Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:436:2395] 5 registered 2026-01-04T03:02:32.994380Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:452:2397] 1 registered 2026-01-04T03:02:32.998208Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:452:2397] 2 registered 2026-01-04T03:02:33.084128Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 1 registered 2026-01-04T03:02:33.084523Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 2 registered 2026-01-04T03:02:33.084713Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 3 registered 2026-01-04T03:02:33.085051Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 4 registered 2026-01-04T03:02:33.085159Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 5 registered 2026-01-04T03:02:33.085274Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 6 registered 2026-01-04T03:02:33.086372Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 7 registered 2026-01-04T03:02:33.086901Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 8 registered 2026-01-04T03:02:33.088554Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 9 registered 2026-01-04T03:02:33.096849Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 10 registered 2026-01-04T03:02:33.124982Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 11 registered 2026-01-04T03:02:33.125501Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 12 registered 2026-01-04T03:02:33.126696Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: MemTable [1:461:2399] 13 registered 2026-01-04T03:02:33.128493Z node 1 :MEMORY_CONTROLLER TRACE: memory_controller.cpp:312: Me ... in: 50MiB Max: 50MiB 2026-01-04T03:04:14.269304Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-04T03:04:14.269327Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-04T03:04:14.269352Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 6.25MiB Min: 6.25MiB Max: 6.25MiB 2026-01-04T03:04:14.269379Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 50MiB Min: 50MiB Max: 50MiB 2026-01-04T03:04:14.269423Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 31.3MiB Min: 31.3MiB Max: 31.3MiB 2026-01-04T03:04:14.269459Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 100MiB 2026-01-04T03:04:14.269549Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 236MiB 2026-01-04T03:04:14.269576Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-04T03:04:14.459815Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-04T03:04:14.460405Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-04T03:04:14.460465Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-04T03:04:14.460502Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.460538Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.460563Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.460590Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.460615Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.460642Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.460676Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-04T03:04:14.460725Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-04T03:04:14.460813Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:458: Apply ResourceBroker config: LimitBytes: 600MiB queue_cs_general: 75MiB queue_cs_indexation: 25MiB queue_cs_normalizer: 75MiB queue_cs_ttl: 25MiB queue_kqp_resource_manager: 400MiB 2026-01-04T03:04:14.461075Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1189: New config diff: Queues { Name: "queue_cs_general" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_indexation" Limit { Memory: 26214400 } } Queues { Name: "queue_cs_normalizer" Limit { Memory: 78643200 } } Queues { Name: "queue_cs_ttl" Limit { Memory: 26214400 } } Queues { Name: "queue_kqp_resource_manager" Limit { Memory: 419430400 } } ResourceLimit { Memory: 629145600 } 2026-01-04T03:04:14.461790Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1195: New config: Queues { Name: "queue_default" Weight: 30 Limit { Cpu: 2 } } Queues { Name: "queue_compaction_gen0" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_compaction_gen1" Weight: 100 Limit { Cpu: 6 } } Queues { Name: "queue_compaction_gen2" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_gen3" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_compaction_borrowed" Weight: 100 Limit { Cpu: 3 } } Queues { Name: "queue_cs_indexation" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_ttl" Weight: 100 Limit { Cpu: 3 Memory: 26214400 } } Queues { Name: "queue_cs_general" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_cs_scan_read" Weight: 100 Limit { Cpu: 3 Memory: 3221225472 } } Queues { Name: "queue_cs_normalizer" Weight: 100 Limit { Cpu: 3 Memory: 78643200 } } Queues { Name: "queue_transaction" Weight: 100 Limit { Cpu: 4 } } Queues { Name: "queue_background_compaction" Weight: 10 Limit { Cpu: 1 } } Queues { Name: "queue_scan" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_backup" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_restore" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_kqp_resource_manager" Weight: 30 Limit { Cpu: 4 Memory: 419430400 } } Queues { Name: "queue_build_index" Weight: 100 Limit { Cpu: 10 } } Queues { Name: "queue_ttl" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_datashard_build_stats" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_cdc_initial_scan" Weight: 100 Limit { Cpu: 2 } } Queues { Name: "queue_statistics_scan" Weight: 100 Limit { Cpu: 1 } } Queues { Name: "queue_system_tablet_backup" Weight: 100 Limit { Cpu: 1 } } Tasks { Name: "unknown" QueueName: "queue_default" DefaultDuration: 60000000 } Tasks { Name: "compaction_gen0" QueueName: "queue_compaction_gen0" DefaultDuration: 10000000 } Tasks { Name: "compaction_gen1" QueueName: "queue_compaction_gen1" DefaultDuration: 30000000 } Tasks { Name: "compaction_gen2" QueueName: "queue_compaction_gen2" DefaultDuration: 120000000 } Tasks { Name: "compaction_gen3" QueueName: "queue_compaction_gen3" DefaultDuration: 600000000 } Tasks { Name: "compaction_borrowed" QueueName: "queue_compaction_borrowed" DefaultDuration: 600000000 } Tasks { Name: "CS::TTL" QueueName: "queue_cs_ttl" DefaultDuration: 600000000 } Tasks { Name: "CS::INDEXATION" QueueName: "queue_cs_indexation" DefaultDuration: 600000000 } Tasks { Name: "CS::GENERAL" QueueName: "queue_cs_general" DefaultDuration: 600000000 } Tasks { Name: "CS::SCAN_READ" QueueName: "queue_cs_scan_read" DefaultDuration: 600000000 } Tasks { Name: "CS::NORMALIZER" QueueName: "queue_cs_normalizer" DefaultDuration: 600000000 } Tasks { Name: "transaction" QueueName: "queue_transaction" DefaultDuration: 600000000 } Tasks { Name: "background_compaction" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen0" QueueName: "queue_background_compaction" DefaultDuration: 10000000 } Tasks { Name: "background_compaction_gen1" QueueName: "queue_background_compaction" DefaultDuration: 20000000 } Tasks { Name: "background_compaction_gen2" QueueName: "queue_background_compaction" DefaultDuration: 60000000 } Tasks { Name: "background_compaction_gen3" QueueName: "queue_background_compaction" DefaultDuration: 300000000 } Tasks { Name: "scan" QueueName: "queue_scan" DefaultDuration: 300000000 } Tasks { Name: "backup" QueueName: "queue_backup" DefaultDuration: 300000000 } Tasks { Name: "restore" QueueName: "queue_restore" DefaultDuration: 300000000 } Tasks { Name: "kqp_query" QueueName: "queue_kqp_resource_manager" DefaultDuration: 600000000 } Tasks { Name: "build_index" QueueName: "queue_build_index" DefaultDuration: 600000000 } Tasks { Name: "ttl" QueueName: "queue_ttl" DefaultDuration: 300000000 } Tasks { Name: "datashard_build_stats" QueueName: "queue_datashard_build_stats" DefaultDuration: 5000000 } Tasks { Name: "cdc_initial_scan" QueueName: "queue_cdc_initial_scan" DefaultDuration: 600000000 } Tasks { Name: "statistics_scan" QueueName: "queue_statistics_scan" DefaultDuration: 600000000 } Tasks { Name: "system_tablet_backup" QueueName: "queue_system_tablet_backup" DefaultDuration: 60000000 } ResourceLimit { Cpu: 256 Memory: 629145600 } 2026-01-04T03:04:14.463275Z node 12 :RESOURCE_BROKER INFO: resource_broker.cpp:1240: Configure result: Success: true 2026-01-04T03:04:14.463680Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-04T03:04:14.463749Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B 2026-01-04T03:04:14.464087Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:335: ResourceBroker configure result Success: true 2026-01-04T03:04:14.599467Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:235: Periodic memory stats: AnonRss: none CGroupLimit: 1.95GiB MemTotal: none MemAvailable: none AllocatedMemory: 0B AllocatorCachesMemory: 0B HardLimit: 1.95GiB SoftLimit: 1.46GiB TargetUtilization: 1000MiB ActivitiesLimitBytes: 600MiB ConsumersConsumption: 172KiB OtherConsumption: 0B ExternalConsumption: 0B TargetConsumersConsumption: 1000MiB ResultingConsumersConsumption: 1000MiB Coefficient: 0.90625 2026-01-04T03:04:14.599924Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer SharedCache state: Consumption: 0B Limit: 944MiB Min: 400MiB Max: 1000MiB 2026-01-04T03:04:14.599966Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer MemTable state: Consumption: 172KiB Limit: 56.3MiB Min: 20MiB Max: 60MiB 2026-01-04T03:04:14.599988Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesScanGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.600015Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesCompGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.600038Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesBlobCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.600060Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDataAccessorCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.600085Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesColumnDataCache state: Consumption: 0B Limit: 25MiB Min: 25MiB Max: 25MiB 2026-01-04T03:04:14.600108Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesDeduplicationGroupedMemory state: Consumption: 0B Limit: 200MiB Min: 200MiB Max: 200MiB 2026-01-04T03:04:14.600132Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:280: Consumer ColumnTablesPortionsMetaDataCache state: Consumption: 0B Limit: 125MiB Min: 125MiB Max: 125MiB 2026-01-04T03:04:14.600164Z node 12 :MEMORY_CONTROLLER INFO: memory_controller.cpp:443: Consumer QueryExecution state: Consumption: 1.94MiB Limit: 400MiB 2026-01-04T03:04:14.600265Z node 12 :TABLET_SAUSAGECACHE INFO: shared_sausagecache.cpp:248: Limit memory consumer with 944MiB 2026-01-04T03:04:14.600295Z node 12 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 32MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B |99.1%| [TM] {RESULT} ydb/core/memory_controller/ut/unittest |99.1%| [TM] {BAZEL_UPLOAD} ydb/core/memory_controller/ut/unittest >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps >> TBtreeIndexBuilder::NoNodes [GOOD] >> TBtreeIndexBuilder::OneNode [GOOD] >> TBtreeIndexBuilder::FewNodes [GOOD] >> TBtreeIndexBuilder::SplitBySize [GOOD] >> TBtreeIndexNode::TIsNullBitmap [GOOD] >> TBtreeIndexNode::CompareTo [GOOD] >> TBtreeIndexNode::Basics [GOOD] >> TBtreeIndexNode::Group [GOOD] >> TBtreeIndexNode::History [GOOD] >> TBtreeIndexNode::OneKey [GOOD] >> TBtreeIndexNode::Reusable [GOOD] >> TBtreeIndexNode::CutKeys [GOOD] >> TBtreeIndexTPart::Conf [GOOD] >> TBtreeIndexTPart::NoNodes [GOOD] >> TBtreeIndexTPart::OneNode [GOOD] >> TBtreeIndexTPart::FewNodes [GOOD] >> TBtreeIndexTPart::Erases [GOOD] >> TBtreeIndexTPart::Groups >> TBtreeIndexTPart::Groups [GOOD] >> TBtreeIndexTPart::History >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_predicate_deps [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate >> TBtreeIndexTPart::History [GOOD] >> TBtreeIndexTPart::External >> TMLPWriterTests::TopicNotExists [GOOD] >> TMLPWriterTests::EmptyWrite >> TBtreeIndexTPart::External [GOOD] >> TChargeBTreeIndex::NoNodes >> TChargeBTreeIndex::NoNodes [GOOD] >> TChargeBTreeIndex::NoNodes_Groups |99.1%| [TA] $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_async_table-True] [GOOD] >> test_isolation.py::TestTransactionIsolation::test_read_skew_g_single_write_predicate [GOOD] >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TFlatTableExecutor_VersionedRows::TestVersionedRowsLargeBlobs [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2NoRestart [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1 [GOOD] >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 |99.1%| [TA] {RESULT} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} |99.1%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/audit/test-results/py3test/{meta.json ... results_accumulator.log} >> TFlatTableRenameTableAndColumn::TestSchema1ToSchema2ToSchema1ToSchema2 [GOOD] >> TGenCompaction::OverloadFactorDuringForceCompaction >> TChargeBTreeIndex::NoNodes_Groups [GOOD] >> TChargeBTreeIndex::NoNodes_History >> TChargeBTreeIndex::NoNodes_History [GOOD] >> TChargeBTreeIndex::NoNodes_Groups_History >> test_isolation.py::TestTransactionIsolation::test_write_skew_g2_item [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 >> TGenCompaction::OverloadFactorDuringForceCompaction [GOOD] >> TGenCompaction::ForcedCompactionNoGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithGenerations [GOOD] >> TGenCompaction::ForcedCompactionWithFinalParts [GOOD] >> TGenCompaction::ForcedCompactionByDeletedRows [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccData [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataRestart [GOOD] >> TGenCompaction::ForcedCompactionByUnreachableMvccDataBorrowed [GOOD] >> TIterator::Basics >> TIterator::Basics [GOOD] >> TIterator::External [GOOD] >> TIterator::Single >> KeyValueGRPCService::SimpleWriteReadOverrunV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadOverrunV2 >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2 [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] >> TIterator::Single [GOOD] >> TIterator::SingleReverse |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TChargeBTreeIndex::NoNodes_Groups_History [GOOD] >> TChargeBTreeIndex::OneNode >> TTxDataShardLocalKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges >> test_session_pool.py::TestSessionPool::test_no_cluster_endpoints_no_failure [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 >> test_session_pool.py::TestSessionPool::test_session_pool_close_basic_logic_case_2 [GOOD] >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] >> TChargeBTreeIndex::OneNode [GOOD] >> TChargeBTreeIndex::OneNode_Groups >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session >> TIterator::SingleReverse [GOOD] >> TIterator::Mixed >> test_discovery.py::TestDiscoveryExtEndpoint::test_scenario [GOOD] >> BuildStatsHistogram::Ten_Mixed |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_async_table-True] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> test_inserts.py::TestYdbInsertsOperations::test_bulk_upsert_parallel [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows >> TChargeBTreeIndex::OneNode_Groups [GOOD] >> TChargeBTreeIndex::OneNode_History >> TChargeBTreeIndex::OneNode_History [GOOD] >> TChargeBTreeIndex::OneNode_Groups_History |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[substitute_table-create_indexed_table-True] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TIterator::Mixed [GOOD] >> TIterator::MixedReverse |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_simple_table-False] [GOOD] |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> BuildStatsHistogram::Ten_Mixed [GOOD] >> BuildStatsHistogram::Ten_Serial >> TDqPqRdReadActorTests::Backpressure [GOOD] >> DataShardStats::HasSchemaChanges_BTreeIndex [GOOD] >> DataShardStats::HasSchemaChanges_ByKeyFilter >> TChargeBTreeIndex::OneNode_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.1%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TChargeBTreeIndex::FewNodes [GOOD] >> TChargeBTreeIndex::FewNodes_Groups >> KeyValueGRPCService::SimpleWriteReadOverrunV2 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV1 >> test_session_grace_shutdown.py::Test::test_grace_shutdown_of_session [GOOD] >> TDqPqRdReadActorTests::RowDispatcherIsRestarted2 [GOOD] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [FAIL] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/rename/py3test >> test_rename.py::test_client_gets_retriable_errors_when_rename[replace_table-create_indexed_table-True] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/rename/py3test >> TChargeBTreeIndex::FewNodes_Groups [GOOD] >> TChargeBTreeIndex::FewNodes_History |99.1%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TDqPqRdReadActorTests::TwoPartitionsRowDispatcherIsRestarted [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] >> TDqPqRdReadActorTests::IgnoreMessageIfNoSessions [GOOD] >> BuildStatsHistogram::Ten_Serial [GOOD] >> BuildStatsHistogram::Ten_Crossed |99.2%| [TA] $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> TChargeBTreeIndex::FewNodes_History [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky >> TIterator::MixedReverse [GOOD] >> TIterator::Serial |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/functional/rename/test-results/py3test/{meta.json ... results_accumulator.log} >> TDqPqRdReadActorTests::MetadataFields [GOOD] >> TChargeBTreeIndex::FewNodes_Sticky [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test >> TMLPWriterTests::EmptyWrite [GOOD] >> TMLPWriterTests::WriteOneMessage |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TIterator::Serial [GOOD] >> TIterator::SerialReverse |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::IgnoreCoordinatorResultIfWrongState [GOOD] >> BuildStatsHistogram::Ten_Crossed [GOOD] >> BuildStatsHistogram::Ten_Mixed_Log >> TDqPqRdReadActorTests::TestReadFromTopicFirstWatermark [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_indexes.py::TestSecondaryIndexes::test_create_table_with_global_index [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them >> TIterator::SerialReverse [GOOD] >> TIterator::GetKey [GOOD] >> TIterator::GetKeyWithEraseCache >> TIterator::GetKeyWithEraseCache [GOOD] >> TIterator::GetKeyWithVersionSkips [GOOD] >> TLegacy::IndexIter >> TChargeBTreeIndex::FewNodes_Groups_History [GOOD] >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[async_read_table] [GOOD] >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] >> KeyValueGRPCService::SimpleWriteReadRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteReadRangeV2 >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline >> TLegacy::IndexIter [GOOD] >> TLegacy::ScreenedIndexIter [GOOD] >> TLegacy::StatsIter >> TTxDataShardLocalKMeansScan::BuildToBuild_Ranges [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest >> TLegacy::StatsIter [GOOD] >> TPageHandleTest::Uninitialized >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] >> TPageHandleTest::Uninitialized [GOOD] >> TPageHandleTest::NormalUse [GOOD] >> TPageHandleTest::HandleRef [GOOD] >> TPageHandleTest::PinnedRef [GOOD] >> TPageHandleTest::PinnedRefPure [GOOD] >> TPart::Basics [GOOD] >> TPart::BasicColumnGroups [GOOD] >> TPart::CellDefaults [GOOD] >> TPart::Matter [GOOD] >> TPart::External [GOOD] >> TPart::Outer [GOOD] >> TPart::MassCheck >> TPart::MassCheck [GOOD] >> TPart::PageFailEnv |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqRdReadActorTests::TestReadFromTopicWatermarks1 [GOOD] >> test_inserts.py::TestYdbInsertsOperations::test_insert_multiple_empty_rows [GOOD] >> TDqPqRdReadActorTests::TestWatermarksWhere >> BuildStatsHistogram::Ten_Mixed_Log [GOOD] >> BuildStatsHistogram::Ten_Serial_Log |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TPart::PageFailEnv [GOOD] >> TPart::ForwardEnv |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TPart::ForwardEnv [GOOD] >> TPart::ForwardEnvColumnGroups >> TPart::ForwardEnvColumnGroups [GOOD] >> TPart::ManyVersions [GOOD] >> TPart::ManyDeltas [GOOD] >> TPart::CutKeys_Lz4 [GOOD] >> TPart::CutKeys_Seek [GOOD] >> TPart::CutKeys_SeekPages [GOOD] >> TPart::CutKeys_SeekSlices [GOOD] >> TPart::CutKeys_CutString [GOOD] >> TPart::CutKeys_CutUtf8String >> TPart::CutKeys_CutUtf8String [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase0::test_effective_acls_are_too_large [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TPart::CutKeys_CutUtf8String [GOOD] Test command err: 00000.001 II| FAKE_ENV: Born at 2026-01-04T03:04:02.875214Z 00000.048 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.057 II| FAKE_ENV: Starting storage for BS group 0 00000.058 II| FAKE_ENV: Starting storage for BS group 1 00000.058 II| FAKE_ENV: Starting storage for BS group 2 00000.058 II| FAKE_ENV: Starting storage for BS group 3 00000.073 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.076 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.083 DD| TABLET_EXECUTOR: Leader{1:2:2} commited cookie 2 for step 1 ... waiting for NKikimr::NMemory::TEvConsumerLimit 00000.084 II| TABLET_SAUSAGECACHE: Limit memory consumer with 8MiB 00000.084 TT| TABLET_SAUSAGECACHE: GC has finished with Limit: 8MiB Active: 0B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NMemory::TEvConsumerLimit (done) 00000.093 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema 00000.093 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.097 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} hope 1 -> done Change{2, redo 0b alter 209b annex 0, ~{ } -{ }, 0 gb} 00000.097 DD| TABLET_EXECUTOR: Leader{1:2:2} Tx{1, NKikimr::NTabletFlatExecutor::TRowsModel::TTxSchema} release 4194304b of static, Memory{0 dyn 0} 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} commited cookie 1 for step 2 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} hope 1 -> done Change{2, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.098 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{2, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_TryKeepInMemory::TTxCachingFamily} release 4194304b of static, Memory{0 dyn 0} 00000.099 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.099 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{2, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:3} Tx{3, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} commited cookie 1 for step 3 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{3, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.100 DD| TABLET_EXECUTOR: Leader{1:2:4} Tx{4, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:5} commited cookie 1 for step 4 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{4, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:5} Tx{5, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 1 for step 5 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{5, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.101 DD| TABLET_EXECUTOR: Leader{1:2:6} Tx{6, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 1 for step 6 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{6, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{7, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:8} commited cookie 1 for step 7 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{7, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.102 DD| TABLET_EXECUTOR: Leader{1:2:8} Tx{8, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 1 for step 8 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{8, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{9, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{9, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:11} commited cookie 1 for step 10 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{10, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.103 DD| TABLET_EXECUTOR: Leader{1:2:11} Tx{11, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:12} commited cookie 1 for step 11 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{11, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:12} Tx{12, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:13} commited cookie 1 for step 12 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.104 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{12, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:13} Tx{13, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:14} commited cookie 1 for step 13 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{13, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:14} Tx{14, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memory{0 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:15} commited cookie 1 for step 14 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} queued, type NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} hope 1 -> done Change{14, redo 1041b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.105 DD| TABLET_EXECUTOR: Leader{1:2:15} Tx{15, NKikimr::NTabletFlatExecutor::TRowsModel::TTxAddRows} release 4194304b of static, Memor ... {[1:2:3:0:0:0:0] eph 0, 420b 10r} data 1381b + FlatIndex{11} Label{3 rev 3, 375b} 11 rec | Page Row Bytes (Uint32, String) | 0 0 42b {1, aaa} | 1 1 42b {1, aba} | 2 2 42b {1, aca} | 3 3 42b {1, baa} | 4 4 42b {1, bba} | 5 5 42b {2, aaa} | 6 6 42b {2, aba} | 7 7 42b {2, aca} | 8 8 42b {2, baa} | 9 9 42b {2, bba} | 9 9 42b {2, bba} + BTreeIndex{PageId: 10 RowCount: 10 DataSize: 420 ErasedRowCount: 0} Label{13 rev 1, 557b} | PageId: 0 RowCount: 1 DataSize: 42 ErasedRowCount: 0 | > {1, aba} | PageId: 1 RowCount: 2 DataSize: 84 ErasedRowCount: 0 | > {1, aca} | PageId: 2 RowCount: 3 DataSize: 126 ErasedRowCount: 0 | > {1, baa} | PageId: 3 RowCount: 4 DataSize: 168 ErasedRowCount: 0 | > {1, bba} | PageId: 4 RowCount: 5 DataSize: 210 ErasedRowCount: 0 | > {2, aaa} | PageId: 5 RowCount: 6 DataSize: 252 ErasedRowCount: 0 | > {2, aba} | PageId: 6 RowCount: 7 DataSize: 294 ErasedRowCount: 0 | > {2, aca} | PageId: 7 RowCount: 8 DataSize: 336 ErasedRowCount: 0 | > {2, baa} | PageId: 8 RowCount: 9 DataSize: 378 ErasedRowCount: 0 | > {2, bba} | PageId: 9 RowCount: 10 DataSize: 420 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccccd} | 1 1 41b {ccccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 83b 2r} data 320b + FlatIndex{3} Label{3 rev 3, 109b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 43b {ccccccd} | 1 1 43b {ccccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 83 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccccd} | PageId: 1 RowCount: 2 DataSize: 83 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccccd} | 1 1 40b {cccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 82b 2r} data 316b + FlatIndex{3} Label{3 rev 3, 107b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 42b {cccccd} | 1 1 42b {cccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 82 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccccd} | PageId: 1 RowCount: 2 DataSize: 82 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 79b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 39b {ccccd} | 1 1 39b {ccccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 79 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 79 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 81b 2r} data 312b + FlatIndex{3} Label{3 rev 3, 105b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 41b {ccccd} | 1 1 41b {ccccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 81 ErasedRowCount: 0} Label{13 rev 1, 107b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {ccccd} | PageId: 1 RowCount: 2 DataSize: 81 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 78b 2r} data 304b + FlatIndex{3} Label{3 rev 3, 101b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 38b {cccd} | 1 1 38b {cccd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 78 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 78 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 75b 2r} data 292b + FlatIndex{3} Label{3 rev 3, 95b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 75 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 75 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 77b 2r} data 296b + FlatIndex{3} Label{3 rev 3, 97b} 3 rec | Page Row Bytes (String) | 0 0 40b {cccccc} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 77 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 77 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 69b 2r} data 280b + FlatIndex{3} Label{3 rev 3, 89b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 35b {d} | 1 1 35b {d} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 69 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 69 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 71b 2r} data 284b + FlatIndex{3} Label{3 rev 3, 91b} 3 rec | Page Row Bytes (String) | 0 0 34b {} | 1 1 37b {d} | 1 1 37b {ddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 71 ErasedRowCount: 0} Label{13 rev 1, 103b} | PageId: 0 RowCount: 1 DataSize: 34 ErasedRowCount: 0 | > {d} | PageId: 1 RowCount: 2 DataSize: 71 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 280b 2r} data 904b + FlatIndex{3} Label{3 rev 3, 401b} 3 rec | Page Row Bytes (String) | 0 0 140b {____________________________________________________________________________________________________cccddd} | 1 1 140b {____________________________________________________________________________________________________cd} | 1 1 140b {____________________________________________________________________________________________________cddddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 280 ErasedRowCount: 0} Label{13 rev 1, 204b} | PageId: 0 RowCount: 1 DataSize: 140 ErasedRowCount: 0 | > {____________________________________________________________________________________________________cd} | PageId: 1 RowCount: 2 DataSize: 280 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 80b 2r} data 308b + FlatIndex{3} Label{3 rev 3, 103b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {cccccc} | 1 1 40b {cccd} | 1 1 40b {cccddd} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 80 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {cccd} | PageId: 1 RowCount: 2 DataSize: 80 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x89\x91} | 1 1 49b {abc\xF0\x9F\x89\x91\xF0\x9F\x89\x91\xF0\x9F\x89\x91} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x89\x91} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 342b + FlatIndex{3} Label{3 rev 3, 120b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 46b {abc\xE2\x9A\xAB} | 1 1 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 108b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xE2\x9A\xAB} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 95b 2r} data 344b + FlatIndex{3} Label{3 rev 3, 121b} 3 rec | Page Row Bytes (Utf8) | 0 0 46b {abc\xE2\x9A\xAB\xE2\x9A\xAB\xE2\x9A\xAB} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 95 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 46 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 95 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 332b + FlatIndex{3} Label{3 rev 3, 115b} 3 rec | Page Row Bytes (Utf8) | 0 0 40b {abcxxx} | 1 1 49b {abc\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 40 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x94} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 89b 2r} data 326b + FlatIndex{3} Label{3 rev 3, 112b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 40b {abcx} | 1 1 40b {abcxxx} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 89 ErasedRowCount: 0} Label{13 rev 1, 106b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abcx} | PageId: 1 RowCount: 2 DataSize: 89 ErasedRowCount: 0 Part{[1:2:3:0:0:0:0] eph 0, 98b 2r} data 350b + FlatIndex{3} Label{3 rev 3, 124b} 3 rec | Page Row Bytes (Utf8) | 0 0 49b {abc\xF0\x9F\x98\x94\xF0\x9F\x98\x94\xF0\x9F\x98\x94} | 1 1 49b {abc\xF0\x9F\x98\x96} | 1 1 49b {abc\xF0\x9F\x98\x96\xF0\x9F\x98\x96\xF0\x9F\x98\x96} + BTreeIndex{PageId: 2 RowCount: 2 DataSize: 98 ErasedRowCount: 0} Label{13 rev 1, 109b} | PageId: 0 RowCount: 1 DataSize: 49 ErasedRowCount: 0 | > {abc\xF0\x9F\x98\x96} | PageId: 1 RowCount: 2 DataSize: 98 ErasedRowCount: 0 |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success >> TChargeBTreeIndex::FewNodes_Groups_History_Sticky [GOOD] >> TCompaction::OneMemtable [GOOD] >> TCompaction::ManyParts >> BuildStatsHistogram::Ten_Serial_Log [GOOD] >> BuildStatsHistogram::Ten_Crossed_Log >> test_public_api.py::TestCRUDOperations::test_prepared_query_pipeline [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops >> TCompaction::ManyParts [GOOD] >> TCompaction::BootAbort |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_success [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column >> test_public_api.py::TestCRUDOperations::test_scheme_client_ops [GOOD] >> test_public_api.py::TestCRUDOperations::test_scheme_operation_errors_handle [GOOD] >> test_public_api.py::TestCRUDOperations::test_none_values >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_new_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_pool.py::TestSessionPool::test_session_pool_min_size_feature [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_change_column_type [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column >> TCompaction::BootAbort [GOOD] >> TCompaction::Defaults [GOOD] >> TCompaction::Merges [GOOD] >> TCompactionMulti::ManyParts >> test_public_api.py::TestCRUDOperations::test_none_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_list_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_column [GOOD] >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key >> test_public_api.py::TestCRUDOperations::test_parse_list_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_parse_tuple >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_add_to_key [GOOD] >> TTxDataShardPrefixKMeansScan::BadRequest [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPosting >> test_public_api.py::TestCRUDOperations::test_parse_tuple [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> test_public_api.py::TestCRUDOperations::test_dict_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_struct_type [GOOD] >> test_public_api.py::TestCRUDOperations::test_data_types >> TCompactionMulti::ManyParts [GOOD] >> TCompactionMulti::MainPageCollectionEdge >> TDqPqRdReadActorTests::TestWatermarksWhere [GOOD] >> TDqPqRdReadActorTests::TestWatermarksWhereFalse [GOOD] >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer >> test_public_api.py::TestCRUDOperations::test_data_types [GOOD] >> test_public_api.py::TestCRUDOperations::test_struct_type_parameter [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> KeyValueGRPCService::SimpleWriteReadRangeV2 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 >> test_public_api.py::TestCRUDOperations::test_bulk_prepared_insert_many_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_bulk_upsert >> TCompactionMulti::MainPageCollectionEdge [GOOD] >> TCompactionMulti::MainPageCollectionEdgeMany >> BuildStatsHistogram::Ten_Crossed_Log [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels >> test_public_api.py::TestCRUDOperations::test_bulk_upsert [GOOD] >> test_public_api.py::TestCRUDOperations::test_all_enums_are_presented_as_exceptions [GOOD] >> test_public_api.py::TestCRUDOperations::test_type_builders_str_methods [GOOD] >> test_public_api.py::TestCRUDOperations::test_create_and_delete_session_then_use_it_again [GOOD] >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error >> BuildStatsHistogram::Single_Small_2_Levels [GOOD] >> BuildStatsHistogram::Single_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels >> BuildStatsHistogram::Three_Mixed_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_1_Level >> TMLPWriterTests::WriteOneMessage [GOOD] >> TMLPWriterTests::WriteTwoMessage_OnePartition >> BuildStatsHistogram::Three_Mixed_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Mixed_Small_0_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels >> BuildStatsHistogram::Three_Serial_Small_2_Levels [GOOD] >> BuildStatsHistogram::Three_Serial_Small_2_Levels_3_Buckets [GOOD] >> BuildStatsHistogram::Three_Serial_Small_1_Level >> TCompactionMulti::MainPageCollectionEdgeMany [GOOD] >> TCompactionMulti::MainPageCollectionOverflow >> test_public_api.py::TestCRUDOperations::test_locks_invalidated_error [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_tcl_3 [GOOD] >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak >> BuildStatsHistogram::Three_Serial_Small_1_Level [GOOD] >> BuildStatsHistogram::Three_Serial_Small_0_Levels [GOOD] >> BuildStatsMixedIndex::Single >> TCompactionMulti::MainPageCollectionOverflow [GOOD] >> TCompactionMulti::MainPageCollectionOverflowSmallRefs [GOOD] >> TCompactionMulti::MainPageCollectionOverflowLargeRefs [GOOD] >> TExecutorDb::FullScan >> BuildStatsMixedIndex::Single [GOOD] >> BuildStatsMixedIndex::Single_Slices >> TDqPqRdReadActorTests::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> BuildStatsMixedIndex::Single_Slices [GOOD] >> BuildStatsMixedIndex::Single_History |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsMixedIndex::Single_History [GOOD] >> BuildStatsMixedIndex::Single_History_Slices >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsMixedIndex::Single_History_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups >> test_crud.py::TestCRUDOperations::test_create_table_and_drop_table_success [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed1 [GOOD] >> test_crud.py::TestCRUDOperations::test_create_table_wrong_primary_key_failed2 [GOOD] >> BuildStatsMixedIndex::Single_Groups [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices >> BuildStatsMixedIndex::Single_Groups_Slices [GOOD] >> BuildStatsMixedIndex::Single_Groups_History |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_isolation.py::TestTransactionIsolation::test_anti_dependency_cycles_g2_two_edges [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> BuildStatsMixedIndex::Single_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TExecutorDb::RandomOps >> BuildStatsMixedIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsMixedIndex::Mixed >> BuildStatsMixedIndex::Mixed [GOOD] >> BuildStatsMixedIndex::Mixed_Groups >> BuildStatsMixedIndex::Mixed_Groups [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] >> TDqPqRdReadActorTests::RebalanceAfterDistributionReset [GOOD] >> BuildStatsMixedIndex::Mixed_Groups_History [GOOD] >> BuildStatsMixedIndex::Serial >> BuildStatsMixedIndex::Serial [GOOD] >> BuildStatsMixedIndex::Serial_Groups >> TDqPqReadActorTest::TestReadFromTopic >> DataShardStats::HasSchemaChanges_ByKeyFilter [GOOD] >> DataShardStats::HasSchemaChanges_Columns >> BuildStatsMixedIndex::Serial_Groups [GOOD] >> BuildStatsMixedIndex::Serial_Groups_History >> BuildStatsMixedIndex::Serial_Groups_History [GOOD] >> BuildStatsMixedIndex::Single_LowResolution >> BuildStatsMixedIndex::Single_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Slices_LowResolution >> BuildStatsMixedIndex::Single_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_LowResolution >> BuildStatsMixedIndex::Single_Groups_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution >> TieredCache::Touch [GOOD] >> TieredCache::Erase [GOOD] >> TieredCache::EvictNext [GOOD] >> TieredCache::UpdateLimit [GOOD] >> TieredCache::InsertUntouched [GOOD] >> TieredCache::EnsureLimits [GOOD] >> TSharedPageCache_Actor::Request_Basics [GOOD] >> TSharedPageCache_Actor::Request_Failed |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> BuildStatsMixedIndex::Single_Groups_Slices_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_LowResolution >> TSharedPageCache_Actor::Request_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue [GOOD] >> TSharedPageCache_Actor::Request_Queue_Failed >> TSharedPageCache_Actor::Request_Queue_Failed [GOOD] >> TSharedPageCache_Actor::Request_Queue_Fast >> BuildStatsMixedIndex::Single_Groups_History_LowResolution [GOOD] >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution >> TSharedPageCache_Actor::Request_Queue_Fast [GOOD] >> TSharedPageCache_Actor::Request_Sequential [GOOD] >> TSharedPageCache_Actor::Request_Cached >> TSharedPageCache_Actor::Request_Cached [GOOD] >> TSharedPageCache_Actor::Request_Different_Collections [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages >> BuildStatsMixedIndex::Single_Groups_History_Slices_LowResolution [GOOD] >> Charge::Lookups [GOOD] >> Charge::ByKeysBasics [GOOD] >> Charge::ByKeysGroups [GOOD] >> Charge::ByKeysGroupsLimits [GOOD] >> Charge::ByKeysLimits [GOOD] >> Charge::ByKeysReverse [GOOD] >> Charge::ByKeysHistory [GOOD] >> Charge::ByKeysIndex [GOOD] >> Charge::ByRows [GOOD] >> Charge::ByRowsReverse >> TSharedPageCache_Actor::Request_Different_Pages [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed >> Charge::ByRowsReverse [GOOD] >> Charge::ByRowsLimits [GOOD] >> Charge::ByRowsLimitsReverse [GOOD] >> DBase::Basics [GOOD] >> DBase::Affects [GOOD] >> DBase::Annex [GOOD] >> DBase::AnnexRollbackChanges [GOOD] >> DBase::AlterAndUpsertChangesVisibility [GOOD] >> DBRowLocks::LockSurvivesCompactions [GOOD] >> DBRowLocks::LockOverCompactedErase [GOOD] >> DBRowLocks::CommitTxAfterLockThenCompact [GOOD] >> DBRowLocks::CommitLockThenCompactRowVersion [GOOD] >> DBRowLocks::OverwriteLockThenCompact [GOOD] >> DBRowLocks::LockOpenTxAndTxDataAccounting [GOOD] >> DBRowLocks::MultipleCommittedRowLocks [GOOD] >> DBRowLocks::LocksCommittedRemovedIteration [GOOD] >> DBRowLocks::LocksReplay [GOOD] >> DBRowLocks::LocksMvccCompact [GOOD] >> TSharedPageCache_Actor::Request_Different_Pages_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Subset [GOOD] >> TSharedPageCache_Actor::Request_Subset_Shuffled >> TSharedPageCache_Actor::Request_Subset_Shuffled [GOOD] >> TSharedPageCache_Actor::Request_Superset [GOOD] >> TSharedPageCache_Actor::Request_Superset_Reversed >> TSharedPageCache_Actor::Request_Superset_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing >> TSharedPageCache_Actor::Request_Crossing [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Reversed [GOOD] >> TSharedPageCache_Actor::Request_Crossing_Shuffled >> TSharedPageCache_Actor::Request_Crossing_Shuffled [GOOD] >> TSharedPageCache_Actor::Unregister_Basics |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TSharedPageCache_Actor::Unregister_Basics [GOOD] >> TSharedPageCache_Actor::Unregister_Cached [GOOD] >> TSharedPageCache_Actor::Unregister_Expired >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] >> TSharedPageCache_Actor::Unregister_Expired [GOOD] >> TSharedPageCache_Actor::Unregister_InFly |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TSharedPageCache_Actor::Unregister_InFly [GOOD] >> TSharedPageCache_Actor::Unregister_Queued >> TSharedPageCache_Actor::Unregister_Queued [GOOD] >> TSharedPageCache_Actor::Unregister_Queued_Pending >> TSharedPageCache_Actor::Unregister_Queued_Pending [GOOD] >> TSharedPageCache_Actor::InMemory_Preemption |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TSharedPageCache_Actor::InMemory_Preemption [GOOD] >> TSharedPageCache_Actor::InMemory_Unregister >> TSharedPageCache_Actor::InMemory_Unregister [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPages >> TSharedPageCache_Actor::InMemory_ReloadPages [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> DBRowLocks::LocksMvccCompact [GOOD] Test command err: 10 parts: [0:0:1:0:0:0:0] 23928 rows, 1015 pages, 5 levels: (157741, 52588) (315424, 105149) (479998, 160007) (639733, 213252) (799132, 266385) [0:0:2:0:0:0:0] 24216 rows, 1025 pages, 5 levels: (158968, 52997) (323176, 107733) (478777, 159600) (638131, 212718) (798961, 266328) [0:0:3:0:0:0:0] 23857 rows, 1008 pages, 5 levels: (161719, 53914) (324091, 108038) (482023, 160682) (640741, 213588) (798478, 266167) [0:0:4:0:0:0:0] 24184 rows, 1023 pages, 5 levels: (160366, 53463) (321823, 107282) (478882, 159635) (641413, 213812) (799024, 266349) [0:0:5:0:0:0:0] 23945 rows, 1019 pages, 5 levels: (160006, 53343) (321943, 107322) (483100, 161041) (641107, 213710) (799117, 266380) [0:0:6:0:0:0:0] 23619 rows, 1005 pages, 5 levels: (161371, 53798) (319855, 106626) (480928, 160317) (636934, 212319) (799258, 266427) [0:0:7:0:0:0:0] 23988 rows, 1019 pages, 5 levels: (154531, 51518) (314071, 104698) (475438, 158487) (636523, 212182) (798766, 266263) [0:0:8:0:0:0:0] 23770 rows, 1013 pages, 5 levels: (160948, 53657) (318202, 106075) (477640, 159221) (640657, 213560) (799090, 266371) [0:0:9:0:0:0:0] 24256 rows, 1029 pages, 5 levels: (157747, 52590) (320038, 106687) (482770, 160931) (638905, 212976) (799195, 266406) [0:0:10:0:0:0:0] 24237 rows, 1026 pages, 5 levels: (162409, 54144) (317659, 105894) (477673, 159232) (637528, 212517) (798748, 266257) Checking BTree: Touched 1% bytes, 37 pages RowCountHistogram: 9% (actual 6%) key = (54346, 18123) value = 23288 (actual 16627 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 47414 (actual 46964 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 71513 (actual 62823 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 95735 (actual 93347 - 0% error) 10% (actual 11%) key = (400444, 133489) value = 119910 (actual 120422 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 144061 (actual 138588 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 168190 (actual 169409 - 0% error) 10% (actual 5%) key = (611236, 203753) value = 192378 (actual 183755 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 215613 (actual 212448 - 1% error) 10% (actual 11%) DataSizeHistogram: 9% (actual 6%) key = (54346, 18123) value = 1986792 (actual 1422570 - 2% error) 10% (actual 12%) key = (155473, 51832) value = 4036041 (actual 4004054 - 0% error) 10% (actual 6%) key = (208177, 69400) value = 6085370 (actual 5348583 - 3% error) 10% (actual 12%) key = (310048, 103357) value = 8135866 (actual 7931163 - 1% error) 10% (actual 11%) key = (400444, 133489) value = 10187497 (actual 10227908 - 0% error) 10% (actual 7%) key = (460861, 153628) value = 12238449 (actual 11773611 - 2% error) 10% (actual 12%) key = (563395, 187806) value = 14287401 (actual 14387497 - 0% error) 10% (actual 6%) key = (611236, 203753) value = 16340389 (actual 15610901 - 3% error) 9% (actual 11%) key = (706417, 235480) value = 18309634 (actual 18041898 - 1% error) 10% (actual 11%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (79777, 26600) value = 24001 (actual 24257 - 0% error) 10% (actual 10%) key = (159688, 53237) value = 48010 (actual 48277 - 0% error) 10% (actual 10%) key = (239839, 79954) value = 72013 (actual 72278 - 0% error) 10% (actual 9%) key = (319807, 106610) value = 96022 (actual 96277 - 0% error) 10% (actual 10%) key = (399964, 133329) value = 120041 (actual 120304 - 0% error) 10% (actual 10%) key = (479791, 159938) value = 144061 (actual 144321 - 0% error) 10% (actual 10%) key = (559867, 186630) value = 168077 (actual 168330 - 0% error) 10% (actual 10%) key = (639661, 213228) value = 192085 (actual 192333 - 0% error) 10% (actual 10%) key = (719458, 239827) value = 216091 (actual 216348 - 0% error) 9% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79318, 26447) value = 2038035 (actual 2060169 - 0% error) 10% (actual 10%) key = (159028, 53017) value = 4076033 (actual 4098046 - 0% error) 10% (actual 10%) key = (239581, 79868) value = 6115440 (actual 6137485 - 0% error) 10% (actual 10%) key = (319516, 106513) value = 8153742 (actual 8175567 - 0% error) 10% (actual 10%) key = (399841, 133288) value = 10191957 (actual 10213746 - 0% error) 10% (actual 10%) key = (479734, 159919) value = 12230556 (actual 12252749 - 0% error) 10% (actual 10%) key = (559552, 186525) value = 14269383 (actual 14291350 - 0% error) 10% (actual 10%) key = (639193, 213072) value = 16307737 (actual 16329710 - 0% error) 10% (actual 10%) key = (719326, 239783) value = 18346896 (actual 18369051 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 43 pages RowCountHistogram: 10% (actual 6%) key = (50749, 16924) value = 24065 (actual 15550 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 48098 (actual 44756 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 72300 (actual 61833 - 4% error) 10% (actual 12%) key = (301159, 100394) value = 96516 (actual 90698 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 120685 (actual 119332 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 144842 (actual 136562 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 168942 (actual 165043 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 193089 (actual 183462 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 218665 (actual 210950 - 3% error) 8% (actual 12%) DataSizeHistogram: 10% (actual 6%) key = (50749, 16924) value = 2051869 (actual 1330161 - 3% error) 10% (actual 12%) key = (148027, 49350) value = 4100433 (actual 3812568 - 1% error) 10% (actual 7%) key = (205003, 68342) value = 6148888 (actual 5264750 - 4% error) 10% (actual 11%) key = (301159, 100394) value = 8200933 (actual 7706870 - 2% error) 10% (actual 11%) key = (396778, 132267) value = 10251926 (actual 10135710 - 0% error) 10% (actual 7%) key = (454192, 151405) value = 12302580 (actual 11601475 - 3% error) 10% (actual 11%) key = (548890, 182971) value = 14351377 (actual 14019410 - 1% error) 10% (actual 7%) key = (609940, 203321) value = 16401437 (actual 15584938 - 4% error) 10% (actual 11%) key = (701434, 233819) value = 18568091 (actual 17915901 - 3% error) 8% (actual 12%) 10 parts: [0:0:1:0:0:0:0] 24000 rows, 1023 pages, 5 levels: (15913, 5312) (32008, 10677) (48025, 16016) (64045, 21356) (79984, 26669) [0:0:2:0:0:0:0] 24000 rows, 1022 pages, 5 levels: (95941, 31988) (111937, 37320) (127807, 42610) (143950, 47991) (159964, 53329) [0:0:3:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (175939, 58654) (191968, 63997) (208006, 69343) (224077, 74700) (240169, 80064) [0:0:4:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (256087, 85370) (272023, 90682) (288022, 96015) (304021, 101348) (320014, 106679) [0:0:5:0:0:0:0] 24000 rows, 1017 pages, 5 levels: (336010, 112011) (352159, 117394) (368092, 122705) (384097, 128040) (400177, 133400) [0:0:6:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (416161, 138728) (432139, 144054) (447946, 149323) (463885, 154636) (479950, 159991) [0:0:7:0:0:0:0] 24000 rows, 1018 pages, 5 levels: (495844, 165289) (511843, 170622) (527917, 175980) (543799, 181274) (559849, 186624) [0:0:8:0:0:0:0] 24000 rows, 1020 pages, 5 levels: (575899, 191974) (591895, 197306) (607765, 202596) (623614, 207879) (639565, 213196) [0:0:9:0:0:0:0] 24000 rows, 1016 pages, 5 levels: (655633, 218552) (671707, 223910) (687631, 229218) (703516, 234513) (719437, 239820) [0:0:10:0:0:0:0] 24000 rows, 1015 pages, 5 levels: (735415, 245146) (751432, 250485) (767404, 255809) (783427, 261150) (799303, 266442) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 10% (actual 10%) key = (80038, 26687) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160003, 53342) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240184, 80069) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320035, 106686) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400189, 133404) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559891, 186638) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719461, 239828) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (80038, 26687) value = 2046023 (actual 2082181 - 0% error) 10% (actual 10%) key = (160003, 53342) value = 4091184 (actual 4123481 - 0% error) 9% (actual 9%) key = (240184, 80069) value = 6122543 (actual 6150789 - 0% error) 9% (actual 9%) key = (320035, 106686) value = 8157907 (actual 8182094 - 0% error) 9% (actual 9%) key = (400189, 133404) value = 10192958 (actual 10213133 - 0% error) 10% (actual 9%) key = (479956, 159993) value = 12231984 (actual 12248119 - 0% error) 9% (actual 9%) key = (559891, 186638) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639643, 213222) value = 16310625 (actual 16318673 - 0% error) 9% (actual 9%) key = (719461, 239828) value = 18343487 (actual 18347516 - 0% error) 9% (actual 9%) Checking Flat: Touched 100% bytes, 10 pages RowCountHistogram: 10% (actual 10%) key = (80041, 26688) value = 24000 (actual 24433 - 0% error) 10% (actual 9%) key = (160006, 53343) value = 48000 (actual 48386 - 0% error) 10% (actual 9%) key = (240187, 80070) value = 72000 (actual 72338 - 0% error) 10% (actual 9%) key = (320038, 106687) value = 96000 (actual 96293 - 0% error) 10% (actual 9%) key = (400192, 133405) value = 120000 (actual 120247 - 0% error) 10% (actual 9%) key = (479959, 159994) value = 144000 (actual 144195 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 168000 (actual 168146 - 0% error) 10% (actual 9%) key = (639649, 213224) value = 192000 (actual 192095 - 0% error) 10% (actual 9%) key = (719467, 239830) value = 216000 (actual 216050 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 10%) key = (79732, 26585) value = 2038706 (actual 2078803 - 0% error) 10% (actual 9%) key = (159427, 53150) value = 4076220 (actual 4112556 - 0% error) 10% (actual 9%) key = (239872, 79965) value = 6113960 (actual 6146198 - 0% error) 10% (actual 9%) key = (319849, 106624) value = 8153334 (actual 8181530 - 0% error) 10% (actual 9%) key = (400177, 133400) value = 10192636 (actual 10213133 - 0% error) 10% (actual 9%) key = (479950, 159991) value = 12231529 (actual 12248119 - 0% error) 10% (actual 9%) key = (559894, 186639) value = 14269160 (actual 14281266 - 0% error) 10% (actual 9%) key = (639481, 213168) value = 16306978 (actual 16318673 - 0% error) 10% (actual 9%) key = (719554, 239859) value = 18345472 (actual 18349551 - 0% error) 9% (actual 9%) Checking Mixed: Touched 1% bytes, 40 pages RowCountHistogram: 10% (actual 4%) key = (34876, 11633) value = 24122 (actual 10941 - 5% error) 10% (actual 10%) key = (120064, 40029) value = 48205 (actual 36464 - 4% error) 10% (actual 10%) key = (205300, 68441) value = 72229 (actual 61965 - 4% error) 10% (actual 10%) key = (290449, 96824) value = 96245 (actual 87436 - 3% error) 10% (actual 11%) key = (380554, 126859) value = 121759 (actual 114432 - 3% error) 10% (actual 11%) ... 85 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27765 (actual 27678 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 5% (actual 5%) key = (91, 38) value = 25 (actual 25 - 0% error) 5% (actual 5%) key = (166, 63) value = 50 (actual 50 - 0% error) 4% (actual 4%) key = (253, 92) value = 74 (actual 74 - 0% error) 4% (actual 4%) key = (325, 116) value = 96 (actual 96 - 0% error) 4% (actual 4%) key = (394, 139) value = 119 (actual 119 - 0% error) 5% (actual 5%) key = (481, 168) value = 144 (actual 144 - 0% error) 4% (actual 4%) key = (556, 193) value = 167 (actual 167 - 0% error) 4% (actual 4%) key = (631, 218) value = 191 (actual 191 - 0% error) 4% (actual 4%) key = (709, 244) value = 215 (actual 215 - 0% error) 3% (actual 3%) key = (766, 263) value = 234 (actual 234 - 0% error) 5% (actual 5%) key = (853, 292) value = 261 (actual 261 - 0% error) 4% (actual 4%) key = (934, 319) value = 285 (actual 285 - 0% error) 4% (actual 4%) key = (1006, 343) value = 309 (actual 309 - 0% error) 4% (actual 4%) key = (1087, 370) value = 332 (actual 332 - 0% error) 0% (actual 0%) key = (1090, 371) value = 333 (actual 333 - 0% error) 4% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 4% (actual 4%) key = (91, 38) value = 1974 (actual 1974 - 0% error) 4% (actual 4%) key = (166, 63) value = 3992 (actual 3992 - 0% error) 4% (actual 4%) key = (253, 92) value = 5889 (actual 5889 - 0% error) 4% (actual 4%) key = (325, 116) value = 7868 (actual 7868 - 0% error) 4% (actual 4%) key = (394, 139) value = 9910 (actual 9910 - 0% error) 4% (actual 4%) key = (481, 168) value = 11938 (actual 11938 - 0% error) 4% (actual 4%) key = (556, 193) value = 13685 (actual 13685 - 0% error) 4% (actual 4%) key = (631, 218) value = 15674 (actual 15674 - 0% error) 4% (actual 4%) key = (709, 244) value = 17709 (actual 17709 - 0% error) 4% (actual 4%) key = (766, 263) value = 19664 (actual 19664 - 0% error) 4% (actual 4%) key = (853, 292) value = 21673 (actual 21673 - 0% error) 4% (actual 4%) key = (934, 319) value = 23712 (actual 23712 - 0% error) 4% (actual 4%) key = (1006, 343) value = 25687 (actual 25687 - 0% error) 4% (actual 4%) key = (1087, 370) value = 27678 (actual 27678 - 0% error) 0% (actual 0%) key = (1090, 371) value = 27765 (actual 27765 - 0% error) 4% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) Checking Mixed: Touched 100% bytes, 3 pages RowCountHistogram: 14% (actual 5%) key = (91, 38) value = 70 (actual 25 - 9% error) 5% (actual 5%) key = (166, 63) value = 95 (actual 50 - 9% error) 4% (actual 4%) key = (253, 92) value = 119 (actual 74 - 9% error) 4% (actual 4%) key = (325, 116) value = 141 (actual 96 - 9% error) 4% (actual 4%) key = (394, 139) value = 164 (actual 119 - 9% error) 5% (actual 5%) key = (481, 168) value = 189 (actual 144 - 9% error) 4% (actual 9%) key = (631, 218) value = 212 (actual 191 - 4% error) 4% (actual 4%) key = (709, 244) value = 236 (actual 215 - 4% error) 3% (actual 3%) key = (766, 263) value = 255 (actual 234 - 4% error) 5% (actual 5%) key = (853, 292) value = 282 (actual 261 - 4% error) 4% (actual 4%) key = (934, 319) value = 306 (actual 285 - 4% error) 4% (actual 4%) key = (1006, 343) value = 330 (actual 309 - 4% error) 4% (actual 4%) key = (1087, 370) value = 353 (actual 332 - 4% error) 0% (actual 4%) key = (1156, 393) value = 354 (actual 354 - 0% error) 5% (actual 5%) key = (1246, 423) value = 380 (actual 380 - 0% error) 4% (actual 4%) key = (1324, 449) value = 404 (actual 404 - 0% error) 4% (actual 4%) key = (1396, 473) value = 426 (actual 426 - 0% error) 4% (actual 4%) key = (1471, 498) value = 448 (actual 448 - 0% error) 4% (actual 4%) key = (1543, 522) value = 470 (actual 470 - 0% error) 5% (actual 5%) key = (1633, 552) value = 496 (actual 496 - 0% error) 0% (actual 0%) DataSizeHistogram: 14% (actual 4%) key = (91, 38) value = 5939 (actual 1974 - 9% error) 4% (actual 4%) key = (166, 63) value = 7957 (actual 3992 - 9% error) 4% (actual 4%) key = (253, 92) value = 9854 (actual 5889 - 9% error) 4% (actual 4%) key = (325, 116) value = 11833 (actual 7868 - 9% error) 4% (actual 4%) key = (394, 139) value = 13875 (actual 9910 - 9% error) 4% (actual 4%) key = (481, 168) value = 15903 (actual 11938 - 9% error) 4% (actual 8%) key = (631, 218) value = 17650 (actual 15674 - 4% error) 4% (actual 4%) key = (709, 244) value = 19685 (actual 17709 - 4% error) 4% (actual 4%) key = (766, 263) value = 21640 (actual 19664 - 4% error) 4% (actual 4%) key = (853, 292) value = 23649 (actual 21673 - 4% error) 4% (actual 4%) key = (934, 319) value = 25688 (actual 23712 - 4% error) 4% (actual 4%) key = (1006, 343) value = 27663 (actual 25687 - 4% error) 4% (actual 4%) key = (1087, 370) value = 29654 (actual 27678 - 4% error) 0% (actual 4%) key = (1156, 393) value = 29741 (actual 29741 - 0% error) 4% (actual 4%) key = (1246, 423) value = 31726 (actual 31726 - 0% error) 4% (actual 4%) key = (1324, 449) value = 33698 (actual 33698 - 0% error) 4% (actual 4%) key = (1396, 473) value = 35700 (actual 35700 - 0% error) 4% (actual 4%) key = (1471, 498) value = 37620 (actual 37620 - 0% error) 4% (actual 4%) key = (1543, 522) value = 39641 (actual 39641 - 0% error) 4% (actual 4%) key = (1633, 552) value = 41669 (actual 41669 - 0% error) 0% (actual 0%) 3 parts: [0:0:1:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () [0:0:2:0:0:0:0] 166 rows, 1 pages, 0 levels: () () () () () [0:0:3:0:0:0:0] 167 rows, 1 pages, 0 levels: () () () () () Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 33% (actual 33%) key = (553, 192) value = 167 (actual 166 - 0% error) 33% (actual 33%) key = (1087, 370) value = 333 (actual 332 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (553, 192) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1087, 370) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Flat: Touched 100% bytes, 3 pages RowCountHistogram: 33% (actual 33%) key = (556, 193) value = 167 (actual 167 - 0% error) 33% (actual 33%) key = (1090, 371) value = 333 (actual 333 - 0% error) 33% (actual 33%) DataSizeHistogram: 32% (actual 32%) key = (556, 193) value = 13565 (actual 13565 - 0% error) 33% (actual 33%) key = (1090, 371) value = 27505 (actual 27505 - 0% error) 33% (actual 33%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) Got : 24000 2106439 49449 38 44 Expected: 24000 2106439 49449 38 44 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 49449 20 23 Expected: 12816 1121048 49449 20 23 Got : 24000 3547100 81694 64 44 Expected: 24000 3547100 81694 64 44 { [1012, 1475), [1682, 1985), [2727, 3553), [3599, 3992), [5397, 7244), [9181, 9807), [9993, 10178), [12209, 14029), [15089, 15342), [16198, 16984), [17238, 18436), [21087, 21876), [23701, 23794) } Got : 9582 1425198 81694 26 17 Expected: 9582 1425198 81694 26 17 Got : 24000 2460139 23760 42 41 Expected: 24000 2460139 23760 42 41 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 23760 18 18 Expected: 10440 1060798 23760 18 18 Got : 24000 4054050 46562 68 43 Expected: 24000 4054050 46562 68 43 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2277890 46562 38 24 Expected: 13570 2277890 46562 38 24 Got : 24000 2106459 49449 38 44 Expected: 24000 2106459 49449 38 44 Got : 24000 2460219 23555 41 41 Expected: 24000 2460219 23555 41 41 Got : 24000 4054270 46543 66 43 Expected: 24000 4054270 46543 66 43 Got : 24000 2106479 49555 38 44 Expected: 24000 2106479 49555 38 44 Got : 24000 2460259 23628 41 41 Expected: 24000 2460259 23628 41 41 Got : 24000 4054290 46640 65 43 Expected: 24000 4054290 46640 65 43 Got : 24000 2106439 66674 3 4 Expected: 24000 2106439 66674 3 4 { [2455, 2599), [2798, 3624), [4540, 4713), [5654, 7161), [8509, 8794), [8936, 9973), [11888, 14280), [14337, 14882), [15507, 16365), [17368, 19451), [19536, 20135), [20790, 21503), [21589, 23243) } Got : 12816 1121048 66674 2 2 Expected: 12816 1121048 66674 2 2 Got : 24000 2460139 33541 4 4 Expected: 24000 2460139 33541 4 4 { [1296, 2520), [3888, 4320), [5040, 6840), [6912, 7272), [10872, 11160), [11520, 12096), [12096, 13824), [15192, 15624), [17064, 17856), [18216, 19296), [19800, 20160), [20736, 21096), [21096, 22104) } Got : 10440 1060798 33541 1 1 Expected: 10440 1060798 33541 1 1 Got : 24000 4054050 64742 7 4 Expected: 24000 4054050 64742 7 4 { [460, 1518), [2300, 2484), [2760, 4002), [4600, 5842), [6302, 9752), [11178, 12328), [14582, 14858), [16790, 18032), [18216, 18446), [18722, 19504), [19504, 19964), [20378, 20470), [21344, 23506) } Got : 13570 2234982 64742 4 2 Expected: 13570 2234982 64742 4 2 |99.2%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TExecutorDb::FullScan [GOOD] >> TSharedPageCache_Actor::InMemory_ReloadPagesLimitedInFly [GOOD] >> TExecutorDb::CoordinatorSimulation >> TSharedPageCache_Actor::IncrementFrequency_Active >> TSharedPageCache_Actor::IncrementFrequency_Active [GOOD] >> TSharedPageCache_Actor::IncrementFrequency_Passive [GOOD] >> TSharedPageCache_Transactions::One_Transaction_One_Key >> TSharedPageCache_Transactions::One_Transaction_One_Key [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations >> TSharedPageCache_Transactions::One_Transaction_Two_Keys [GOOD] >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts >> TSharedPageCache_Transactions::One_Transaction_Two_Keys_Many_Parts [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_One_Key >> TSharedPageCache_Transactions::Two_Transactions_One_Key [GOOD] >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys >> test_public_api.py::TestCRUDOperations::test_reuse_session_to_tx_leak [GOOD] >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TSharedPageCache_Transactions::Two_Transactions_Two_Keys [GOOD] >> TSharedPageCache_Transactions::Compaction >> TSharedPageCache_Transactions::Compaction [GOOD] >> Vacuum::StartVacuumNoTables [GOOD] >> Vacuum::StartVacuumNoTablesWithRestart [GOOD] >> Vacuum::StartVacuumLog >> Vacuum::StartVacuumLog [GOOD] >> Vacuum::StartVacuum [GOOD] >> Vacuum::StartVacuumMultipleFamilies >> Vacuum::StartVacuumMultipleFamilies [GOOD] >> Vacuum::StartVacuumMultipleTables [GOOD] >> Vacuum::StartVacuumWithFollowers [GOOD] >> Vacuum::StartVacuumMultipleTimes >> Vacuum::StartVacuumMultipleTimes [GOOD] >> Vacuum::StartVacuumEmptyTable [GOOD] >> Vacuum::StartVacuumWithRestarts [GOOD] >> Vacuum::StartVacuumRetryWithNotGreaterGenerations >> test_public_api.py::TestCRUDOperations::test_direct_leak_tx_but_no_actual_leak_by_best_efforts [GOOD] >> test_public_api.py::TestCRUDOperations::test_presented_in_cache [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV1 [GOOD] >> KeyValueGRPCService::SimpleWriteListRangeV2 >> Vacuum::StartVacuumRetryWithNotGreaterGenerations [GOOD] >> Vacuum::StartVacuumWithTabletGCErrors [GOOD] >> Vacuum::StartVacuumWithSysTabletGCErrors >> test_public_api.py::TestCRUDOperations::test_decimal_values_negative_stories [GOOD] >> test_public_api.py::TestCRUDOperations::test_decimal_values >> Vacuum::StartVacuumWithSysTabletGCErrors [GOOD] >> TVersions::WreckHead >> test_public_api.py::TestCRUDOperations::test_decimal_values [GOOD] >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children >> test_public_api.py::TestCRUDOperations::test_list_directory_with_children [GOOD] >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result >> test_public_api.py::TestCRUDOperations::test_validate_describe_path_result [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modifications_1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_acl_modification_2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success >> TDqPqReadActorTest::TestReadFromTopic [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_can_execute_valid_statement_after_invalid_success [GOOD] >> test_public_api.py::TestCRUDOperations::test_modify_permissions_3 [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_directory_that_doesnt_exists [GOOD] >> test_public_api.py::TestCRUDOperations::test_crud_acl_actions [GOOD] >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions >> TDqPqReadActorTest::TestReadFromTopicFromNow >> test_public_api.py::TestCRUDOperations::test_too_many_pending_transactions [GOOD] >> test_public_api.py::TestCRUDOperations::test_query_set1 |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_query_set1 [GOOD] >> test_public_api.py::TestCRUDOperations::test_queries_set2 |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/limits/py3test >> test_schemeshard_limits.py::TestSchemeShardLimitsCase1::test_too_large_acls [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/limits/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::TestReadFromTopicFromNow [GOOD] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_read_table.py::TestReadTableWithTabletKills::test_read_table_async_simple[read_table] [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> DBase::Select [GOOD] >> DBase::Defaults [GOOD] >> DBase::Subsets [GOOD] >> DBase::Garbage [GOOD] >> DBase::WideKey >> test_drain.py::TestHive::test_drain_on_stop >> TDqPqReadActorTest::ReadWithFreeSpace >> test_public_api.py::TestCRUDOperations::test_queries_set2 [GOOD] >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure >> DBase::WideKey [GOOD] >> DBase::Outer [GOOD] >> DBase::VersionBasics >> DBase::VersionBasics [GOOD] >> DBase::VersionPureMem |99.2%| [TA] $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} >> DBase::VersionPureMem [GOOD] >> DBase::VersionPureParts |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TA] {RESULT} $(B)/ydb/tests/functional/limits/test-results/py3test/{meta.json ... results_accumulator.log} |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_then_can_lookup_them [GOOD] |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TExecutorDb::CoordinatorSimulation [GOOD] >> TExecutorDb::RandomCoordinatorSimulation >> DBase::VersionPureParts [GOOD] >> DBase::VersionCompactedMem >> TTxDataShardPrefixKMeansScan::BuildToPosting [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap >> TMLPWriterTests::WriteTwoMessage_OnePartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_TwoPartition |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TExecutorDb::RandomOps [GOOD] >> TFlatCxxDatabaseTest::BasicSchemaTest [GOOD] >> TFlatCxxDatabaseTest::RenameColumnSchemaTest [GOOD] >> TFlatCxxDatabaseTest::SchemaFillerTest [GOOD] >> TFlatDatabaseDecimal::UpdateRead [GOOD] >> TFlatEraseCacheTest::BasicUsage [GOOD] >> TFlatEraseCacheTest::BasicUsageReverse [GOOD] >> TFlatEraseCacheTest::CacheEviction [GOOD] >> TFlatEraseCacheTest::StressGarbageCollection >> TFlatEraseCacheTest::StressGarbageCollection [GOOD] >> TFlatEraseCacheTest::StressGarbageCollectionWithStrings [GOOD] >> TFlatExecutorLeases::Basics >> DBase::VersionCompactedMem [GOOD] >> DBase::VersionCompactedParts >> TDqPqReadActorTest::ReadWithFreeSpace [GOOD] >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] >> DBase::VersionCompactedParts [GOOD] >> DBase::KIKIMR_15506_MissingSnapshotKeys [GOOD] >> DBase::EraseCacheWithUncommittedChanges [GOOD] >> DBase::EraseCacheWithUncommittedChangesCompacted [GOOD] >> DBase::UncommittedChangesVisibility [GOOD] >> DBase::UncommittedChangesCommitWithUpdates [GOOD] >> DBase::ReplayNewTable [GOOD] >> DBase::SnapshotNewTable [GOOD] >> DBase::DropModifiedTable [GOOD] >> DBase::KIKIMR_15598_Many_MemTables |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.2%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TDqPqReadActorTest::ReadNonExistentTopic [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatExecutorLeases::Basics [GOOD] >> TFlatExecutorLeases::BasicsLeaseTimeout >> KeyValueGRPCService::SimpleWriteListRangeV2 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatExecutorLeases::BasicsLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLease |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DBase::KIKIMR_15598_Many_MemTables [GOOD] >> Memtable::Basics [GOOD] >> Memtable::BasicsReverse [GOOD] >> Memtable::Markers [GOOD] >> Memtable::Overlap [GOOD] >> Memtable::Wreck |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> Memtable::Wreck [GOOD] >> Memtable::Erased |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> Memtable::Erased [GOOD] >> NFwd_TBlobs::MemTableTest [GOOD] >> NFwd_TBlobs::Lower [GOOD] >> NFwd_TBlobs::Sieve [GOOD] >> NFwd_TBlobs::SieveFiltered [GOOD] >> NFwd_TBlobs::Basics [GOOD] >> NFwd_TBlobs::Simple [GOOD] >> NFwd_TBlobs::Shuffle [GOOD] >> NFwd_TBlobs::Grow [GOOD] >> NFwd_TBlobs::Trace [GOOD] >> NFwd_TBlobs::Filtered [GOOD] >> NFwd_TBTreeIndexCache::Basics [GOOD] >> NFwd_TBTreeIndexCache::IndexPagesLocator [GOOD] >> NFwd_TBTreeIndexCache::GetTwice [GOOD] >> NFwd_TBTreeIndexCache::ForwardTwice [GOOD] >> NFwd_TBTreeIndexCache::Forward_OnlyUsed [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done [GOOD] >> NFwd_TBTreeIndexCache::Skip_Done_None [GOOD] >> NFwd_TBTreeIndexCache::Skip_Keep [GOOD] >> NFwd_TBTreeIndexCache::Skip_Wait >> NFwd_TBTreeIndexCache::Skip_Wait [GOOD] >> NFwd_TBTreeIndexCache::Trace_BTree [GOOD] >> NFwd_TBTreeIndexCache::Trace_Data [GOOD] >> NFwd_TBTreeIndexCache::End [GOOD] >> NFwd_TBTreeIndexCache::Slices [GOOD] >> NFwd_TBTreeIndexCache::ManyApplies [GOOD] >> NFwd_TFlatIndexCache::Basics [GOOD] >> NFwd_TFlatIndexCache::IndexPagesLocator [GOOD] >> NFwd_TFlatIndexCache::GetTwice [GOOD] >> NFwd_TFlatIndexCache::ForwardTwice [GOOD] >> NFwd_TFlatIndexCache::Skip_Done [GOOD] >> NFwd_TFlatIndexCache::Skip_Done_None [GOOD] >> NFwd_TFlatIndexCache::Skip_Keep [GOOD] >> NFwd_TFlatIndexCache::Skip_Wait [GOOD] >> NFwd_TFlatIndexCache::Trace [GOOD] >> NFwd_TFlatIndexCache::End [GOOD] >> NFwd_TFlatIndexCache::Slices [GOOD] >> NFwd_TLoadedPagesCircularBuffer::Basics [GOOD] >> NOther::Blocks [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatExecutorLeases::BasicsInitialLease [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> NOther::Blocks [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | > {34} | | | PageId: 21 RowCount: 36 DataSize: 900 ErasedRowCount: 0 | | | > {36} | | | PageId: 24 RowCount: 38 DataSize: 950 ErasedRowCount: 0 | | | > {38} | | | PageId: 25 RowCount: 40 DataSize: 1000 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{7} Label{74 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{8} Label{84 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{9} Label{94 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{11} Label{114 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{12} Label{124 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{13} Label{134 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{15} Label{154 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{16} Label{164 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{17} Label{174 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{19} Label{194 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{20} Label{204 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{21} Label{214 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{24} Label{244 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{25} Label{254 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 2358b + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 98b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 150 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 0 RowCount: 2 DataSize: 50 ErasedRowCount: 0 | | | > {2} | | | PageId: 1 RowCount: 4 DataSize: 100 ErasedRowCount: 0 | | | > {4} | | | PageId: 2 RowCount: 6 DataSize: 150 ErasedRowCount: 0 | | > {6} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 300 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 3 RowCount: 8 DataSize: 200 ErasedRowCount: 0 | | | > {8} | | | PageId: 4 RowCount: 10 DataSize: 250 ErasedRowCount: 0 | | | > {10} | | | PageId: 5 RowCount: 12 DataSize: 300 ErasedRowCount: 0 | | > {12} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 450 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 7 RowCount: 14 DataSize: 350 ErasedRowCount: 0 | | | > {14} | | | PageId: 8 RowCount: 16 DataSize: 400 ErasedRowCount: 0 | | | > {16} | | | PageId: 9 RowCount: 18 DataSize: 450 ErasedRowCount: 0 | > {18} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 143b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 600 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 11 RowCount: 20 DataSize: 500 ErasedRowCount: 0 | | | > {20} | | | PageId: 12 RowCount: 22 DataSize: 550 ErasedRowCount: 0 | | | > {22} | | | PageId: 13 RowCount: 24 DataSize: 600 ErasedRowCount: 0 | | > {24} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 750 ErasedRowCount: 0} Label{13 rev 1, 143b} | | | PageId: 15 RowCount: 26 DataSize: 650 ErasedRowCount: 0 | | | > {26} | | | PageId: 16 RowCount: 28 DataSize: 700 ErasedRowCount: 0 | | | > {28} | | | PageId: 17 RowCount: 30 DataSize: 750 ErasedRowCount: 0 | | > {30} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 1000 ErasedRowCount: 0} Label{13 rev 1, 233b} | | | PageId: 19 RowCount: 32 DataSize: 800 ErasedRowCount: 0 | | | > {32} | | | PageId: 20 RowCount: 34 DataSize: 850 ErasedRowCount: 0 | | | ... 3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} Part{[1:2:3:0:0:0:0] eph 0, 1000b 40r} data 1479b + FlatIndex{20} Label{3 rev 3, 453b} 21 rec | Page Row Bytes (Uint32) | 0 0 50b {0} | 1 2 50b {2} | 2 4 50b {4} | 3 6 50b {6} | 4 8 50b {8} | 5 10 50b {10} | 6 12 50b {12} | 7 14 50b {14} | 8 16 50b {16} | 9 18 50b {18} | 10 20 50b {20} | 11 22 50b {22} | 12 24 50b {24} | 13 26 50b {26} | 14 28 50b {28} | 15 30 50b {30} | 16 32 50b {32} | 17 34 50b {34} | 18 36 50b {36} | 19 38 50b {38} | 19 39 50b {39} + Rows{0} Label{04 rev 1, 50b}, [0, +2)row | ERowOp 1: {0} {Set 1 Uint32 : 0} | ERowOp 1: {1} {Set 1 Uint32 : 100} + Rows{1} Label{14 rev 1, 50b}, [2, +2)row | ERowOp 1: {2} {Set 1 Uint32 : 200} | ERowOp 1: {3} {Set 1 Uint32 : 300} + Rows{2} Label{24 rev 1, 50b}, [4, +2)row | ERowOp 1: {4} {Set 1 Uint32 : 400} | ERowOp 1: {5} {Set 1 Uint32 : 500} + Rows{3} Label{34 rev 1, 50b}, [6, +2)row | ERowOp 1: {6} {Set 1 Uint32 : 600} | ERowOp 1: {7} {Set 1 Uint32 : 700} + Rows{4} Label{44 rev 1, 50b}, [8, +2)row | ERowOp 1: {8} {Set 1 Uint32 : 800} | ERowOp 1: {9} {Set 1 Uint32 : 900} + Rows{5} Label{54 rev 1, 50b}, [10, +2)row | ERowOp 1: {10} {Set 1 Uint32 : 1000} | ERowOp 1: {11} {Set 1 Uint32 : 1100} + Rows{6} Label{64 rev 1, 50b}, [12, +2)row | ERowOp 1: {12} {Set 1 Uint32 : 1200} | ERowOp 1: {13} {Set 1 Uint32 : 1300} + Rows{7} Label{74 rev 1, 50b}, [14, +2)row | ERowOp 1: {14} {Set 1 Uint32 : 1400} | ERowOp 1: {15} {Set 1 Uint32 : 1500} + Rows{8} Label{84 rev 1, 50b}, [16, +2)row | ERowOp 1: {16} {Set 1 Uint32 : 1600} | ERowOp 1: {17} {Set 1 Uint32 : 1700} + Rows{9} Label{94 rev 1, 50b}, [18, +2)row | ERowOp 1: {18} {Set 1 Uint32 : 1800} | ERowOp 1: {19} {Set 1 Uint32 : 1900} + Rows{10} Label{104 rev 1, 50b}, [20, +2)row | ERowOp 1: {20} {Set 1 Uint32 : 2000} | ERowOp 1: {21} {Set 1 Uint32 : 2100} + Rows{11} Label{114 rev 1, 50b}, [22, +2)row | ERowOp 1: {22} {Set 1 Uint32 : 2200} | ERowOp 1: {23} {Set 1 Uint32 : 2300} + Rows{12} Label{124 rev 1, 50b}, [24, +2)row | ERowOp 1: {24} {Set 1 Uint32 : 2400} | ERowOp 1: {25} {Set 1 Uint32 : 2500} + Rows{13} Label{134 rev 1, 50b}, [26, +2)row | ERowOp 1: {26} {Set 1 Uint32 : 2600} | ERowOp 1: {27} {Set 1 Uint32 : 2700} + Rows{14} Label{144 rev 1, 50b}, [28, +2)row | ERowOp 1: {28} {Set 1 Uint32 : 2800} | ERowOp 1: {29} {Set 1 Uint32 : 2900} + Rows{15} Label{154 rev 1, 50b}, [30, +2)row | ERowOp 1: {30} {Set 1 Uint32 : 3000} | ERowOp 1: {31} {Set 1 Uint32 : 3100} + Rows{16} Label{164 rev 1, 50b}, [32, +2)row | ERowOp 1: {32} {Set 1 Uint32 : 3200} | ERowOp 1: {33} {Set 1 Uint32 : 3300} + Rows{17} Label{174 rev 1, 50b}, [34, +2)row | ERowOp 1: {34} {Set 1 Uint32 : 3400} | ERowOp 1: {35} {Set 1 Uint32 : 3500} + Rows{18} Label{184 rev 1, 50b}, [36, +2)row | ERowOp 1: {36} {Set 1 Uint32 : 3600} | ERowOp 1: {37} {Set 1 Uint32 : 3700} + Rows{19} Label{194 rev 1, 50b}, [38, +2)row | ERowOp 1: {38} {Set 1 Uint32 : 3800} | ERowOp 1: {39} {Set 1 Uint32 : 3900} |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> TVersions::WreckHead [GOOD] >> TVersions::WreckHeadReverse >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] >> TFlatExecutorLeases::BasicsInitialLeaseTimeout [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleep |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_public_api.py::TestCRUDOperations::test_when_result_set_is_large_then_issue_occure [GOOD] >> TExecutorDb::RandomCoordinatorSimulation [GOOD] >> TExecutorDb::MultiPage >> TTxDataShardPrefixKMeansScan::BuildToPostingWithOverlap [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuild |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> DataShardStats::HasSchemaChanges_Columns [GOOD] >> DataShardStats::HasSchemaChanges_Families |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TExecutorDb::MultiPage [GOOD] >> TExecutorDb::EncodedPage |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_execute_scheme.py::TestExecuteSchemeOperations::test_create_table_if_it_is_created_fail_remove_from_key [GOOD] >> TExecutorDb::EncodedPage [GOOD] >> NPage::Encoded [GOOD] >> NPage::ABI_002 |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_drain.py::TestHive::test_drain_tablets >> NPage::ABI_002 [GOOD] >> NPage::GroupIdEncoding [GOOD] >> NPageCollection::Align [GOOD] >> NPageCollection::Meta |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [FAIL] >> NPageCollection::Meta [GOOD] >> NPageCollection::PagesToBlobsConverter [GOOD] >> NPageCollection::Grow [GOOD] >> NPageCollection::Groups [GOOD] >> NPageCollection::Chop [GOOD] >> NPageCollection::CookieAllocator [GOOD] >> NProto::LargeGlobId [GOOD] >> Redo::ABI_008 [GOOD] >> Self::Literals [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TFlatExecutorLeases::BasicsInitialLeaseSleep [GOOD] >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/sql/py3test >> test_crud.py::TestYdbCrudOperations::test_crud_operations [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV1 [GOOD] >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/sql/py3test |99.3%| [TM] {RESULT} ydb/tests/sql/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> Self::Literals [GOOD] Test command err: + BTreeIndex{PageId: 0 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385, 13 rev 1, 683b} | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | > {0, a, false, 0} | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | > {1, b, true, 10} | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | > {2, c, false, 20} | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | > {3, d, true, 30} | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | > {4, e, false, 40} | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | > {5, f, true, 50} | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | > {6, g, false, 60} | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | > {7, h, true, 70} | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | > {9, j, true, 90} | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 + BTreeIndex{PageId: 9 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 116b} | + BTreeIndex{PageId: 5 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | + BTreeIndex{PageId: 0 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93, 13 rev 1, 179b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, a, false, 0} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, b, true, 10} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | > {2, c, false, 20} | | + BTreeIndex{PageId: 1 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195, 13 rev 1, 179b} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, d, true, 30} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, e, false, 40} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | > {5, f, true, 50} | | + BTreeIndex{PageId: 2 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306, 13 rev 1, 179b} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, g, false, 60} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, h, true, 70} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | > {8, i, false, 80} | + BTreeIndex{PageId: 8 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 242b} | | + BTreeIndex{PageId: 3 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 179b} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, j, true, 90} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, k, false, 100} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, l, true, 110} | | + BTreeIndex{PageId: 4 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555, 13 rev 1, 179b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, m, false, 120} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, n, true, 130} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | > {14, o, false, 140} | | + BTreeIndex{PageId: 6 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693, 13 rev 1, 179b} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, p, true, 150} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, q, false, 160} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | > {17, r, true, 170} | | + BTreeIndex{PageId: 7 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840, 13 rev 1, 179b} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, s, false, 180} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, t, true, 190} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 + BTreeIndex{PageId: 15 RowCount: 15150 DataSize: 106050 GroupDataSize: 207050 ErasedRowCount: 8080, 13 rev 1, 174b} | + BTreeIndex{PageId: 12 RowCount: 9078 DataSize: 70278 GroupDataSize: 138278 ErasedRowCount: 4318, 13 rev 1, 690b} | | + BTreeIndex{PageId: 0 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426, 13 rev 1, 702b} | | | PageId: 10000 RowCount: 100 DataSize: 1000 GroupDataSize: 2000 ErasedRowCount: 30 | | | > {0, x, NULL, NULL} | | | PageId: 10001 RowCount: 201 DataSize: 2001 GroupDataSize: 4001 ErasedRowCount: 61 | | | > {1, xx, NULL, NULL} | | | PageId: 10002 RowCount: 303 DataSize: 3003 GroupDataSize: 6003 ErasedRowCount: 93 | | | > {2, xxx, NULL, NULL} | | | PageId: 10003 RowCount: 406 DataSize: 4006 GroupDataSize: 8006 ErasedRowCount: 126 | | | > {3, xxxx, NULL, NULL} | | | PageId: 10004 RowCount: 510 DataSize: 5010 GroupDataSize: 10010 ErasedRowCount: 160 | | | > {4, xxxxx, NULL, NULL} | | | PageId: 10005 RowCount: 615 DataSize: 6015 GroupDataSize: 12015 ErasedRowCount: 195 | | | > {5, xxxxxx, NULL, NULL} | | | PageId: 10006 RowCount: 721 DataSize: 7021 GroupDataSize: 14021 ErasedRowCount: 231 | | | > {6, xxxxxxx, NULL, NULL} | | | PageId: 10007 RowCount: 828 DataSize: 8028 GroupDataSize: 16028 ErasedRowCount: 268 | | | > {7, xxxxxxxx, NULL, NULL} | | | PageId: 10008 RowCount: 936 DataSize: 9036 GroupDataSize: 18036 ErasedRowCount: 306 | | | > {8, xxxxxxxxx, NULL, NULL} | | | PageId: 10009 RowCount: 1045 DataSize: 10045 GroupDataSize: 20045 ErasedRowCount: 345 | | | > {9, xxxxxxxxxx, NULL, NULL} | | | PageId: 10010 RowCount: 1155 DataSize: 11055 GroupDataSize: 22055 ErasedRowCount: 385 | | | > {10, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10011 RowCount: 1266 DataSize: 12066 GroupDataSize: 24066 ErasedRowCount: 426 | | > {11, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 1 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891, 13 rev 1, 683b} | | | PageId: 10012 RowCount: 1378 DataSize: 13078 GroupDataSize: 26078 ErasedRowCount: 468 | | | > {12, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10013 RowCount: 1491 DataSize: 14091 GroupDataSize: 28091 ErasedRowCount: 511 | | | > {13, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10014 RowCount: 1605 DataSize: 15105 GroupDataSize: 30105 ErasedRowCount: 555 | | | > {14, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10015 RowCount: 1720 DataSize: 16120 GroupDataSize: 32120 ErasedRowCount: 600 | | | > {15, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10016 RowCount: 1836 DataSize: 17136 GroupDataSize: 34136 ErasedRowCount: 646 | | | > {16, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10017 RowCount: 1953 DataSize: 18153 GroupDataSize: 36153 ErasedRowCount: 693 | | | > {17, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10018 RowCount: 2071 DataSize: 19171 GroupDataSize: 38171 ErasedRowCount: 741 | | | > {18, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10019 RowCount: 2190 DataSize: 20190 GroupDataSize: 40190 ErasedRowCount: 790 | | | > {19, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10020 RowCount: 2310 DataSize: 21210 GroupDataSize: 42210 ErasedRowCount: 840 | | | > {20, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10021 RowCount: 2431 DataSize: 22231 GroupDataSize: 44231 ErasedRowCount: 891 | | > {21, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 2 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395, 13 rev 1, 689b} | | | PageId: 10022 RowCount: 2553 DataSize: 23253 GroupDataSize: 46253 ErasedRowCount: 943 | | | > {22, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10023 RowCount: 2676 DataSize: 24276 GroupDataSize: 48276 ErasedRowCount: 996 | | | > {23, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10024 RowCount: 2800 DataSize: 25300 GroupDataSize: 50300 ErasedRowCount: 1050 | | | > {24, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10025 RowCount: 2925 DataSize: 26325 GroupDataSize: 52325 ErasedRowCount: 1105 | | | > {25, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10026 RowCount: 3051 DataSize: 27351 GroupDataSize: 54351 ErasedRowCount: 1161 | | | > {26, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10027 RowCount: 3178 DataSize: 28378 GroupDataSize: 56378 ErasedRowCount: 1218 | | | > {27, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10028 RowCount: 3306 DataSize: 29406 GroupDataSize: 58406 ErasedRowCount: 1276 | | | > {28, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10029 RowCount: 3435 DataSize: 30435 GroupDataSize: 60435 ErasedRowCount: 1335 | | | > {29, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10030 RowCount: 3565 DataSize: 31465 GroupDataSize: 62465 ErasedRowCount: 1395 | | > {30, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 3 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911, 13 rev 1, 669b} | | | PageId: 10031 RowCount: 3696 DataSize: 32496 GroupDataSize: 64496 ErasedRowCount: 1456 | | | > {31, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10032 RowCount: 3828 DataSize: 33528 GroupDataSize: 66528 ErasedRowCount: 1518 | | | > {32, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10033 RowCount: 3961 DataSize: 34561 GroupDataSize: 68561 ErasedRowCount: 1581 | | | > {33, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10034 RowCount: 4095 DataSize: 35595 GroupDataSize: 70595 ErasedRowCount: 1645 | | | > {34, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10035 RowCount: 4230 DataSize: 36630 GroupDataSize: 72630 ErasedRowCount: 1710 | | | > {35, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10036 RowCount: 4366 DataSize: 37666 GroupDataSize: 74666 ErasedRowCount: 1776 | | | > {36, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10037 RowCount: 4503 DataSize: 38703 GroupDataSize: 76703 ErasedRowCount: 1843 | | | > {37, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10038 RowCount: 4641 DataSize: 39741 GroupDataSize: 78741 ErasedRowCount: 1911 | | > {38, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 4 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491, 13 rev 1, 725b} | | | PageId: 10039 RowCount: 4780 DataSize: 40780 GroupDataSize: 80780 ErasedRowCount: 1980 | | | > {39, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10040 RowCount: 4920 DataSize: 41820 GroupDataSize: 82820 ErasedRowCount: 2050 | | | > {40, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10041 RowCount: 5061 DataSize: 42861 GroupDataSize: 84861 ErasedRowCount: 2121 | | | > {41, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10042 RowCount: 5203 DataSize: 43903 GroupDataSize: 86903 ErasedRowCount: 2193 | | | > {42, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10043 RowCount: 5346 DataSize: 44946 GroupDataSize: 88946 ErasedRowCount: 2266 | | | > {43, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10044 RowCount: 5490 DataSize: 45990 GroupDataSize: 90990 ErasedRowCount: 2340 | | | > {44, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10045 RowCount: 5635 DataSize: 47035 GroupDataSize: 93035 ErasedRowCount: 2415 | | | > {45, xxxxxxxxxx.., NULL, NULL} | | | PageId: 10046 RowCount: 5781 DataSize: 48081 GroupDataSize: 95081 ErasedRowCount: 2491 | | > {46, xxxxxxxxxx.., NULL, NULL} | | + BTreeIndex{PageId: 5 RowCount: 6831 DataSize: 55431 GroupDataSize: 109431 ErasedRowCount: 3051, 13 ... ams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.155 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.155 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 1}} 00000.155 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.155 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.156 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.156 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{5 on 2, Compact{1.2.7, eph 2}} 00000.156 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 5 00000.156 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.181 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.7, eph 2}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.182 II| OPS_COMPACT: Compact{1.2.7, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.203 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.203 II| OPS_COMPACT: Compact{1.2.6, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.327 II| TABLET_EXECUTOR: Leader{1:2:9} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 7, product {1 parts epoch 3} done 00000.329 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 3 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 6, product {1 parts epoch 0} done 00000.329 II| TABLET_EXECUTOR: Leader{1:2:11} starting compaction 00000.329 II| TABLET_EXECUTOR: Leader{1:2:12} starting Scan{7 on 2, Compact{1.2.11, eph 3}} 00000.329 II| TABLET_EXECUTOR: Leader{1:2:12} started compaction 7 00000.329 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.347 II| TABLET_EXECUTOR: Leader{1:2:12} starting compaction 00000.347 II| TABLET_EXECUTOR: Leader{1:2:13} starting Scan{9 on 2, Compact{1.2.12, eph 2}} 00000.347 II| TABLET_EXECUTOR: Leader{1:2:13} started compaction 9 00000.347 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.11, eph 3}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.347 II| OPS_COMPACT: Compact{1.2.11, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.352 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} begin on TSubset{head 0, 0m 1p 0c} 00000.352 II| TABLET_EXECUTOR: Leader{1:2:13} starting compaction 00000.352 II| TABLET_EXECUTOR: Leader{1:2:14} starting Scan{11 on 2, Compact{1.2.13, eph 1}} 00000.352 II| TABLET_EXECUTOR: Leader{1:2:14} started compaction 11 00000.352 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} begin on TSubset{head 0, 0m 1p 0c} 00000.389 II| TABLET_EXECUTOR: Leader{1:2:14} Compact 7 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 11, product {1 parts epoch 4} done 00000.411 II| TABLET_OPS_HOST: Scan{11 on 2, Compact{1.2.13, eph 1}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.412 II| OPS_COMPACT: Compact{1.2.13, eph 1} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.414 II| TABLET_OPS_HOST: Scan{9 on 2, Compact{1.2.12, eph 2}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.415 II| OPS_COMPACT: Compact{1.2.12, eph 2} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.421 II| TABLET_EXECUTOR: Leader{1:2:15} Compact 11 on TGenCompactionParams{2: gen 2 epoch 0, 1 parts} step 13, product {1 parts epoch 0} done 00000.422 II| TABLET_EXECUTOR: Leader{1:2:16} Compact 9 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 12, product {1 parts epoch 0} done 00000.423 II| TABLET_EXECUTOR: Leader{1:2:17} starting compaction 00000.423 II| TABLET_EXECUTOR: Leader{1:2:18} starting Scan{13 on 2, Compact{1.2.17, eph 3}} 00000.423 II| TABLET_EXECUTOR: Leader{1:2:18} started compaction 13 00000.423 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} begin on TSubset{head 0, 0m 1p 0c} 00000.423 II| TABLET_EXECUTOR: Leader{1:2:18} starting compaction 00000.423 II| TABLET_EXECUTOR: Leader{1:2:19} starting Scan{15 on 2, Compact{1.2.18, eph 2}} 00000.423 II| TABLET_EXECUTOR: Leader{1:2:19} started compaction 15 00000.423 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.460 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.17, eph 3}} end=Done, 1r seen, TFwd{fetch=9.54MiB,saved=9.54MiB,usage=9.54MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.460 II| OPS_COMPACT: Compact{1.2.17, eph 3} end=Done, 5 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (10000197 10000008 0)b }, ecr=1.000 00000.496 II| TABLET_EXECUTOR: Leader{1:2:19} Compact 13 on TGenCompactionParams{2: gen 1 epoch 0, 1 parts} step 17, product {1 parts epoch 0} done 00000.518 II| TABLET_OPS_HOST: Scan{15 on 2, Compact{1.2.18, eph 2}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=5}, trace 0 of 0 ~3p 00000.518 II| OPS_COMPACT: Compact{1.2.18, eph 2} end=Done, 6 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.525 II| TABLET_EXECUTOR: Leader{1:2:20} Compact 15 on TGenCompactionParams{2: gen 2 epoch 0, 2 parts} step 18, product {1 parts epoch 0} done 00000.526 II| TABLET_EXECUTOR: Leader{1:2:21} starting compaction 00000.526 II| TABLET_EXECUTOR: Leader{1:2:22} starting Scan{17 on 2, Compact{1.2.21, eph 3}} 00000.526 II| TABLET_EXECUTOR: Leader{1:2:22} started compaction 17 00000.526 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} begin on TSubset{head 0, 0m 2p 0c} 00000.591 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.21, eph 3}} end=Done, 2r seen, TFwd{fetch=19.1MiB,saved=19.1MiB,usage=19.1MiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=4}, trace 0 of 0 ~3p 00000.591 II| OPS_COMPACT: Compact{1.2.21, eph 3} end=Done, 6 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 0 +0, (20000244 20000016 0)b }, ecr=1.000 00000.594 II| TABLET_EXECUTOR: Leader{1:2:22} suiciding, Waste{2:0, 30001493b +(33, 60120792b), 21 trc, -60120792b acc} 00000.594 II| TABLET_EXECUTOR: Leader{1:2:22} cancelling compaction 17 00000.601 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.601 NN| TABLET_SAUSAGECACHE: Poison cache serviced 23 reqs hit {6 10000223b} miss {17 90000462b} in-memory miss {0 0b} 00000.601 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.601 II| FAKE_ENV: DS.0 gone, left {3075b, 21}, put {3095b, 22} 00000.601 II| FAKE_ENV: DS.1 gone, left {122826b, 31}, put {122826b, 31} 00000.606 II| FAKE_ENV: DS.2 gone, left {110001012b, 29}, put {110001012b, 29} 00000.630 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.630 II| FAKE_ENV: All BS storage groups are stopped 00000.630 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.630 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 82}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:05:02.350782Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.074 II| TABLET_EXECUTOR: Leader{1:2:3} starting compaction 00000.075 II| TABLET_EXECUTOR: Leader{1:2:4} starting Scan{1 on 2, Compact{1.2.3, eph 1}} 00000.075 II| TABLET_EXECUTOR: Leader{1:2:4} started compaction 1 00000.075 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.087 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.3, eph 1}} end=Done, 1r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 0 of 0 ~1p 00000.088 II| OPS_COMPACT: Compact{1.2.3, eph 1} end=Done, 2 blobs 1r (max 1), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (39360 0 0)b }, ecr=0.004 00000.092 II| TABLET_EXECUTOR: Leader{1:2:4} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 3, product {1 parts epoch 2} done 00000.153 II| TABLET_EXECUTOR: Leader{1:2:6} starting compaction 00000.153 II| TABLET_EXECUTOR: Leader{1:2:7} starting Scan{3 on 2, Compact{1.2.6, eph 2}} 00000.153 II| TABLET_EXECUTOR: Leader{1:2:7} started compaction 3 00000.154 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} begin on TSubset{head 3, 1m 1p 0c} 00000.208 II| TABLET_OPS_HOST: Scan{3 on 2, Compact{1.2.6, eph 2}} end=Done, 2r seen, TFwd{fetch=38.3KiB,saved=38.3KiB,usage=38.3KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=2}, trace 0 of 0 ~2p 00000.208 II| OPS_COMPACT: Compact{1.2.6, eph 2} end=Done, 2 blobs 2r (max 2), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.209 II| TABLET_EXECUTOR: Leader{1:2:7} Compact 3 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 6, product {1 parts epoch 3} done 00000.288 II| TABLET_EXECUTOR: Leader{1:2:9} starting compaction 00000.288 II| TABLET_EXECUTOR: Leader{1:2:10} starting Scan{5 on 2, Compact{1.2.9, eph 3}} 00000.288 II| TABLET_EXECUTOR: Leader{1:2:10} started compaction 5 00000.288 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} begin on TSubset{head 4, 1m 1p 0c} 00000.369 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.9, eph 3}} end=Done, 2r seen, TFwd{fetch=76.8KiB,saved=76.8KiB,usage=76.8KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=3}, trace 0 of 0 ~2p 00000.369 II| OPS_COMPACT: Compact{1.2.9, eph 3} end=Done, 2 blobs 2r (max 3), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 1 pk, lobs 0 +0, (78660 0 0)b }, ecr=0.004 00000.375 II| TABLET_EXECUTOR: Leader{1:2:10} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 1 parts} step 9, product {1 parts epoch 4} done 00000.431 II| TABLET_EXECUTOR: Leader{1:2:11} suiciding, Waste{2:0, 79404b +(7, 236400b), 10 trc, -236400b acc} 00000.437 II| FAKE_ENV: Model starts soft shutdown on level 8 of 8, left 2 actors 00000.437 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {4 117917b} miss {0 0b} in-memory miss {0 0b} 00000.437 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.437 II| FAKE_ENV: DS.0 gone, left {1034b, 10}, put {1054b, 11} 00000.437 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.437 II| FAKE_ENV: DS.1 gone, left {316163b, 14}, put {316163b, 14} 00000.442 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.442 II| FAKE_ENV: All BS storage groups are stopped 00000.442 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.442 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 40}, stopped |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select distinct b, a from (select a, b from t1 union all select b, a from t1 order by b) order by B-Column B is not in source column set.*] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_crud.py::TestSelect::test_advanced_select_failed[select count(a, b) from t1-Aggregation function Count requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(a, b) from t1-Aggregation function Min requires exactly 1 argument] [GOOD] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/ttl/py3test >> test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [FAIL] >> test_crud.py::TestSelect::test_advanced_select_failed[select min(*) from t1-.*is not allowed here] [GOOD] |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/ttl/py3test >> TFlatExecutorLeases::BasicsInitialLeaseSleepTimeout [GOOD] >> TFlatTableDatetime::TestDate >> TFlatTableDatetime::TestDate [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundSnapshot [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen1 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionToRegular [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 >> TMLPWriterTests::WriteTwoMessage_TwoPartition [GOOD] >> TMLPWriterTests::WriteTwoMessage_Deduplicated >> TFlatTableExecutor_BackgroundCompactions::TestRunBackgroundCompactionGen2 [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundSnapshotPriorityByTime [GOOD] >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_BackgroundCompactions::TestChangeBackgroundCompactionPriorityByTime [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default |99.3%| [TA] $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_Default [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True |99.3%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TA] {RESULT} $(B)/ydb/tests/datashard/ttl/test-results/py3test/{meta.json ... results_accumulator.log} |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> TVersions::WreckHeadReverse [GOOD] >> TVersions::Wreck2 |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_session_grace_shutdown.py::TestIdle::test_idle_shutdown_of_session [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> KeyValueGRPCService::SimpleGetStorageChannelStatusV2 [GOOD] >> KeyValueGRPCService::SimpleCreateAlterDropVolume >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_False_EnableLocalDBFlatIndex_False [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff |99.3%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_TurnOff [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] >> TFlatTableExecutor_BTreeIndex::EnableLocalDBBtreeIndex_True_Generations [GOOD] >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache >> TFlatTableExecutor_CachePressure::TestNotEnoughLocalCache [GOOD] >> TFlatTableExecutor_Cold::ColdBorrowScan >> TFlatTableExecutor_Cold::ColdBorrowScan [GOOD] >> TFlatTableExecutor_ColumnGroups::TestManyRows |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> TFlatTableExecutor_ColumnGroups::TestManyRows [GOOD] >> TFlatTableExecutor_CompactionScan::TestCompactionScan >> TTxDataShardPrefixKMeansScan::BuildToBuild [GOOD] >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_CompactionScan::TestCompactionScan [GOOD] >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows >> TFlatTableExecutor_CompressedSelectRows::TestCompressedSelectRows [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionDirect [GOOD] >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue >> TFlatTableExecutor_Exceptions::TestTabletExecuteExceptionEnqueue [GOOD] >> TFlatTableExecutor_ExecutorTxLimit::TestExecutorTxLimit [GOOD] >> TFlatTableExecutor_Follower::BasicFollowerRead [GOOD] >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles >> TFlatTableExecutor_Follower::FollowerEarlyRebootHoles [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot >> TMLPWriterTests::WriteTwoMessage_Deduplicated [GOOD] >> TMLPWriterTests::Deduplicated_Reboot >> TFlatTableExecutor_Follower::FollowerAttachOnTxQueueScanSnapshot [GOOD] >> TFlatTableExecutor_Follower::FollowerAttachAfterLoan [GOOD] >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages >> TFlatTableExecutor_Follower::FollowerPromoteToLeaderWhileLoadingPages [GOOD] >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot >> TFlatTableExecutor_Gc::TestFailedGcAfterReboot [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex >> test_public_api.py::TestSessionNotFound::test_session_not_found |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_IndexLoading::CalculateReadSize_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex >> TFlatTableExecutor_IndexLoading::CalculateReadSize_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> KeyValueGRPCService::SimpleCreateAlterDropVolume [GOOD] >> KeyValueGRPCService::SimpleListPartitions [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Keep::test_no_degraded_groups_after_shutdown [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet ------- [TM] {asan, default-linux-x86_64, release} ydb/services/keyvalue/ut/unittest >> KeyValueGRPCService::SimpleListPartitions [GOOD] Test command err: 2026-01-04T03:01:37.287333Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336215693823067:2078];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:37.287413Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:37.504929Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:37.684614Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:37.684736Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:37.703704Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:37.752810Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:37.754282Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:37.788957Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-04T03:01:37.789477Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-04T03:01:37.789763Z node 1 :GRPC_SERVER NOTICE: grpc_request_proxy.cpp:372: Grpc request proxy started, nodeid# 1, serve as static node 2026-01-04T03:01:37.789938Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:603: Subscribe to /Root 2026-01-04T03:01:37.791913Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-04T03:01:37.791978Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-04T03:01:37.792053Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:408: Subscribed for config changes 2026-01-04T03:01:37.792065Z node 1 :GRPC_SERVER INFO: grpc_request_proxy.cpp:416: Updated app config 2026-01-04T03:01:37.796494Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-04T03:01:37.796526Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:425: Got proxy service configuration 2026-01-04T03:01:37.799094Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:37.799142Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:37.799150Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:37.799160Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:37.862407Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:37.862541Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:37.862553Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:37.862627Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.071871Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: (empty maybe) 2026-01-04T03:01:38.071895Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: (empty maybe) 2026-01-04T03:01:38.071963Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:204: failed to initialize from file: (empty maybe) 2026-01-04T03:01:38.072072Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:38.297417Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:38.518922Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:38.542599Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.542681Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.542691Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.542733Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.595979Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# MakeDirectoryRequest, traceId# 01ke3f8251bj91a0790t2xyn01, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37532, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:38.610335Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.610459Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.610472Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.610526Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.619219Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.619310Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.619328Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.619405Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.628557Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# CreateVolumeRequest, traceId# 01ke3f8261c1pnf86mpntrgwn4, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37532, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:38.633852Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateSolomonVolume, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_solomon.cpp:380) 2026-01-04T03:01:38.640146Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.640241Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.640260Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:535: SchemeBoardUpdate /Root 2026-01-04T03:01:38.640295Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:564: Can't update SecurityState for /Root - no PublicKeys 2026-01-04T03:01:38.675162Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828672 event# NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:38.676108Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01ke3f827k07f93b2wmk0715dt, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37532, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:38.678256Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828673 event# NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:38.679021Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:65: KeyValue# 72075186224037888 TTxInit flat Execute 2026-01-04T03:01:38.679038Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:68: KeyValue# 72075186224037888 TTxInit flat BuildScheme 2026-01-04T03:01:38.681149Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037888 TTxInit flat Execute returns true 2026-01-04T03:01:38.681886Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268828684 event# NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:38.681980Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-04T03:01:38.682008Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-04T03:01:38.682482Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:01:38.682519Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:01:38.683896Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 1 Channel# 2 Status# OK} 2026-01-04T03:01:38.683925Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 1 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:01:38.683982Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-04T03:01:38.684013Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2026-01-04T03:01:38.684033Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-04T03:01:38.684042Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2026-01-04T03:01:38.684051Z node 1 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2026-01-04T03:01:38.684083Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Execute Wait iteration# 0 2026-01-04T03:01:38.690611Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Complete 2026-01-04T03:01:38.692806Z node 1 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ExecuteTransactionRequest, traceId# 01ke3f8284010cggdajex4rxkk, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:37532, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:01:38.695582Z node 1 :KEYVALUE DEBUG: keyvalue_flat_impl.h:646: KeyValue# ... h:65: KeyValue# 72075186224037889 TTxInit flat Execute 2026-01-04T03:05:15.434150Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:78: KeyValue# 72075186224037889 TTxInit flat ReadDb Tree 2026-01-04T03:05:15.434347Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:93: KeyValue# 72075186224037889 TTxInit flat Execute returns true 2026-01-04T03:05:15.435964Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.436050Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.436398Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.436433Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.436457Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.436484Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.436508Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037888 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.436525Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037888 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.439111Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.439159Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.439183Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.439209Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037888 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037888 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.439304Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-04T03:05:15.439337Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 4 Marker# KV50 2026-01-04T03:05:15.439349Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-04T03:05:15.439361Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 3 Marker# KV50 2026-01-04T03:05:15.439376Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-04T03:05:15.439385Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 2 Marker# KV50 2026-01-04T03:05:15.439394Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037888 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-04T03:05:15.439405Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037888 InitialCollectsSent# 1 Marker# KV50 2026-01-04T03:05:15.439430Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037888 SendCutHistory Marker# KV51 2026-01-04T03:05:15.439499Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Execute 2026-01-04T03:05:15.440387Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.440464Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.440514Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.440541Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.440572Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.440598Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.440637Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:616: KeyValue# 72075186224037889 StateInit flat event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.440674Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:384: KeyValue# 72075186224037889 Enqueue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.440853Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 0 Channel# 2 Status# OK} 2026-01-04T03:05:15.440900Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 1 Channel# 3 Status# OK} 2026-01-04T03:05:15.440934Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 2 Channel# 2 Status# OK} 2026-01-04T03:05:15.440961Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:672: KeyValue# 72075186224037889 Dequeue, event type# 268632582 event# TEvCollectGarbageResult {TabletId# 72075186224037889 RecordGeneration# 2 PerGenerationCounter# 3 Channel# 3 Status# OK} 2026-01-04T03:05:15.441023Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-04T03:05:15.441043Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 4 Marker# KV50 2026-01-04T03:05:15.441058Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 1 Marker# KV52 2026-01-04T03:05:15.441069Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 3 Marker# KV50 2026-01-04T03:05:15.441079Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-04T03:05:15.441086Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 2 Marker# KV50 2026-01-04T03:05:15.441100Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:482: KeyValue# 72075186224037889 Handle TEvCollectGarbageResult Cookie# 2 Marker# KV52 2026-01-04T03:05:15.441108Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:696: KeyValue# 72075186224037889 InitialCollectsSent# 1 Marker# KV50 2026-01-04T03:05:15.441131Z node 65 :KEYVALUE DEBUG: keyvalue_state.cpp:719: KeyValue# 72075186224037889 SendCutHistory Marker# KV51 2026-01-04T03:05:15.441189Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:324: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Execute 2026-01-04T03:05:15.442455Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# DescribeVolumeRequest, traceId# 01ke3fenxj874mxfcbc3gvqfmw, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53322, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:05:15.444624Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037888 TTxRegisterInitialGCCompletion Complete 2026-01-04T03:05:15.444718Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:332: KeyValue# 72075186224037889 TTxRegisterInitialGCCompletion Complete 2026-01-04T03:05:15.446851Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# DropVolumeRequest, traceId# 01ke3fenxpe901qhctajrad4j9, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53322, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:05:15.449369Z node 65 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpDropSolomonVolume, opId: 281474976710662:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_drop_solomon.cpp:184) 2026-01-04T03:05:15.537518Z node 65 :GRPC_SERVER DEBUG: grpc_request_proxy.cpp:641: Got grpc request# ListDirectoryRequest, traceId# 01ke3fep0hfksztyska7wc75zq, sdkBuildInfo# undef, state# AS_NOT_PERFORMED, database# undef, peer# ipv6:[::1]:53322, grpcInfo# grpc-c++/1.54.3 grpc-c/31.0.0 (linux; chttp2), timeout# undef 2026-01-04T03:05:15.539692Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037889 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-04T03:05:15.540408Z node 65 :KEYVALUE DEBUG: keyvalue_flat_impl.h:365: KeyValue# 72075186224037888 OnTabletDead NKikimr::TEvTablet::TEvTabletDead 2026-01-04T03:05:15.540498Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037889 not found 2026-01-04T03:05:15.540537Z node 65 :HIVE WARN: hive_impl.cpp:519: HIVE#72057594037968897 Handle TEvLocal::TEvTabletStatus from node 65, TabletId: 72075186224037888 not found |99.4%| [TM] {RESULT} ydb/services/keyvalue/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/services/keyvalue/ut/unittest >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TVersions::Wreck2 [GOOD] >> TVersions::Wreck2Reverse >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] >> DataShardStats::HasSchemaChanges_Families [GOOD] >> DataShardStats::BackupTableStatsReportInterval |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_then_kill_system_tablets_and_it_increases_generation [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardPrefixKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardRecomputeKMeansScan::BadRequest >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] >> TDqPqReadActorTest::TestSaveLoadPqRead [GOOD] >> TDqPqReadActorTest::LoadCorruptedState >> TDqPqReadActorTest::LoadCorruptedState [GOOD] >> TDqPqReadActorTest::TestLoadFromSeveralStates |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_kill_tablets.py::TestKillTablets::test_when_kill_keyvalue_tablet_it_will_be_restarted [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TTxDataShardRecomputeKMeansScan::BadRequest [GOOD] >> TTxDataShardRecomputeKMeansScan::MainTable >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_create_tablets.py::TestHive::test_when_create_tablets_after_bs_groups_and_kill_hive_then_tablets_start [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test >> TVersions::Wreck2Reverse [GOOD] >> TVersions::Wreck1 >> TDqPqReadActorTest::TestLoadFromSeveralStates [GOOD] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex >> TMLPWriterTests::Deduplicated_Reboot [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/restarts/py3test >> test_insert_restarts.py::TestS3::test_atomic_upload_commit[v2-client0] [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/restarts/py3test |99.4%| [TM] {RESULT} ydb/tests/fq/restarts/py3test >> test_discovery.py::TestDiscoveryFaultInjectionSlotStop::test_scenario [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/persqueue/public/mlp/ut/unittest >> TMLPWriterTests::Deduplicated_Reboot [GOOD] Test command err: 2026-01-04T03:01:31.806248Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336189076101459:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:31.806336Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:31.941612Z node 1 :PQ_READ_PROXY DEBUG: caching_service.cpp:44: Direct read cache: : Created 2026-01-04T03:01:32.109949Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:32.307719Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:32.322282Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:32.322406Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:32.324391Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:32.441357Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:32.821769Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:32.892093Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:32.911528Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:190: distributable config is empty, broken or outdated, will use file: /home/runner/.ya/build/build_root/c5zo/0036cb/r3tmp/yandexynlexM.tmp 2026-01-04T03:01:32.911603Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:196: will try to initialize from file: /home/runner/.ya/build/build_root/c5zo/0036cb/r3tmp/yandexynlexM.tmp 2026-01-04T03:01:32.917532Z node 1 :NET_CLASSIFIER WARN: net_classifier.cpp:202: successfully initialized from file: /home/runner/.ya/build/build_root/c5zo/0036cb/r3tmp/yandexynlexM.tmp 2026-01-04T03:01:32.917725Z node 1 :NET_CLASSIFIER ERROR: net_classifier.cpp:228: got bad distributable configuration 2026-01-04T03:01:33.257894Z INFO: TTestServer started on Port 28769 GrpcPort 1634 PQClient connected to localhost:1634 2026-01-04T03:01:33.519535Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:33.617309Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710659:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:35.159499Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336206255971656:2331], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:35.161050Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:35.161597Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336206255971667:2334], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:35.161730Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:263: [WorkloadService] [Service] Failed to fetch pool default, DatabaseId: /Root, status: NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:35.162041Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336206255971671:2336], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:35.170925Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateResourcePool, opId: 281474976710662:3, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_resource_pool.cpp:179) 2026-01-04T03:01:35.183076Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TPoolCreatorActor] ActorId: [1:7591336206255971673:2337], DatabaseId: /Root, PoolId: default, Scheduled retry for error: {
: Error: Transaction 281474976710662 completed, doublechecking } 2026-01-04T03:01:35.248382Z node 1 :TX_PROXY ERROR: schemereq.cpp:624: Actor# [1:7591336206255971737:2647] txid# 281474976710663, issues: { message: "Check failed: path: \'/Root/.metadata/workload_manager/pools/default\', error: path exist, request accepts it (id: [OwnerId: 72057594046644480, LocalPathId: 45], type: EPathTypeResourcePool, state: EPathStateNoChanges)" severity: 1 } 2026-01-04T03:01:35.681239Z node 1 :KQP_COMPILE_ACTOR ERROR: kqp_compile_actor.cpp:625: Compilation failed, self: [1:7591336206255971745:2343], status: SCHEME_ERROR, issues:
: Error: Type annotation, code: 1030
:3:16: Error: At function: KiReadTable!
:3:16: Error: Cannot find table 'db.[/Root/PQ/Config/V2/Cluster]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 2026-01-04T03:01:35.688638Z node 1 :KQP_SESSION WARN: {KQPSA@kqp_session_actor.cpp:2679} SessionId: ydb://session/3?node_id=1&id=ZjBjZDcxNGMtMzBkYWI5NDMtNDA0NGM4NWUtMjZlYWFhMjY=, ActorId: [1:7591336206255971654:2330], ActorState: ExecuteState, LegacyTraceId: 01ke3f7yspcaf0p119xjgy4r56, ReplyQueryCompileError, remove tx status# SCHEME_ERROR issues# { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } tx_id# trace_id# 2026-01-04T03:01:35.693647Z node 1 :PERSQUEUE_CLUSTER_TRACKER ERROR: cluster_tracker.cpp:167: failed to list clusters: { Response { QueryIssues { message: "Type annotation" issue_code: 1030 severity: 1 issues { position { row: 3 column: 16 } message: "At function: KiReadTable!" end_position { row: 3 column: 16 } severity: 1 issues { position { row: 3 column: 16 } message: "Cannot find table \'db.[/Root/PQ/Config/V2/Cluster]\' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions." end_position { row: 3 column: 16 } issue_code: 2003 severity: 1 } } } TxMeta { } } YdbStatus: SCHEME_ERROR ConsumedRu: 1 } 2026-01-04T03:01:35.765526Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710664:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:35.805067Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710665:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:35.889374Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710666:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) === Init DC: UPSERT INTO `/Root/PQ/Config/V2/Cluster` (name, balancer, local, enabled, weight) VALUES ("dc1", "localhost", true, true, 1000), ("dc2", "dc2.logbroker.yandex.net", false, true, 1000); === CheckClustersList. Subcribe to ClusterTracker from [1:7591336210550939328:2824] 2026-01-04T03:01:36.806682Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=table_exists.cpp:59;actor=TTableExistsActor;event=timeout;self_id=[1:7591336189076101459:2073];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:36.806736Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=timeout; === CheckClustersList. Ok 2026-01-04T03:01:42.325073Z :TODO INFO: TTopicSdkTestSetup started 2026-01-04T03:01:42.346139Z node 1 :PQ_READ_PROXY DEBUG: grpc_pq_schema.cpp:133: new create topic request 2026-01-04T03:01:42.419521Z node 1 :PERSQUEUE_READ_BALANCER INFO: read_balancer__balancing.cpp:1657: [72075186224037893][] pipe [1:7591336236320743353:2937] connected; active server actors: 1 2026-01-04T03:01:42.420237Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer__balancing.cpp:1521: [72075186224037893][test-topic] updating configuration. Deleted partitions []. Added partitions [0] 2026-01-04T03:01:42.424311Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:756: [72075186224037893][test-topic] Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at RB 72075186224037893 2026-01-04T03:01:42.426380Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:142: [72075186224037893][test-topic] BALANCER INIT DONE for test-topic: (0, 72075186224037892) 2026-01-04T03:01:42.431681Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3023: [PQ: 72075186224037892] Handle TEvInterconnect::TEvNodeInfo 2026-01-04T03:01:42.438708Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:3055: [PQ: 72075186224037892] Transactions request. From tx_00000000000000000000, To tx_18446744073709551615 2026-01-04T03:01:42.439915Z node 1 :PERSQUEUE DEBUG: pq_impl.cpp:788: [PQ: 72075186224037892] no config, start with empty partitions and default config 2026-01-04T03:01:42.439958Z node 1 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037892] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:01:42.440000Z node 1 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037892] doesn't have tx writes info 2026-01-04T03:01:42.440276Z node 1 :PERSQUEUE_READ_BALANCER DEBUG: read_balancer.cpp:394: [72075186224037893][test-topic] TEvClientConnec ... ed in BillingMeteringConfig 2026-01-04T03:05:36.413328Z node 19 :PERSQUEUE INFO: pq_impl.cpp:613: [PQ: 72075186224037894] doesn't have tx writes info 2026-01-04T03:05:36.424214Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7591337244095145639:2479] 2026-01-04T03:05:36.434824Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 1 [19:7591337244095145639:2479] 2026-01-04T03:05:36.434976Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-04T03:05:36.436128Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-04T03:05:36.436318Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-04T03:05:36.440440Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-04T03:05:36.440488Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-04T03:05:36.440519Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-04T03:05:36.440545Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-04T03:05:36.440578Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 0 vs 0 2026-01-04T03:05:36.440609Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:295: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-04T03:05:36.440641Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-04T03:05:36.440673Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-04T03:05:36.440703Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-04T03:05:36.442798Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:127: [72075186224037894][Partition][0][StateIdle] Updateing MLP consumer 'mlp-consumer' config 2026-01-04T03:05:36.443417Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-04T03:05:36.443746Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:05:36.462367Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7591337244095145654:3041]] Start describe 2026-01-04T03:05:36.465086Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7591337244095145654:3041]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-04T03:05:36.465126Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7591337244095145654:3041]] Start write 2026-01-04T03:05:36.470848Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:386: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPQ::TEvEndOffsetChanged. Offset: 1 2026-01-04T03:05:36.470894Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7591337244095145654:3041]] Handle TEvPersQueue::TEvResponse 2026-01-04T03:05:36.470897Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7591337244095145639:2479] >>>>>> reload PQ tablet 2026-01-04T03:05:36.473044Z node 19 :PERSQUEUE ERROR: pq_impl.cpp:203: Answer error topic: 'topic1' partition: 0 messageNo: 0 requestId: error: tablet will be restarted right now 2026-01-04T03:05:36.479056Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2026-01-04T03:05:36.480409Z node 19 :PQ_MLP_CONSUMER WARN: mlp_consumer.cpp:718: [72075186224037894][0][MLP][mlp-consumer] Fetch messages failed: Status: 128 ErrorReason: "tablet will be restarted right now" ErrorCode: INITIALIZING 2026-01-04T03:05:36.490527Z node 19 :PERSQUEUE NOTICE: pq_impl.cpp:927: [PQ: 72075186224037894] disable metering: reason# billing is not enabled in BillingMeteringConfig 2026-01-04T03:05:36.490575Z node 19 :PERSQUEUE INFO: pq_impl.cpp:600: [PQ: 72075186224037894] has a tx writes info 2026-01-04T03:05:36.491384Z node 19 :PERSQUEUE INFO: partition_init.cpp:1155: [72075186224037894][Partition][0][StateInit] bootstrapping 0 [19:7591337244095145699:2482] 2026-01-04T03:05:36.495966Z node 19 :PERSQUEUE INFO: partition_init.cpp:1016: [topic1:0:TInitEndWriteTimestampStep] Initializing EndWriteTimestamp skipped because already initialized. 2026-01-04T03:05:36.496490Z node 19 :PERSQUEUE INFO: partition.cpp:712: [72075186224037894][Partition][0][StateInit] init complete for topic 'topic1' partition 0 generation 2 [19:7591337244095145699:2482] 2026-01-04T03:05:36.496648Z node 19 :PERSQUEUE INFO: partition_mlp.cpp:146: [72075186224037894][Partition][0][StateIdle] Creating MLP consumer 'mlp-consumer' 2026-01-04T03:05:36.497305Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:105: [72075186224037894][0][MLP][mlp-consumer] Start MLP consumer mlp-consumer 2026-01-04T03:05:36.497492Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:356: [72075186224037894][0][MLP][mlp-consumer] Update config: RetentionPeriod: 86400.000000s Name: "mlp-consumer" ReadFromTimestampsMs: 0 FormatVersion: 0 Codec { } ServiceType: "data-streams" Version: 0 Generation: 0 Type: CONSUMER_TYPE_MLP KeepMessageOrder: false DeadLetterPolicyEnabled: true DeadLetterPolicy: DEAD_LETTER_POLICY_DELETE MaxProcessingAttempts: 10 DefaultProcessingTimeoutSeconds: 0 2026-01-04T03:05:36.499254Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:185: [72075186224037894][0][MLP][mlp-consumer] HandleOnInit TEvKeyValue::TEvResponse 2026-01-04T03:05:36.499295Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:225: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-04T03:05:36.499322Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:278: [72075186224037894][0][MLP][mlp-consumer] Initializing new consumer 2026-01-04T03:05:36.499345Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-04T03:05:36.499385Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7591337244095145699:2482] 2026-01-04T03:05:36.500963Z node 19 :PERSQUEUE WARN: pq_l2_cache.cpp:94: PQ Cache (L2). Same blob insertion. Tablet '72075186224037894' partition 0 offset 0 partno 0 count 1 parts 0 suffix '63' size 152 2026-01-04T03:05:36.502991Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:707: [72075186224037894][0][MLP][mlp-consumer] Initialized 2026-01-04T03:05:36.503031Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:713: [72075186224037894][0][MLP][mlp-consumer] Handle TEvPersQueue::TEvResponse 2026-01-04T03:05:36.503177Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:763: [72075186224037894][0][MLP][mlp-consumer] Fetched 1 messages 2026-01-04T03:05:36.503212Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-04T03:05:36.503243Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-04T03:05:36.503282Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-04T03:05:36.503568Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:644: [72075186224037894][0][MLP][mlp-consumer] Delete old WAL: c0000000000wmlp-consumer|0000000000000000 - c0000000000wmlp-consumer|0000000000000000 2026-01-04T03:05:36.503618Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:654: [72075186224037894][0][MLP][mlp-consumer] Write Snapshot Count: 1 Size: 69 cookie: 3 2026-01-04T03:05:36.505459Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:302: [72075186224037894][0][MLP][mlp-consumer] HandleOnWrite TEvKeyValue::TEvResponse Status: 1 Cookie: 3 DeleteRangeResult { Status: 0 } WriteResult { Status: 0 StatusFlags: 1 } 2026-01-04T03:05:36.505495Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:327: [72075186224037894][0][MLP][mlp-consumer] TX write finished 2026-01-04T03:05:36.505521Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:347: [72075186224037894][0][MLP][mlp-consumer] Try commit offset: 0 vs 0 2026-01-04T03:05:36.505545Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-04T03:05:36.505571Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-04T03:05:36.505594Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-04T03:05:36.505621Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-04T03:05:37.439130Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:694: [72075186224037894][0][MLP][mlp-consumer] Fetching 100 messages from offset 0 from [19:7591337244095145639:2479] 2026-01-04T03:05:37.439204Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-04T03:05:37.439233Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-04T03:05:37.439258Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-04T03:05:37.474207Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:30: [[19:7591337248390113034:3077]] Start describe 2026-01-04T03:05:37.474609Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:41: [[19:7591337248390113034:3077]] Handle NDescriber::TEvDescribeTopicsResponse 2026-01-04T03:05:37.474645Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:116: [[19:7591337248390113034:3077]] Start write 2026-01-04T03:05:37.481929Z node 19 :PQ_MLP_WRITER DEBUG: mlp_writer.cpp:175: [[19:7591337248390113034:3077]] Handle TEvPersQueue::TEvResponse 2026-01-04T03:05:37.497874Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:675: [72075186224037894][0][MLP][mlp-consumer] Skip fetch: partition end offset is reached: 1 vs 1 2026-01-04T03:05:37.497919Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:499: [72075186224037894][0][MLP][mlp-consumer] ProcessEventQueue 2026-01-04T03:05:37.497949Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:574: [72075186224037894][0][MLP][mlp-consumer] Persist 2026-01-04T03:05:37.497981Z node 19 :PQ_MLP_CONSUMER DEBUG: mlp_consumer.cpp:580: [72075186224037894][0][MLP][mlp-consumer] Batch is empty 2026-01-04T03:05:37.845222Z node 19 :FLAT_TX_SCHEMESHARD WARN: schemeshard_impl.cpp:7747: Cannot get console configs 2026-01-04T03:05:37.845252Z node 19 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded |99.4%| [TM] {RESULT} ydb/core/persqueue/public/mlp/ut/unittest |99.4%| [TM] {BAZEL_UPLOAD} ydb/core/persqueue/public/mlp/ut/unittest >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TDqPqReadActorTest::TestReadFromTopicFirstWatermark [GOOD] >> test_crud.py::TestClientTimeouts::test_can_set_timeouts_on_query [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardRecomputeKMeansScan::MainTable [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign >> DataShardStats::BackupTableStatsReportInterval [GOOD] >> DataShardStats::CollectKeySampleLeader |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_in_new_channel_then_can_read_from_tablet [GOOD] |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TDqPqReadActorTest::TestReadFromTopicWatermarks1 [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardRecomputeKMeansScan::BuildTable+WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFound::test_session_not_found [GOOD] >> TDqPqReadActorTest::WatermarkCheckpointWithItemsInReadyBuffer [GOOD] >> TFlatTableExecutor_IndexLoading::PrechargeAndSeek_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex >> TPqWriterTest::TestWriteToTopic >> TPqWriterTest::TestWriteToTopic [GOOD] >> test_vdisks.py::TestTinyVDisks::test_disabled >> TPqWriterTest::TestWriteToTopicMultiBatch >> TPqWriterTest::TestWriteToTopicMultiBatch [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex >> TPqWriterTest::TestDeferredWriteToTopic >> TPqWriterTest::TestDeferredWriteToTopic [GOOD] >> TTxDataShardRecomputeKMeansScan::BuildTable-WithForeign [GOOD] >> TTxDataShardRecomputeKMeansScan::EmptyCluster |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPqWriterTest::WriteNonExistentTopic [GOOD] >> TPqWriterTest::TestCheckpoints >> TFlatTableExecutor_IndexLoading::Scan_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic >> TPqWriterTest::TestCheckpoints [GOOD] >> TPqWriterTest::TestCheckpointWithEmptyBatch >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex >> TVersions::Wreck1 [GOOD] >> TVersions::Wreck1Reverse |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.4%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/fq/pq_async_io/ut/unittest >> TPqWriterTest::TestCheckpointWithEmptyBatch [GOOD] Test command err: 2026-01-04T03:02:15.289223Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [1:7591336378558270998:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-04T03:02:15.303628Z node 1 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-04T03:02:15.303977Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-04T03:02:15.304145Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-04T03:02:15.304164Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([1:7591336378558270998:2054]) 2026-01-04T03:02:15.304219Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [1:7591336378558271004:2048] 2026-01-04T03:02:15.304450Z node 1 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [1:7591336378558270999:2055] 2026-01-04T03:02:15.304519Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [1:7591336378558270999:2055], partIds: 666 cookie 1 2026-01-04T03:02:15.304873Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [1:7591336378558270999:2055], cookie 1 2026-01-04T03:02:15.304912Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-04T03:02:15.304930Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-04T03:02:15.304976Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [1:7591336378558271001:2057], generation 1 2026-01-04T03:02:15.305045Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [1:7591336378558271001:2057], connection id 1 partitions offsets (666 / ), 2026-01-04T03:02:15.305490Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [1:7591336378558271001:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-04T03:02:15.306752Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [1:7591336378558271001:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-04T03:02:15.319503Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [1:7591336378558271001:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-04T03:02:15.320870Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-04T03:02:15.320887Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-04T03:02:15.320918Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-04T03:02:15.321050Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-04T03:02:15.321066Z node 1 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-04T03:02:15.322374Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:744: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. PassAway 2026-01-04T03:02:15.322494Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1242: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. State: used buffer size 0 ready buffer event size 0 state 5 InFlyAsyncInputData 0 Counters: CoordinatorChanged 1 CoordinatorResult 1 MessageBatch 1 StartSessionAck 1 NewDataArrived 1 SessionError 0 Statistics 0 NodeDisconnected 0 NodeConnected 0 Undelivered 0 Retry 0 PrivateHeartbeat 0 SessionClosed 0 Pong 0 Heartbeat 0 PrintState 0 ProcessState 0 GetAsyncInputData 2 NotifyCA 1 [1:7591336378558271001:2057] status 2 is waiting ack 0 connection id 1 id 1, LocalRecipient partitions 666 offsets 666=2 has pending data 2026-01-04T03:02:15.322533Z node 1 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:736: SelfId: [1:7591336378558271004:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send StopSession to [1:7591336378558271001:2057] generation 1 2026-01-04T03:02:15.646215Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:557: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. Start read actor, local row dispatcher [2:7591336379101664593:2054], metadatafields: , partitions: 666, skip json errors: 0 2026-01-04T03:02:15.646712Z node 2 :KQP_COMPUTE DEBUG: dq_pq_read_actor_base.cpp:153: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. SessionId: empty Watermarks enabled: 1 granularity: 0.000100s late arrival delay: 0.000000s idle: 0 idle timeout: 86400.000000s 2026-01-04T03:02:15.646899Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [0:0:0], TxId: query_1, task: 0. PQ source. GetAsyncInputData freeSpace = 12345 2026-01-04T03:02:15.646954Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1515: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Switch to single-cluster mode 2026-01-04T03:02:15.646967Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:608: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local RD ([2:7591336379101664593:2054]) 2026-01-04T03:02:15.647017Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:635: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorChangesSubscribe to local row dispatcher, self id [2:7591336379101664599:2048] 2026-01-04T03:02:15.647217Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1009: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvCoordinatorChanged, new coordinator [2:7591336379101664594:2055] 2026-01-04T03:02:15.647252Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:655: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvCoordinatorRequest to coordinator [2:7591336379101664594:2055], partIds: 666 cookie 1 2026-01-04T03:02:15.647417Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1053: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvCoordinatorResult from [2:7591336379101664594:2055], cookie 1 2026-01-04T03:02:15.647434Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1352: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. UpdateSessions, Sessions size 0 2026-01-04T03:02:15.647441Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1355: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Distribution is changed, remove sessions 2026-01-04T03:02:15.647477Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:1374: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Create session to [2:7591336379101664596:2057], generation 1 2026-01-04T03:02:15.647525Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:719: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Send TEvStartSession to [2:7591336379101664596:2057], connection id 1 partitions offsets (666 / ), 2026-01-04T03:02:15.647771Z node 2 :KQP_COMPUTE INFO: dq_pq_rd_read_actor.cpp:864: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvStartSessionAck from [2:7591336379101664596:2057], seqNo 0, ConfirmedSeqNo 0, generation 1 2026-01-04T03:02:15.647928Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7591336379101664596:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-04T03:02:15.648694Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7591336379101664596:2057], seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-04T03:02:15.648728Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 1 2026-01-04T03:02:15.648739Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1171: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. TEvMessageBatch NextOffset 2 2026-01-04T03:02:15.648857Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:767: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. GetAsyncInputData freeSpace = 1000 2026-01-04T03:02:15.648986Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:807: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. NextOffset 2 2026-01-04T03:02:15.649004Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:811: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Return 2 rows, watermark (empty maybe), buffer size 0, free space 948, result size 52 2026-01-04T03:02:15.650049Z node 2 :KQP_COMPUTE DEBUG: dq_pq_rd_read_actor.cpp:1097: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvUndelivered, TSystem::Undelivered from [2:7591336379101664596:2057], reason Disconnected, cookie 999 2026-01-04T03:02:15.650123Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:945: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvNewDataArrived from [2:7591336379101664596:2057], partition 666, seqNo 0, ConfirmedSeqNo 0 generation 1 2026-01-04T03:02:15.650260Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:963: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvRetry, EventQueueId 1 2026-01-04T03:02:15.650711Z node 2 :KQP_COMPUTE TRACE: dq_pq_rd_read_actor.cpp:1123: SelfId: [2:7591336379101664599:2048], TxId: query_1, task: 0, Cluster: . PQ source. Received TEvMessageBatch from [2:7591336379101664596:2057 ... Error: Aborted " } 2026-01-04T03:05:57.614225Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|a621ffc1-fa3b1d94-186bccb7-7d59a3b0_0] PartitionId [0] Generation [1] Write session: close. Timeout 0.000000s 2026-01-04T03:05:57.614273Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|a621ffc1-fa3b1d94-186bccb7-7d59a3b0_0] PartitionId [0] Generation [1] Write session will now close 2026-01-04T03:05:57.614331Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|a621ffc1-fa3b1d94-186bccb7-7d59a3b0_0] PartitionId [0] Generation [1] Write session: aborting 2026-01-04T03:05:57.614773Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|a621ffc1-fa3b1d94-186bccb7-7d59a3b0_0] PartitionId [0] Generation [1] Write session: gracefully shut down, all writes complete 2026-01-04T03:05:57.614813Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|a621ffc1-fa3b1d94-186bccb7-7d59a3b0_0] PartitionId [0] Generation [1] Write session: destroy 2026-01-04T03:05:58.048760Z node 46 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:268: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Load state: { SourceId: "54395558-65109a40-70f1ba19-75bfc10b" ConfirmedSeqNo: 3 EgressBytes: 3 } 2026-01-04T03:05:58.049032Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 2. Checkpoint: 0. Finished: 0 2026-01-04T03:05:58.075776Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 4 2026-01-04T03:05:58.075847Z node 46 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:221: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Received data for sending: 5 2026-01-04T03:05:58.084053Z :INFO: [local] OnFederationDiscovery fall back to single mode, database=local [] [] Start federated write session to database '' (previous was ) FederationState: { Status: SUCCESS SelfLocation: "" DbInfos: [ { path: "local" endpoint: "localhost:11266" status: AVAILABLE weight: 100 } ] ControlPlaneEndpoint: localhost:11266 }2026-01-04T03:05:58.084523Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: try to update token 2026-01-04T03:05:58.085284Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Start write session. Will connect to nodeId: 0 2026-01-04T03:05:58.086876Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Starting read session 2026-01-04T03:05:58.086943Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Starting single session 2026-01-04T03:05:58.088170Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] In Reconnect, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T03:05:58.088265Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] New values: ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T03:05:58.088357Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Reconnecting session to cluster in 0.000000s 2026-01-04T03:05:58.096369Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: write to message_group: 54395558-65109a40-70f1ba19-75bfc10b 2026-01-04T03:05:58.096511Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: send init request: init_request { path: "Checkpoints" producer_id: "54395558-65109a40-70f1ba19-75bfc10b" message_group_id: "54395558-65109a40-70f1ba19-75bfc10b" } 2026-01-04T03:05:58.096551Z :TRACE: [local] TRACE_EVENT InitRequest 2026-01-04T03:05:58.096671Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Successfully connected. Initializing session 2026-01-04T03:05:58.096878Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: OnWriteDone gRpcStatusCode: 0 2026-01-04T03:05:58.099819Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Got InitResponse. ReadSessionId: test_client_1_22_16230100304439605459_v1 2026-01-04T03:05:58.099892Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] In ContinueReadingDataImpl, ReadSizeBudget = 52428800, ReadSizeServerDelta = 0 2026-01-04T03:05:58.100012Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-04T03:05:58.104432Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Confirm partition stream create. Partition stream id: 1. Cluster: "-". Topic: "Checkpoints". Partition: 0. Read offset: (NULL) 2026-01-04T03:05:58.108434Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Got ReadResponse, serverBytesSize = 938, now ReadSizeBudget = 0, ReadSizeServerDelta = 52427862 2026-01-04T03:05:58.108606Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] In ContinueReadingDataImpl, ReadSizeBudget = 0, ReadSizeServerDelta = 52427862 2026-01-04T03:05:58.108926Z :DEBUG: [local] Decompression task done. Partition/PartitionSessionId: 1 (0-4) 2026-01-04T03:05:58.109027Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Returning serverBytesSize = 938 to budget 2026-01-04T03:05:58.109083Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] In ContinueReadingDataImpl, ReadSizeBudget = 938, ReadSizeServerDelta = 52427862 2026-01-04T03:05:58.109436Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] After sending read request: ReadSizeBudget = 0, ReadSizeServerDelta = 52428800 2026-01-04T03:05:58.109530Z :DEBUG: [local] Take Data. Partition 0. Read: {0, 0} (0-0) 2026-01-04T03:05:58.109581Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 0} (1-1) 2026-01-04T03:05:58.109618Z :DEBUG: [local] Take Data. Partition 0. Read: {1, 1} (2-2) 2026-01-04T03:05:58.109663Z :DEBUG: [local] Take Data. Partition 0. Read: {2, 0} (3-3) 2026-01-04T03:05:58.109716Z :DEBUG: [local] Take Data. Partition 0. Read: {3, 0} (4-4) 2026-01-04T03:05:58.109934Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] The application data is transferred to the client. Number of messages 5, size 5 bytes 2026-01-04T03:05:58.109963Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Closing read session. Close timeout: 0.000000s 2026-01-04T03:05:58.110012Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Returning serverBytesSize = 0 to budget 2026-01-04T03:05:58.110079Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-04T03:05:58.110172Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 23 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:05:58.110371Z :NOTICE: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Close with zero timeout " } 2026-01-04T03:05:58.110451Z :DEBUG: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] [] Abort session to cluster 2026-01-04T03:05:58.111544Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Closing read session. Close timeout: 0.000000s 2026-01-04T03:05:58.111643Z :INFO: [local] Read/commit by partition streams (cluster:topic:partition:stream-id:read-offset:committed-offset): -:Checkpoints:0:1:4:0 2026-01-04T03:05:58.111728Z :INFO: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Counters: { Errors: 0 CurrentSessionLifetimeMs: 24 BytesRead: 5 MessagesRead: 5 BytesReadCompressed: 5 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:05:58.111917Z :NOTICE: [local] [local] [99e1801d-2f51d5fe-3e773913-4cee7c4] Aborting read session. Description: SessionClosed { Status: ABORTED Issues: "
: Error: Aborted " } 2026-01-04T03:05:58.117607Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: OnReadDone gRpcStatusCode: 0 2026-01-04T03:05:58.117797Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Counters: { Errors: 0 CurrentSessionLifetimeMs: 1767495958117 BytesWritten: 0 MessagesWritten: 0 BytesWrittenCompressed: 0 BytesInflightUncompressed: 0 BytesInflightCompressed: 0 BytesInflightTotal: 0 MessagesInflight: 0 } 2026-01-04T03:05:58.117951Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session established. Init response: last_seq_no: 5 session_id: "54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0" 2026-01-04T03:05:58.118015Z :TRACE: [local] TRACE_EVENT InitResponse partition_id=0 session_id=54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0 2026-01-04T03:05:58.118063Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] MessageGroupId [54395558-65109a40-70f1ba19-75bfc10b] Write session: set DirectWriteToPartitionId 0 2026-01-04T03:05:58.118277Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] PartitionId [0] Generation [0] Write session: close. Timeout 0.000000s 2026-01-04T03:05:58.118317Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] PartitionId [0] Generation [0] Write session will now close 2026-01-04T03:05:58.118386Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] PartitionId [0] Generation [0] Write session: aborting 2026-01-04T03:05:58.118881Z :INFO: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] PartitionId [0] Generation [0] Write session: gracefully shut down, all writes complete 2026-01-04T03:05:58.119025Z :DEBUG: [local] TraceId [SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. ] SessionId [54395558-65109a40-70f1ba19-75bfc10b|15b93420-50a34539-898e75d1-d7738602_0] PartitionId [0] Generation [0] Write session: destroy 2026-01-04T03:05:58.789371Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:195: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. SendData. Batch: 0. Checkpoint: 1. Finished: 0 2026-01-04T03:05:58.804663Z node 47 :KQP_COMPUTE DEBUG: dq_pq_write_actor.cpp:240: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. [Checkpoint 0.0] Send checkpoint state immediately 2026-01-04T03:05:58.804912Z node 47 :KQP_COMPUTE TRACE: dq_pq_write_actor.cpp:418: SelfId: [0:0:0], TxId: query_1, TaskId: 0, PQ sink. Save checkpoint { Id: 0 Generation: 0 } state: { SourceId: "8d5bc60d-e0966032-5cae1a6a-4db4cf51" } >> TTxDataShardRecomputeKMeansScan::EmptyCluster [GOOD] >> TTxDataShardReshuffleKMeansScan::BadRequest |99.5%| [TM] {RESULT} ydb/tests/fq/pq_async_io/ut/unittest |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/fq/pq_async_io/ut/unittest >> TFlatTableExecutor_IndexLoading::Scan_History_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex >> test_drain.py::TestHive::test_drain_on_stop [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_FlatIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool |99.4%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex [GOOD] >> TFlatTableExecutor_IndexLoading::Scan_Groups_BTreeIndex_Empty [GOOD] >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers >> TFlatTableExecutor_KeepEraseMarkers::TestKeepEraseMarkers [GOOD] >> TFlatTableExecutor_LongTx::CompactUncommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactCommittedLongTx [GOOD] >> TFlatTableExecutor_LongTx::CompactedLongTxRestart >> TFlatTableExecutor_LongTx::CompactedLongTxRestart [GOOD] >> TFlatTableExecutor_LongTx::CompactMultipleChanges [GOOD] >> TFlatTableExecutor_LongTx::CompactedTxIdReuse >> TFlatTableExecutor_LongTx::CompactedTxIdReuse [GOOD] >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] >> DataShardStats::CollectKeySampleLeader [GOOD] >> DataShardStats::CollectKeySampleFollower ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TFlatTableExecutorGC::TestGCVectorDeduplicaton [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:43.878978Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: TNanny initiates TDummy tablet 72057594037927937 birth 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 II| TABLET_EXECUTOR: Leader{1:2:0} activating executor 00000.008 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1, 35b, wait} done, Waste{2:0, 0b +(0, 0b), 0 trc} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:276} starting compaction 00000.109 II| TABLET_EXECUTOR: Leader{1:2:277} starting Scan{1 on 2, Compact{1.2.276, eph 1}} 00000.109 II| TABLET_EXECUTOR: Leader{1:2:277} started compaction 1 00000.109 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.276, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.112 II| TABLET_OPS_HOST: Scan{1 on 2, Compact{1.2.276, eph 1}} end=Done, 115r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 19 of 23 ~1p 00000.113 II| OPS_COMPACT: Compact{1.2.276, eph 1} end=Done, 10 blobs 78r (max 115), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 19 +6, (133887 27242 228753)b }, ecr=1.000 00000.126 II| TABLET_EXECUTOR: Leader{1:2:278} Compact 1 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 276, product {1 parts epoch 2} done 00000.142 II| TABLET_EXECUTOR: LSnap{1:2, on 2:301, 6040b, wait} done, Waste{2:0, 925823b +(69, 41450b), 300 trc} 00000.172 II| TABLET_EXECUTOR: Leader{1:2:361} starting compaction 00000.172 II| TABLET_EXECUTOR: Leader{1:2:362} starting Scan{3 on 3, Compact{1.2.361, eph 1}} 00000.172 II| TABLET_EXECUTOR: Leader{1:2:362} started compaction 3 00000.173 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.361, eph 1}} begin on TSubset{head 2, 1m 0p 0c} 00000.174 II| TABLET_OPS_HOST: Scan{3 on 3, Compact{1.2.361, eph 1}} end=Done, 105r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 18 of 22 ~1p 00000.174 II| OPS_COMPACT: Compact{1.2.361, eph 1} end=Done, 11 blobs 75r (max 105), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 18 +7, (139364 6978 227525)b }, ecr=1.000 00000.176 II| TABLET_EXECUTOR: Leader{1:2:363} Compact 3 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 361, product {1 parts epoch 2} done 00000.311 II| TABLET_EXECUTOR: LSnap{1:2, on 2:601, 8109b, wait} done, Waste{2:0, 1768882b +(76, 42660b), 300 trc} 00000.328 II| TABLET_EXECUTOR: Leader{1:2:636} starting compaction 00000.328 II| TABLET_EXECUTOR: Leader{1:2:637} starting Scan{5 on 2, Compact{1.2.636, eph 2}} 00000.328 II| TABLET_EXECUTOR: Leader{1:2:637} started compaction 5 00000.328 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.636, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.330 II| TABLET_OPS_HOST: Scan{5 on 2, Compact{1.2.636, eph 2}} end=Done, 107r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 22 of 24 ~1p 00000.330 II| OPS_COMPACT: Compact{1.2.636, eph 2} end=Done, 8 blobs 107r (max 107), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 22 +4, (165592 33932 248406)b }, ecr=1.000 00000.341 II| TABLET_EXECUTOR: Leader{1:2:638} Compact 5 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 636, product {1 parts epoch 3} done 00000.341 II| TABLET_EXECUTOR: Leader{1:2:639} starting compaction 00000.341 II| TABLET_EXECUTOR: Leader{1:2:640} starting Scan{7 on 2, Compact{1.2.639, eph 2}} 00000.341 II| TABLET_EXECUTOR: Leader{1:2:640} started compaction 7 00000.341 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.639, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.345 II| TABLET_OPS_HOST: Scan{7 on 2, Compact{1.2.639, eph 2}} end=Done, 158r seen, TFwd{fetch=291KiB,saved=291KiB,usage=284KiB,after=6.83KiB,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 40 of 51 ~3p 00000.345 II| OPS_COMPACT: Compact{1.2.639, eph 2} end=Done, 4 blobs 130r (max 185), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 40 +0, (265207 54175 379191)b }, ecr=1.000 00000.347 II| TABLET_EXECUTOR: Leader{1:2:640} Compact 7 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 639, product {1 parts epoch 0} done 00000.403 II| TABLET_EXECUTOR: Leader{1:2:751} starting compaction 00000.403 II| TABLET_EXECUTOR: Leader{1:2:752} starting Scan{9 on 3, Compact{1.2.751, eph 2}} 00000.403 II| TABLET_EXECUTOR: Leader{1:2:752} started compaction 9 00000.403 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.751, eph 2}} begin on TSubset{head 3, 1m 0p 0c} 00000.405 II| TABLET_OPS_HOST: Scan{9 on 3, Compact{1.2.751, eph 2}} end=Done, 124r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 18 of 20 ~1p 00000.406 II| OPS_COMPACT: Compact{1.2.751, eph 2} end=Done, 10 blobs 124r (max 124), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 18 +6, (163711 20029 212264)b }, ecr=1.000 00000.418 II| TABLET_EXECUTOR: Leader{1:2:753} Compact 9 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 751, product {1 parts epoch 3} done 00000.418 II| TABLET_EXECUTOR: Leader{1:2:754} starting compaction 00000.418 II| TABLET_EXECUTOR: Leader{1:2:755} starting Scan{11 on 3, Compact{1.2.754, eph 2}} 00000.418 II| TABLET_EXECUTOR: Leader{1:2:755} started compaction 11 00000.419 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.754, eph 2}} begin on TSubset{head 0, 0m 2p 0c} 00000.422 II| TABLET_OPS_HOST: Scan{11 on 3, Compact{1.2.754, eph 2}} end=Done, 168r seen, TFwd{fetch=288KiB,saved=288KiB,usage=288KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=6}, trace 39 of 49 ~3p 00000.422 II| OPS_COMPACT: Compact{1.2.754, eph 2} end=Done, 4 blobs 129r (max 199), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 39 +0, (247459 20029 352126)b }, ecr=1.000 00000.424 II| TABLET_EXECUTOR: Leader{1:2:758} Compact 11 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 754, product {1 parts epoch 0} done 00000.510 II| TABLET_EXECUTOR: LSnap{1:2, on 2:901, 10212b, wait} done, Waste{2:0, 2384919b +(183, 850777b), 300 trc} 00000.528 II| TABLET_EXECUTOR: Leader{1:2:945} starting compaction 00000.528 II| TABLET_EXECUTOR: Leader{1:2:946} starting Scan{13 on 2, Compact{1.2.945, eph 3}} 00000.528 II| TABLET_EXECUTOR: Leader{1:2:946} started compaction 13 00000.529 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.945, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.530 II| TABLET_OPS_HOST: Scan{13 on 2, Compact{1.2.945, eph 3}} end=Done, 111r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 11 of 16 ~1p 00000.530 II| OPS_COMPACT: Compact{1.2.945, eph 3} end=Done, 9 blobs 111r (max 111), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 11 +5, (192342 13254 135639)b }, ecr=1.000 00000.542 II| TABLET_EXECUTOR: Leader{1:2:947} Compact 13 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 945, product {1 parts epoch 4} done 00000.622 II| TABLET_EXECUTOR: Leader{1:2:1096} starting compaction 00000.622 II| TABLET_EXECUTOR: Leader{1:2:1097} starting Scan{15 on 3, Compact{1.2.1096, eph 3}} 00000.622 II| TABLET_EXECUTOR: Leader{1:2:1097} started compaction 15 00000.622 II| TABLET_OPS_HOST: Scan{15 on 3, Compact{1.2.1096, eph 3}} begin on TSubset{head 4, 1m 0p 0c} 00000.624 II| TABLET_OPS_HOST: Scan{15 on 3, Compact{1.2.1096, eph 3}} end=Done, 111r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 18 of 20 ~1p 00000.624 II| OPS_COMPACT: Compact{1.2.1096, eph 3} end=Done, 9 blobs 111r (max 111), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 18 +5, (150400 27056 207602)b }, ecr=1.000 00000.626 II| TABLET_EXECUTOR: Leader{1:2:1097} Compact 15 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1096, product {1 parts epoch 4} done 00000.700 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1201, 11353b, wait} done, Waste{2:0, 3253270b +(151, 78368b), 300 trc} 00000.745 II| TABLET_EXECUTOR: Leader{1:2:1279} starting compaction 00000.745 II| TABLET_EXECUTOR: Leader{1:2:1280} starting Scan{17 on 2, Compact{1.2.1279, eph 4}} 00000.746 II| TABLET_EXECUTOR: Leader{1:2:1280} started compaction 17 00000.746 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.1279, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.748 II| TABLET_OPS_HOST: Scan{17 on 2, Compact{1.2.1279, eph 4}} end=Done, 118r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 18 of 19 ~1p 00000.748 II| OPS_COMPACT: Compact{1.2.1279, eph 4} end=Done, 9 blobs 118r (max 118), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 18 +5, (172142 13104 202965)b }, ecr=1.000 00000.758 II| TABLET_EXECUTOR: Leader{1:2:1281} Compact 17 on TGenCompactionParams{2: gen 0 epoch +inf, 0 parts} step 1279, product {1 parts epoch 5} done 00000.759 II| TABLET_EXECUTOR: Leader{1:2:1282} starting compaction 00000.759 II| TABLET_EXECUTOR: Leader{1:2:1283} starting Scan{19 on 2, Compact{1.2.1282, eph 4}} 00000.759 II| TABLET_EXECUTOR: Leader{1:2:1283} started compaction 19 00000.759 II| TABLET_OPS_HOST: Scan{19 on 2, Compact{1.2.1282, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.764 II| TABLET_OPS_HOST: Scan{19 on 2, Compact{1.2.1282, eph 4}} end=Done, 182r seen, TFwd{fetch=349KiB,saved=349KiB,usage=349KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=8}, trace 31 of 39 ~3p 00000.764 II| OPS_COMPACT: Compact{1.2.1282, eph 4} end=Done, 4 blobs 182r (max 229), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 31 +0, (275996 19770 270979)b }, ecr=1.000 00000.766 II| TABLET_EXECUTOR: Leader{1:2:1284} Compact 19 on TGenCompactionParams{2: gen 1 epoch 0, 2 parts} step 1282, product {1 parts epoch 0} done 00000.840 II| TABLET_EXECUTOR: Leader{1:2:1409} starting compaction 00000.840 II| TABLET_EXECUTOR: Leader{1:2:1410} starting Scan{21 on 3, Compact{1.2.1409, eph 4}} 00000.840 II| TABLET_EXECUTOR: Leader{1:2:1410} started compaction 21 00000.840 II| TABLET_OPS_HOST: Scan{21 on 3, Compact{1.2.1409, eph 4}} begin on TSubset{head 5, 1m 0p 0c} 00000.843 II| TABLET_OPS_HOST: Scan{21 on 3, Compact{1.2.1409, eph 4}} end=Done, 103r seen, TFwd{fetch=0B,saved=0B,usage=0B,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=1}, trace 11 of 12 ~1p 00000.843 II| OPS_COMPACT: Compact{1.2.1409, eph 4} end=Done, 7 blobs 103r (max 103), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 11 +3, (170647 41019 134039)b }, ecr=1.000 00000.851 II| TABLET_EXECUTOR: Leader{1:2:1411} Compact 21 on TGenCompactionParams{3: gen 0 epoch +inf, 0 parts} step 1409, product {1 parts epoch 5} done 00000.852 II| TABLET_EXECUTOR: Leader{1:2:1412} starting compaction 00000.852 II| TABLET_EXECUTOR: Leader{1:2:1413} starting Scan{23 on 3, Compact{1.2.1412, eph 4}} 00000.852 II| TABLET_EXECUTOR: Leader{1:2:1413} started compaction 23 00000.852 II| TABLET_OPS_HOST: Scan{23 on 3, Compact{1.2.1412, eph 4}} begin on TSubset{head 0, 0m 2p 0c} 00000.856 II| TABLET_OPS_HOST: Scan{23 on 3, Compact{1.2.1412, eph 4}} end=Done, 175r seen, TFwd{fetch=306KiB,saved=306KiB,usage=299KiB,after=0B,before=0B}, bio Spent{time=0.000s,wait=0.000s,interrupts=7}, trace 30 of 37 ~3p 00000.856 II| OPS_COMPACT: Compact{1.2.1412, eph 4} end=Done, 4 blobs 175r (max 214), put Spent{time=0.000s,wait=0.000s,interrupts=1} Part{ 2 pk, lobs 30 +0, (250989 54859 274631)b }, ecr=1.000 00000.858 II| TABLET_EXECUTOR: Leader{1:2:1415} Compact 23 on TGenCompactionParams{3: gen 1 epoch 0, 2 parts} step 1412, product {1 parts epoch 0} done 00000.911 II| TABLET_EXECUTOR: LSnap{1:2, on 2:1501, 12954b, wait} done, Waste{2:0, 3 ... DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (1 by [59:30:2062]) to queue queue_compaction_gen0 00000.014 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.390625 (insert task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 1, edge 9223372036854775807/0, generation 0 00000.014 II| TABLET_EXECUTOR: Leader{1:2:5} starting compaction 00000.014 II| TABLET_EXECUTOR: Leader{1:2:6} starting Scan{1 on 101, Compact{1.2.5, eph 1}} 00000.014 II| TABLET_EXECUTOR: Leader{1:2:6} started compaction 1 00000.014 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 1 generation 0 00000.025 DD| TABLET_EXECUTOR: Leader{1:2:6} commited cookie 8 for step 5 00000.025 II| TABLET_EXECUTOR: Leader{1:2:6} Compact 1 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 5, product {tx status + 1 parts epoch 2} done 00000.025 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 1, generation 0 00000.025 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.025 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.026 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (1 by [59:30:2062]) (release resources {1, 0}) 00000.026 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.390625 to 0.000000 (remove task gen0-table-101-tablet-1 (1 by [59:30:2062])) 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:7} commited cookie 3 for step 6 00000.026 DD| TABLET_EXECUTOR: Leader{1:2:7} switch applied on followers, step 6 ...waiting until compacted 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{5, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.027 DD| TABLET_EXECUTOR: Leader{1:2:7} Tx{6, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 1 ...compacting 00000.027 DD| TABLET_EXECUTOR: TCompactionLogic PrepareForceCompaction for 1 table 101, mode Mem, forced state None, forced mode Mem 00000.027 DD| RESOURCE_BROKER: Submitted new compaction_gen0 task gen0-table-101-tablet-1 (2 by [59:30:2062]) priority=5 resources={1, 0} 00000.027 DD| RESOURCE_BROKER: Assigning waiting task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Allocate resources {1, 0} for task gen0-table-101-tablet-1 (2 by [59:30:2062]) from queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Assigning in-fly task gen0-table-101-tablet-1 (2 by [59:30:2062]) to queue queue_compaction_gen0 00000.027 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.000000 to 0.371094 (insert task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.027 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1: task 2, edge 9223372036854775807/0, generation 0 00000.027 II| TABLET_EXECUTOR: Leader{1:2:7} starting compaction 00000.027 II| TABLET_EXECUTOR: Leader{1:2:8} starting Scan{3 on 101, Compact{1.2.7, eph 1}} 00000.027 II| TABLET_EXECUTOR: Leader{1:2:8} started compaction 3 00000.028 DD| TABLET_EXECUTOR: TGenCompactionStrategy PrepareCompaction for 1 started compaction 3 generation 0 00000.028 II| TABLET_EXECUTOR: Leader{1:2:8} Compact 3 on TGenCompactionParams{101: gen 0 epoch +inf, 0 parts} step 7, product {0 parts epoch 2} done 00000.028 DD| TABLET_EXECUTOR: TGenCompactionStrategy CompactionFinished for 1: compaction 3, generation 0 00000.028 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.028 DD| RESOURCE_BROKER: Finish task gen0-table-101-tablet-1 (2 by [59:30:2062]) (release resources {1, 0}) 00000.028 DD| RESOURCE_BROKER: Updated planned resource usage for queue queue_compaction_gen0 from 0.371094 to 0.000000 (remove task gen0-table-101-tablet-1 (2 by [59:30:2062])) 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 8 for step 7 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:9} commited cookie 3 for step 8 00000.028 DD| TABLET_EXECUTOR: Leader{1:2:9} switch applied on followers, step 8 ...waiting until compacted 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} hope 1 -> done Change{6, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{7, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxHasTxData} release 4194304b of static, Memory{0 dyn 0} ...hasTxData = 0 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u> 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} hope 1 -> done Change{6, redo 86b alter 0b annex 0, ~{ 101 } -{ }, 0 gb} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:9} Tx{8, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWriteRow<2u>} release 4194304b of static, Memory{0 dyn 0} 00000.029 DD| TABLET_EXECUTOR: Leader{1:2:10} commited cookie 1 for step 9 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{9, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxWaitCompleted} release 4194304b of static, Memory{0 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.030 DD| TABLET_EXECUTOR: Leader{1:2:10} Tx{10, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 4194304b of static, Memory{0 dyn 0} ...restarting tablet 00000.031 II| TABLET_EXECUTOR: Leader{1:2:10} suiciding, Waste{2:0, 478b +(3, 191b), 9 trc, -191b acc} 00000.033 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 35 bytes, 35 total, blobs: { [1:2:1:1:28672:35:0] } 00000.033 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 258 bytes, 258 total, blobs: { [1:2:2:1:8192:84:0], [1:2:6:1:32768:124:0], [1:2:8:1:32768:50:0] } 00000.034 DD| TABLET_EXECUTOR: Leader{1:3:-} sending TEvGet batch 384 bytes, 384 total, blobs: { [1:2:5:1:12288:158:0], [1:2:3:1:24576:78:0], [1:2:4:1:24576:65:0], [1:2:9:1:24576:83:0] } 00000.035 II| TABLET_EXECUTOR: Leader{1:3:0} activating executor 00000.035 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 1, state Free, final id 0, final level 0 00000.035 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 2, state Free, final id 0, final level 0 00000.035 DD| TABLET_EXECUTOR: TGenCompactionStrategy CheckGeneration for 1 generation 3, state Free, final id 0, final level 0 00000.035 II| TABLET_EXECUTOR: LSnap{1:3, on 3:1, 239b, wait} done, Waste{2:0, 478b +(3, 191b), 9 trc} 00000.035 DD| TABLET_EXECUTOR: Leader{1:3:2} commited cookie 2 for step 1 ... checking rows 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} queued, type NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 4194304b of static mem, Memory{4194304 dyn 0} 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 1 -> retry Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} pin 0 (0 b) load 1 (55 b) 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} took 8388608b of static mem, Memory{8388608 dyn 0} 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} postponed, loading 1 pages, 55 bytes, newly pinned 0 pages, 0 bytes 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} got result TEvResult{1 pages [1:2:5:1:12288:158:0] ok OK}, type 1 00000.036 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} activated 00000.037 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} hope 2 -> done Change{7, redo 0b alter 0b annex 0, ~{ } -{ }, 0 gb} 00000.037 DD| TABLET_EXECUTOR: Leader{1:3:2} Tx{1, NKikimr::NTabletFlatExecutor::NTestSuiteTFlatTableExecutor_LongTx::TTxCheckRows} release 8388608b of static, Memory{0 dyn 0} 00000.037 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.037 II| TABLET_EXECUTOR: Leader{1:3:2} suiciding, Waste{2:0, 478b +(0, 0b), 1 trc, -191b acc} 00000.037 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 91b} miss {0 0b} in-memory miss {0 0b} 00000.038 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.038 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {796b, 12} 00000.038 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.038 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.038 II| FAKE_ENV: DS.1 gone, left {717b, 5}, put {1117b, 11} 00000.038 II| FAKE_ENV: All BS storage groups are stopped 00000.038 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 0.000s 00000.038 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 126}, stopped |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] >> TTxDataShardReshuffleKMeansScan::BadRequest [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPosting >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] >> test_public_api.py::TestSessionNotFoundOperations::test_session_pool [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_ok_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx >> test_public_api.py::TestSessionNotFoundOperations::test_can_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_cannot_commit_bad_tx [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit >> test_public_api.py::TestSessionNotFoundOperations::test_commit_successfully_after_success_commit [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_invalid_keep_alive_example [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds >> test_public_api.py::TestSessionNotFoundOperations::test_describe_table_with_bounds [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types >> test_public_api.py::TestSessionNotFoundOperations::test_native_datetime_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSessionNotFoundOperations::test_native_date_types [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled >> test_public_api.py::TestSessionNotFoundOperations::test_keep_in_cache_disabled [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 >> test_public_api.py::TestSessionNotFoundOperations::test_explicit_partitions_case_1 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_explict_partitions_case_2 [GOOD] >> test_public_api.py::TestSessionNotFoundOperations::test_simple_table_profile_settings [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] >> TTxDataShardReshuffleKMeansScan::MainToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_on_stop [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToPostingWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuild >> TVersions::Wreck1Reverse [GOOD] >> TVersions::Wreck0 |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> DataShardStats::CollectKeySampleFollower [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/ut_stats/unittest >> DataShardStats::CollectKeySampleFollower [GOOD] Test command err: ... waiting for SysViewsRoster update finished 2026-01-04T03:01:32.549154Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:125: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, got undelivered to scheme cache: ActorUnknown 2026-01-04T03:01:32.774883Z node 1 :KQP_WORKLOAD_SERVICE WARN: helpers.h:73: [WorkloadService] [TCleanupTablesActor] ActorId: [1:315:2359], Scheduled retry for error: {
: Error: Retry LookupError for table .metadata/workload_manager/delayed_requests } 2026-01-04T03:01:32.775388Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=incorrect path status: LookupError; 2026-01-04T03:01:32.775439Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/workload_manager/classifiers/resource_pool_classifiers;error=incorrect path status: LookupError; 2026-01-04T03:01:33.274318Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:33.274473Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:33.379609Z node 1 :CONFIGS_DISPATCHER ERROR: configs_dispatcher.cpp:1374: Notification cookie mismatch for subscription [1:34:2081] 1767495689474810 != 1767495689474813 2026-01-04T03:01:33.411349Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:33.470634Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:33.617773Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions ... waiting for SysViewsRoster update finished (done) 2026-01-04T03:01:33.948287Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:33.962972Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:34.094424Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976715657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:34.174549Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828672, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvBoot 2026-01-04T03:01:34.175853Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3119: StateInit, received event# 268828673, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvRestored 2026-01-04T03:01:34.176317Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:885:2765] 2026-01-04T03:01:34.176662Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:34.224049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3132: StateInactive, received event# 268828684, Sender [1:877:2759], Recipient [1:885:2765]: NKikimr::TEvTablet::TEvFollowerSyncComplete 2026-01-04T03:01:34.224959Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:34.225142Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:34.228361Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:01:34.228468Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:01:34.228537Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:01:34.231202Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:34.231425Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:34.231550Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:901:2765] in generation 1 2026-01-04T03:01:34.242634Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:34.281192Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:01:34.286065Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:34.286327Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:903:2775] 2026-01-04T03:01:34.286384Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:34.286427Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:01:34.286464Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:34.286692Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435072, Sender [1:885:2765], Recipient [1:885:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:34.289903Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3178: StateWork, processing event TEvPrivate::TEvProgressTransaction 2026-01-04T03:01:34.294501Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:01:34.294681Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:01:34.294826Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:34.294872Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:34.294951Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:01:34.295006Z node 1 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:01:34.295043Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:01:34.295078Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:01:34.295161Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:34.295350Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269877761, Sender [1:888:2766], Recipient [1:885:2765]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:34.295396Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3189: StateWork, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:01:34.295463Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:884:2764], serverId# [1:888:2766], sessionId# [0:0:0] 2026-01-04T03:01:34.295978Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269549568, Sender [1:399:2398], Recipient [1:888:2766] 2026-01-04T03:01:34.296049Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3159: StateWork, processing event TEvDataShard::TEvProposeTransaction 2026-01-04T03:01:34.296188Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:01:34.296636Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit CheckSchemeTx 2026-01-04T03:01:34.296704Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976715657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:01:34.296805Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976715657 at tablet 72075186224037888 2026-01-04T03:01:34.296862Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is ExecutedNoMoreRestarts 2026-01-04T03:01:34.296905Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit CheckSchemeTx 2026-01-04T03:01:34.296946Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit StoreSchemeTx 2026-01-04T03:01:34.296981Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit StoreSchemeTx 2026-01-04T03:01:34.297296Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayCompleteNoMoreRestarts 2026-01-04T03:01:34.297394Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit StoreSchemeTx 2026-01-04T03:01:34.297433Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit FinishPropose 2026-01-04T03:01:34.297483Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit FinishPropose 2026-01-04T03:01:34.297556Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:281474976715657] at 72075186224037888 is DelayComplete 2026-01-04T03:01:34.297606Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:281474976715657] at 72075186224037888 executing on unit FinishPropose 2026-01-04T03:01:34.297649Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:281474976715657] at 72075186224037888 to execution unit WaitForPlan 2026-01-04T03:01:34.297684Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:281474976715657] at 72075186224037888 on unit WaitForPlan 2026-01-04T03:01:34.297711Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1853: Operation [0:281474976715657] at 72075186224037888 is not ready to execute on unit WaitForPlan 2026-01-04T03:01:34.303289Z node 1 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 269746185, Sender [1:904:2776], Recipient [1:885:2765]: NKikimr::TEvTxProxySchemeCache::TEvWatchNotifyUpdated 2026-01-04T03:01:34.303384Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:01:34.314582Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T03:01:34.314698Z node 1 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1954: Complete execution for [0:281474 ... 46644480 FollowerId: 1 2026-01-04T03:06:25.521771Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:25.532223Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:25.532294Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:26.269479Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:26.269701Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 0, tableId 38 2026-01-04T03:06:26.343753Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:203: CompactionComplete of tablet# 72075186224037888, table# 1001, finished edge# 0, ts 1970-01-01T00:00:00.000000Z 2026-01-04T03:06:26.343815Z node 16 :TX_DATASHARD DEBUG: datashard__compaction.cpp:240: ReplyCompactionWaiters of tablet# 72075186224037888, table# 1001, finished edge# 0, front# 0 2026-01-04T03:06:26.344302Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-04T03:06:26.344667Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 268828680, Sender [16:916:2782], Recipient [16:924:2786]: NKikimr::TEvTablet::TEvFUpdate 2026-01-04T03:06:26.345281Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 268828683, Sender [16:877:2759], Recipient [16:886:2765]: NKikimr::TEvTablet::TEvFollowerGcApplied 2026-01-04T03:06:26.355626Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:26.355683Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:26.355836Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3473: TEvPeriodicTableStats from datashard 72075186224037888, FollowerId 1, tableId 38 2026-01-04T03:06:27.137150Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435073, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvCleanupTransaction 2026-01-04T03:06:27.137215Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3179: StateWork, processing event TEvPrivate::TEvCleanupTransaction 2026-01-04T03:06:27.137281Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:214: No cleanup at 72075186224037888 outdated step 76000 last cleanup 0 2026-01-04T03:06:27.137326Z node 16 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:06:27.137376Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:385: Check unit PlanQueue at 72075186224037888 2026-01-04T03:06:27.137446Z node 16 :TX_DATASHARD TRACE: plan_queue_unit.cpp:59: TPlanQueueUnit at 72075186224037888 has no attached operations 2026-01-04T03:06:27.137505Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:403: Unit PlanQueue has no ready operations at 72075186224037888 2026-01-04T03:06:27.137652Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3153: StateWork, received event# 2146435079, Sender [0:0:0], Recipient [16:886:2765]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:27.148296Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 2146435079, Sender [0:0:0], Recipient [16:924:2786]: NKikimr::NDataShard::TDataShard::TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:27.148353Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3312: StateWorkAsFollower, processing event TEvPrivate::TEvPeriodicWakeup 2026-01-04T03:06:28.069161Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553215, Sender [16:1636:3417], Recipient [16:924:2786]: NKikimrTxDataShard.TEvRead ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 KeysSize: 3 2026-01-04T03:06:28.069257Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3305: StateWorkAsFollower, processing event TEvDataShard::TEvRead 2026-01-04T03:06:28.069394Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-04T03:06:28.069456Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2800: 72075186224037888 changed HEAD read to repeatable v2500/18446744073709551615 2026-01-04T03:06:28.069533Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CheckRead 2026-01-04T03:06:28.069646Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T03:06:28.069698Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CheckRead 2026-01-04T03:06:28.069759Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit BuildAndWaitDependencies 2026-01-04T03:06:28.069831Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit BuildAndWaitDependencies 2026-01-04T03:06:28.069870Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:483: Activated operation [0:5] at 72075186224037888 2026-01-04T03:06:28.069922Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T03:06:28.069943Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit BuildAndWaitDependencies 2026-01-04T03:06:28.069958Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit ExecuteRead 2026-01-04T03:06:28.069976Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-04T03:06:28.070085Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 1, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T03:06:28.070349Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Restart 2026-01-04T03:06:28.070381Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Restart at tablet# 72075186224037888 2026-01-04T03:06:28.070616Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2697: TTxReadViaPipeline execute: at tablet# 72075186224037888, FollowerId 1 2026-01-04T03:06:28.070641Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit ExecuteRead 2026-01-04T03:06:28.070698Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:1748: 72075186224037888 Execute read# 2, request: { ReadId: 0 TableId { OwnerId: 72057594046644480 TableId: 38 SchemaVersion: 1 } Columns: 1 ResultFormat: FORMAT_CELLVEC MaxRows: 1001 MaxBytes: 5242880 Reverse: false TotalRowsLimit: 1001 } 2026-01-04T03:06:28.070917Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2396: 72075186224037888 Complete read# {[16:1636:3417], 0} after executionsCount# 2 2026-01-04T03:06:28.070990Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2370: 72075186224037888 read iterator# {[16:1636:3417], 0} sends rowCount# 3, bytes# 48, quota rows left# 998, quota bytes left# 5242832, hasUnreadQueries# 0, total queries# 3, firstUnprocessed# 0 2026-01-04T03:06:28.071073Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2421: 72075186224037888 read iterator# {[16:1636:3417], 0} finished in read 2026-01-04T03:06:28.071141Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T03:06:28.071160Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit ExecuteRead 2026-01-04T03:06:28.071177Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1937: Add [0:5] at 72075186224037888 to execution unit CompletedOperations 2026-01-04T03:06:28.071194Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1848: Trying to execute [0:5] at 72075186224037888 on unit CompletedOperations 2026-01-04T03:06:28.071231Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1883: Execution status for [0:5] at 72075186224037888 is Executed 2026-01-04T03:06:28.071246Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1931: Advance execution plan for [0:5] at 72075186224037888 executing on unit CompletedOperations 2026-01-04T03:06:28.071269Z node 16 :TX_DATASHARD TRACE: datashard_pipeline.cpp:1943: Execution plan for [0:5] at 72075186224037888 has finished 2026-01-04T03:06:28.071310Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2932: TTxReadViaPipeline(69) Execute with status# Executed at tablet# 72075186224037888 2026-01-04T03:06:28.071407Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:2981: TTxReadViaPipeline(69) Complete: at tablet# 72075186224037888 2026-01-04T03:06:28.072300Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553219, Sender [16:1636:3417], Recipient [16:924:2786]: NKikimrTxDataShard.TEvReadCancel ReadId: 0 2026-01-04T03:06:28.072357Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3308: StateWorkAsFollower, processing event TEvDataShard::TEvReadCancel 2026-01-04T03:06:28.072423Z node 16 :TX_DATASHARD TRACE: datashard__read_iterator.cpp:3668: 72075186224037888 ReadCancel: { ReadId: 0 } { items { uint32_value: 42 } }, { items { uint32_value: 44 } }, { items { uint32_value: 46 } } TEST 9: EvGetTableStats(collectKeySample=false) after the collected key sample becomes invalid TEST Sending the EvGetTableStats message to the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 2026-01-04T03:06:28.074047Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269877761, Sender [16:1639:3420], Recipient [16:924:2786]: NKikimr::TEvTabletPipe::TEvServerConnected 2026-01-04T03:06:28.074125Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3303: StateWorkAsFollower, processing event TEvTabletPipe::TEvServerConnected 2026-01-04T03:06:28.074192Z node 16 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at follower 1 tablet# 72075186224037888, clientId# [16:1638:3419], serverId# [16:1639:3420], sessionId# [0:0:0] 2026-01-04T03:06:28.074319Z node 16 :TX_DATASHARD TRACE: datashard_impl.h:3295: StateWorkAsFollower, received event# 269553160, Sender [16:829:2724], Recipient [16:924:2786]: NKikimrTxDataShard.TEvGetTableStats TableId: 38 CollectKeySample: false TEST Received the TEvGetTableStatsResult response from the tablet 72075186224037888, tableId=38, collectKeySample=0, toFollower=1 DatashardId: 72075186224037888 TableLocalId: 38 TableStats { InMemSize: 0 LastAccessTime: 80545 LastUpdateTime: 0 } TabletMetrics { CPU: 119 } FullStatsReady: false TableOwnerId: 72057594046644480 FollowerId: 1 >> TTxDataShardReshuffleKMeansScan::MainToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap |99.5%| [TM] {RESULT} ydb/core/tx/datashard/ut_stats/unittest |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/ut_stats/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/hive/py3test >> test_drain.py::TestHive::test_drain_tablets [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/hive/py3test |99.5%| [TA] $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/hive/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTxDataShardReshuffleKMeansScan::MainToBuildWithOverlap [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToPosting |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/datashard/async_replication/py3test >> test_async_replication.py::TestAsyncReplication::test_async_replication[table_index_0__ASYNC-pk_types6-all_types6-index6---ASYNC] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/datashard/async_replication/py3test |99.5%| [TA] $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> TTxDataShardReshuffleKMeansScan::BuildToPosting [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuild >> test_public_api.py::TestBadSession::test_simple |99.5%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} |99.5%| [TA] {RESULT} $(B)/ydb/tests/datashard/async_replication/test-results/py3test/{meta.json ... results_accumulator.log} >> TVersions::Wreck0 [GOOD] >> TVersions::Wreck0Reverse |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled >> TTxDataShardReshuffleKMeansScan::BuildToBuild [GOOD] >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_discovery.py::TestMirror3DCDiscovery::test_mirror3dc_discovery_logic [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> TTxDataShardReshuffleKMeansScan::BuildToBuildWithOverlap [GOOD] >> TTxDataShardSampleKScan::BadRequest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> TTxDataShardSampleKScan::BadRequest [GOOD] >> TTxDataShardSampleKScan::RunScan >> test_self_heal.py::TestEnableSelfHeal::test_replication >> TVersions::Wreck0Reverse [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TVersions::Wreck0Reverse [GOOD] Test command err: 2026-01-04T03:04:45.137859Z node 1 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:45.138194Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-04T03:04:45.138225Z node 1 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [1:5:2052] 2026-01-04T03:04:45.138286Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [1:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-04T03:04:45.138335Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #1 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #1 (done) Checking fetches#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] ... waiting for results #1 2026-01-04T03:04:45.138915Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 3 ] 2026-01-04T03:04:45.138966Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:1] owner [1:5:2052] class Online pages [ 1 2 3 ] cookie 1 2026-01-04T03:04:45.139002Z node 1 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 366B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #1 (done) Checking results#1 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ 1 2 3 ] 2026-01-04T03:04:45.190279Z node 2 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:45.190552Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-04T03:04:45.190578Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:5:2052] 2026-01-04T03:04:45.190633Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:5:2052] cookie 1 class Online from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-04T03:04:45.190689Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 366B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-04T03:04:45.190776Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-04T03:04:45.190799Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:5:2052] 2026-01-04T03:04:45.190831Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:5:2052] cookie 2 class Online from cache [ ] already requested [ ] to request [ 4 5 ] 2026-01-04T03:04:45.190888Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 610B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-04T03:04:45.190958Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [2:6:2053] 2026-01-04T03:04:45.190991Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [2:6:2053] cookie 3 class Online from cache [ ] already requested [ ] to request [ 5 6 ] 2026-01-04T03:04:45.191028Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 854B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:45.191095Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [2:6:2053] 2026-01-04T03:04:45.191132Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [2:6:2053] cookie 4 class Online from cache [ ] already requested [ ] to request [ 6 7 ] 2026-01-04T03:04:45.191171Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 1.07KiB EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for fetches #4 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 ... waiting for fetches #4 (done) Checking fetches#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 5 6 ] PageCollection: [1:0:256:0:0:0:1] Cookie: 30 Pages: [ 1 2 3 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 20 Pages: [ 6 7 ] ... waiting for results #4 2026-01-04T03:04:45.191417Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status ERROR pages [ 1 2 3 ] 2026-01-04T03:04:45.191440Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1076: Drop page collection [1:0:256:0:0:0:1] error ERROR 2026-01-04T03:04:45.191476Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:5:2052] class Online error ERROR cookie 1 2026-01-04T03:04:45.191510Z node 2 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:1020: Send page collection error [1:0:256:0:0:0:1] owner [2:6:2053] class Online error ERROR cookie 3 2026-01-04T03:04:45.191554Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 732B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 1 Pages: [ ] PageCollection: [1:0:256:0:0:0:1] Cookie: 3 Pages: [ ] 2026-01-04T03:04:45.191719Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 5 6 ] 2026-01-04T03:04:45.191746Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 488B EvictedInMemoryBytes: 0B Checking results#4 Expected: Actual: ... waiting for results #4 2026-01-04T03:04:45.202085Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 6 7 ] 2026-01-04T03:04:45.202140Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:6:2053] class Online pages [ 6 7 ] cookie 4 2026-01-04T03:04:45.202182Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B 2026-01-04T03:04:45.202235Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:2] status OK pages [ 4 5 ] 2026-01-04T03:04:45.202254Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:998: Send page collection result [1:0:256:0:0:0:2] owner [2:5:2052] class Online pages [ 4 5 ] cookie 2 2026-01-04T03:04:45.202275Z node 2 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 488B Passive: 0B LoadInFly: 0B EvictedInMemoryBytes: 0B ... waiting for results #4 (done) Checking results#4 Expected: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] Actual: PageCollection: [1:0:256:0:0:0:2] Cookie: 2 Pages: [ 4 5 ] PageCollection: [1:0:256:0:0:0:2] Cookie: 4 Pages: [ 6 7 ] 2026-01-04T03:04:45.254815Z node 3 :TABLET_SAUSAGECACHE NOTICE: shared_sausagecache.cpp:1479: Bootstrap with config MemoryLimit: 488 AsyncQueueInFlyLimit: 19 InMemoryInFlyLimit: 1220 ... waiting for NKikimr::NSharedCache::TEvRequest 2026-01-04T03:04:45.255098Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:1] 2026-01-04T03:04:45.255131Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:1] owner [3:5:2052] 2026-01-04T03:04:45.255186Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:1] owner [3:5:2052] cookie 1 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 4 5 ] 2026-01-04T03:04:45.255212Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 1 2 ] 2026-01-04T03:04:45.255279Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) ... waiting for NKikimr::NSharedCache::TEvRequest ... blocking NKikimr::NTabletFlatExecutor::NBlockIO::TEvFetch from SAUSAGE_CACHE to SAUSAGE_BIO_A cookie 0 2026-01-04T03:04:45.255379Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:833: Add page collection [1:0:256:0:0:0:2] 2026-01-04T03:04:45.255406Z node 3 :TABLET_SAUSAGECACHE DEBUG: shared_sausagecache.cpp:281: Add page collection [1:0:256:0:0:0:2] owner [3:6:2053] 2026-01-04T03:04:45.255450Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:494: Request page collection [1:0:256:0:0:0:2] owner [3:6:2053] cookie 2 class AsyncLoad from cache [ ] already requested [ ] to request [ 1 2 3 ] 2026-01-04T03:04:45.255481Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 0B Passive: 0B LoadInFly: 244B EvictedInMemoryBytes: 0B ... waiting for NKikimr::NSharedCache::TEvRequest (done) Checking fetches#2 Expected: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] Actual: PageCollection: [1:0:256:0:0:0:1] Cookie: 20 Pages: [ 1 2 ] ... waiting for fetches #2 2026-01-04T03:04:45.255612Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:733: Receive page collection [1:0:256:0:0:0:1] status OK pages [ 1 2 ] 2026-01-04T03:04:45.255668Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:573: Request page collection [1:0:256:0:0:0:1] async queue pages [ 3 4 ] 2026-01-04T03:04:45.255731Z node 3 :TABLET_SAUSAGECACHE TRACE: shared_sausagecache.cpp:234: GC has finished with Limit: 488B Active: 244B Passive: 0B LoadInFly: 244B EvictedI ... 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.015 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.016 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.016 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.016 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.016 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {751b, 11} 00000.016 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.016 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.016 II| FAKE_ENV: All BS storage groups are stopped 00000.016 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.016 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.640904Z 00000.007 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.008 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.052 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.053 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.053 NN| TABLET_SAUSAGECACHE: Poison cache serviced 11 reqs hit {18 513007b} miss {0 0b} in-memory miss {0 0b} 00000.054 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.054 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {2093b, 23} 00000.054 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {210604b, 21} 00000.054 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {205178b, 4} 00000.054 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {102690b, 4} 00000.054 II| FAKE_ENV: All BS storage groups are stopped 00000.054 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.054 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.700041Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.034 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.035 NN| TABLET_SAUSAGECACHE: Poison cache serviced 3 reqs hit {3 307329b} miss {0 0b} in-memory miss {0 0b} 00000.035 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.035 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1828b, 23} 00000.035 II| FAKE_ENV: DS.1 gone, left {1247b, 3}, put {311467b, 22} 00000.035 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.035 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.035 II| FAKE_ENV: All BS storage groups are stopped 00000.035 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.035 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.740217Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.031 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 5 actors 00000.032 NN| TABLET_SAUSAGECACHE: Poison cache serviced 4 reqs hit {8 307836b} miss {0 0b} in-memory miss {0 0b} 00000.032 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.032 II| FAKE_ENV: DS.0 gone, left {57b, 2}, put {1436b, 31} 00000.032 II| FAKE_ENV: DS.1 gone, left {629b, 3}, put {310476b, 16} 00000.033 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.033 II| FAKE_ENV: All BS storage groups are stopped 00000.033 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.033 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.779352Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.008 II| FAKE_ENV: Starting storage for BS group 2 00000.008 II| FAKE_ENV: Starting storage for BS group 3 00000.028 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.029 NN| TABLET_SAUSAGECACHE: Poison cache serviced 2 reqs hit {2 194646b} miss {0 0b} in-memory miss {0 0b} 00000.029 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.030 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1567b, 23} 00000.030 II| FAKE_ENV: DS.1 gone, left {529b, 3}, put {197610b, 21} 00000.030 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.030 II| FAKE_ENV: All BS storage groups are stopped 00000.030 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.030 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.817110Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.010 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.011 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.011 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.011 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {326b, 7} 00000.011 II| FAKE_ENV: DS.1 gone, left {418b, 4}, put {453b, 5} 00000.011 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.011 II| FAKE_ENV: All BS storage groups are stopped 00000.011 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.011 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.833341Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A cookie 0 00000.043 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbage from FLAT_EXECUTOR to FAKE_ENV_A 00000.043 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.044 NN| TABLET_SAUSAGECACHE: Poison cache serviced 6 reqs hit {8 410030b} miss {0 0b} in-memory miss {0 0b} 00000.044 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.044 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1492b, 23} 00000.044 II| FAKE_ENV: DS.1 gone, left {504b, 4}, put {310786b, 20} 00000.045 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.045 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.045 II| FAKE_ENV: All BS storage groups are stopped 00000.045 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 15.00s 00000.045 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.885010Z 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.017 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.018 NN| TABLET_SAUSAGECACHE: Poison cache serviced 1 reqs hit {1 102443b} miss {0 0b} in-memory miss {0 0b} 00000.018 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.018 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {753b, 11} 00000.018 II| FAKE_ENV: DS.1 gone, left {541b, 3}, put {103970b, 10} 00000.018 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.018 II| FAKE_ENV: All BS storage groups are stopped 00000.018 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.018 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 15}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.908544Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 ... blocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR cookie 0 00000.044 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled ... unblocking NKikimr::TEvBlobStorage::TEvCollectGarbageResult from FAKE_ENV_A to FLAT_EXECUTOR ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult ... waiting for NKikimr::TEvBlobStorage::TEvCollectGarbageResult (done) 00000.056 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.057 NN| TABLET_SAUSAGECACHE: Poison cache serviced 5 reqs hit {8 205278b} miss {0 0b} in-memory miss {0 0b} 00000.057 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.057 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {1353b, 17} 00000.057 II| FAKE_ENV: DS.1 gone, left {774b, 4}, put {105547b, 14} 00000.057 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {102560b, 2} 00000.057 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {116b, 2} 00000.057 II| FAKE_ENV: All BS storage groups are stopped 00000.057 II| FAKE_ENV: Model stopped, hosted 4 actors, spent 15.00s 00000.057 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:04:48.971545Z 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.229 II| TABLET_SAUSAGECACHE: Wakeup DoGCScheduled 00000.240 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.241 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.241 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.241 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {501b, 9} 00000.241 II| FAKE_ENV: DS.1 gone, left {425b, 4}, put {460b, 5} 00000.241 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.241 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.241 II| FAKE_ENV: All BS storage groups are stopped 00000.241 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 17.71s 00000.241 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 16}, stopped |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestBadSession::test_simple [GOOD] >> TTxDataShardSampleKScan::RunScan [GOOD] >> TTxDataShardSampleKScan::SkipForeign >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units >> test_public_api.py::TestDriverCanRecover::test_driver_recovery >> TTxDataShardSampleKScan::SkipForeign [GOOD] >> TTxDataShardValidateUniqueIndexScan::BadRequest >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_self_heal.py::TestEnableSelfHeal::test_replication [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] >> TTxDataShardValidateUniqueIndexScan::BadRequest [GOOD] >> TTxDataShardValidateUniqueIndexScan::RunScan |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[none] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestDriverCanRecover::test_driver_recovery [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_pdisk_slot_size_in_units [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tx/datashard/build_index/ut/unittest >> TTxDataShardValidateUniqueIndexScan::RunScan [GOOD] Test command err: 2026-01-04T03:01:38.804090Z node 1 :METADATA_PROVIDER WARN: log.cpp:841: fline=table_exists.cpp:54;actor=TTableExistsActor;event=undelivered;self_id=[1:7591336222152504854:2074];send_to=[0:7307199536658146131:7762515]; 2026-01-04T03:01:38.804479Z node 1 :METADATA_PROVIDER ERROR: log.cpp:841: fline=accessor_snapshot_base.cpp:71;action=cannot detect path existence;path=//Root/.metadata/initialization/migrations;error=scheme_cache_undelivered_message; 2026-01-04T03:01:39.152300Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: LookupError, path: Root/.metadata/script_executions 2026-01-04T03:01:39.162895Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Unknown -> Disconnected 2026-01-04T03:01:39.163007Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Disconnected -> Connecting 2026-01-04T03:01:39.218855Z node 1 :HIVE WARN: node_info.cpp:25: HIVE#72057594037968897 Node(1, (0,0,0,0)) VolatileState: Connecting -> Connected 2026-01-04T03:01:39.298055Z node 1 :IMPORT WARN: schemeshard_import.cpp:440: Table profiles were not loaded 2026-01-04T03:01:39.319084Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpAlterSubDomain, opId: 1:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_alter_subdomain.cpp:311) 2026-01-04T03:01:39.361643Z node 1 :KQP_PROXY WARN: kqp_finalize_script_service.cpp:137: [ScriptExecutions] [TKqpFinalizeScriptService] Failed to check script execution tables existence, scheme status: PathErrorUnknown, path: Root/.metadata/script_executions 2026-01-04T03:01:39.385890Z node 1 :FLAT_TX_SCHEMESHARD WARN: schemeshard__operation.cpp:183: Operation part proposed ok, but propose itself is undo unsafe, suboperation type: ESchemeOpCreateTable, opId: 281474976710657:0, at schemeshard: 72057594046644480, first GetDB called at: (GetDB first called at ydb/core/tx/schemeshard/schemeshard__operation_create_table.cpp:689) 2026-01-04T03:01:39.422015Z node 1 :TX_DATASHARD INFO: datashard.cpp:374: TDataShard::OnActivateExecutor: tablet 72075186224037888 actor [1:7591336226447472914:2283] 2026-01-04T03:01:39.422313Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:661: TxInitSchema.Execute 2026-01-04T03:01:39.436847Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:727: TxInitSchema.Complete 2026-01-04T03:01:39.436934Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:50: TDataShard::TTxInit::Execute 2026-01-04T03:01:39.441839Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1324: LoadChangeRecords: QueueSize: 0, at tablet: 72075186224037888 2026-01-04T03:01:39.441920Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1380: LoadLockChangeRecords at tablet: 72075186224037888 2026-01-04T03:01:39.441948Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1429: LoadChangeRecordCommits at tablet: 72075186224037888 2026-01-04T03:01:39.444219Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:94: TDataShard::TTxInit::Complete 2026-01-04T03:01:39.444292Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:102: TDataShard::TTxInitRestored::Execute 2026-01-04T03:01:39.444329Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:142: DataShard 72075186224037888 persisting started state actor id [1:7591336226447472929:2283] in generation 1 2026-01-04T03:01:39.446801Z node 1 :TX_DATASHARD DEBUG: datashard__init.cpp:151: TDataShard::TTxInitRestored::Complete 2026-01-04T03:01:39.484811Z node 1 :TX_DATASHARD INFO: datashard.cpp:418: Switched to work state WaitScheme tabletId 72075186224037888 2026-01-04T03:01:39.485678Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:458: 72075186224037888 not sending time cast registration request in state WaitScheme: missing processing params 2026-01-04T03:01:39.485756Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1259: Change sender created: at tablet: 72075186224037888, actorId: [1:7591336226447472931:2284] 2026-01-04T03:01:39.485765Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:39.485778Z node 1 :TX_DATASHARD INFO: datashard.cpp:1281: Cannot activate change sender: at tablet: 72075186224037888, state: WaitScheme 2026-01-04T03:01:39.485788Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:39.486801Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591336226447472912:2495], serverId# [1:7591336226447472916:2496], sessionId# [0:0:0] 2026-01-04T03:01:39.486925Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:96: TTxCheckInReadSets::Execute at 72075186224037888 2026-01-04T03:01:39.487018Z node 1 :TX_DATASHARD DEBUG: datashard__cleanup_in_rs.cpp:139: TTxCheckInReadSets::Complete found 0 read sets to remove in 72075186224037888 2026-01-04T03:01:39.487062Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:39.487076Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:39.487147Z node 1 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037888 TxInFly 0 2026-01-04T03:01:39.487176Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:39.487214Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:33: TTxProposeTransactionBase::Execute at 72075186224037888 2026-01-04T03:01:39.487632Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:133: Propose scheme transaction at tablet 72075186224037888 txId 281474976710657 ssId 72057594046644480 seqNo 2:1 2026-01-04T03:01:39.488208Z node 1 :TX_DATASHARD DEBUG: check_scheme_tx_unit.cpp:221: Prepared scheme transaction txId 281474976710657 at tablet 72075186224037888 2026-01-04T03:01:39.489489Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:01:39.489634Z node 1 :TX_DATASHARD DEBUG: datashard__propose_tx_base.cpp:152: TTxProposeTransactionBase::Complete at 72075186224037888 2026-01-04T03:01:39.489707Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:468: 72075186224037888 not sending time cast registration request in state WaitScheme 2026-01-04T03:01:39.492739Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037888, clientId# [1:7591336226447472946:2513], serverId# [1:7591336226447472947:2514], sessionId# [0:0:0] 2026-01-04T03:01:39.498025Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:69: Planned transaction txId 281474976710657 at step 1767495699536 at tablet 72075186224037888 { Transactions { TxId: 281474976710657 AckTo { RawX1: 0 RawX2: 0 } } Step: 1767495699536 MediatorID: 72057594046382081 TabletID: 72075186224037888 } 2026-01-04T03:01:39.498065Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:39.498224Z node 1 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:01:39.498347Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:39.498366Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 active 0 active planned 0 immediate 0 planned 1 2026-01-04T03:01:39.498404Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:333: Found ready operation [1767495699536:281474976710657] in PlanQueue unit at 72075186224037888 2026-01-04T03:01:39.498729Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:634: LoadTxDetails at 72075186224037888 loaded tx from db 1767495699536:281474976710657 keys extracted: 0 2026-01-04T03:01:39.498893Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 1 active planned 1 immediate 0 planned 1 2026-01-04T03:01:39.498990Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037888 2026-01-04T03:01:39.499099Z node 1 :TX_DATASHARD INFO: create_table_unit.cpp:69: Trying to CREATE TABLE at 72075186224037888 tableId# [OwnerId: 72057594046644480, LocalPathId: 38] schema version# 1 2026-01-04T03:01:39.503185Z node 1 :TX_DATASHARD INFO: datashard.cpp:476: Send registration request to time cast Ready tabletId 72075186224037888 mediators count is 1 coordinators count is 1 buckets per mediator 2 2026-01-04T03:01:39.504683Z node 1 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037888 (dry run) active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:01:39.506019Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3772: Got TEvMediatorTimecast::TEvRegisterTabletResult at 72075186224037888 time 1767495699535 2026-01-04T03:01:39.506037Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:39.506068Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037888 coordinator 72057594046316545 last step 0 next step 1767495699543 2026-01-04T03:01:39.506112Z node 1 :TX_DATASHARD DEBUG: datashard__plan_step.cpp:98: Sending '{TEvPlanStepAccepted TabletId# 72075186224037888 step# 1767495699536} 2026-01-04T03:01:39.506150Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:39.506206Z node 1 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037888 2026-01-04T03:01:39.506222Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037888 2026-01-04T03:01:39.506234Z node 1 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037888 2026-01-04T03:01:39.506278Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [1767495699536 : 281474976710657] from 72075186224037888 at tablet 72075186224037888 send result to client [1:7591336222152505161:2143], exec latency: 5 ms, propose latency: 7 ms 2026-01-04T03:01:39.506299Z node 1 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037888 Sending notify to schemeshard 72057594046644480 txId 281474976710657 state Ready TxInFly 0 2026-01-04T03:01:39.506350Z node 1 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037888 2026-01-04T03:01:39.511139Z node 1 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976710657 datashard 72075186224037888 state Ready 2026-01-04T03:01:39.511197Z node 1 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037888 Got TEvSchemaChangedResult from SS at 72075186224037888 2026-01-04T03:01:39.809877Z node 1 :TX_CONVEYOR ERROR: log.cpp:841: fline=service.h:53;problem=unexpected event for task executor;ev_type=NActors::TEvents::TEvWakeup; 2026-01-04T03:01:41.985858Z node 1 :KQP_WORKLOAD_SERVICE WARN: scheme_actors.cpp:225: [WorkloadService] [TPoolFetcherActor] ActorId: [1:7591336235037407629:2306], DatabaseId: /Root, PoolId: default, Failed to fetch pool info, NOT_FOUND, issues: {
: Error: Resource pool default not found or you don't have access permissions } 2026-01-04T03:01:41.985979Z node 1 :KQP_WORKLOAD_SERVICE WARN: kqp_workload_service.cpp:2 ... _DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T03:07:31.560754Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T03:07:31.560820Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:1264: Trying to activate change sender: at tablet: 72075186224037889 2026-01-04T03:07:31.560911Z node 50 :TX_DATASHARD INFO: datashard.cpp:1302: Change sender activated: at tablet: 72075186224037889 2026-01-04T03:07:31.561023Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:810: Complete [3000 : 281474976715662] from 72075186224037889 at tablet 72075186224037889 send result to client [50:400:2399], exec latency: 0 ms, propose latency: 0 ms 2026-01-04T03:07:31.561121Z node 50 :TX_DATASHARD INFO: datashard.cpp:1599: 72075186224037889 Sending notify to schemeshard 72057594046644480 txId 281474976715662 state Ready TxInFly 0 2026-01-04T03:07:31.561300Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T03:07:31.563362Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037889 2026-01-04T03:07:31.563581Z node 50 :TX_DATASHARD DEBUG: datashard_subdomain_path_id.cpp:180: Discovered subdomain [OwnerId: 72057594046644480, LocalPathId: 1] state, outOfSpace = 0 at datashard 72075186224037888 2026-01-04T03:07:31.564895Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3790: Got TEvMediatorTimecast::TEvSubscribeReadStepResult at 72075186224037889 coordinator 72057594046316545 last step 0 next step 3000 2026-01-04T03:07:31.566321Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:2965: Handle TEvSchemaChangedResult 281474976715662 datashard 72075186224037889 state Ready 2026-01-04T03:07:31.566429Z node 50 :TX_DATASHARD DEBUG: datashard__schema_changed.cpp:22: 72075186224037889 Got TEvSchemaChangedResult from SS at 72075186224037889 2026-01-04T03:07:31.574995Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1175:2968], serverId# [50:1176:2969], sessionId# [0:0:0] 2026-01-04T03:07:31.575308Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 45 2026-01-04T03:07:31.575438Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.576104Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.576200Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.576330Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.576506Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 45 MeteringStats { ReadRows: 0 ReadBytes: 0 } 2026-01-04T03:07:31.577279Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T03:07:31.577354Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:07:31.577413Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T03:07:31.577507Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T03:07:31.721392Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1197:2983], serverId# [50:1198:2984], sessionId# [0:0:0] 2026-01-04T03:07:31.722019Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:2] at 72075186224037889 2026-01-04T03:07:31.722212Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:2] at 72075186224037889, row count=2 2026-01-04T03:07:31.733508Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T03:07:31.740111Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1204:2989], serverId# [50:1205:2990], sessionId# [0:0:0] 2026-01-04T03:07:31.740428Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 46 2026-01-04T03:07:31.740546Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.741242Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.741333Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:31.741744Z node 50 :BUILD_INDEX ERROR: unique_index.cpp:126: Failed TValidateUniqueIndexScan Id: 1 Status: BUILD_ERROR Issues: {
: Error: Duplicate key found: (key_part1=1, key_part2=1) } Id: 1 TabletId: 72075186224037889 Status: BUILD_ERROR Issues { message: "Duplicate key found: (key_part1=1, key_part2=1)" severity: 1 } RequestSeqNoGeneration: 42 RequestSeqNoRound: 46 MeteringStats { ReadRows: 2 ReadBytes: 42 } 2026-01-04T03:07:31.742245Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T03:07:31.742319Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:07:31.742409Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T03:07:31.742494Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T03:07:31.966521Z node 50 :TX_DATASHARD DEBUG: datashard__read_iterator.cpp:2672: 72075186224037889 Acquired lock# 281474976715664, counter# 0 for [OwnerId: 72057594046644480, LocalPathId: 43] 2026-01-04T03:07:31.973833Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:4] at 72075186224037889 2026-01-04T03:07:31.974087Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:4] at 72075186224037889, row count=2 2026-01-04T03:07:31.985443Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T03:07:32.128999Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:5] at 72075186224037889 2026-01-04T03:07:32.129187Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:5] at 72075186224037889, row count=2 2026-01-04T03:07:32.140645Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T03:07:32.147770Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1254:3021], serverId# [50:1255:3022], sessionId# [0:0:0] 2026-01-04T03:07:32.148145Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 47 2026-01-04T03:07:32.148296Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.149217Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.149333Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.149562Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.149832Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 47 MeteringStats { ReadRows: 2 ReadBytes: 37 } 2026-01-04T03:07:32.150278Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T03:07:32.150358Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:07:32.150446Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T03:07:32.150529Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 2026-01-04T03:07:32.282157Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:262: Executing write operation for [0:6] at 72075186224037889 2026-01-04T03:07:32.282332Z node 50 :TX_DATASHARD DEBUG: execute_write_unit.cpp:465: Executed write operation for [0:6] at 72075186224037889, row count=3 2026-01-04T03:07:32.293519Z node 50 :TX_DATASHARD DEBUG: cdc_stream_heartbeat.cpp:87: [CdcStreamHeartbeat] Emit heartbeats: at tablet# 72075186224037889 2026-01-04T03:07:32.299785Z node 50 :TX_DATASHARD DEBUG: datashard.cpp:3740: Server connected at leader tablet# 72075186224037889, clientId# [50:1280:3039], serverId# [50:1281:3040], sessionId# [0:0:0] 2026-01-04T03:07:32.300125Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:246: Starting TValidateUniqueIndexScan TabletId: 72075186224037889 Id: 1 TabletId: 72075186224037889 OwnerId: 72057594046644480 PathId: 43 IndexColumns: "key_part1" IndexColumns: "key_part2" SeqNoGeneration: 42 SeqNoRound: 48 2026-01-04T03:07:32.300256Z node 50 :BUILD_INDEX INFO: unique_index.cpp:51: Create TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.300914Z node 50 :BUILD_INDEX INFO: unique_index.cpp:58: Prepare TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.301016Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:63: Seek 0 TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.301203Z node 50 :BUILD_INDEX TRACE: unique_index.cpp:134: Exhausted TValidateUniqueIndexScan Id: 1 Status: INVALID Issues: 2026-01-04T03:07:32.301419Z node 50 :BUILD_INDEX NOTICE: unique_index.cpp:124: Done TValidateUniqueIndexScan Id: 1 Status: DONE Issues: Id: 1 TabletId: 72075186224037889 Status: DONE RequestSeqNoGeneration: 42 RequestSeqNoRound: 48 MeteringStats { ReadRows: 5 ReadBytes: 45 } 2026-01-04T03:07:32.301947Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:16: TTxProgressTransaction::Execute at 72075186224037889 2026-01-04T03:07:32.302037Z node 50 :TX_DATASHARD DEBUG: datashard_pipeline.cpp:272: GetNextActiveOp at 72075186224037889 active 0 active planned 0 immediate 0 planned 0 2026-01-04T03:07:32.302112Z node 50 :TX_DATASHARD INFO: datashard__progress_tx.cpp:48: No tx to execute at 72075186224037889 TxInFly 0 2026-01-04T03:07:32.302191Z node 50 :TX_DATASHARD DEBUG: datashard__progress_tx.cpp:119: TTxProgressTransaction::Complete at 72075186224037889 |99.5%| [TM] {RESULT} ydb/core/tx/datashard/build_index/ut/unittest |99.5%| [TM] {BAZEL_UPLOAD} ydb/core/tx/datashard/build_index/ut/unittest |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[mirror-3-dc] [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcMax::test_no_degraded_groups_after_shutdown [GOOD] |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.5%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_tablet_channel_migration.py::TestChannelsOps::test_when_write_and_change_tablet_channel_then_can_read_from_tablet [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestSelectAfterDropWithRepetitions::test_select_on_dropped_table_unsuccessful[10] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_fixtures[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_slot_size_in_units.py::TestPDiskSlotSizeInUnits::test_change_group_size_in_units [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockKeep::test_restart_as_much_as_can [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TTopicWriterTests::TestTopicWriterParams_Format_NewlineDelimited [GOOD] >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/public/lib/ydb_cli/topic/ut/unittest >> TTopicWriterTests::TestTopicWriterParams_Format_Concatenated [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/public/lib/ydb_cli/topic/ut/unittest |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TA] $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TA] {RESULT} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TA] {BAZEL_UPLOAD} $(B)/ydb/public/lib/ydb_cli/topic/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] >> test_public_api.py::TestMetaDataInvalidation::test_invalidation_success [GOOD] >> TPart::State [GOOD] >> TPart::Trivials [GOOD] >> TPart::WreckPart |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_pdisk_format_info.py::TestPDiskInfo::test_read_disk_state [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPart::WreckPart [GOOD] >> TPart::WreckPartColumnGroups |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> TPart::WreckPartColumnGroups [GOOD] >> TPart::PageFailEnvColumnGroups >> TPart::PageFailEnvColumnGroups [GOOD] >> TPart::Versions [GOOD] >> TPartBtreeIndexIteration::NoNodes |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::NoNodes [GOOD] >> TPartBtreeIndexIteration::NoNodes_Groups |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::NoNodes_Groups [GOOD] >> TPartBtreeIndexIteration::NoNodes_History >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [FAIL] >> TPartBtreeIndexIteration::NoNodes_History [GOOD] >> TPartBtreeIndexIteration::OneNode >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> TPartBtreeIndexIteration::OneNode [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups >> Backup::GenerationDirs >> Backup::GenerationDirs [GOOD] >> Backup::SnapshotIOError >> Backup::SnapshotIOError [GOOD] >> Backup::EmptyData [GOOD] >> Backup::SnapshotData >> Backup::SnapshotData [GOOD] >> Backup::SnapshotLargeData >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok >> TPartBtreeIndexIteration::OneNode_Groups [GOOD] >> TPartBtreeIndexIteration::OneNode_History >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown >> TPartBtreeIndexIteration::OneNode_History [GOOD] >> TPartBtreeIndexIteration::OneNode_Slices >> TPartBtreeIndexIteration::OneNode_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_History_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled [GOOD] >> test_public_api.py::TestJsonExample::test_json_unexpected_failure |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::OneNode_History_Slices [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_crud.py::TestManySelectsInRow::test_selects_in_row_success[500-500-50] [GOOD] |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled [GOOD] >> TPartBtreeIndexIteration::OneNode_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestJsonExample::test_json_unexpected_failure [GOOD] >> test_public_api.py::TestJsonExample::test_json_success >> test_public_api.py::TestJsonExample::test_json_success [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups [GOOD] >> TPartBtreeIndexIteration::FewNodes_History |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_History [GOOD] >> TPartBtreeIndexIteration::FewNodes_Sticky |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_Sticky [GOOD] >> TPartBtreeIndexIteration::FewNodes_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_Groups_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_History_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_replication.py::TestReplicationAfterNodesRestart::test_replication[block-4-2] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices |99.6%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices [GOOD] >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> TPartBtreeIndexIteration::FewNodes_Groups_History_Slices_Sticky [GOOD] >> TPartGroupBtreeIndexIter::NoNodes >> TPartGroupBtreeIndexIter::NoNodes [GOOD] >> TPartGroupBtreeIndexIter::OneNode [GOOD] >> TPartGroupBtreeIndexIter::FewNodes >> TPartGroupBtreeIndexIter::FewNodes [GOOD] >> TPartMulti::Basics [GOOD] >> TPartMulti::BasicsReverse [GOOD] >> TPartSlice::TrivialMerge [GOOD] >> TPartSlice::SimpleMerge [GOOD] >> TPartSlice::ComplexMerge [GOOD] >> TPartSlice::LongTailMerge [GOOD] >> TPartSlice::CutSingle [GOOD] >> TPartSlice::CutMulti [GOOD] >> TPartSlice::LookupBasics [GOOD] >> TPartSlice::LookupFull [GOOD] >> TPartSlice::EqualByRowId [GOOD] >> TPartSlice::SupersetByRowId [GOOD] >> TPartSlice::Subtract [GOOD] >> TPartSlice::ParallelCompactions [GOOD] >> TPartSlice::UnsplitBorrow [GOOD] >> TPartSliceLoader::RestoreMissingSlice >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup >> TPartSliceLoader::RestoreMissingSlice [GOOD] >> TPartSliceLoader::RestoreOneSlice [GOOD] >> TPartSliceLoader::RestoreMissingSliceFullScreen [GOOD] >> TPartSliceLoader::RestoreFromScreenIndexKeys [GOOD] >> TPartSliceLoader::RestoreFromScreenDataKeys [GOOD] >> TRowVersionRangesTest::MergeExtendLeft [GOOD] >> TRowVersionRangesTest::MergeExtendLeftInner [GOOD] >> TRowVersionRangesTest::MergeExtendLeftComplete [GOOD] >> TRowVersionRangesTest::MergeExtendRight [GOOD] >> TRowVersionRangesTest::MergeExtendRightComplete [GOOD] >> TRowVersionRangesTest::MergeExtendBoth [GOOD] >> TRowVersionRangesTest::MergeAllOuter [GOOD] >> TRowVersionRangesTest::MergeAllInner [GOOD] >> TRowVersionRangesTest::MergeAllEdges [GOOD] >> TRowVersionRangesTest::ContainsEmpty [GOOD] >> TRowVersionRangesTest::ContainsNonEmpty [GOOD] >> TRowVersionRangesTest::ContainsInvalid [GOOD] >> TRowVersionRangesTest::AdjustDown [GOOD] >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> TRowVersionRangesTest::AdjustDownSnapshot [GOOD] Test command err: Part{[1:2:3:0:0:0:0] eph 0, 2050b 40r} data 2167b + FlatIndex{1} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 0 0 2050b {0, 1} | 0 39 2050b {5, 7} + BTreeIndex{Empty, PageId: 0 RowCount: 40 DataSize: 2050 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{0} Label{04 rev 1, 2050b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 3750b 40r} data 2915b + FlatIndex{10} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 3 0 620b {0, 1} | 3 39 620b {5, 7} + BTreeIndex{Empty, PageId: 3 RowCount: 40 DataSize: 620 GroupDataSize: 3130 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{3} Label{34 rev 1, 620b}, [0, +40)row | ERowOp 1: {0, 1} | ERowOp 1: {0, 3} | ERowOp 1: {0, 4} | ERowOp 1: {0, 6} | ERowOp 1: {0, 7} | ERowOp 1: {0, 8} | ERowOp 1: {0, 10} | ERowOp 1: {1, 1} | ERowOp 1: {1, 3} | ERowOp 1: {1, 4} | ERowOp 1: {1, 6} | ERowOp 1: {1, 7} | ERowOp 1: {1, 8} | ERowOp 1: {1, 10} | ERowOp 1: {2, 1} | ERowOp 1: {2, 3} | ERowOp 1: {2, 4} | ERowOp 1: {2, 6} | ERowOp 1: {2, 7} | ERowOp 1: {2, 8} | ERowOp 1: {2, 10} | ERowOp 1: {3, 1} | ERowOp 1: {3, 3} | ERowOp 1: {3, 4} | ERowOp 1: {3, 6} | ERowOp 1: {3, 7} | ERowOp 1: {3, 8} | ERowOp 1: {3, 10} | ERowOp 1: {4, 1} | ERowOp 1: {4, 3} | ERowOp 1: {4, 4} | ERowOp 1: {4, 6} | ERowOp 1: {4, 7} | ERowOp 1: {4, 8} | ERowOp 1: {4, 10} | ERowOp 1: {5, 1} | ERowOp 1: {5, 3} | ERowOp 1: {5, 4} | ERowOp 1: {5, 6} | ERowOp 1: {5, 7} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 5129b 40r} data 5373b + FlatIndex{3} Label{3 rev 3, 64b} 2 rec | Page Row Bytes (Uint32, Uint32) | 1 0 2466b {0, 1} | 1 39 2466b {5, 7} + BTreeIndex{Empty, PageId: 1 RowCount: 40 DataSize: 2466 GroupDataSize: 2663 ErasedRowCount: 0 LevelCount: 0 IndexSize: 0 + Rows{1} Label{14 rev 1, 2466b}, [0, +40)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4017b + FlatIndex{21} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 6 12 122b {1, 8} | 7 14 122b {2, NULL} | 8 16 122b {2, 4} | 9 18 122b {2, 7} | 10 20 122b {2, 10} | 11 22 122b {3, 3} | 12 24 122b {3, 6} | 13 26 122b {3, 8} | 14 28 122b {4, NULL} | 15 30 122b {4, 4} | 16 32 122b {4, 7} | 17 34 122b {4, 10} | 18 36 122b {5, 3} | 19 38 122b {5, 6} | 19 39 122b {5, 7} + BTreeIndex{PageId: 20 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 976b} | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | > {0, 4} | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | > {0, 7} | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | > {0, 10} | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | > {1, 3} | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | > {1, 6} | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | > {1, 8} | PageId: 6 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | > {2, NULL} | PageId: 7 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | > {2, 4} | PageId: 8 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | PageId: 9 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | > {2, 10} | PageId: 10 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | > {3, 3} | PageId: 11 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | > {3, 6} | PageId: 12 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | > {3, 8} | PageId: 13 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | > {4, NULL} | PageId: 14 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | > {4, 4} | PageId: 15 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | > {4, 7} | PageId: 16 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | > {4, 10} | PageId: 17 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | > {5, 3} | PageId: 18 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | > {5, 6} | PageId: 19 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERo ... owOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{6} Label{64 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{7} Label{74 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{8} Label{84 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{9} Label{94 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{10} Label{104 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{11} Label{114 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{12} Label{124 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{13} Label{134 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{14} Label{144 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{15} Label{154 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{16} Label{164 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{17} Label{174 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{18} Label{184 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{19} Label{194 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } Part{[1:2:3:0:0:0:0] eph 0, 2430b 40r} data 4441b + FlatIndex{29} Label{3 rev 3, 558b} 21 rec | Page Row Bytes (Uint32, Uint32) | 0 0 120b {0, 1} | 1 2 120b {0, 4} | 2 4 120b {0, 7} | 3 6 120b {0, 10} | 4 8 120b {1, 3} | 5 10 122b {1, 6} | 7 12 122b {1, 8} | 8 14 122b {2, NULL} | 9 16 122b {2, 4} | 11 18 122b {2, 7} | 12 20 122b {2, 10} | 13 22 122b {3, 3} | 15 24 122b {3, 6} | 16 26 122b {3, 8} | 17 28 122b {4, NULL} | 19 30 122b {4, 4} | 20 32 122b {4, 7} | 21 34 122b {4, 10} | 24 36 122b {5, 3} | 25 38 122b {5, 6} | 25 39 122b {5, 7} + BTreeIndex{PageId: 28 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 102b} | + BTreeIndex{PageId: 23 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 6 RowCount: 6 DataSize: 360 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 0 RowCount: 2 DataSize: 120 ErasedRowCount: 0 | | | > {0, 4} | | | PageId: 1 RowCount: 4 DataSize: 240 ErasedRowCount: 0 | | | > {0, 7} | | | PageId: 2 RowCount: 6 DataSize: 360 ErasedRowCount: 0 | | > {0, 10} | | + BTreeIndex{PageId: 10 RowCount: 12 DataSize: 722 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 3 RowCount: 8 DataSize: 480 ErasedRowCount: 0 | | | > {1, 3} | | | PageId: 4 RowCount: 10 DataSize: 600 ErasedRowCount: 0 | | | > {1, 6} | | | PageId: 5 RowCount: 12 DataSize: 722 ErasedRowCount: 0 | | > {1, 8} | | + BTreeIndex{PageId: 14 RowCount: 18 DataSize: 1088 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 7 RowCount: 14 DataSize: 844 ErasedRowCount: 0 | | | > {2, NULL} | | | PageId: 8 RowCount: 16 DataSize: 966 ErasedRowCount: 0 | | | > {2, 4} | | | PageId: 9 RowCount: 18 DataSize: 1088 ErasedRowCount: 0 | > {2, 7} | + BTreeIndex{PageId: 27 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 151b} | | + BTreeIndex{PageId: 18 RowCount: 24 DataSize: 1454 ErasedRowCount: 0} Label{13 rev 1, 151b} | | | PageId: 11 RowCount: 20 DataSize: 1210 ErasedRowCount: 0 | | | > {2, 10} | | | PageId: 12 RowCount: 22 DataSize: 1332 ErasedRowCount: 0 | | | > {3, 3} | | | PageId: 13 RowCount: 24 DataSize: 1454 ErasedRowCount: 0 | | > {3, 6} | | + BTreeIndex{PageId: 22 RowCount: 30 DataSize: 1820 ErasedRowCount: 0} Label{13 rev 1, 147b} | | | PageId: 15 RowCount: 26 DataSize: 1576 ErasedRowCount: 0 | | | > {3, 8} | | | PageId: 16 RowCount: 28 DataSize: 1698 ErasedRowCount: 0 | | | > {4, NULL} | | | PageId: 17 RowCount: 30 DataSize: 1820 ErasedRowCount: 0 | | > {4, 4} | | + BTreeIndex{PageId: 26 RowCount: 40 DataSize: 2430 ErasedRowCount: 0} Label{13 rev 1, 249b} | | | PageId: 19 RowCount: 32 DataSize: 1942 ErasedRowCount: 0 | | | > {4, 7} | | | PageId: 20 RowCount: 34 DataSize: 2064 ErasedRowCount: 0 | | | > {4, 10} | | | PageId: 21 RowCount: 36 DataSize: 2186 ErasedRowCount: 0 | | | > {5, 3} | | | PageId: 24 RowCount: 38 DataSize: 2308 ErasedRowCount: 0 | | | > {5, 6} | | | PageId: 25 RowCount: 40 DataSize: 2430 ErasedRowCount: 0 + Rows{0} Label{04 rev 1, 120b}, [0, +2)row | ERowOp 1: {0, 1} {Set 2 Uint32 : 0}, {Set 3 Uint64 : 0}, {Set 4 String : xxxxxxxxxx_0} | ERowOp 1: {0, 3} {Set 2 Uint32 : 1}, {Set 3 Uint64 : 1}, {Set 4 String : xxxxxxxxxx_1} + Rows{1} Label{14 rev 1, 120b}, [2, +2)row | ERowOp 1: {0, 4} {Set 2 Uint32 : 2}, {Set 3 Uint64 : 2}, {Set 4 String : xxxxxxxxxx_2} | ERowOp 1: {0, 6} {Set 2 Uint32 : 3}, {Set 3 Uint64 : 3}, {Set 4 String : xxxxxxxxxx_3} + Rows{2} Label{24 rev 1, 120b}, [4, +2)row | ERowOp 1: {0, 7} {Set 2 Uint32 : 4}, {Set 3 Uint64 : 4}, {Set 4 String : xxxxxxxxxx_4} | ERowOp 1: {0, 8} {Set 2 Uint32 : 5}, {Set 3 Uint64 : 5}, {Set 4 String : xxxxxxxxxx_5} + Rows{3} Label{34 rev 1, 120b}, [6, +2)row | ERowOp 1: {0, 10} {Set 2 Uint32 : 6}, {Set 3 Uint64 : 6}, {Set 4 String : xxxxxxxxxx_6} | ERowOp 1: {1, 1} {Set 2 Uint32 : 7}, {Set 3 Uint64 : 7}, {Set 4 String : xxxxxxxxxx_7} + Rows{4} Label{44 rev 1, 120b}, [8, +2)row | ERowOp 1: {1, 3} {Set 2 Uint32 : 8}, {Set 3 Uint64 : 8}, {Set 4 String : xxxxxxxxxx_8} | ERowOp 1: {1, 4} {Set 2 Uint32 : 9}, {Set 3 Uint64 : 9}, {Set 4 String : xxxxxxxxxx_9} + Rows{5} Label{54 rev 1, 122b}, [10, +2)row | ERowOp 1: {1, 6} {Set 2 Uint32 : 10}, {Set 3 Uint64 : 10}, {Set 4 String : xxxxxxxxxx_10} | ERowOp 1: {1, 7} {Set 2 Uint32 : 11}, {Set 3 Uint64 : 11}, {Set 4 String : xxxxxxxxxx_11} + Rows{7} Label{74 rev 1, 122b}, [12, +2)row | ERowOp 1: {1, 8} {Set 2 Uint32 : 12}, {Set 3 Uint64 : 12}, {Set 4 String : xxxxxxxxxx_12} | ERowOp 1: {1, 10} {Set 2 Uint32 : 13}, {Set 3 Uint64 : 13}, {Set 4 String : xxxxxxxxxx_13} + Rows{8} Label{84 rev 1, 122b}, [14, +2)row | ERowOp 1: {2, 1} {Set 2 Uint32 : 14}, {Set 3 Uint64 : 14}, {Set 4 String : xxxxxxxxxx_14} | ERowOp 1: {2, 3} {Set 2 Uint32 : 15}, {Set 3 Uint64 : 15}, {Set 4 String : xxxxxxxxxx_15} + Rows{9} Label{94 rev 1, 122b}, [16, +2)row | ERowOp 1: {2, 4} {Set 2 Uint32 : 16}, {Set 3 Uint64 : 16}, {Set 4 String : xxxxxxxxxx_16} | ERowOp 1: {2, 6} {Set 2 Uint32 : 17}, {Set 3 Uint64 : 17}, {Set 4 String : xxxxxxxxxx_17} + Rows{11} Label{114 rev 1, 122b}, [18, +2)row | ERowOp 1: {2, 7} {Set 2 Uint32 : 18}, {Set 3 Uint64 : 18}, {Set 4 String : xxxxxxxxxx_18} | ERowOp 1: {2, 8} {Set 2 Uint32 : 19}, {Set 3 Uint64 : 19}, {Set 4 String : xxxxxxxxxx_19} + Rows{12} Label{124 rev 1, 122b}, [20, +2)row | ERowOp 1: {2, 10} {Set 2 Uint32 : 20}, {Set 3 Uint64 : 20}, {Set 4 String : xxxxxxxxxx_20} | ERowOp 1: {3, 1} {Set 2 Uint32 : 21}, {Set 3 Uint64 : 21}, {Set 4 String : xxxxxxxxxx_21} + Rows{13} Label{134 rev 1, 122b}, [22, +2)row | ERowOp 1: {3, 3} {Set 2 Uint32 : 22}, {Set 3 Uint64 : 22}, {Set 4 String : xxxxxxxxxx_22} | ERowOp 1: {3, 4} {Set 2 Uint32 : 23}, {Set 3 Uint64 : 23}, {Set 4 String : xxxxxxxxxx_23} + Rows{15} Label{154 rev 1, 122b}, [24, +2)row | ERowOp 1: {3, 6} {Set 2 Uint32 : 24}, {Set 3 Uint64 : 24}, {Set 4 String : xxxxxxxxxx_24} | ERowOp 1: {3, 7} {Set 2 Uint32 : 25}, {Set 3 Uint64 : 25}, {Set 4 String : xxxxxxxxxx_25} + Rows{16} Label{164 rev 1, 122b}, [26, +2)row | ERowOp 1: {3, 8} {Set 2 Uint32 : 26}, {Set 3 Uint64 : 26}, {Set 4 String : xxxxxxxxxx_26} | ERowOp 1: {3, 10} {Set 2 Uint32 : 27}, {Set 3 Uint64 : 27}, {Set 4 String : xxxxxxxxxx_27} + Rows{17} Label{174 rev 1, 122b}, [28, +2)row | ERowOp 1: {4, 1} {Set 2 Uint32 : 28}, {Set 3 Uint64 : 28}, {Set 4 String : xxxxxxxxxx_28} | ERowOp 1: {4, 3} {Set 2 Uint32 : 29}, {Set 3 Uint64 : 29}, {Set 4 String : xxxxxxxxxx_29} + Rows{19} Label{194 rev 1, 122b}, [30, +2)row | ERowOp 1: {4, 4} {Set 2 Uint32 : 30}, {Set 3 Uint64 : 30}, {Set 4 String : xxxxxxxxxx_30} | ERowOp 1: {4, 6} {Set 2 Uint32 : 31}, {Set 3 Uint64 : 31}, {Set 4 String : xxxxxxxxxx_31} + Rows{20} Label{204 rev 1, 122b}, [32, +2)row | ERowOp 1: {4, 7} {Set 2 Uint32 : 32}, {Set 3 Uint64 : 32}, {Set 4 String : xxxxxxxxxx_32} | ERowOp 1: {4, 8} {Set 2 Uint32 : 33}, {Set 3 Uint64 : 33}, {Set 4 String : xxxxxxxxxx_33} + Rows{21} Label{214 rev 1, 122b}, [34, +2)row | ERowOp 1: {4, 10} {Set 2 Uint32 : 34}, {Set 3 Uint64 : 34}, {Set 4 String : xxxxxxxxxx_34} | ERowOp 1: {5, 1} {Set 2 Uint32 : 35}, {Set 3 Uint64 : 35}, {Set 4 String : xxxxxxxxxx_35} + Rows{24} Label{244 rev 1, 122b}, [36, +2)row | ERowOp 1: {5, 3} {Set 2 Uint32 : 36}, {Set 3 Uint64 : 36}, {Set 4 String : xxxxxxxxxx_36} | ERowOp 1: {5, 4} {Set 2 Uint32 : 37}, {Set 3 Uint64 : 37}, {Set 4 String : xxxxxxxxxx_37} + Rows{25} Label{254 rev 1, 122b}, [38, +2)row | ERowOp 1: {5, 6} {Set 2 Uint32 : 38}, {Set 3 Uint64 : 38}, {Set 4 String : xxxxxxxxxx_38} | ERowOp 1: {5, 7} {Set 2 Uint32 : 39}, {Set 3 Uint64 : 39}, {Set 4 String : xxxxxxxxxx_39} Slices{ [0, 39] } |99.7%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestForPotentialDeadlock::test_deadlocked_threads_on_cleanup [GOOD] >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_disabled_enabled [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestRecursiveCreation::test_mkdir |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_vdisks.py::TestTinyVDisks::test_enabled_disabled [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_public_api.py::TestRecursiveCreation::test_mkdir [GOOD] >> test_public_api.py::TestRecursiveCreation::test_create_table >> test_public_api.py::TestRecursiveCreation::test_create_table [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> Backup::SnapshotLargeData [GOOD] >> Backup::SnapshotSchema >> Backup::SnapshotSchema [GOOD] >> Backup::ChangelogData >> Backup::ChangelogData [GOOD] >> Backup::ChangelogLargeData |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_public_api.py::TestAttributes::test_create_table |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_public_api.py::TestAttributes::test_create_table [GOOD] >> test_public_api.py::TestAttributes::test_copy_table >> test_public_api.py::TestAttributes::test_copy_table [GOOD] >> test_public_api.py::TestAttributes::test_create_indexed_table >> test_public_api.py::TestAttributes::test_create_indexed_table [GOOD] >> test_public_api.py::TestAttributes::test_alter_table >> test_public_api.py::TestAttributes::test_alter_table [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes0] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes1] >> test_public_api.py::TestAttributes::test_limits[attributes1] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes2] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes3] [GOOD] >> test_public_api.py::TestAttributes::test_limits[attributes4] >> test_public_api.py::TestAttributes::test_limits[attributes4] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_public_api.py::TestDocApiTables::test_create_table >> test_public_api.py::TestDocApiTables::test_create_table [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] >> test_public_api.py::TestDocApiTables::test_alter_table[None-BadRequest] [GOOD] >> test_public_api.py::TestDocApiTables::test_alter_table[settings1-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] >> test_public_api.py::TestDocApiTables::test_drop_table[None-None] [GOOD] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] >> Backup::ChangelogLargeData [GOOD] >> Backup::ChangelogManyCommits >> Backup::ChangelogManyCommits [GOOD] >> Backup::ChangelogSchema >> Backup::ChangelogSchema [GOOD] >> Backup::ChangelogSchemaAndData >> Backup::ChangelogSchemaAndData [GOOD] >> Backup::ChangelogSchemaNewColumn >> Backup::ChangelogSchemaNewColumn [GOOD] >> Backup::ChangelogTornWrite >> Backup::ChangelogTornWrite [GOOD] >> Backup::ExcludeTablet [GOOD] >> Backup::RecoveryModeKeepsData >> Backup::RecoveryModeKeepsData [GOOD] >> Backup::NoBackupTable >> Backup::NoBackupTable [GOOD] >> Backup::NoBackupColumn >> Backup::NoBackupColumn [GOOD] >> Backup::NewSnapshotChangelogSize >> Backup::NewSnapshotChangelogSize [GOOD] >> Bloom::Conf [GOOD] >> Bloom::Hashes >> Bloom::Hashes [GOOD] >> Bloom::Rater >> Bloom::Rater [GOOD] >> Bloom::Dipping >> Bloom::Dipping [GOOD] >> Bloom::Basics [GOOD] >> Bloom::Stairs >> Bloom::Stairs [GOOD] >> BuildStatsBTreeIndex::Single >> BuildStatsBTreeIndex::Single [GOOD] >> BuildStatsBTreeIndex::Single_Slices >> BuildStatsBTreeIndex::Single_Slices [GOOD] >> BuildStatsBTreeIndex::Single_History >> BuildStatsBTreeIndex::Single_History [GOOD] >> BuildStatsBTreeIndex::Single_History_Slices >> BuildStatsBTreeIndex::Single_History_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups >> BuildStatsBTreeIndex::Single_Groups [GOOD] >> BuildStatsBTreeIndex::Single_Groups_Slices >> BuildStatsBTreeIndex::Single_Groups_Slices [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History [GOOD] >> BuildStatsBTreeIndex::Single_Groups_History_Slices >> BuildStatsBTreeIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsBTreeIndex::Mixed >> BuildStatsBTreeIndex::Mixed [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups >> BuildStatsBTreeIndex::Mixed_Groups [GOOD] >> BuildStatsBTreeIndex::Mixed_Groups_History >> BuildStatsBTreeIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Single >> BuildStatsFlatIndex::Single [GOOD] >> BuildStatsFlatIndex::Single_Slices >> BuildStatsFlatIndex::Single_Slices [GOOD] >> BuildStatsFlatIndex::Single_History >> BuildStatsFlatIndex::Single_History [GOOD] >> BuildStatsFlatIndex::Single_History_Slices >> BuildStatsFlatIndex::Single_History_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups >> BuildStatsFlatIndex::Single_Groups [GOOD] >> BuildStatsFlatIndex::Single_Groups_Slices >> BuildStatsFlatIndex::Single_Groups_Slices [GOOD] >> BuildStatsFlatIndex::Single_Groups_History >> BuildStatsFlatIndex::Single_Groups_History [GOOD] >> BuildStatsFlatIndex::Single_Groups_History_Slices >> BuildStatsFlatIndex::Single_Groups_History_Slices [GOOD] >> BuildStatsFlatIndex::Mixed >> BuildStatsFlatIndex::Mixed [GOOD] >> BuildStatsFlatIndex::Mixed_Groups >> BuildStatsFlatIndex::Mixed_Groups [GOOD] >> BuildStatsFlatIndex::Mixed_Groups_History >> BuildStatsFlatIndex::Mixed_Groups_History [GOOD] >> BuildStatsFlatIndex::Serial >> BuildStatsFlatIndex::Serial [GOOD] >> BuildStatsFlatIndex::Serial_Groups >> BuildStatsFlatIndex::Serial_Groups [GOOD] >> BuildStatsFlatIndex::Serial_Groups_History >> BuildStatsFlatIndex::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Single |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> BuildStatsHistogram::Single [GOOD] >> BuildStatsHistogram::Single_Slices >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [FAIL] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Single_Slices [GOOD] >> BuildStatsHistogram::Single_History |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/api/py3test >> test_public_api.py::TestDocApiTables::test_drop_table[settings1-None] [GOOD] |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/api/py3test |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.7%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.7%| [TA] $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} |99.7%| [TA] {RESULT} $(B)/ydb/tests/functional/api/test-results/py3test/{meta.json ... results_accumulator.log} >> BuildStatsHistogram::Single_History [GOOD] >> BuildStatsHistogram::Single_History_Slices |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Single_History_Slices [GOOD] >> BuildStatsHistogram::Five_Five_Mixed >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> BuildStatsHistogram::Five_Five_Mixed [GOOD] >> BuildStatsHistogram::Five_Five_Serial |99.7%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Five_Five_Serial [GOOD] >> BuildStatsHistogram::Five_Five_Crossed |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> BuildStatsHistogram::Five_Five_Crossed [GOOD] >> BuildStatsHistogram::Single_Small_1_Level >> BuildStatsHistogram::Single_Small_1_Level [GOOD] >> BuildStatsHistogram::Single_Small_0_Levels [GOOD] >> BuildStatsHistogram::Mixed_Groups_History >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> BuildStatsHistogram::Mixed_Groups_History [GOOD] >> BuildStatsHistogram::Serial_Groups_History >> BuildStatsHistogram::Serial_Groups_History [GOOD] >> BuildStatsHistogram::Benchmark >> BuildStatsHistogram::Benchmark [GOOD] >> BuildStatsHistogram::Many_Mixed >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupMirror3dcKeep::test_no_degraded_groups_after_shutdown [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can >> test_serverless.py::test_create_table_with_alter_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> BuildStatsHistogram::Many_Mixed [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/cms/ut_sentinel_unstable/unittest >> TSentinelUnstableTests::BSControllerCantChangeStatus [FAIL] >> BuildStatsHistogram::Many_Serial Test command err: 2026-01-04T03:01:53.264319Z node 1 :CMS DEBUG: sentinel.cpp:1047: [Sentinel] [Main] UpdateConfig 2026-01-04T03:01:53.264433Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start ConfigUpdater 2026-01-04T03:01:53.264521Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T03:01:53.264555Z node 1 :CMS INFO: sentinel.cpp:987: [Sentinel] [Main] StateUpdater was delayed 2026-01-04T03:01:53.264624Z node 1 :CMS DEBUG: sentinel.cpp:549: [Sentinel] [ConfigUpdater] Request blobstorage config: attempt# 0 2026-01-04T03:01:53.264720Z node 1 :CMS DEBUG: sentinel.cpp:562: [Sentinel] [ConfigUpdater] Request CMS cluster state: attempt# 0 2026-01-04T03:01:53.268664Z node 1 :CMS DEBUG: sentinel.cpp:571: [Sentinel] [ConfigUpdater] Handle TEvCms::TEvClusterStateResponse: response# Status { Code: OK } State { Hosts { Name: "node-1" State: UNKNOWN Devices { Name: "pdisk-1-4" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-5" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-6" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-1-7" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 1 InterconnectPort: 10000 Location { Rack: "rack-1" } StartTimeSeconds: 0 } Hosts { Name: "node-2" State: UNKNOWN Devices { Name: "pdisk-2-8" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-9" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-10" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-2-11" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 2 InterconnectPort: 10000 Location { Rack: "rack-2" } StartTimeSeconds: 0 } Hosts { Name: "node-3" State: UNKNOWN Devices { Name: "pdisk-3-12" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-13" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-14" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-3-15" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 3 InterconnectPort: 10000 Location { Rack: "rack-3" } StartTimeSeconds: 0 } Hosts { Name: "node-4" State: UNKNOWN Devices { Name: "pdisk-4-16" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-17" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-18" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-4-19" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 4 InterconnectPort: 10000 Location { Rack: "rack-4" } StartTimeSeconds: 0 } Hosts { Name: "node-5" State: UNKNOWN Devices { Name: "pdisk-5-20" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-21" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-22" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-5-23" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 5 InterconnectPort: 10000 Location { Rack: "rack-5" } StartTimeSeconds: 0 } Hosts { Name: "node-6" State: UNKNOWN Devices { Name: "pdisk-6-24" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-25" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-26" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-6-27" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 6 InterconnectPort: 10000 Location { Rack: "rack-6" } StartTimeSeconds: 0 } Hosts { Name: "node-7" State: UNKNOWN Devices { Name: "pdisk-7-28" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-29" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-30" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-7-31" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 7 InterconnectPort: 10000 Location { Rack: "rack-7" } StartTimeSeconds: 0 } Hosts { Name: "node-8" State: UNKNOWN Devices { Name: "pdisk-8-32" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-33" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-34" State: DOWN Timestamp: 0 } Devices { Name: "pdisk-8-35" State: DOWN Timestamp: 0 } Timestamp: 0 NodeId: 8 InterconnectPort: 10000 Location { Rack: "rack-8" } StartTimeSeconds: 0 } } 2026-01-04T03:01:53.280637Z node 1 :CMS DEBUG: sentinel.cpp:624: [Sentinel] [ConfigUpdater] Handle TEvBlobStorage::TEvControllerConfigResponse: response# Status { Success: true BaseConfig { PDisk { NodeId: 1 PDiskId: 4 Path: "/1/pdisk-4.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 5 Path: "/1/pdisk-5.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 6 Path: "/1/pdisk-6.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 1 PDiskId: 7 Path: "/1/pdisk-7.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 8 Path: "/2/pdisk-8.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 9 Path: "/2/pdisk-9.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 10 Path: "/2/pdisk-10.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 2 PDiskId: 11 Path: "/2/pdisk-11.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 12 Path: "/3/pdisk-12.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 13 Path: "/3/pdisk-13.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 14 Path: "/3/pdisk-14.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 3 PDiskId: 15 Path: "/3/pdisk-15.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 16 Path: "/4/pdisk-16.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 17 Path: "/4/pdisk-17.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 18 Path: "/4/pdisk-18.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 4 PDiskId: 19 Path: "/4/pdisk-19.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 20 Path: "/5/pdisk-20.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 21 Path: "/5/pdisk-21.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 22 Path: "/5/pdisk-22.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 5 PDiskId: 23 Path: "/5/pdisk-23.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 24 Path: "/6/pdisk-24.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 25 Path: "/6/pdisk-25.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 26 Path: "/6/pdisk-26.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 6 PDiskId: 27 Path: "/6/pdisk-27.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 28 Path: "/7/pdisk-28.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 29 Path: "/7/pdisk-29.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 30 Path: "/7/pdisk-30.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 7 PDiskId: 31 Path: "/7/pdisk-31.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 32 Path: "/8/pdisk-32.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 33 Path: "/8/pdisk-33.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 34 Path: "/8/pdisk-34.data" Guid: 1 DriveStatus: ACTIVE } PDisk { NodeId: 8 PDiskId: 35 Path: "/8/pdisk-35.data" Guid: 1 DriveStatus: ACTIVE } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1000 } GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 4 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 5 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 6 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 1 PDiskId: 7 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 8 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 9 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 10 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 2 PDiskId: 11 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 1 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 12 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1002 } GroupId: 6 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 13 VSlotId: 1003 } GroupId: 7 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1000 } GroupId: 8 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1001 } GroupId: 9 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1002 } GroupId: 10 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 14 VSlotId: 1003 } GroupId: 11 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1000 } GroupId: 12 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1001 } GroupId: 13 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1002 } GroupId: 14 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 3 PDiskId: 15 VSlotId: 1003 } GroupId: 15 GroupGeneration: 1 FailDomainIdx: 2 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1000 } GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1001 } GroupId: 1 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1002 } GroupId: 2 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 16 VSlotId: 1003 } GroupId: 3 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1000 } GroupId: 4 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 1001 } GroupId: 5 GroupGeneration: 1 FailDomainIdx: 3 } VSlot { VSlotId { NodeId: 4 PDiskId: 17 VSlotId: 100 ... 026-01-04T03:10:56.619356Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-04T03:10:56.619418Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-04T03:10:56.619468Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-04T03:10:56.619513Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-04T03:10:56.619545Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-04T03:10:56.619577Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-04T03:10:56.619611Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-04T03:10:56.619644Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-04T03:10:56.619988Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.620765Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 2, response# PDiskStateInfo { PDiskId: 8 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-8.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 9 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-9.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 10 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-10.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 11 CreateTime: 0 ChangeTime: 0 Path: "/2/pdisk-11.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.620899Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 3, response# PDiskStateInfo { PDiskId: 12 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-12.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 13 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-13.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 14 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-14.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 15 CreateTime: 0 ChangeTime: 0 Path: "/3/pdisk-15.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.621061Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 7, response# PDiskStateInfo { PDiskId: 28 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-28.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 29 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-29.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 30 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-30.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 31 CreateTime: 0 ChangeTime: 0 Path: "/7/pdisk-31.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.621146Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 8, response# PDiskStateInfo { PDiskId: 32 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-32.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 33 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-33.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 34 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-34.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 35 CreateTime: 0 ChangeTime: 0 Path: "/8/pdisk-35.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.621228Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 4, response# PDiskStateInfo { PDiskId: 16 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-16.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 17 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-17.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 18 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-18.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 19 CreateTime: 0 ChangeTime: 0 Path: "/4/pdisk-19.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.621316Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 5, response# PDiskStateInfo { PDiskId: 20 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-20.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 21 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-21.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 22 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-22.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 23 CreateTime: 0 ChangeTime: 0 Path: "/5/pdisk-23.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825060110 2026-01-04T03:10:56.621400Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 6, response# PDiskStateInfo { PDiskId: 24 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-24.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } PDiskStateInfo { PDiskId: 25 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-25.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 26 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-26.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 27 CreateTime: 0 ChangeTime: 0 Path: "/6/pdisk-27.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: CommonLoggerInitError } ResponseTime: 825060110 2026-01-04T03:10:56.621447Z node 1 :CMS DEBUG: sentinel.cpp:1068: [Sentinel] [Main] State was updated in 0.000000s 2026-01-04T03:10:56.632236Z node 1 :CMS DEBUG: sentinel.cpp:1063: [Sentinel] [Main] UpdateState 2026-01-04T03:10:56.632308Z node 1 :CMS DEBUG: sentinel.cpp:992: [Sentinel] [Main] Start StateUpdater 2026-01-04T03:10:56.632437Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 1, wbId# [1:8388350642965737326:1634689637] 2026-01-04T03:10:56.632485Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 2, wbId# [2:8388350642965737326:1634689637] 2026-01-04T03:10:56.632519Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 3, wbId# [3:8388350642965737326:1634689637] 2026-01-04T03:10:56.632550Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 4, wbId# [4:8388350642965737326:1634689637] 2026-01-04T03:10:56.632580Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 5, wbId# [5:8388350642965737326:1634689637] 2026-01-04T03:10:56.632613Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 6, wbId# [6:8388350642965737326:1634689637] 2026-01-04T03:10:56.632646Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 7, wbId# [7:8388350642965737326:1634689637] 2026-01-04T03:10:56.632679Z node 1 :CMS DEBUG: sentinel.cpp:786: [Sentinel] [StateUpdater] Request pdisks state: nodeId# 8, wbId# [8:8388350642965737326:1634689637] 2026-01-04T03:10:56.633029Z node 1 :CMS DEBUG: sentinel.cpp:796: [Sentinel] [StateUpdater] Handle TEvWhiteboard::TEvPDiskStateResponse: nodeId# 1, response# PDiskStateInfo { PDiskId: 4 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-4.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 5 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-5.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 6 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-6.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } PDiskStateInfo { PDiskId: 7 CreateTime: 0 ChangeTime: 0 Path: "/1/pdisk-7.data" Guid: 1 AvailableSize: 107374182400 TotalSize: 214748364800 State: Normal } ResponseTime: 825070110 (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1278: Dispatched 2500000 events, limit reached. TBackTrace::Capture()+28 (0x10A2B64C) TWithBackTrace::TWithBackTrace<>()+80 (0x10654EC0) NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant)+26802 (0x31048982) NActors::TTestActorRuntimeBase::DispatchEvents(NActors::TDispatchOptions const&)+49 (0x31041FB1) NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus)+901 (0x1068CB75) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TTestCaseBSControllerCantChangeStatus::Execute_(NUnitTest::TTestContext&)+2376 (0x1068B1F8) std::__y1::__function::__func, void ()>::operator()()+280 (0x1068F058) TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool)+538 (0x10F4BB9A) NUnitTest::TTestBase::Run(std::__y1::function, TBasicString> const&, char const*, bool)+504 (0x10F23418) NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TCurrentTest::Execute()+1229 (0x1068E15D) NUnitTest::TTestFactory::Execute()+2176 (0x10F24BD0) NUnitTest::RunMain(int, char**)+5805 (0x10F459FD) ??+0 (0x7F9A90392D90) __libc_start_main+128 (0x7F9A90392E40) _start+41 (0xDFD5029) >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {RESULT} ydb/core/cms/ut_sentinel_unstable/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorKeep::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/cms/ut_sentinel_unstable/unittest |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/blobstorage/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/blobstorage/py3test |99.8%| [TA] $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleMirror3DC::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.8%| [TA] {RESULT} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/blobstorage/test-results/py3test/{meta.json ... results_accumulator.log} >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can >> BuildStatsHistogram::Many_Serial [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/tablet_flat/ut/unittest >> BuildStatsHistogram::Many_Serial [GOOD] Test command err: 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:08:27.696205Z ...starting tablet 00000.009 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.009 II| FAKE_ENV: Starting storage for BS group 0 00000.010 II| FAKE_ENV: Starting storage for BS group 1 00000.010 II| FAKE_ENV: Starting storage for BS group 2 00000.010 II| FAKE_ENV: Starting storage for BS group 3 00000.017 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.019 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...restarting tablet 00000.024 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.024 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g3_s2/changelog.json ...restarting tablet again 00000.028 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g4_s2/snapshot 00000.029 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpEVOYOC/dummy/1/backup_19700101000000Z_g4_s2/changelog.json 00000.030 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.030 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.031 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.031 II| FAKE_ENV: DS.0 gone, left {42b, 1}, put {186b, 6} 00000.031 II| FAKE_ENV: DS.1 gone, left {105b, 3}, put {105b, 3} 00000.031 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.031 II| FAKE_ENV: All BS storage groups are stopped 00000.031 II| FAKE_ENV: Model stopped, hosted 5 actors, spent 0.000s 00000.031 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 21}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:08:27.732214Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.007 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpKDg0jF/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.011 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpKDg0jF/dummy/1/backup_19700101000000Z_g2_s2/changelog.json 00000.086 C1| TABLET_EXECUTOR: Tablet 1 unhandled exception yexception: ydb/core/tablet_flat/flat_executor.cpp:5177: Backup snapshot failed: Failed to create snapshot dir /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpKDg0jF/dummy/1/backup_19700101000000Z_g2_s2/snapshot: (Error 13: Permission denied) util/folder/path.cpp:424: could not create directory /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpKDg0jF/dummy/1 ??+0 (0x1240E9AD) __cxa_throw+221 (0x1240E7CD) NKikimr::NTabletFlatExecutor::TExecutor::Handle(TAutoPtr, TDelete>&)+1049 (0x188ABA29) NKikimr::NTabletFlatExecutor::TExecutor::StateWork(TAutoPtr&)+1443 (0x18830AC3) NActors::IActor::Receive(TAutoPtr&)+744 (0x13B3D338) ??+0 (0x1240E9AD) __cxa_rethrow_primary_exception+340 (0x1240EBF4) std::rethrow_exception(std::exception_ptr)+28 (0x1245096C) NActors::IActorExceptionHandler::OnUnhandledException(std::exception_ptr const&)+183 (0x1126CCE7) ...waiting tablet death 00000.087 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 2 actors 00000.087 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.087 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.087 II| FAKE_ENV: DS.0 gone, left {62b, 2}, put {62b, 2} 00000.087 II| FAKE_ENV: DS.1 gone, left {35b, 1}, put {35b, 1} 00000.087 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.087 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.087 II| FAKE_ENV: All BS storage groups are stopped 00000.087 II| FAKE_ENV: Model stopped, hosted 3 actors, spent 0.000s 00000.087 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 1 Error 0 Left 17}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:08:27.823885Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.007 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.011 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.018 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.025 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.026 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.026 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.065 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.070 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmp3WtB6o/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.071 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.071 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.074 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.074 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.074 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.074 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {440b, 14} 00000.074 II| FAKE_ENV: DS.1 gone, left {395b, 2}, put {860b, 6} 00000.074 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.074 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.075 II| FAKE_ENV: All BS storage groups are stopped 00000.075 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.075 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 26}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:08:27.915975Z ...starting tablet 00000.006 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.006 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.007 II| FAKE_ENV: Starting storage for BS group 2 00000.007 II| FAKE_ENV: Starting storage for BS group 3 00000.008 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g2_s2/snapshot 00000.009 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g2_s2/changelog.json ...initing schema 00000.010 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns 00000.012 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing two columns simultaneously 00000.013 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...erasing row 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.014 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...replacing row 00000.015 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.015 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing different values in one column 00000.016 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog 00000.016 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...writing composite primary key 00000.017 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteChangelog ...restarting tablet 00000.024 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g3_s2/snapshot 00000.028 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g3_s2/changelog.json 00000.029 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.029 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot ...restarting dummy tablet in recovery mode ...restoring backup ...restarting tablet in normal mode 00000.072 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g5_s2/snapshot 00000.076 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpg9UbMd/dummy/1/backup_19700101000000Z_g5_s2/changelog.json 00000.077 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.077 DD| LOCAL_DB_BACKUP: Handle NKikimr::NTabletFlatExecutor::NBackup::TEvWriteSnapshot 00000.083 II| FAKE_ENV: Model starts hard shutdown on level 7 of 8, left 3 actors 00000.084 NN| TABLET_SAUSAGECACHE: Poison cache serviced 0 reqs hit {0 0b} miss {0 0b} in-memory miss {0 0b} 00000.084 II| FAKE_ENV: Shut order, stopping 4 BS groups 00000.084 II| FAKE_ENV: DS.0 gone, left {68b, 3}, put {1268b, 26} 00000.084 II| FAKE_ENV: DS.1 gone, left {668b, 4}, put {2057b, 18} 00000.084 II| FAKE_ENV: DS.2 gone, left {0b, 0}, put {0b, 0} 00000.084 II| FAKE_ENV: DS.3 gone, left {0b, 0}, put {0b, 0} 00000.084 II| FAKE_ENV: All BS storage groups are stopped 00000.084 II| FAKE_ENV: Model stopped, hosted 6 actors, spent 0.001s 00000.084 II| FAKE_ENV: Logged {Emerg 0 Alert 0 Crit 0 Error 0 Left 36}, stopped 00000.000 II| FAKE_ENV: Born at 2026-01-04T03:08:28.005911Z ...starting tablet 00000.005 NN| TABLET_SAUSAGECACHE: Bootstrap with config MemoryLimit: 8388608 ScanQueueInFlyLimit: 262144 AsyncQueueInFlyLimit: 262144 00000.005 II| FAKE_ENV: Starting storage for BS group 0 00000.006 II| FAKE_ENV: Starting storage for BS group 1 00000.006 II| FAKE_ENV: Starting storage for BS group 2 00000.006 II| FAKE_ENV: Starting storage for BS group 3 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpvCo1vV/dummy/1/backup_19700101000000Z_g2_s2/changelog.json 00000.007 DD| LOCAL_DB_BACKUP: Bootstrap for /home/runner/.ya/build/build_root/c5zo/0029fe/r3tmp/tmpvCo1vV/dummy/1/backup_19700101000000Z_g2_s2/snapshot ...initing schema 00000.009 DD| LOCAL_DB_BACKUP: Handle NKikimr::NT ... 140, NULL) (311209, NULL) (311281, NULL) (311344, NULL) (311416, NULL) [0:0:935:0:0:0:0] 100 rows, 100 pages, 4 levels: (311479, NULL) (311542, NULL) (311614, NULL) (311683, NULL) (311755, NULL) [0:0:936:0:0:0:0] 100 rows, 100 pages, 4 levels: (311821, NULL) (311890, NULL) (311956, NULL) (312034, NULL) (312100, NULL) [0:0:937:0:0:0:0] 100 rows, 100 pages, 4 levels: (312172, NULL) (312232, NULL) (312301, NULL) (312370, NULL) (312439, NULL) [0:0:938:0:0:0:0] 100 rows, 100 pages, 4 levels: (312508, NULL) (312571, NULL) (312637, NULL) (312700, NULL) (312760, NULL) [0:0:939:0:0:0:0] 100 rows, 100 pages, 4 levels: (312835, NULL) (312904, NULL) (312970, NULL) (313030, NULL) (313102, NULL) [0:0:940:0:0:0:0] 100 rows, 100 pages, 4 levels: (313174, NULL) (313240, NULL) (313300, NULL) (313366, NULL) (313429, NULL) [0:0:941:0:0:0:0] 100 rows, 100 pages, 4 levels: (313498, NULL) (313573, NULL) (313639, NULL) (313699, NULL) (313768, NULL) [0:0:942:0:0:0:0] 100 rows, 100 pages, 4 levels: (313828, NULL) (313891, NULL) (313957, NULL) (314023, NULL) (314086, NULL) [0:0:943:0:0:0:0] 100 rows, 100 pages, 4 levels: (314149, NULL) (314212, NULL) (314275, NULL) (314338, NULL) (314401, NULL) [0:0:944:0:0:0:0] 100 rows, 100 pages, 4 levels: (314464, NULL) (314530, NULL) (314590, NULL) (314656, NULL) (314719, NULL) [0:0:945:0:0:0:0] 100 rows, 100 pages, 4 levels: (314788, NULL) (314854, NULL) (314920, NULL) (314983, NULL) (315046, NULL) [0:0:946:0:0:0:0] 100 rows, 100 pages, 4 levels: (315109, NULL) (315178, NULL) (315238, NULL) (315304, NULL) (315370, NULL) [0:0:947:0:0:0:0] 100 rows, 100 pages, 4 levels: (315433, NULL) (315496, NULL) (315565, NULL) (315631, NULL) (315697, NULL) [0:0:948:0:0:0:0] 100 rows, 100 pages, 4 levels: (315766, NULL) (315826, NULL) (315889, NULL) (315952, NULL) (316024, NULL) [0:0:949:0:0:0:0] 100 rows, 100 pages, 4 levels: (316087, NULL) (316156, NULL) (316222, NULL) (316288, NULL) (316357, NULL) [0:0:950:0:0:0:0] 100 rows, 100 pages, 4 levels: (316432, NULL) (316498, NULL) (316564, NULL) (316636, NULL) (316705, NULL) [0:0:951:0:0:0:0] 100 rows, 100 pages, 4 levels: (316768, NULL) (316831, NULL) (316891, NULL) (316951, NULL) (317011, NULL) [0:0:952:0:0:0:0] 100 rows, 100 pages, 4 levels: (317080, NULL) (317143, NULL) (317218, NULL) (317287, NULL) (317356, NULL) [0:0:953:0:0:0:0] 100 rows, 100 pages, 4 levels: (317422, NULL) (317497, NULL) (317563, NULL) (317632, NULL) (317701, NULL) [0:0:954:0:0:0:0] 100 rows, 100 pages, 4 levels: (317764, NULL) (317824, NULL) (317887, NULL) (317953, NULL) (318019, NULL) [0:0:955:0:0:0:0] 100 rows, 100 pages, 4 levels: (318088, NULL) (318166, NULL) (318235, NULL) (318304, NULL) (318370, NULL) [0:0:956:0:0:0:0] 100 rows, 100 pages, 4 levels: (318442, NULL) (318511, NULL) (318574, NULL) (318640, NULL) (318703, NULL) [0:0:957:0:0:0:0] 100 rows, 100 pages, 4 levels: (318772, NULL) (318838, NULL) (318898, NULL) (318970, NULL) (319036, NULL) [0:0:958:0:0:0:0] 100 rows, 100 pages, 4 levels: (319099, NULL) (319162, NULL) (319225, NULL) (319294, NULL) (319360, NULL) [0:0:959:0:0:0:0] 100 rows, 100 pages, 4 levels: (319423, NULL) (319492, NULL) (319555, NULL) (319621, NULL) (319687, NULL) [0:0:960:0:0:0:0] 100 rows, 100 pages, 4 levels: (319753, NULL) (319828, NULL) (319900, NULL) (319963, NULL) (320035, NULL) [0:0:961:0:0:0:0] 100 rows, 100 pages, 4 levels: (320104, NULL) (320164, NULL) (320233, NULL) (320299, NULL) (320365, NULL) [0:0:962:0:0:0:0] 100 rows, 100 pages, 4 levels: (320428, NULL) (320500, NULL) (320569, NULL) (320629, NULL) (320698, NULL) [0:0:963:0:0:0:0] 100 rows, 100 pages, 4 levels: (320764, NULL) (320833, NULL) (320893, NULL) (320959, NULL) (321019, NULL) [0:0:964:0:0:0:0] 100 rows, 100 pages, 4 levels: (321085, NULL) (321151, NULL) (321214, NULL) (321277, NULL) (321352, NULL) [0:0:965:0:0:0:0] 100 rows, 100 pages, 4 levels: (321421, NULL) (321493, NULL) (321562, NULL) (321631, NULL) (321691, NULL) [0:0:966:0:0:0:0] 100 rows, 100 pages, 4 levels: (321757, NULL) (321823, NULL) (321886, NULL) (321949, NULL) (322009, NULL) [0:0:967:0:0:0:0] 100 rows, 100 pages, 4 levels: (322081, NULL) (322159, NULL) (322225, NULL) (322294, NULL) (322363, NULL) [0:0:968:0:0:0:0] 100 rows, 100 pages, 4 levels: (322429, NULL) (322498, NULL) (322564, NULL) (322642, NULL) (322711, NULL) [0:0:969:0:0:0:0] 100 rows, 100 pages, 4 levels: (322783, NULL) (322846, NULL) (322915, NULL) (322978, NULL) (323041, NULL) [0:0:970:0:0:0:0] 100 rows, 100 pages, 4 levels: (323104, NULL) (323164, NULL) (323230, NULL) (323305, NULL) (323368, NULL) [0:0:971:0:0:0:0] 100 rows, 100 pages, 4 levels: (323434, NULL) (323506, NULL) (323569, NULL) (323632, NULL) (323707, NULL) [0:0:972:0:0:0:0] 100 rows, 100 pages, 4 levels: (323776, NULL) (323851, NULL) (323917, NULL) (323986, NULL) (324052, NULL) [0:0:973:0:0:0:0] 100 rows, 100 pages, 4 levels: (324115, NULL) (324184, NULL) (324256, NULL) (324316, NULL) (324379, NULL) [0:0:974:0:0:0:0] 100 rows, 100 pages, 4 levels: (324442, NULL) (324502, NULL) (324568, NULL) (324631, NULL) (324703, NULL) [0:0:975:0:0:0:0] 100 rows, 100 pages, 4 levels: (324769, NULL) (324838, NULL) (324904, NULL) (324973, NULL) (325033, NULL) [0:0:976:0:0:0:0] 100 rows, 100 pages, 4 levels: (325105, NULL) (325174, NULL) (325234, NULL) (325297, NULL) (325363, NULL) [0:0:977:0:0:0:0] 100 rows, 100 pages, 4 levels: (325438, NULL) (325504, NULL) (325570, NULL) (325630, NULL) (325699, NULL) [0:0:978:0:0:0:0] 100 rows, 100 pages, 4 levels: (325771, NULL) (325834, NULL) (325900, NULL) (325966, NULL) (326032, NULL) [0:0:979:0:0:0:0] 100 rows, 100 pages, 4 levels: (326101, NULL) (326170, NULL) (326233, NULL) (326296, NULL) (326359, NULL) [0:0:980:0:0:0:0] 100 rows, 100 pages, 4 levels: (326434, NULL) (326497, NULL) (326563, NULL) (326632, NULL) (326701, NULL) [0:0:981:0:0:0:0] 100 rows, 100 pages, 4 levels: (326773, NULL) (326836, NULL) (326905, NULL) (326965, NULL) (327025, NULL) [0:0:982:0:0:0:0] 100 rows, 100 pages, 4 levels: (327097, NULL) (327169, NULL) (327232, NULL) (327301, NULL) (327364, NULL) [0:0:983:0:0:0:0] 100 rows, 100 pages, 4 levels: (327430, NULL) (327496, NULL) (327559, NULL) (327622, NULL) (327682, NULL) [0:0:984:0:0:0:0] 100 rows, 100 pages, 4 levels: (327742, NULL) (327811, NULL) (327871, NULL) (327934, NULL) (327997, NULL) [0:0:985:0:0:0:0] 100 rows, 100 pages, 4 levels: (328072, NULL) (328138, NULL) (328222, NULL) (328291, NULL) (328363, NULL) [0:0:986:0:0:0:0] 100 rows, 100 pages, 4 levels: (328432, NULL) (328501, NULL) (328573, NULL) (328648, NULL) (328717, NULL) [0:0:987:0:0:0:0] 100 rows, 100 pages, 4 levels: (328783, NULL) (328849, NULL) (328915, NULL) (328978, NULL) (329044, NULL) [0:0:988:0:0:0:0] 100 rows, 100 pages, 4 levels: (329119, NULL) (329185, NULL) (329248, NULL) (329317, NULL) (329383, NULL) [0:0:989:0:0:0:0] 100 rows, 100 pages, 4 levels: (329455, NULL) (329518, NULL) (329590, NULL) (329662, NULL) (329722, NULL) [0:0:990:0:0:0:0] 100 rows, 100 pages, 4 levels: (329782, NULL) (329854, NULL) (329917, NULL) (329983, NULL) (330049, NULL) [0:0:991:0:0:0:0] 100 rows, 100 pages, 4 levels: (330118, NULL) (330187, NULL) (330253, NULL) (330322, NULL) (330382, NULL) [0:0:992:0:0:0:0] 100 rows, 100 pages, 4 levels: (330454, NULL) (330520, NULL) (330595, NULL) (330673, NULL) (330739, NULL) [0:0:993:0:0:0:0] 100 rows, 100 pages, 4 levels: (330808, NULL) (330874, NULL) (330940, NULL) (331003, NULL) (331072, NULL) [0:0:994:0:0:0:0] 100 rows, 100 pages, 4 levels: (331132, NULL) (331204, NULL) (331276, NULL) (331342, NULL) (331405, NULL) [0:0:995:0:0:0:0] 100 rows, 100 pages, 4 levels: (331465, NULL) (331540, NULL) (331615, NULL) (331684, NULL) (331753, NULL) [0:0:996:0:0:0:0] 100 rows, 100 pages, 4 levels: (331816, NULL) (331891, NULL) (331960, NULL) (332026, NULL) (332086, NULL) [0:0:997:0:0:0:0] 100 rows, 100 pages, 4 levels: (332152, NULL) (332215, NULL) (332284, NULL) (332350, NULL) (332419, NULL) [0:0:998:0:0:0:0] 100 rows, 100 pages, 4 levels: (332491, NULL) (332557, NULL) (332623, NULL) (332686, NULL) (332752, NULL) [0:0:999:0:0:0:0] 100 rows, 100 pages, 4 levels: (332818, NULL) (332884, NULL) (332944, NULL) (333013, NULL) (333073, NULL) [0:0:1000:0:0:0:0] 100 rows, 100 pages, 4 levels: (333148, NULL) (333214, NULL) (333274, NULL) (333340, NULL) (333403, NULL) Checking BTree: Touched 0% bytes, 0 pages RowCountHistogram: 5% (actual 6%) key = (16984, 5669) value = 5100 (actual 6998 - -1% error) 10% (actual 9%) key = (50416, 16813) value = 15100 (actual 16798 - -1% error) 10% (actual 9%) key = (83701, 27908) value = 25100 (actual 26598 - -1% error) 10% (actual 9%) key = (116986, 39003) value = 35100 (actual 36398 - -1% error) 10% (actual 9%) key = (150319, 50114) value = 45100 (actual 46198 - -1% error) 10% (actual 9%) key = (183700, 61241) value = 55100 (actual 55998 - 0% error) 10% (actual 9%) key = (217081, 72368) value = 65100 (actual 65798 - 0% error) 10% (actual 9%) key = (250486, 83503) value = 75100 (actual 75598 - 0% error) 10% (actual 9%) key = (283771, 94598) value = 85100 (actual 85398 - 0% error) 14% (actual 14%) DataSizeHistogram: 5% (actual 6%) key = (16648, 5557) value = 524891 (actual 723287 - -1% error) 10% (actual 9%) key = (50086, 16703) value = 1569936 (actual 1747238 - -1% error) 9% (actual 9%) key = (83356, 27793) value = 2610698 (actual 2767306 - -1% error) 10% (actual 9%) key = (116647, 38890) value = 3652143 (actual 3787394 - -1% error) 9% (actual 9%) key = (149656, 49893) value = 4685435 (actual 4800597 - -1% error) 10% (actual 9%) key = (183040, 61021) value = 5728420 (actual 5822785 - 0% error) 10% (actual 9%) key = (216727, 72250) value = 6776444 (actual 6848929 - 0% error) 9% (actual 9%) key = (250144, 83389) value = 7813547 (actual 7865227 - 0% error) 9% (actual 9%) key = (283444, 94489) value = 8853697 (actual 8884838 - 0% error) 14% (actual 14%) Checking Flat: Touched 100% bytes, 1000 pages RowCountHistogram: 10% (actual 11%) key = (33379, 11134) value = 10000 (actual 11800 - -1% error) 10% (actual 9%) key = (66721, 22248) value = 20000 (actual 21600 - -1% error) 10% (actual 9%) key = (100015, 33346) value = 30000 (actual 31400 - -1% error) 10% (actual 9%) key = (133258, 44427) value = 40000 (actual 41200 - -1% error) 10% (actual 9%) key = (166621, 55548) value = 50000 (actual 51000 - -1% error) 10% (actual 9%) key = (200041, 66688) value = 60000 (actual 60800 - 0% error) 10% (actual 9%) key = (233449, 77824) value = 70000 (actual 70600 - 0% error) 10% (actual 9%) key = (266824, 88949) value = 80000 (actual 80400 - 0% error) 10% (actual 9%) key = (300073, 100032) value = 90000 (actual 90200 - 0% error) 10% (actual 9%) DataSizeHistogram: 10% (actual 11%) key = (33187, NULL) value = 1041247 (actual 1229534 - -1% error) 10% (actual 9%) key = (66517, NULL) value = 2082456 (actual 2249844 - -1% error) 10% (actual 9%) key = (99709, NULL) value = 3123684 (actual 3270138 - -1% error) 10% (actual 9%) key = (132925, NULL) value = 4164886 (actual 4290603 - -1% error) 10% (actual 9%) key = (166246, NULL) value = 5206111 (actual 5311117 - -1% error) 10% (actual 9%) key = (199678, NULL) value = 6247321 (actual 6331068 - 0% error) 10% (actual 9%) key = (233290, NULL) value = 7288529 (actual 7350869 - 0% error) 10% (actual 9%) key = (266701, NULL) value = 8329759 (actual 8371441 - 0% error) 10% (actual 9%) key = (300052, NULL) value = 9371030 (actual 9392083 - 0% error) 9% (actual 9%) Checking Mixed: Touched 0% bytes, 0 pages RowCountHistogram: 100% (actual 100%) DataSizeHistogram: 100% (actual 100%) |99.8%| [TM] {BAZEL_UPLOAD} ydb/core/tablet_flat/ut/unittest |99.8%| [TA] $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TA] {RESULT} $(B)/ydb/core/tablet_flat/ut/test-results/unittest/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror3DC::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterMirror34::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror3DC::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleMax::test_check_shutdown_state_storage_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [FAIL] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_discovery_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_value_workload.py::TestResultSetValueWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleBlock42::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_workload.py::TestYdbTestShardWorkload::test >> test_workload_topic.py::TestYdbTopicWorkload::test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsBlockMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> S3PathStyleBackup::DisableVirtualAddressing >> test_encryption.py::TestEncryption::test_simple_encryption >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/s3_path_style/unittest >> S3PathStyleBackup::DisableVirtualAddressing [GOOD] |99.8%| [TM] {RESULT} ydb/tests/functional/backup/s3_path_style/unittest |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/s3_path_style/unittest >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_workload.py::TestYdbBackupWorkload::test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_restart.py::TestCmsStateStorageRestartsMirrorMax::test_restart_as_much_as_can [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_state_storage.py::TestCmsStateStorageSimpleKeep::test_check_shutdown_state_storage_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartClusterBlock42::test_when_create_many_tablets_and_restart_cluster_then_every_thing_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_workload.py::TestYdbWorkload::test >> test_workload.py::TestYdbWorkload::test |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartMultipleMirror34::test_tablets_are_successfully_started_after_few_killed_nodes [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> test_workload_topic.py::TestYdbTopicWorkload::test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [GOOD] >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] >> Transfer::BaseScenario_Local >> Transfer::BaseScenario_Local [GOOD] >> Transfer::BaseScenario_Remote >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> Transfer::BaseScenario_Remote [GOOD] >> Transfer::CreateTransfer_TargetNotFound >> Transfer::CreateTransfer_TargetNotFound [GOOD] >> Transfer::ConnectionString_BadChar >> Transfer::ConnectionString_BadChar [GOOD] >> Transfer::ConnectionString_BadDNSName >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] >> Transfer::ConnectionString_BadDNSName [GOOD] >> Transfer::Create_WithPermission >> Transfer::Create_WithPermission [GOOD] >> Transfer::Create_WithoutTablePermission >> Transfer::Create_WithoutTablePermission [GOOD] >> Transfer::Create_WithoutAlterTopicPermission_AndGrant >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_seamless_migration_to_exclusive_nodes[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> Transfer::Create_WithoutAlterTopicPermission_AndGrant [GOOD] >> Transfer::LocalTopic_WithPermission |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.8%| [TM] {RESULT} ydb/tests/stress/topic/tests/py3test |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic/tests/py3test >> test_serverless.py::test_create_table[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [GOOD] >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/restarts/py3test >> test_restarts.py::TestRestartSingleBlock42::test_restart_single_node_is_ok [GOOD] |99.8%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/restarts/py3test |99.8%| [TA] $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.8%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/cms/py3test >> test_cms_erasure.py::TestDegradedGroupBlock42Max::test_no_degraded_groups_after_shutdown [GOOD] |99.8%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/cms/py3test >> test_workload.py::TestYdbKvWorkload::test[row] >> Transfer::LocalTopic_WithPermission [GOOD] >> Transfer::LocalTopic_BigMessage |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/restarts/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test[row-local] |99.8%| [TA] $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/cms/test-results/py3test/{meta.json ... results_accumulator.log} >> test_workload.py::TestYdbWorkload::test[row] >> Transfer::LocalTopic_BigMessage [GOOD] >> Transfer::AlterLambda >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::AlterLambda [GOOD] >> Transfer::EnsureError >> Transfer::EnsureError [GOOD] >> Transfer::CheckCommittedOffset_Local >> Replication::Types >> Transfer::CheckCommittedOffset_Local [GOOD] >> Transfer::CheckCommittedOffset_Remote >> Replication::Types [GOOD] >> Replication::PauseAndResumeReplication |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/viewer/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/viewer/tests/py3test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] >> Transfer::CheckCommittedOffset_Remote [GOOD] >> Transfer::DropTransfer >> Transfer::DropTransfer [GOOD] >> Transfer::CreateAndDropConsumer >> test_workload.py::TestYdbWorkload::test >> Transfer::CreateAndDropConsumer [GOOD] >> Transfer::DescribeError_OnLambdaCompilation >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [GOOD] >> Replication::PauseAndResumeReplication [GOOD] >> Replication::TopicAutopartitioning >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] >> Transfer::DescribeError_OnLambdaCompilation [GOOD] >> Transfer::PausedAfterError ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] 2026-01-04 03:15:24,459 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 03:15:25,278 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1155715 1.2G 1.2G 1.2G ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/c5zo/00246d/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 1286366 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1287994 1.2G 1.4G 952M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1288024 1.1G 1.2G 819M ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1288317 958M 1.0G 636M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 128, in runtestprotocol rep = call_and_report(item, "setup", log) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 158, in pytest_runtest_setup item.session._setupstate.setup(item) File "contrib/python/pytest/py3/_pytest/runner.py", line 511, in setup col.setup() File "contrib/python/pytest/py3/_pytest/python.py", line 1847, in setup self._request._fillfixtures() File "contrib/python/pytest/py3/_pytest/fixtures.py", line 689, in _fillfixtures item.funcargs[argname] = self.getfixturevalue(argname) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 547, in getfixturevalue fixturedef = self._get_active_fixturedef(argname) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 566, in _get_active_fixturedef self._compute_fixture_value(fixturedef) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 648, in _compute_fixture_value fixturedef.execute(request=subrequest) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1087, in execute result = ihook.pytest_fixture_setup(fixturedef=self, request=request) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1140, in pytest_fixture_setup result = call_fixture_func(fixturefunc, request, kwargs) File "contrib/python/pytest/py3/_pytest/fixtures.py", line 910, in call_fixture_func fixture_result = next(generator) File "ydb/tests/functional/serverless/conftest.py", line 131, in ydb_disk_small_quoted_serverless_db with ydb_serverless_db_ctx(ydb_cluster, database_name, ydb_hostel_db, disk_quotas=disk_quotas): File "contrib/tools/python3/Lib/contextlib.py", line 137, in __enter__ return next(self.gen) File "ydb/tests/functional/serverless/conftest.py", line 77, in ydb_serverless_db_ctx ydb_cluster.create_serverless_database( File "ydb/tests/library/harness/kikimr_cluster_interface.py", line 307, in create_serverless_database operation = self.__wait_console_op(operation.id, timeout_seconds=timeout_seconds, token=token) File "ydb/tests/library/harness/kikimr_cluster_interface.py", line 192, in __wait_console_op time.sleep(step_seconds) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b3d8731a640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3d86010640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3d88624640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3da906b640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3d8bf42640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3d9eb1a640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3da8688640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b3dabfe8640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f3ddc379940 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/kikimr_cluster_interface.py", line 401 in _remove_database_wait_tenant_gone File "ydb/tests/library/harness/kikimr_cluster_interface.py", line 365 in remove_database File "ydb/tests/functional/serverless/conftest.py", line 54 in ydb_hostel_db_ctx File "contrib/tools/python3/Lib/contextlib.py", line 144 in __exit__ File "ydb/tests/functional/serverless/conftest.py", line 64 in ydb_hostel_db File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/00246d', '--source-root', '/home/runner/.ya/build/build_root/c5zo/00246d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/00246d', '--source-root', '/home/runner/.ya/build/build_root/c5zo/00246d/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/00246d/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk3/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '3', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2026-01-04 03:15:56,409 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-04 03:15:56,410 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores 2026-01-04 03:16:13,141 WARNING libarchive: File (test_serverless.py.test_database_with_column_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true/cluster/slot_1/logfile_zt4bsym2.log) size has changed. Can't write more data than was declared in the tar header (580310). (probably file was changed during archiving) |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/functional/serverless/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] >> Transfer::PausedAfterError [GOOD] >> Transfer::DescribeTransferWithErrorTopicNotFound >> Transfer::DescribeTransferWithErrorTopicNotFound [GOOD] >> Transfer::CustomConsumer |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/testshard_workload/tests/py3test >> test_workload.py::TestYdbTestShardWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/testshard_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/testshard_workload/tests/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] >> test_workload.py::TestYdbBackupWorkload::test [GOOD] >> test_workload.py::TestYdbMixedWorkload::test[row] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] >> Transfer::CustomConsumer [GOOD] >> Transfer::CustomConsumer_NotExists_Remote |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/encryption/py3test >> test_encryption.py::TestEncryption::test_simple_encryption [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/encryption/py3test |99.9%| [TM] {RESULT} ydb/tests/functional/encryption/py3test >> Transfer::CustomConsumer_NotExists_Remote [GOOD] >> Transfer::CustomConsumer_NotExists_Local >> Transfer::CustomConsumer_NotExists_Local [GOOD] >> Transfer::CustomFlushInterval |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/backup/tests/py3test >> test_workload.py::TestYdbBackupWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/backup/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/backup/tests/py3test >> Transfer::CustomFlushInterval [GOOD] >> Transfer::AlterFlushInterval >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_ColumnTable::KeyColumnFirst >> Transfer::AlterFlushInterval [GOOD] >> Transfer::AlterBatchSize >> Transfer::AlterBatchSize [GOOD] >> Transfer::CreateTransferSourceNotExists >> Transfer::CreateTransferSourceNotExists [GOOD] >> Transfer::CreateTransferSourceNotExists_LocalTopic >> Transfer::CreateTransferSourceNotExists_LocalTopic [GOOD] >> Transfer::CreateTransferSourceDirNotExists >> Transfer::CreateTransferSourceDirNotExists [GOOD] >> Transfer::CreateTransferSourceDirNotExists_LocalTopic >> Transfer::CreateTransferSourceDirNotExists_LocalTopic [GOOD] >> Transfer::TransferSourceDropped >> test_value_workload.py::TestResultSetValueWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/s3_backups/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/s3_backups/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/s3_backups/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-None] [GOOD] >> Transfer_ColumnTable::KeyColumnFirst [GOOD] >> Transfer_ColumnTable::KeyColumnLast >> Transfer::TransferSourceDropped [GOOD] >> Transfer::TransferSourceDropped_LocalTopic >> Transfer::TransferSourceDropped_LocalTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_create_table_using_exclusive_nodes[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> Transfer::CreateTransferSourceIsNotTopic [GOOD] >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic >> Backup::UuidValue >> test_workload.py::TestYdbWorkload::test[row-local] [GOOD] >> test_workload.py::TestYdbWorkload::test >> Transfer::CreateTransferSourceIsNotTopic_LocalTopic [GOOD] >> Transfer::CreateTransferTargetIsNotTable >> Transfer::CreateTransferTargetIsNotTable [GOOD] >> Transfer::CreateTransferTargetNotExists >> Transfer_ColumnTable::KeyColumnLast [GOOD] >> Transfer_ColumnTable::ComplexKey >> Transfer::CreateTransferTargetNotExists [GOOD] >> Transfer::PauseAndResumeTransfer >> test_arrow_workload.py::TestResultSetArrowWorkload::test >> test_workload.py::TestYdbWorkload::test[row] [GOOD] >> Backup::UuidValue [GOOD] >> test_workload.py::TestYdbWorkload::test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] >> test_workload.py::TestYdbKvWorkload::test[row] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/backup/unittest >> Backup::UuidValue [GOOD] Test command err: Found S3 object: "ProducerUuidValueBackup/data_00.csv" Found S3 object: "ProducerUuidValueBackup/data_00.csv.sha256" Found S3 object: "ProducerUuidValueBackup/metadata.json" Found S3 object: "ProducerUuidValueBackup/metadata.json.sha256" Found S3 object: "ProducerUuidValueBackup/permissions.pb" Found S3 object: "ProducerUuidValueBackup/permissions.pb.sha256" Found S3 object: "ProducerUuidValueBackup/scheme.pb" Found S3 object: "ProducerUuidValueBackup/scheme.pb.sha256" |99.9%| [TM] {RESULT} ydb/tests/functional/backup/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/backup/unittest >> Transfer_ColumnTable::ComplexKey [GOOD] >> Transfer_ColumnTable::NullableColumn >> test_workload.py::TestYdbWorkload::test >> Transfer::PauseAndResumeTransfer [GOOD] >> Transfer::TargetTableWithoutDirectory >> Transfer_ColumnTable::NullableColumn [GOOD] >> Transfer_ColumnTable::WriteNullToKeyColumn >> test_workload.py::TestYdbWorkload::test[column] >> test_workload.py::TestYdbWorkload::test[row-remote] >> Transfer::TargetTableWithoutDirectory [GOOD] >> Transfer::TargetTableWriteOutsideDirectory >> Transfer::TargetTableWriteOutsideDirectory [GOOD] >> Transfer::TargetTableWriteInsideDirectory >> Transfer_ColumnTable::WriteNullToKeyColumn [GOOD] >> Transfer_ColumnTable::WriteNullToColumn |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> test_scheme_board_workload.py::TestSchemeBoard::test_scheme_board [GOOD] >> test_workload.py::TestYdbKvWorkload::test[column] >> Transfer::TargetTableWriteInsideDirectory [GOOD] >> Transfer::AlterTargetDirectory >> Transfer_ColumnTable::WriteNullToColumn [GOOD] >> Transfer_ColumnTable::Upsert_DifferentBatch >> Transfer::AlterTargetDirectory [GOOD] >> Transfer::WriteToNotExists |99.9%| [TM] {RESULT} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/scheme_board/pile_promotion/tests/py3test >> Transfer_ColumnTable::Upsert_DifferentBatch [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch >> Transfer::WriteToNotExists [GOOD] >> Transfer::WriteToNotTable >> ConsistentIndexRead::InteractiveTx >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] >> Transfer_ColumnTable::Upsert_OneBatch [GOOD] >> Transfer_ColumnTable::ColumnType_Date >> Transfer::WriteToNotTable [GOOD] >> Transfer::AlterLambdaOnWork |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/view/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_view_workload[30-test_scv] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/view/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/view/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] >> Transfer::AlterLambdaOnWork [GOOD] >> Transfer::CreateAndAlterTransferInDirectory >> Transfer::CreateAndAlterTransferInDirectory [GOOD] >> Transfer::Alter_WithSecret >> test_workload.py::TestYdbWorkload::test >> Transfer_ColumnTable::ColumnType_Date [GOOD] >> Transfer_ColumnTable::ColumnType_Double >> Transfer::Alter_WithSecret [GOOD] >> Transfer::MessageField_Key >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer::MessageField_Key [GOOD] >> Transfer::MessageField_Key_Empty >> Transfer_ColumnTable::ColumnType_Double [GOOD] >> Transfer_ColumnTable::ColumnType_Int8 |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/topic_kafka/tests/py3test >> test_workload_topic.py::TestYdbTopicWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/topic_kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/topic_kafka/tests/py3test >> Transfer::MessageField_Key_Empty [GOOD] >> Transfer::ErrorInMultiLine >> Transfer::ErrorInMultiLine [GOOD] >> Transfer::ReadFromCDC_Remote >> Transfer_ColumnTable::ColumnType_Int8 [GOOD] >> Transfer_ColumnTable::ColumnType_Int16 >> Transfer::ReadFromCDC_Remote [GOOD] >> Transfer::ReadFromCDC_Local >> test_workload.py::TestDeltaProtocol::test >> Transfer::ReadFromCDC_Local [GOOD] >> Transfer::MessageField_CreateTimestamp_Remote >> Transfer_ColumnTable::ColumnType_Int16 [GOOD] >> Transfer_ColumnTable::ColumnType_Int32 >> test_workload.py::TestYdbWorkload::test [FAIL] >> Transfer::MessageField_CreateTimestamp_Remote [GOOD] >> Transfer::MessageField_CreateTimestamp_Local >> KqpQueryService::ReplyPartLimitProxyNode >> KqpQueryService::ReplyPartLimitProxyNode [GOOD] >> NodeIdDescribe::HasDistribution >> test_workload.py::TestYdbMixedWorkload::test[row] [GOOD] >> Transfer::MessageField_CreateTimestamp_Local [GOOD] >> Transfer::MessageField_WriteTimestamp_Remote >> test_workload.py::TestYdbWorkload::test[row-remote] [GOOD] >> Transfer_ColumnTable::ColumnType_Int32 [GOOD] >> Transfer_ColumnTable::ColumnType_Int64 >> Transfer::MessageField_WriteTimestamp_Remote [GOOD] >> Transfer::MessageField_WriteTimestamp_Local >> test_workload.py::TestYdbWorkload::test[column] [GOOD] >> Transfer::MessageField_WriteTimestamp_Local [GOOD] >> Transfer::MessageField_Attributes_Remote >> test_workload.py::TestYdbWorkload::test[column-local] >> Transfer::MessageField_Attributes_Remote [GOOD] >> Transfer::MessageField_Attributes_Local >> Transfer_ColumnTable::ColumnType_Int64 [GOOD] >> Transfer_ColumnTable::ColumnType_Utf8_LongValue >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] >> Transfer::MessageField_Attributes_Local [GOOD] >> Transfer::MessageField_Partition_Remote |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/olap_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [FAIL] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/olap_workload/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/simple_queue/tests/py3test >> test_workload.py::TestYdbWorkload::test[column] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD, SKIPPED} ydb/tests/stress/simple_queue/tests/py3test >> Transfer::MessageField_Partition_Remote [GOOD] >> Transfer::MessageField_Partition_Local >> Transfer_ColumnTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_ColumnTable::MessageField_Attributes |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kv/tests/py3test >> test_workload.py::TestYdbKvWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/simple_queue/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kv/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/kv/tests/py3test ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/serverless/py3test >> test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] 2026-01-04 03:19:18,226 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper execution timed out 2026-01-04 03:19:19,740 WARNING devtools.ya.test.programs.test_tool.run_test.run_test: Wrapper has overrun 600 secs timeout. Process tree before termination: pid rss ref pdirt 1210697 833M 834M 755M ydb-tests-functional-serverless --basetemp /home/runner/.ya/build/build_root/c5zo/002480/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 1300629 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1302972 1.6G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1303254 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ 1303641 1.6G 1.5G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/ Test command err: File "library/python/pytest/main.py", line 101, in main rc = pytest.main( File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175, in main ret: Union[ExitCode, int] = config.hook.pytest_cmdline_main( File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 320, in pytest_cmdline_main return wrap_session(config, _main) File "contrib/python/pytest/py3/_pytest/main.py", line 273, in wrap_session session.exitstatus = doit(config, session) or 0 File "contrib/python/pytest/py3/_pytest/main.py", line 327, in _main config.hook.pytest_runtestloop(session=session) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/main.py", line 352, in pytest_runtestloop item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 115, in pytest_runtest_protocol runtestprotocol(item, nextitem=nextitem) File "contrib/python/pytest/py3/_pytest/runner.py", line 134, in runtestprotocol reports.append(call_and_report(item, "call", log)) File "contrib/python/pytest/py3/_pytest/runner.py", line 223, in call_and_report call = call_runtest_hook(item, when, **kwds) File "contrib/python/pytest/py3/_pytest/runner.py", line 262, in call_runtest_hook return CallInfo.from_call( File "contrib/python/pytest/py3/_pytest/runner.py", line 342, in from_call result: Optional[TResult] = func() File "contrib/python/pytest/py3/_pytest/runner.py", line 263, in lambda: ihook(item=item, **kwds), when=when, reraise=reraise File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "contrib/python/pytest/py3/_pytest/runner.py", line 170, in pytest_runtest_call item.runtest() File "contrib/python/pytest/py3/_pytest/python.py", line 1844, in runtest self.ihook.pytest_pyfunc_call(pyfuncitem=self) File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512, in __call__ return self._hookexec(self.name, self._hookimpls.copy(), kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120, in _hookexec return self._inner_hookexec(hook_name, methods, kwargs, firstresult) File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121, in _multicall res = hook_impl.function(*args) File "library/python/pytest/plugins/ya.py", line 591, in pytest_pyfunc_call pyfuncitem.retval = testfunction(**testargs) File "ydb/tests/functional/serverless/test_serverless.py", line 200, in test_turn_on_serverless_storage_billing time.sleep(15) File "library/python/pytest/plugins/ya.py", line 345, in _graceful_shutdown stack = traceback.format_stack() Thread 0x00007b417b687640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b4180db8640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b417f2ad640 (most recent call first): File "ydb/tests/library/common/wait_for.py", line 19 in wait_for File "ydb/tests/library/harness/daemon.py", line 198 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 279 in stop File "ydb/tests/library/harness/kikimr_runner.py", line 693 in __stop_node File "ydb/tests/library/harness/kikimr_runner.py", line 707 in stop_node File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b41820c2640 (most recent call first): File "contrib/python/grpcio/py3/grpc/_channel.py", line 1942 in _poll_connectivity File "contrib/tools/python3/Lib/threading.py", line 1012 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b418727d640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b4183ed5640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Thread 0x00007b418bc71640 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 359 in wait File "contrib/python/grpcio/py3/grpc/_utilities.py", line 108 in _block File "contrib/python/grpcio/py3/grpc/_utilities.py", line 162 in result File "contrib/python/ydb/py3/ydb/connection.py", line 527 in ready_factory File "contrib/python/ydb/py3/ydb/resolver.py", line 177 in context_resolve File "contrib/tools/python3/Lib/contextlib.py", line 137 in __enter__ File "contrib/python/ydb/py3/ydb/pool.py", line 213 in execute_discovery File "contrib/python/ydb/py3/ydb/pool.py", line 264 in run File "contrib/tools/python3/Lib/threading.py", line 1075 in _bootstrap_inner File "contrib/tools/python3/Lib/threading.py", line 1032 in _bootstrap Current thread 0x00007f41bc249940 (most recent call first): File "contrib/tools/python3/Lib/threading.py", line 1169 in _wait_for_tstate_lock File "contrib/tools/python3/Lib/threading.py", line 1149 in join File "ydb/tests/library/harness/kikimr_runner.py", line 718 in stop File "ydb/tests/library/fixtures/__init__.py", line 59 in ydb_cluster File "contrib/python/pytest/py3/_pytest/fixtures.py", line 926 in _teardown_yield_fixture File "contrib/python/pytest/py3/_pytest/fixtures.py", line 1042 in finish File "contrib/python/pytest/py3/_pytest/runner.py", line 543 in teardown_exact File "contrib/python/pytest/py3/_pytest/runner.py", line 109 in pytest_sessionfinish File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/main.py", line 308 in wrap_session File "contrib/python/pytest/py3/_pytest/main.py", line 320 in pytest_cmdline_main File "contrib/python/pluggy/py3/pluggy/_callers.py", line 121 in _multicall File "contrib/python/pluggy/py3/pluggy/_manager.py", line 120 in _hookexec File "contrib/python/pluggy/py3/pluggy/_hooks.py", line 512 in __call__ File "contrib/python/pytest/py3/_pytest/config/__init__.py", line 175 in main File "library/python/pytest/main.py", line 101 in main Traceback (most recent call last): File "library/python/testing/yatest_common/yatest/common/process.py", line 384, in wait wait_for( File "library/python/testing/yatest_common/yatest/common/process.py", line 765, in wait_for raise TimeoutError(truncate(message, MAX_MESSAGE_LEN)) yatest.common.process.TimeoutError: ...runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/002480', '--source-root', '/home/runner/.ya/build/build_root/c5zo/002480/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout During handling of the above exception, another exception occurred: Traceback (most recent call last): File "devtools/ya/test/programs/test_tool/run_test/run_test.py", line 1772, in main res.wait(check_exit_code=False, timeout=current_run_test_timeout, on_timeout=timeout_callback) File "library/python/testing/yatest_common/yatest/common/process.py", line 398, in wait raise ExecutionTimeoutError(self, str(e)) yatest.common.process.ExecutionTimeoutError: (("...runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/ytest.report.trace', '--build-root', '/home/runner/.ya/build/build_root/c5zo/002480', '--source-root', '/home/runner/.ya/build/build_root/c5zo/002480/environment/arcadia', '--output-dir', '/home/runner/.ya/build/build_root/c5zo/002480/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless/chunk9/testing_out_stuff', '--durations', '0', '--project-path', 'ydb/tests/functional/serverless', '--test-tool-bin', '/home/runner/.ya/tools/v4/10669990147/test_tool', '--ya-version', '2', '--collect-cores', '--sanitizer-extra-checks', '--build-type', 'release', '--tb', 'short', '--modulo', '10', '--modulo-index', '9', '--partition-mode', 'SEQUENTIAL', '--split-by-tests', '--dep-root', 'ydb/tests/functional/serverless', '--flags', 'ADD_PEERDIRS_GEN_TESTS=yes', '--flags', 'APPLE_SDK_LOCAL=yes', '--flags', 'CFLAGS=-fno-omit-frame-pointer -Wno-unknown-argument', '--flags', 'DEBUGINFO_LINES_ONLY=yes', '--flags', 'DISABLE_FLAKE8_MIGRATIONS=yes', '--flags', 'OPENSOURCE=yes', '--flags', 'SANITIZER_TYPE=address', '--flags', 'TESTS_REQUESTED=yes', '--flags', 'USE_AIO=static', '--flags', 'USE_CLANG_CL=yes', '--flags', 'USE_EAT_MY_DATA=yes', '--flags', 'USE_ICONV=static', '--flags', 'USE_IDN=static', '--flags', 'USE_PREBUILT_TOOLS=no', '--sanitize', 'address', '--test-file-filter', 'test_serverless.py']' stopped by 600 seconds timeout",), {}) 2026-01-04 03:19:50,413 WARNING library.python.cores: Core dump dir doesn't exist: /coredumps 2026-01-04 03:19:50,413 WARNING library.python.cores: Core dump dir doesn't exist: /var/tmp/cores |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/serverless/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] >> test_workload.py::TestYdbWorkload::test >> Transfer::MessageField_Partition_Local [GOOD] >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] >> test_kafka_streams.py::TestYdbTopicWorkload::test ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/functional/unittest >> Transfer::MessageField_Partition_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_6195347959766041313` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6195347959766041313` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_6195347959766041313` FROM `Topic_6195347959766041313` TO `Table_6195347959766041313` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_6195347959766041313` ORDER BY `Message` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_6195347959766041313`; DDL: DROP TABLE `Table_6195347959766041313` DDL: DROP TOPIC `Topic_6195347959766041313` DDL: CREATE TABLE `Table_13160913554830859452` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13160913554830859452` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_13160913554830859452` FROM `Topic_13160913554830859452` TO `Table_13160913554830859452` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=17 count=0 >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=16 count=0 >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=15 count=0 >>>>> Query: SELECT `Message` FROM `Table_13160913554830859452` ORDER BY `Message` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_13160913554830859452`; DDL: DROP TABLE `Table_13160913554830859452` DDL: DROP TOPIC `Topic_13160913554830859452` DDL: CREATE TOPIC `Topic_14893729745577984878` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64) |> ]; }; ; CREATE TRANSFER `Transfer_14893729745577984878` FROM `Topic_14893729745577984878` TO `Table_14893729745577984878` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: [ {
: Error: Executing ESchemeOpCreateTransfer, code: 2003 subissue: {
: Error: Path does not exist, code: 2003 } } {
: Error: Query invalidated on scheme/internal error during Scheme execution, code: 2019 } ] >>>>> EXPECTED: Path does not exist DDL: DROP TOPIC `Topic_14893729745577984878` DDL: CREATE TABLE `Table_5673026561333223556` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_5673026561333223556` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_5673026561333223556 FROM Topic_5673026561333223556 TO Table_5673026561333223556 USING $l WITH ( CONNECTION_STRING = "grp§c://localhost:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_5673026561333223556: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for grp§c://localhost:2135: C-ares status is not ARES_SUCCESS qtype=A name=grp§c://localhost:2135 is_balancer=0: Misformatted domain name } {
: Error: Grpc error response on endpoint grp§c://localhost:2135 } ]) } >>>>> EXPECTED: DNS resolution failed for grp§c://localhost:2135 DDL: DROP TRANSFER `Transfer_5673026561333223556`; DDL: DROP TABLE `Table_5673026561333223556` DDL: DROP TOPIC `Topic_5673026561333223556` DDL: CREATE TABLE `Table_17256207322526852198` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17256207322526852198` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: 1, Message:CAST("Message-1" AS Utf8) |> ]; }; CREATE TRANSFER Transfer_17256207322526852198 FROM Topic_17256207322526852198 TO Table_17256207322526852198 USING $l WITH ( CONNECTION_STRING = "grpc://domain-not-exists-localhost.com.moc:2135/?database=/Root" ) >>>>> ACTUAL: {
: Error: Discovery error: /Root/Topic_17256207322526852198: TRANSPORT_UNAVAILABLE ([ {
: Error: GRpc error: (14): DNS resolution failed for domain-not-exists-localhost.com.moc:2135: C-ares status is not ARES_SUCCESS qtype=A name=domain-not-exists-localhost.com.moc is_balancer=0: Domain name not found } {
: Error: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 } ]) } >>>>> EXPECTED: Grpc error response on endpoint domain-not-exists-localhost.com.moc:2135 DDL: DROP TRANSFER `Transfer_17256207322526852198`; DDL: DROP TABLE `Table_17256207322526852198` DDL: DROP TOPIC `Topic_17256207322526852198` DDL: CREATE USER u738 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u738@builtin` DDL: CREATE TABLE `Table_12696865585305769491` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.write', 'ydb.generic.read' ON `/local/Table_12696865585305769491` TO `u738@builtin` DDL: CREATE TOPIC `Topic_12696865585305769491` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_12696865585305769491` TO `u738@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_12696865585305769491` FROM `Topic_12696865585305769491` TO `Table_12696865585305769491` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); DDL: DROP TOPIC `Topic_12696865585305769491` DDL: DROP TRANSFER `Transfer_12696865585305769491`; DDL: CREATE USER u4383 DDL: GRANT 'ydb.granular.create_table', 'ydb.granular.create_queue' ON `/local` TO `u4383@builtin` DDL: CREATE TABLE `Table_7334190441734137293` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: GRANT 'ydb.generic.read' ON `/local/Table_7334190441734137293` TO `u4383@builtin` DDL: CREATE TOPIC `Topic_7334190441734137293` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT ALL ON `/local/Topic_7334190441734137293` TO `u4383@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_7334190441734137293` FROM `Topic_7334190441734137293` TO `Table_7334190441734137293` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Executing ESchemeOpCreateTransfer, code: 2018 subissue: {
: Error: Access denied for scheme request, code: 2018 subissue: {
: Error: Access denied. } } } >>>>> EXPECTED: Access denied for scheme request DDL: DROP TOPIC `Topic_7334190441734137293` DDL: CREATE USER u2464 DDL: CREATE TABLE `Table_17342999858019664929` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_17342999858019664929` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: GRANT 'ydb.generic.read' ON `/local/Topic_17342999858019664929` TO `u2464@builtin` DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17342999858019664929` FROM `Topic_17342999858019664929` TO `Table_1734299985801 ... _7898142651510093261` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_7898142651510093261` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_7898142651510093261` FROM `Topic_7898142651510093261` TO `Table_7898142651510093261` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_7898142651510093261` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_7898142651510093261` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_7898142651510093261` ORDER BY `CreateTimestamp` Attempt=17 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_7898142651510093261` ORDER BY `CreateTimestamp` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_7898142651510093261`; DDL: DROP TABLE `Table_7898142651510093261` DDL: DROP TOPIC `Topic_7898142651510093261` DDL: CREATE TABLE `Table_6449985091771379827` ( Offset Uint64 NOT NULL, CreateTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6449985091771379827` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), CreateTimestamp:$x._create_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_6449985091771379827` FROM `Topic_6449985091771379827` TO `Table_6449985091771379827` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6449985091771379827` ORDER BY `CreateTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6449985091771379827` ORDER BY `CreateTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `CreateTimestamp` FROM `Table_6449985091771379827` ORDER BY `CreateTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_6449985091771379827`; DDL: DROP TABLE `Table_6449985091771379827` DDL: DROP TOPIC `Topic_6449985091771379827` DDL: CREATE TABLE `Table_2627213584644790997` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_2627213584644790997` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_2627213584644790997` FROM `Topic_2627213584644790997` TO `Table_2627213584644790997` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_2627213584644790997` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_2627213584644790997` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_2627213584644790997` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_2627213584644790997`; DDL: DROP TABLE `Table_2627213584644790997` DDL: DROP TOPIC `Topic_2627213584644790997` DDL: CREATE TABLE `Table_6925602170720766263` ( Offset Uint64 NOT NULL, WriteTimestamp Timestamp, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6925602170720766263` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), WriteTimestamp:$x._write_timestamp |> ]; }; ; CREATE TRANSFER `Transfer_6925602170720766263` FROM `Topic_6925602170720766263` TO `Table_6925602170720766263` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `WriteTimestamp` FROM `Table_6925602170720766263` ORDER BY `WriteTimestamp` Attempt=19 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_6925602170720766263` ORDER BY `WriteTimestamp` Attempt=18 count=0 >>>>> Query: SELECT `WriteTimestamp` FROM `Table_6925602170720766263` ORDER BY `WriteTimestamp` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_6925602170720766263`; DDL: DROP TABLE `Table_6925602170720766263` DDL: DROP TOPIC `Topic_6925602170720766263` DDL: CREATE TABLE `Table_4181850982459497618` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4181850982459497618` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4181850982459497618` FROM `Topic_4181850982459497618` TO `Table_4181850982459497618` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_4181850982459497618` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_4181850982459497618` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_4181850982459497618` ORDER BY `Value` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_4181850982459497618`; DDL: DROP TABLE `Table_4181850982459497618` DDL: DROP TOPIC `Topic_4181850982459497618` DDL: CREATE TABLE `Table_12025297312533530959` ( Offset Uint64 NOT NULL, Value Utf8, PRIMARY KEY (Offset) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_12025297312533530959` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Offset:CAST($x._offset AS Uint64), Value:CAST($x._attributes['attribute_key'] AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_12025297312533530959` FROM `Topic_12025297312533530959` TO `Table_12025297312533530959` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Value` FROM `Table_12025297312533530959` ORDER BY `Value` Attempt=19 count=0 >>>>> Query: SELECT `Value` FROM `Table_12025297312533530959` ORDER BY `Value` Attempt=18 count=0 >>>>> Query: SELECT `Value` FROM `Table_12025297312533530959` ORDER BY `Value` Attempt=17 count=0 >>>>> Query: SELECT `Value` FROM `Table_12025297312533530959` ORDER BY `Value` Attempt=16 count=1 DDL: DROP TRANSFER `Transfer_12025297312533530959`; DDL: DROP TABLE `Table_12025297312533530959` DDL: DROP TOPIC `Topic_12025297312533530959` DDL: CREATE TABLE `Table_10911420982718045691` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10911420982718045691` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_10911420982718045691` FROM `Topic_10911420982718045691` TO `Table_10911420982718045691` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:4012/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_10911420982718045691` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_10911420982718045691` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_10911420982718045691` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_10911420982718045691`; DDL: DROP TABLE `Table_10911420982718045691` DDL: DROP TOPIC `Topic_10911420982718045691` DDL: CREATE TABLE `Table_2136045306054795969` ( Partition Uint32 NOT NULL, Message Utf8, PRIMARY KEY (Partition) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_2136045306054795969` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Partition:CAST($x._partition AS Uint32), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_2136045306054795969` FROM `Topic_2136045306054795969` TO `Table_2136045306054795969` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Partition`, `Message` FROM `Table_2136045306054795969` ORDER BY `Partition`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_2136045306054795969` ORDER BY `Partition`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Partition`, `Message` FROM `Table_2136045306054795969` ORDER BY `Partition`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_2136045306054795969`; DDL: DROP TABLE `Table_2136045306054795969` DDL: DROP TOPIC `Topic_2136045306054795969` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/functional/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/functional/unittest >> Transfer_ColumnTable::MessageField_Attributes [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_svc/unittest >> NodeIdDescribe::HasDistribution [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_svc/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_svc/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_ColumnTable::MessageField_CreateTimestamp [GOOD] >> Transfer_ColumnTable::MessageField_Partition >> Transfer_ColumnTable::MessageField_Partition [GOOD] >> Transfer_ColumnTable::MessageField_SeqNo >> KqpQuerySession::NoLocalAttach |99.9%| [TA] $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TA] {BAZEL_UPLOAD, SKIPPED} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/ctas/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/ctas/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/ctas/tests/py3test >> Transfer_RowTable::KeyColumnFirst |99.9%| [TA] {RESULT} $(B)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} >> Transfer_ColumnTable::MessageField_SeqNo [GOOD] >> Transfer_ColumnTable::MessageField_ProducerId >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-None] [GOOD] >> Transfer_RowTable::KeyColumnFirst [GOOD] >> Transfer_RowTable::KeyColumnLast >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::KeyColumnLast [GOOD] >> Transfer_RowTable::ComplexKey >> Transfer_ColumnTable::MessageField_ProducerId [GOOD] >> Transfer_ColumnTable::MessageField_MessageGroupId >> KqpQuerySession::NoLocalAttach [GOOD] >> Transfer_RowTable::ComplexKey [GOOD] >> Transfer_RowTable::NullableColumn >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_query_session/unittest >> KqpQuerySession::NoLocalAttach [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_query_session/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_query_session/unittest >> Transfer_RowTable::NullableColumn [GOOD] >> Transfer_RowTable::WriteNullToKeyColumn >> Transfer_ColumnTable::MessageField_MessageGroupId [GOOD] >> Transfer_ColumnTable::MessageField_WriteTimestamp >> Transfer_RowTable::WriteNullToKeyColumn [GOOD] >> Transfer_RowTable::WriteNullToColumn >> Transfer_RowTable::WriteNullToColumn [GOOD] >> Transfer_RowTable::Upsert_DifferentBatch >> Transfer_ColumnTable::MessageField_WriteTimestamp [GOOD] >> Transfer_ColumnTable::ProcessingJsonMessage >> Transfer_RowTable::Upsert_DifferentBatch [GOOD] >> Transfer_RowTable::Upsert_OneBatch |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/oltp_workload/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/oltp_workload/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-local] [GOOD] >> Transfer_RowTable::Upsert_OneBatch [GOOD] >> Transfer_RowTable::ColumnType_Bool >> Transfer_RowTable::ColumnType_Bool [GOOD] >> Transfer_RowTable::ColumnType_Date >> Transfer_ColumnTable::ProcessingJsonMessage [GOOD] >> Transfer_ColumnTable::ProcessingCDCMessage >> Replication::TopicAutopartitioning [GOOD] >> Transfer_RowTable::ColumnType_Date [GOOD] >> Transfer_RowTable::ColumnType_Double >> test_workload.py::TestYdbWorkload::test[column-remote] >> Transfer_RowTable::ColumnType_Double [GOOD] >> Transfer_RowTable::ColumnType_Int8 ------- [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/replication/unittest >> Replication::TopicAutopartitioning [GOOD] Test command err: DDL: CREATE TABLE `SourceTable_17052287099025455823` ( Key Uint32, Key2 Uuid, v01 Uuid, v02 Uuid NOT NULL, v03 Double, PRIMARY KEY (Key, Key2) ); >>>>> Query: UPSERT INTO `SourceTable_17052287099025455823` (Key,Key2,v01,v02,v03) VALUES ( 1, CAST("00078af5-0000-0000-6c0b-040000000000" as Uuid), CAST("00078af5-0000-0000-6c0b-040000000001" as Uuid), UNWRAP(CAST("00078af5-0000-0000-6c0b-040000000002" as Uuid)), CAST("311111111113.222222223" as Double) ); DDL: CREATE ASYNC REPLICATION `Replication_17052287099025455823` FOR `SourceTable_17052287099025455823` AS `Table_17052287099025455823` WITH ( CONNECTION_STRING = 'grpc://localhost:13779/?database=local' ); >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_17052287099025455823` ORDER BY `Key2`, `v01`, `v02`, `v03` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_17052287099025455823]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Key2`, `v01`, `v02`, `v03` FROM `Table_17052287099025455823` ORDER BY `Key2`, `v01`, `v02`, `v03` Attempt=18 count=1 DDL: DROP ASYNC REPLICATION `Replication_17052287099025455823`; DDL: DROP TABLE `SourceTable_17052287099025455823` DDL: CREATE TABLE `SourceTable_11346193160251480615` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ); DDL: CREATE ASYNC REPLICATION `Replication_11346193160251480615` FOR `SourceTable_11346193160251480615` AS `Table_11346193160251480615` WITH ( CONNECTION_STRING = 'grpc://localhost:13779/?database=local' ); >>>>> Query: INSERT INTO `SourceTable_11346193160251480615` (`Key`, `Message`) VALUES (1, 'Message-1'); >>>>> Query: SELECT `Message` FROM `Table_11346193160251480615` ORDER BY `Message` >>>>> Query error:
: Error: Type annotation, code: 1030
:1:1: Error: At function: KiReadTable!
:1:1: Error: Cannot find table 'db.[/local/Table_11346193160251480615]' because it does not exist or you do not have access permissions. Please check correctness of table path and user permissions., code: 2003 Attempt=19 count=-1 >>>>> Query: SELECT `Message` FROM `Table_11346193160251480615` ORDER BY `Message` Attempt=18 count=1 State: Paused DDL: ALTER ASYNC REPLICATION `Replication_11346193160251480615` SET ( STATE = "Paused" ); >>>>> Query: INSERT INTO `SourceTable_11346193160251480615` (`Key`, `Message`) VALUES (2, 'Message-2'); >>>>> Query: SELECT `Message` FROM `Table_11346193160251480615` ORDER BY `Message` Attempt=19 count=1 State: StandBy DDL: ALTER ASYNC REPLICATION `Replication_11346193160251480615` SET ( STATE = "StandBy" ); >>>>> Query: SELECT `Message` FROM `Table_11346193160251480615` ORDER BY `Message` Attempt=19 count=1 >>>>> Query: SELECT `Message` FROM `Table_11346193160251480615` ORDER BY `Message` Attempt=18 count=2 DDL: ALTER ASYNC REPLICATION `Replication_11346193160251480615` SET ( STATE = "Paused" ); DDL: ALTER ASYNC REPLICATION `Replication_11346193160251480615` SET ( STATE = "StandBy" ); DDL: DROP ASYNC REPLICATION `Replication_11346193160251480615`; DDL: DROP TABLE `SourceTable_11346193160251480615` DDL: CREATE TABLE `SourceTable_638491760243948895` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( AUTO_PARTITIONING_BY_SIZE = ENABLED, AUTO_PARTITIONING_PARTITION_SIZE_MB = 1, AUTO_PARTITIONING_BY_LOAD = ENABLED, AUTO_PARTITIONING_MIN_PARTITIONS_COUNT = 1, AUTO_PARTITIONING_MAX_PARTITIONS_COUNT = 10, UNIFORM_PARTITIONS = 1 ); DDL: CREATE ASYNC REPLICATION `Replication_638491760243948895` FOR `SourceTable_638491760243948895` AS `Table_638491760243948895` WITH ( CONNECTION_STRING = 'grpc://localhost:13779/?database=local' ); Insert much data to trigger split >>>>> Query: UPSERT INTO `SourceTable_638491760243948895` (Key, Message) VALUES (0, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa ... aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'), (4999, 'Message-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'); Wait for partition split and data replication Verify data replicated from all partitions >>>>> Query: SELECT `Message` FROM `Table_638491760243948895` ORDER BY `Message` Attempt=19 count=5000 DDL: DROP ASYNC REPLICATION `Replication_638491760243948895`; DDL: DROP TABLE `SourceTable_638491760243948895` >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/replication/unittest |99.9%| [TM] {RESULT} ydb/tests/functional/replication/unittest >> Transfer_RowTable::ColumnType_Int8 [GOOD] >> Transfer_RowTable::ColumnType_Int16 >> Transfer_ColumnTable::ProcessingCDCMessage [GOOD] >> Transfer_ColumnTable::ProcessingTargetTable >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] >> Transfer_RowTable::ColumnType_Int16 [GOOD] >> Transfer_RowTable::ColumnType_Int32 |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/node_broker/tests/py3test >> test_workload.py::TestDeltaProtocol::test [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/node_broker/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/node_broker/tests/py3test >> Transfer_RowTable::ColumnType_Int32 [GOOD] >> Transfer_RowTable::ColumnType_Int64 >> Transfer_ColumnTable::ProcessingTargetTable [GOOD] >> Transfer_ColumnTable::ProcessingTargetTableOtherType >> Transfer_RowTable::ColumnType_Int64 [GOOD] >> Transfer_RowTable::ColumnType_Utf8_LongValue >> Transfer_RowTable::ColumnType_Utf8_LongValue [GOOD] >> Transfer_RowTable::ColumnType_Uuid |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/show_create/table/tests/py3test >> test_workload.py::TestYdbWorkload::test_show_create_table_workload[30-test_sct] [GOOD] >> Transfer_RowTable::ColumnType_Uuid [GOOD] >> Transfer_RowTable::MessageField_Attributes |99.9%| [TM] {RESULT} ydb/tests/stress/show_create/table/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/show_create/table/tests/py3test >> Transfer_ColumnTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_ColumnTable::DropColumn >> Transfer_RowTable::MessageField_Attributes [GOOD] >> Transfer_RowTable::MessageField_CreateTimestamp >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::MessageField_CreateTimestamp [GOOD] >> Transfer_RowTable::MessageField_Partition >> Transfer_RowTable::MessageField_Partition [GOOD] >> Transfer_RowTable::MessageField_SeqNo >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] >> Transfer_ColumnTable::DropColumn [GOOD] >> Transfer_ColumnTable::BigBatchSize_Remote >> Transfer_RowTable::MessageField_SeqNo [GOOD] >> Transfer_RowTable::MessageField_ProducerId |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/cdc/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/cdc/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/cdc/tests/py3test |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/result_set_format/tests/py3test >> test_arrow_workload.py::TestResultSetArrowWorkload::test [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/result_set_format/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/result_set_format/tests/py3test >> Transfer_RowTable::MessageField_ProducerId [GOOD] >> Transfer_RowTable::MessageField_MessageGroupId >> Transfer_RowTable::MessageField_MessageGroupId [GOOD] >> Transfer_RowTable::MessageField_WriteTimestamp >> Transfer_RowTable::MessageField_WriteTimestamp [GOOD] >> Transfer_RowTable::ProcessingJsonMessage >> Transfer_RowTable::ProcessingJsonMessage [GOOD] >> Transfer_RowTable::ProcessingCDCMessage >> Transfer_ColumnTable::BigBatchSize_Remote [GOOD] >> Transfer_ColumnTable::BigBatchSize_Local >> Transfer_RowTable::ProcessingCDCMessage [GOOD] >> Transfer_RowTable::ProcessingTargetTable >> Transfer_RowTable::ProcessingTargetTable [GOOD] >> Transfer_RowTable::ProcessingTargetTableOtherType >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] >> Transfer_RowTable::ProcessingTargetTableOtherType [GOOD] >> Transfer_RowTable::DropColumn |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/kafka/tests/py3test >> test_kafka_streams.py::TestYdbTopicWorkload::test [FAIL] |99.9%| [TM] {RESULT} ydb/tests/stress/kafka/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/kafka/tests/py3test >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] >> Transfer_RowTable::DropColumn [GOOD] >> Transfer_RowTable::TableWithSyncIndex ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/column_table/unittest >> Transfer_ColumnTable::BigBatchSize_Local [GOOD] Test command err: DDL: CREATE TABLE `Table_17097553461482071088` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_17097553461482071088` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_17097553461482071088` FROM `Topic_17097553461482071088` TO `Table_17097553461482071088` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_17097553461482071088` ORDER BY `Key`, `Message` Attempt=14 count=1 DDL: DROP TRANSFER `Transfer_17097553461482071088`; DDL: DROP TABLE `Table_17097553461482071088` DDL: DROP TOPIC `Topic_17097553461482071088` DDL: CREATE TABLE `Table_15706203983936963175` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_15706203983936963175` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_15706203983936963175` FROM `Topic_15706203983936963175` TO `Table_15706203983936963175` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_15706203983936963175` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_15706203983936963175` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_15706203983936963175` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_15706203983936963175`; DDL: DROP TABLE `Table_15706203983936963175` DDL: DROP TOPIC `Topic_15706203983936963175` DDL: CREATE TABLE `Table_1363460844530997589` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1363460844530997589` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_1363460844530997589` FROM `Topic_1363460844530997589` TO `Table_1363460844530997589` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_1363460844530997589` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_1363460844530997589` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_1363460844530997589` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_1363460844530997589`; DDL: DROP TABLE `Table_1363460844530997589` DDL: DROP TOPIC `Topic_1363460844530997589` DDL: CREATE TABLE `Table_16029125351508813449` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_16029125351508813449` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16029125351508813449` FROM `Topic_16029125351508813449` TO `Table_16029125351508813449` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_16029125351508813449` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16029125351508813449` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_16029125351508813449` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_16029125351508813449`; DDL: DROP TABLE `Table_16029125351508813449` DDL: DROP TOPIC `Topic_16029125351508813449` DDL: CREATE TABLE `Table_15283069096384864534` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_15283069096384864534` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_15283069096384864534` FROM `Topic_15283069096384864534` TO `Table_15283069096384864534` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_15283069096384864534`; DDL: DROP TABLE `Table_15283069096384864534` DDL: DROP TOPIC `Topic_15283069096384864534` DDL: CREATE TABLE `Table_3272428118456085941` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_3272428118456085941` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_3272428118456085941` FROM `Topic_3272428118456085941` TO `Table_3272428118456085941` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_3272428118456085941`; DDL: DROP TABLE `Table_3272428118456085941` DDL: DROP TOPIC `Topic_3272428118456085941` DDL: CREATE TABLE `Table_5648025982199603733` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_5648025982199603733` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_5648025982199603733` FROM `Topic_5648025982199603733` TO `Table_5648025982199603733` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:28514/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_5648025982199603733` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_5648025982199603733` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_5648025982199603733` ORDER BY `Message` Attempt=17 count=1 >>>>> Query: SELECT `Message` FROM `Table_5648025982199603733` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_5648025982199603733`; DDL: DROP TABLE `Table_5648025982199603733` DDL: CREATE TABLE `Table_7022822673431300600` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_7022822673431300600` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: ... -------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------' |>; }; return ListMap($lines, $m); }; ; CREATE TRANSFER `Transfer_5108735735122230190` FROM `Topic_5108735735122230190` TO `Table_5108735735122230190` USING $l WITH ( FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 1073741824 ); >>>>> Query: SELECT `offset`, `line` FROM `Table_5108735735122230190` ORDER BY `offset`, `line` Attempt=19 count=1802 DDL: DROP TRANSFER `Transfer_5108735735122230190`; DDL: DROP TABLE `Table_5108735735122230190` DDL: DROP TOPIC `Topic_5108735735122230190` |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/column_table/unittest |99.9%| [TM] {RESULT} ydb/core/transfer/ut/column_table/unittest >> test_workload.py::TestYdbWorkload::test [GOOD] >> Transfer_RowTable::TableWithSyncIndex [GOOD] >> Transfer_RowTable::TableWithAsyncIndex >> Transfer_RowTable::TableWithAsyncIndex [GOOD] >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] ------- [TM] {asan, default-linux-x86_64, release} ydb/core/transfer/ut/row_table/unittest >> Transfer_RowTable::TableWithAsyncIndex [GOOD] Test command err: DDL: CREATE TABLE `Table_10543944371334593175` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10543944371334593175` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_10543944371334593175` FROM `Topic_10543944371334593175` TO `Table_10543944371334593175` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=17 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=16 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=15 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=14 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_10543944371334593175` ORDER BY `Key`, `Message` Attempt=13 count=1 DDL: DROP TRANSFER `Transfer_10543944371334593175`; DDL: DROP TABLE `Table_10543944371334593175` DDL: DROP TOPIC `Topic_10543944371334593175` DDL: CREATE TABLE `Table_6756631916554209232` ( Message Utf8 NOT NULL, Key Uint64 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_6756631916554209232` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:Unwrap(CAST($x._data AS Utf8)) |> ]; }; ; CREATE TRANSFER `Transfer_6756631916554209232` FROM `Topic_6756631916554209232` TO `Table_6756631916554209232` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_6756631916554209232` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_6756631916554209232` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_6756631916554209232` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_6756631916554209232`; DDL: DROP TABLE `Table_6756631916554209232` DDL: DROP TOPIC `Topic_6756631916554209232` DDL: CREATE TABLE `Table_7910851680048853402` ( Key1 Uint64 NOT NULL, Key3 Uint64 NOT NULL, Value1 Utf8, Key2 Uint64 NOT NULL, Value2 Utf8, Key4 Uint64 NOT NULL, ___Value3 Utf8, PRIMARY KEY (Key3, Key2, Key1, Key4) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_7910851680048853402` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key1:Unwrap(CAST(1 AS Uint64)), Key2:Unwrap(CAST(2 AS Uint64)), Value2:CAST("value-2" AS Utf8), Key4:Unwrap(CAST(4 AS Uint64)), Key3:Unwrap(CAST(3 AS Uint64)), Value1:CAST("value-1" AS Utf8), ___Value3:CAST("value-3" AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_7910851680048853402` FROM `Topic_7910851680048853402` TO `Table_7910851680048853402` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_7910851680048853402` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=19 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_7910851680048853402` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=18 count=0 >>>>> Query: SELECT `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` FROM `Table_7910851680048853402` ORDER BY `Key1`, `Key2`, `Key3`, `Key4`, `Value1`, `Value2`, `___Value3` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_7910851680048853402`; DDL: DROP TABLE `Table_7910851680048853402` DDL: DROP TOPIC `Topic_7910851680048853402` DDL: CREATE TABLE `Table_4624660727023573290` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4624660727023573290` WITH ( MIN_ACTIVE_PARTITIONS = 10 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4624660727023573290` FROM `Topic_4624660727023573290` TO `Table_4624660727023573290` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_4624660727023573290` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4624660727023573290` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_4624660727023573290` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_4624660727023573290`; DDL: DROP TABLE `Table_4624660727023573290` DDL: DROP TOPIC `Topic_4624660727023573290` DDL: CREATE TABLE `Table_4622465912314840969` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_4622465912314840969` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:Unwrap(Nothing(Uint64?), "The value of the 'Key' column must be non-NULL"), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_4622465912314840969` FROM `Topic_4622465912314840969` TO `Table_4622465912314840969` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(102): generated.sql:4:29: Failed to unwrap empty optional: The value of the 'Key' column must be non-NULL } >>>>> EXPECTED: The value of the 'Key' column must be non-NULL DDL: DROP TRANSFER `Transfer_4622465912314840969`; DDL: DROP TABLE `Table_4622465912314840969` DDL: DROP TOPIC `Topic_4622465912314840969` DDL: CREATE TABLE `Table_337639089238449305` ( Key Uint64 NOT NULL, Message Utf8 NOT NULL, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_337639089238449305` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:Unwrap(Nothing(Utf8?), "The value of the 'Message' column must be non-NULL") |> ]; }; ; CREATE TRANSFER `Transfer_337639089238449305` FROM `Topic_337639089238449305` TO `Table_337639089238449305` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: Error transform message partition 0 offset 0: Terminate was called, reason(106): generated.sql:5:33: Failed to unwrap empty optional: The value of the 'Message' column must be non-NULL } >>>>> EXPECTED: The value of the 'Message' column must be non-NULL DDL: DROP TRANSFER `Transfer_337639089238449305`; DDL: DROP TABLE `Table_337639089238449305` DDL: DROP TOPIC `Topic_337639089238449305` DDL: CREATE TABLE `Table_16994161886137606796` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_16994161886137606796` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:1, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_16994161886137606796` FROM `Topic_16994161886137606796` TO `Table_16994161886137606796` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_16994161886137606796` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_16994161886137606796` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_16994161886137606796` ORDER BY `Message` Attempt=17 count=1 >>>>> Query: SELECT `Message` FROM `Table_16994161886137606796` ORDER BY `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_16994161886137606796`; DDL: DROP TABLE `Table_16994161886137606796` DDL: CREATE TABLE `Table_14782664862476987779` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14782664862476987779` WITH ( M ... 680445905723060` DDL: DROP TOPIC `Topic_3422680445905723060` DDL: CREATE TABLE `SourceTable_17082803571057413151` ( object_id Utf8 NOT NULL, timestamp Datetime NOT NULL, operation Utf8, PRIMARY KEY (object_id, timestamp) ) WITH ( STORE = ROW ) DDL: ALTER TABLE `SourceTable_17082803571057413151` ADD CHANGEFEED `cdc_17082803571057413151` WITH ( MODE = 'UPDATES', FORMAT = 'JSON' ) DDL: CREATE TABLE `Table_17082803571057413151` ( timestamp Datetime NOT NULL, object_id Utf8 NOT NULL, operation Utf8, PRIMARY KEY (timestamp, object_id) ) WITH ( STORE = ROW ) DDL: $l = ($x) -> { $d = CAST($x._data AS JSON); return [ <| timestamp: Unwrap(DateTime::MakeDatetime(DateTime::ParseIso8601(CAST(Yson::ConvertToString($d.key[1]) AS Utf8)))), object_id: Unwrap(CAST(Yson::ConvertToString($d.key[0]) AS Utf8)), operation: CAST(Yson::ConvertToString($d.update.operation) AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_17082803571057413151` FROM `SourceTable_17082803571057413151/cdc_17082803571057413151` TO `Table_17082803571057413151` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: INSERT INTO `SourceTable_17082803571057413151` (`object_id`, `timestamp`, `operation`) VALUES ('id_1', Datetime('2019-01-01T15:30:00Z'), 'value_1'); >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_17082803571057413151` ORDER BY `operation`, `object_id`, `timestamp` Attempt=19 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_17082803571057413151` ORDER BY `operation`, `object_id`, `timestamp` Attempt=18 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_17082803571057413151` ORDER BY `operation`, `object_id`, `timestamp` Attempt=17 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_17082803571057413151` ORDER BY `operation`, `object_id`, `timestamp` Attempt=16 count=0 >>>>> Query: SELECT `operation`, `object_id`, `timestamp` FROM `Table_17082803571057413151` ORDER BY `operation`, `object_id`, `timestamp` Attempt=15 count=1 DDL: DROP TRANSFER `Transfer_17082803571057413151`; DDL: DROP TABLE `Table_17082803571057413151` DDL: DROP TABLE `SourceTable_17082803571057413151` DDL: CREATE TABLE `Table_14861605837449843866` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_14861605837449843866_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_14861605837449843866_2` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14861605837449843866` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_14861605837449843866_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, <| __ydb_table: "Table_14861605837449843866_2", Key: $x._offset, Message:CAST($x._data || "_2" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_14861605837449843866` FROM `Topic_14861605837449843866` TO `Table_14861605837449843866` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_14861605837449843866` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14861605837449843866` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14861605837449843866` ORDER BY `Key`, `Message` Attempt=17 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14861605837449843866_1` ORDER BY `Key`, `Message` Attempt=19 count=1 >>>>> Query: SELECT `Key`, `Message` FROM `Table_14861605837449843866_2` ORDER BY `Key`, `Message` Attempt=19 count=1 DDL: DROP TRANSFER `Transfer_14861605837449843866`; DDL: DROP TABLE `Table_14861605837449843866` DDL: DROP TOPIC `Topic_14861605837449843866` DDL: CREATE TABLE `Table_1197208496117469696` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TABLE `Table_1197208496117469696_1` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = COLUMN ); DDL: CREATE TOPIC `Topic_1197208496117469696` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key: $x._offset, Message:CAST($x._data AS Utf8) |>, <| __ydb_table: "Table_1197208496117469696_1", Key: $x._offset, Message:CAST($x._data || "_1" AS Utf8) |>, ]; }; ; CREATE TRANSFER `Transfer_1197208496117469696` FROM `Topic_1197208496117469696` TO `Table_1197208496117469696` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608, DIRECTORY = '/local' ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table '/local/Table_1197208496117469696_1' Only the OLTP table is supported } } >>>>> EXPECTED: Error: Bulk upsert to table '/local/Table_ DDL: DROP TRANSFER `Transfer_1197208496117469696`; DDL: DROP TABLE `Table_1197208496117469696` DDL: DROP TOPIC `Topic_1197208496117469696` DDL: CREATE TABLE `Table_14215348730790044441` ( Key Uint64 NOT NULL, Message Utf8, PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_14215348730790044441` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:CAST($x._offset AS Uint64), Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_14215348730790044441` FROM `Topic_14215348730790044441` TO `Table_14215348730790044441` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Message` FROM `Table_14215348730790044441` ORDER BY `Message` Attempt=19 count=0 >>>>> Query: SELECT `Message` FROM `Table_14215348730790044441` ORDER BY `Message` Attempt=18 count=0 >>>>> Query: SELECT `Message` FROM `Table_14215348730790044441` ORDER BY `Message` Attempt=17 count=1 DDL: ALTER TABLE Table_14215348730790044441 DROP COLUMN Message >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_14215348730790044441' Unknown column: Message } } >>>>> EXPECTED: Unknown column: Message DDL: CREATE TABLE `Table_10453455383934502669` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL SYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_10453455383934502669` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_10453455383934502669` FROM `Topic_10453455383934502669` TO `Table_10453455383934502669` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> ACTUAL: {
: Error: Error in target #1: {
: Error: Bulk upsert to table 'local/Table_10453455383934502669' Only async-indexed tables are supported by BulkUpsert } } >>>>> EXPECTED: Only async-indexed tables are supported by BulkUpsert DDL: DROP TRANSFER `Transfer_10453455383934502669`; DDL: DROP TABLE `Table_10453455383934502669` DDL: DROP TOPIC `Topic_10453455383934502669` DDL: CREATE TABLE `Table_13660920691896605444` ( Key Uint64 NOT NULL, Message Utf8, INDEX `title_index` GLOBAL ASYNC ON (`Message`), PRIMARY KEY (Key) ) WITH ( STORE = ROW ); DDL: CREATE TOPIC `Topic_13660920691896605444` WITH ( MIN_ACTIVE_PARTITIONS = 1 ); DDL: $l = ($x) -> { return [ <| Key:$x._offset, Message:CAST($x._data AS Utf8) |> ]; }; ; CREATE TRANSFER `Transfer_13660920691896605444` FROM `Topic_13660920691896605444` TO `Table_13660920691896605444` USING $l WITH ( CONNECTION_STRING = 'grpc://localhost:11626/?database=local', FLUSH_INTERVAL = Interval('PT1S'), BATCH_SIZE_BYTES = 8388608 ); >>>>> Query: SELECT `Key`, `Message` FROM `Table_13660920691896605444` ORDER BY `Key`, `Message` Attempt=19 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13660920691896605444` ORDER BY `Key`, `Message` Attempt=18 count=0 >>>>> Query: SELECT `Key`, `Message` FROM `Table_13660920691896605444` ORDER BY `Key`, `Message` Attempt=17 count=1 DDL: DROP TRANSFER `Transfer_13660920691896605444`; DDL: DROP TABLE `Table_13660920691896605444` DDL: DROP TOPIC `Topic_13660920691896605444` |99.9%| [TM] {RESULT} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/core/transfer/ut/row_table/unittest |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbWorkload::test[column-remote] [GOOD] |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/transfer/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/transfer/tests/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/streaming/tests/py3test >> test_workload.py::TestYdbWorkload::test [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/stress/mixedpy/tests/py3test >> test_workload.py::TestYdbMixedWorkload::test[column] [GOOD] |99.9%| [TM] {RESULT} ydb/tests/stress/streaming/tests/py3test |99.9%| [TM] {RESULT} ydb/tests/stress/mixedpy/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/mixedpy/tests/py3test |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/stress/streaming/tests/py3test >> ConsistentIndexRead::InteractiveTx [GOOD] >> KqpExtTest::SecondaryIndexSelectUsingScripting >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {asan, default-linux-x86_64, release} ydb/tests/functional/kqp/kqp_indexes/unittest >> KqpExtTest::SecondaryIndexSelectUsingScripting [GOOD] |99.9%| [TM] {RESULT} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| [TM] {BAZEL_UPLOAD} ydb/tests/functional/kqp/kqp_indexes/unittest |99.9%| CLEANING BUILD ROOT Number of suites skipped by size: 149 ------ sole chunk ran 1 test (total:104.69s - test:104.57s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.5G (16286492K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1071601 54.8M 54.5M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1071820 40.7M 23.3M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1071857 777M 780M 699M └─ functional-sqs-merge_split_common_table-std --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini 1075493 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1075915 2.0G 2.0G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134047 2.0G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1076417 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1076906 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134091 1.9G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1077501 1.9G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134128 1.9G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1078305 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134130 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1078831 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134086 1.7G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res 1079455 1.9G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-result 1134092 1.9G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-res Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/merge_split_common_table/std/test-results/py3test/testing_out_stuff/stderr ydb/core/persqueue/pqtablet/partition/mlp/ut [size:medium] ------ sole chunk ran 75 tests (total:623.00s - setup:0.02s test:600.09s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: TMLPConsumerTests::ReloadPQTablet (timeout) duration: 620.87s 74 tests were not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/stderr [timeout] TMLPConsumerTests::ReloadPQTablet [default-linux-x86_64-release-asan] (620.87s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/TMLPConsumerTests.ReloadPQTablet.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/persqueue/pqtablet/partition/mlp/ut/test-results/unittest/testing_out_stuff/TMLPConsumerTests.ReloadPQTablet.out ------ TIMEOUT: 74 - NOT_LAUNCHED, 1 - TIMEOUT ydb/core/persqueue/pqtablet/partition/mlp/ut ydb/core/kqp/ut/view [size:medium] ------ sole chunk ran 23 tests (total:386.80s - test:386.62s) [fail] TCreateAndDropViewTest::DropNonexistingView [default-linux-x86_64-release-asan] (14.98s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropNonexistingView.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropNonexistingView.out [fail] TCreateAndDropViewTest::DropViewIfExists [default-linux-x86_64-release-asan] (11.66s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.DropViewIfExists.out [fail] TCreateAndDropViewTest::CallDropViewOnTable [default-linux-x86_64-release-asan] (60.79s) (NYdb::Dev::TContractViolation) Attempt to use result with not successfull status. TCreateSessionResult::GetSession Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/ut/view/test-results/unittest/testing_out_stuff/TCreateAndDropViewTest.CallDropViewOnTable.out ------ FAIL: 20 - GOOD, 3 - FAIL ydb/core/kqp/ut/view ------ sole chunk ran 2 tests (total:385.32s - recipes:19.71s test:363.15s recipes:2.21s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.6G (17363420K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1311760 54.8M 53.2M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1311852 42.2M 24.1M 12.2M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1315366 58.7M 54.0M 32.5M │ └─ test_tool run_ut @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/test_tool.args 1315528 2.0G 2.0G 2.0G │ └─ ydb-tests-functional-kqp-kqp_indexes --trace-path-append /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/ 1312829 2.0G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1312958 2.0G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1313095 1.9G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1313248 1.9G 1.7G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1313559 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1313853 1.9G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1314023 1.9G 1.7G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ 1314306 1.9G 1.7G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_indexes/test-results/unittest/testing_out_stuff/stderr ydb/tests/functional/statistics [size:medium] nchunks:10 ------ [test_restarts.py 0/10] chunk ran 1 test (total:141.18s - test:141.13s) [fail] test_restarts.py::test_basic [default-linux-x86_64-release-asan] (135.30s) ydb/tests/functional/statistics/test_restarts.py:95: in test_basic assert_that(wait_for(get_base_stats_response, timeout_seconds=5), E AssertionError: base stats available after restart Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff/test_restarts.py.test_basic.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/statistics/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/functional/statistics ydb/tests/functional/serverless [size:medium] nchunks:20 ------ [test_serverless.py 3/10] chunk ran 5 tests (total:649.45s - test:600.07s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 216.62s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (good) duration: 162.14s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 160.64s test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (timeout) duration: 75.68s test_serverless.py::test_create_table_with_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] (good) duration: 13.13s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_database_with_column_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (75.68s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_column_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 4/10] chunk ran 4 tests (total:476.39s - test:474.44s) [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (59.46s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (111.79s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--false-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--false-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (117.95s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--false_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff [fail] test_serverless.py::test_database_with_disk_quotas[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] [default-linux-x86_64-release-asan] (176.68s) ydb/tests/functional/serverless/test_serverless.py:458: in test_database_with_disk_quotas IOLoop.current().run_sync(lambda: async_write_key(path, 0, 'test', ignore_out_of_space=False)) contrib/python/tornado/tornado-4/tornado/ioloop.py:458: in run_sync return future_cell[0].result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:355: in wrapped res = yield func(*args, **kwargs) contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/python/tornado/tornado-4/tornado/concurrent.py:238: in result raise_exc_info(self._exc_info) :4: in raise_exc_info ??? contrib/python/tornado/tornado-4/tornado/gen.py:1064: in run yielded = self.gen.throw(*exc_info) ydb/tests/functional/serverless/test_serverless.py:375: in async_write_key yield tx.async_execute( contrib/python/tornado/tornado-4/tornado/gen.py:1056: in run value = future.result() contrib/tools/python3/Lib/concurrent/futures/_base.py:449: in result return self.__get_result() contrib/tools/python3/Lib/concurrent/futures/_base.py:401: in __get_result raise self._exception contrib/python/ydb/py3/ydb/connection.py:121: in _on_response_callback response = response if wrap_result is None else wrap_result(rpc_state, response, *wrap_args) contrib/python/ydb/py3/ydb/_session_impl.py:20: in decorator return func(rpc_state, response_pb, session_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:9: in decorator return func(rpc_state, response_pb, session_state, tx_state, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:22: in decorator return func(rpc_state, response_pb, session_state, tx_state, query, *args, **kwargs) contrib/python/ydb/py3/ydb/_tx_ctx_impl.py:165: in wrap_result_and_tx_id issues._process_response(response_pb.operation) contrib/python/ydb/py3/ydb/issues.py:237: in _process_response raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Unavailable: message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050) During handling of the above exception, another exception occurred: ydb/tests/functional/serverless/test_serverless.py:457: in test_database_with_disk_quotas with pytest.raises(ydb.Unavailable, match=r'.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*'): E AssertionError: Regex pattern did not match. E Regex: '.*DATABASE_DISK_SPACE_QUOTA_EXCEEDED.*' E Input: 'message: "Disk space exhausted. Table `/Root/quoted_serverless/test_database_with_disk_quotas_enable_alter_database_create_hive_first--true-enable_pool_encryption--true_/dirA0/table`." issue_code: 2033 severity: 1 issues { message: "Cannot perform writes: database is out of disk space" issue_code: 2033 severity: 1 } (server_code: 400050)' Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_database_with_disk_quotas.enable_alter_database_create_hive_first--true-enable_pool_encryption--true.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ [test_serverless.py 9/10] chunk ran 4 tests (total:632.47s - test:600.05s) Chunk exceeded 600s timeout, failed to shutdown gracefully in 30s and was terminated using SIGQUIT signal List of the tests involved in the launch: test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--true] (good) duration: 245.20s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] (timeout) duration: 222.29s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--false-enable_pool_encryption--false] (good) duration: 161.75s test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--true] test was not launched inside chunk. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/stderr [timeout] test_serverless.py::test_turn_on_serverless_storage_billing[enable_alter_database_create_hive_first--true-enable_pool_encryption--false] [default-linux-x86_64-release-asan] (222.29s) Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff/test_serverless.py.test_turn_on_serverless_storage_billing.enable_alter_database_create_hive_first--true-enable_pool_encryption--false.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/serverless/test-results/py3test/testing_out_stuff ------ TIMEOUT: 37 - GOOD, 4 - FAIL, 1 - NOT_LAUNCHED, 2 - TIMEOUT ydb/tests/functional/serverless ydb/tests/functional/sqs/cloud [size:medium] nchunks:40 ------ [36/40] chunk ran 2 tests (total:178.40s - test:177.94s) [fail] test_yandex_cloud_mode.py::TestSqsYandexCloudMode::test_yc_events_processor[tables_format_v0] [default-linux-x86_64-release-asan] (149.68s) ydb/tests/functional/sqs/cloud/test_yandex_cloud_mode.py:829: in test_yc_events_processor assert len(lines) >= 2, "Got only %s event lines after all attempts" % len(lines) E AssertionError: Got only 0 event lines after all attempts E assert 0 >= 2 E + where 0 = len([]) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff/test_yandex_cloud_mode.py.TestSqsYandexCloudMode.test_yc_events_processor.tables_format_v0.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/sqs/cloud/test-results/py3test/testing_out_stuff ------ FAIL: 79 - GOOD, 1 - FAIL ydb/tests/functional/sqs/cloud ydb/core/kqp/workload_service/ut [size:medium] nchunks:10 ------ [4/10] chunk ran 6 tests (total:417.87s - test:415.18s) [fail] KqpWorkloadServiceDistributed::TestDistributedLessConcurrentQueryLimit [default-linux-x86_64-release-asan] (210.72s) assertion failed at ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:680, static void NKikimr::NKqp::NWorkload::(anonymous namespace)::TWorkloadServiceYdbSetup::CheckTableCounters(NMonitoring::TDynamicCounterPtr, const TString &): (operationSubgroup->GetCounter("FinishError", true)->Val() == 0) failed: (2 != 0) Node id: 8, unexpected vaule for operation TRefreshPoolStateQuery 0. /-S/util/system/backtrace.cpp:284: ?? @ 0x1B80405B 1. /tmp//-S/library/cpp/testing/unittest/registar.cpp:46: RaiseError @ 0x1BCF507B 2. /tmp//-S/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:680: CheckTableCounters @ 0x38D1D14A 3. /tmp//-S/ydb/core/kqp/workload_service/ut/common/kqp_workload_service_ut_common.cpp:578: ValidateWorkloadServiceCounters @ 0x38D1D14A 4. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:37: StartTestConcurrentQueryLimit @ 0x1B330CC6 5. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:387: operator() @ 0x1B46DA37 6. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149: __invoke<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:387:1) &> @ 0x1B46DA37 7. /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:224: __call<(lambda at /-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:387:1) &> @ 0x1B46DA37 8. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:169: operator() @ 0x1B46DA37 9. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:314: operator() @ 0x1B46DA37 10. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:431: operator() @ 0x1BD244B9 11. /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:990: operator() @ 0x1BD244B9 12. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:526: Run @ 0x1BD244B9 13. /tmp//-S/library/cpp/testing/unittest/registar.cpp:373: Run @ 0x1BCFBD57 14. /tmp//-S/ydb/core/kqp/workload_service/ut/kqp_workload_service_ut.cpp:387: Execute @ 0x1B46CD73 15. /tmp//-S/library/cpp/testing/unittest/registar.cpp:494: Execute @ 0x1BCFD50F 16. /tmp//-S/library/cpp/testing/unittest/utmain.cpp:875: RunMain @ 0x1BD1E31C 17. ??:0: ?? @ 0x7FAA5DDEAD8F 18. ??:0: ?? @ 0x7FAA5DDEAE3F 19. ??:0: ?? @ 0x18A13028 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/KqpWorkloadServiceDistributed.TestDistributedLessConcurrentQueryLimit.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/kqp/workload_service/ut/test-results/unittest/testing_out_stuff/KqpWorkloadServiceDistributed.TestDistributedLessConcurrentQueryLimit.out ------ FAIL: 61 - GOOD, 1 - FAIL ydb/core/kqp/workload_service/ut ydb/core/statistics/service/ut [size:medium] nchunks:60 ------ [15/60] chunk ran 1 test (total:606.83s - test:600.02s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: ColumnStatistics::CountMinSketchServerlessStatistics (timeout) duration: 604.84s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/stderr [timeout] ColumnStatistics::CountMinSketchServerlessStatistics [default-linux-x86_64-release-asan] (604.84s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/service/ut/test-results/unittest/testing_out_stuff/ColumnStatistics.CountMinSketchServerlessStatistics.out ------ TIMEOUT: 26 - GOOD, 1 - TIMEOUT ydb/core/statistics/service/ut ydb/core/http_proxy/ut/sqs_topic_ut [size:medium] nchunks:10 ------ [4/10] chunk ran 2 tests (total:207.58s - setup:0.02s test:207.11s) [fail] TestSqsTopicHttpProxy::TestReceiveMessageGroup [default-linux-x86_64-release-asan] (71.14s) assertion failed at ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp:281, NJson::TJsonMap THttpProxyTestMock::SendJsonRequest(TString, NJson::TJsonMap, ui32): (res.HttpCode == expectedHttpCode) failed: (400 != 200) TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 THttpProxyTestMock::SendJsonRequest(TBasicString>, NJson::TJsonMap, unsigned int) at /-S/ydb/core/http_proxy/ut/datastreams_fixture/datastreams_fixture.cpp:281:9 ~TJsonValue at /-S/library/cpp/json/writer/json_value.h:69:13 ~TJsonValue at /-S/library/cpp/json/writer/json_value.h:69:13 operator() at /-S/ydb/core/http_proxy/ut/sqs_topic_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/testing_out_stuff/TestSqsTopicHttpProxy.TestReceiveMessageGroup.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/http_proxy/ut/sqs_topic_ut/test-results/unittest/testing_out_stuff/TestSqsTopicHttpProxy.TestReceiveMessageGroup.out ------ FAIL: 21 - GOOD, 1 - FAIL ydb/core/http_proxy/ut/sqs_topic_ut ------ [test_discovery.py] chunk ran 3 tests (total:165.93s - test:165.87s) Info: Test run has exceeded 10.0G (10485760K) memory limit with 15.1G (15789304K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1135929 54.9M 53.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1135955 40.8M 23.4M 10.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1135959 824M 831M 750M └─ ydb-tests-functional-api --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doct 1167238 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1167770 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1168461 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1168824 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1169217 1.5G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1169421 1.5G 1.5G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1176099 1.4G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1176109 1.5G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t 1176235 1.5G 1.4G 1003M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/t Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/api/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/audit [size:medium] nchunks:500 ------ [test_canonical_records.py 5/100] chunk ran 1 test (total:140.57s - setup:0.02s test:140.24s) [fail] test_canonical_records.py::test_dstool_evict_vdisk_grpc [default-linux-x86_64-release-asan] (133.32s) ydb/tests/functional/audit/test_canonical_records.py:348: in test_dstool_evict_vdisk_grpc execute_dstool_grpc(ydb_cluster, TOKEN, ['vdisk', 'evict', '--vdisk-ids', vdisk_id, '--ignore-degraded-group-check', '--ignore-failure-model-group-check']) ydb/tests/functional/audit/helpers.py:60: in execute_dstool_grpc assert False, f'Command\n{full_cmd}\n finished with exit code {proc_result.exit_code}, stderr:\n\n{proc_result.std_err.decode("utf-8")}\n\nstdout:\n{proc_result.std_out.decode("utf-8")}' E AssertionError: Command E ['/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/apps/dstool/ydb-dstool', '--endpoint', 'grpc://localhost:9355', 'vdisk', 'evict', '--vdisk-ids', '[82000000:1:0:0:0]', '--ignore-degraded-group-check', '--ignore-failure-model-group-check'] E finished with exit code 1, stderr: E E error, add --verbose for more info E E E stdout: Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff/test_canonical_records.py.test_dstool_evict_vdisk_grpc.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/audit/test-results/py3test/testing_out_stuff ------ FAIL: 33 - GOOD, 1 - FAIL ydb/tests/functional/audit ydb/tests/stress/kafka/tests [size:medium] ------ sole chunk ran 1 test (total:192.08s - test:191.98s) [fail] test_kafka_streams.py::TestYdbTopicWorkload::test [default-linux-x86_64-release-asan] (183.61s) ydb/tests/stress/kafka/tests/test_kafka_streams.py:24: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/kafka_streams_test --endpoint grpc://localhost:3594 --database /Root --bootstrap http://localhost:24408 --source-path test-topic --target-path target-topic --consumer workload-consumer-0 --num-workers 2 --duration 120' has failed with code 255. E Errors: E ...erBatch.completeFutureAndFireCallbacks(ProducerBatch.java:273) E at org.apache.kafka.clients.producer.internals.ProducerBatch.abort(ProducerBatch.java:161) E at org.apache.kafka.clients.producer.internals.RecordAccumulator.abortUndrainedBatches(RecordAccumulator.java:1116) E at org.apache.kafka.clients.producer.internals.Sender.maybeSendAndPollTransactionalRequest(Sender.java:439) E at org.apache.kafka.clients.producer.internals.Sender.runOnce(Sender.java:316) E at org.apache.kafka.clients.producer.internals.Sender.run(Sender.java:243) E at java.base/java.lang.Thread.run(Thread.java:840) E Caused by: org.apache.kafka.common.errors.UnknownServerException: Unexpected termination E Traceback (most recent call last): E File "contrib/tools/python3/Lib/runpy.py", line 198, in _run_module_as_main E return _run_code(code, main_globals, None, E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/tools/python3/Lib/runpy.py", line 88, in _run_code E exec(code, run_globals) E File "ydb/tests/stress/kafka/__main__.py", line 27, in E workload.loop() E File "ydb/tests/stress/kafka/workload/__init__.py", line 125, in loop E assert totalMessCountTest == totalMessCountTarget, f"Source and target {self.target_topic_path}-{i} topics total messages count are not equal:" + \ E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E AssertionError: Source and target target-topic-0 topics total messages count are not equal:{totalMessCountTest} and {totalMessCountTarget} respectively. Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff/test_kafka_streams.py.TestYdbTopicWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/kafka/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/kafka/tests ------ sole chunk ran 2 tests (total:84.15s - recipes:26.37s test:53.05s recipes:4.24s) Info: Test run has exceeded 16.0G (16777216K) memory limit with 16.8G (17593900K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1318733 54.8M 54.0M 7.4M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1318735 41.2M 23.7M 11.1M ├─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1327300 355M 0b 0b │ └─ ydb_recipe --build-root /home/runner/actions_runner/_work/ydb/ydb/tmp/out --source-root /home/runner/actions_runner/_work/ydb/ydb --gdb-path /home/runner/.ya/tools/v4/10 1318999 2.2G 2.2G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1319119 2.1G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1319241 2.0G 2.0G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1319426 2.2G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1319574 2.1G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1319789 2.1G 2.1G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1320062 2.0G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou 1320372 2.1G 0b 0b └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_ou Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/kqp/kqp_query_svc/test-results/unittest/testing_out_stuff/stderr ydb/tests/datashard/ttl [size:medium] nchunks:36 ------ [0/36] chunk ran 2 tests (total:416.47s - test:416.13s) [fail] test_ttl.py::TestPgTTL::test_ttl[table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (317.83s) ydb/tests/datashard/ttl/test_ttl.py:418: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 2 lines, table table_pgdate_0_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestPgTTL.test_ttl.table_pgdate_0_UNIQUE_SYNC-pk_types8-all_types8-index8-pgdate-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [10/36] chunk ran 2 tests (total:393.12s - test:393.07s) [fail] test_ttl.py::TestTTL::test_ttl[table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (286.44s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Datetime_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Datetime_1_UNIQUE_SYNC-pk_types5-all_types5-index5-Datetime-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [25/36] chunk ran 1 test (total:309.96s - test:309.41s) [fail] test_ttl.py::TestTTL::test_ttl[table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC] [default-linux-x86_64-release-asan] (298.59s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 1 lines, table table_Uint32_0__ASYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Uint32_0__ASYNC-pk_types19-all_types19-index19-Uint32--ASYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [5/36] chunk ran 2 tests (total:366.54s - test:366.12s) [fail] test_ttl.py::TestPgTTL::test_ttl[table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC] [default-linux-x86_64-release-asan] (253.51s) ydb/tests/datashard/ttl/test_ttl.py:418: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 6 lines, table table_pgtimestamp_0__SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestPgTTL.test_ttl.table_pgtimestamp_0__SYNC-pk_types9-all_types9-index9-pgtimestamp--SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [7/36] chunk ran 2 tests (total:387.88s - test:387.83s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC] [default-linux-x86_64-release-asan] (281.28s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 4 lines, table table_Date_1_UNIQUE_SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1_UNIQUE_SYNC-pk_types35-all_types35-index35-Date-UNIQUE-SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ [8/36] chunk ran 2 tests (total:379.54s - setup:0.01s test:379.49s) [fail] test_ttl.py::TestTTL::test_ttl[table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC] [default-linux-x86_64-release-asan] (276.45s) ydb/tests/datashard/ttl/test_ttl.py:355: in test_ttl self.do_test_ttl(table_name, pk_types, all_types, index, ttl, unique, sync) ydb/tests/datashard/ttl/test_ttl.py:64: in do_test_ttl self.select(table_name, pk_types, all_types, index, dml) ydb/tests/datashard/ttl/test_ttl.py:127: in select self.create_select(table_name, pk_types, all_types, index, i, 0, dml) ydb/tests/datashard/ttl/test_ttl.py:147: in create_select assert ( E AssertionError: Expected 0 rows, error when deleting 6 lines, table table_Date_1__SYNC E assert (1 == 1 and 1 == 0) E + where 1 = len([{'count': 1}]) E + and 1 = {'count': 1}.count Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff/test_ttl.py.TestTTL.test_ttl.table_Date_1__SYNC-pk_types33-all_types33-index33-Date--SYNC.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/datashard/ttl/test-results/py3test/testing_out_stuff ------ FAIL: 42 - GOOD, 6 - FAIL ydb/tests/datashard/ttl ydb/tests/olap/column_family/compression [size:medium] nchunks:10 ------ [2/10] chunk ran 3 tests (total:601.63s - setup:0.01s test:600.15s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_15_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 15] (good) duration: 320.60s alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_16_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 16] (good) duration: 137.46s alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_17_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 17] (good) duration: 128.19s Killed by timeout (600 s) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/stderr ------ [3/10] chunk ran 2 tests (total:471.60s - test:471.05s) [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_18_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 18] [default-linux-x86_64-release-asan] (273.00s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1031304 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_18_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.18.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff [fail] alter_compression.py::TestAllCompression::test_all_supported_compression[zstd_19_compression-COMPRESSION = "zstd", COMPRESSION_LEVEL = 19] [default-linux-x86_64-release-asan] (185.00s) ydb/tests/olap/column_family/compression/alter_compression.py:194: in test_all_supported_compression assert table.get_portion_stat_by_tier()['__DEFAULT']['Rows'] == expected_raw // 8 E assert 1015725 == (8000000 // 8) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff/alter_compression.py.TestAllCompression.test_all_supported_compression.zstd_19_compression-COMPRESSION.=.zstd.,.COMPRESSION_LEVEL.=.19.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/column_family/compression/test-results/py3test/testing_out_stuff ------ TIMEOUT: 21 - GOOD, 2 - FAIL ydb/tests/olap/column_family/compression ydb/core/cms/ut_sentinel_unstable [size:medium] ------ sole chunk ran 1 test (total:564.13s - test:562.09s) [fail] TSentinelUnstableTests::BSControllerCantChangeStatus [default-linux-x86_64-release-asan] (546.45s) (TWithBackTrace) ydb/library/actors/testlib/test_runtime.cpp:1278: Dispatched 2500000 events, limit reached. TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 TWithBackTrace::TWithBackTrace<>() at /-S/util/generic/yexception.h:146:5 NActors::TTestActorRuntimeBase::DispatchEventsInternal(NActors::TDispatchOptions const&, TInstant) at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:48 DispatchEvents at /-S/ydb/library/actors/testlib/test_runtime.cpp:0:0 NKikimr::NCmsTest::TTestEnv::SetPDiskState(TSet, std::__y1::allocator> const&, NKikimrBlobStorage::TPDiskState_E, NKikimrBlobStorage::EDriveStatus) at /-S/ydb/core/cms/sentinel_ut_helpers.h:285:9 NKikimr::NCmsTest::NTestSuiteTSentinelUnstableTests::TTestCaseBSControllerCantChangeStatus::Execute_(NUnitTest::TTestContext&) at /-S/ydb/core/cms/sentinel_ut_unstable.cpp:0:17 operator() at /-S/ydb/core/cms/sentinel_ut_unstable.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/cms/ut_sentinel_unstable/test-results/unittest/testing_out_stuff/TSentinelUnstableTests.BSControllerCantChangeStatus.out ------ FAIL: 1 - FAIL ydb/core/cms/ut_sentinel_unstable ydb/tests/olap/scenario [size:medium] nchunks:10 ------ [0/10] chunk ran 2 tests (total:598.91s - test:597.13s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 19.3G (20213024K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 387714 54.8M 53.3M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 387909 40.7M 23.0M 10.4M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 387914 925M 917M 915M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 393269 2.1G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 393778 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 394955 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 396657 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 398871 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 399927 2.0G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 400996 2.1G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 402049 2.1G 2.0G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 402408 2.0G 2.0G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [1/10] chunk ran 2 tests (total:275.29s - test:256.07s) [fail] test_alter_tiering.py::TestAlterTiering::test[many_tables] [default-linux-x86_64-release-asan] (190.83s) ydb/tests/olap/scenario/conftest.py:126: in test raise errors[0] ydb/tests/olap/scenario/conftest.py:85: in worker self._test_suffix(local_ctx, suffix, codes, idx) ydb/tests/olap/scenario/conftest.py:136: in _test_suffix ctx.executable(self, ctx) ydb/tests/olap/scenario/test_alter_tiering.py:362: in scenario_many_tables threads.start_and_wait_all() ydb/tests/olap/common/thread_helper.py:49: in start_and_wait_all self.join_all() ydb/tests/olap/common/thread_helper.py:45: in join_all thread.join(timeout=timeout) ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/scenario/test_alter_tiering.py:248: in _loop_scan sth.execute_query( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:507: in execute_query return self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:366: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get GenericError('message: "Scan failed at tablet 72075186224037897, reason: task_error:Error reading blob range for columns: { Blob: DS:4294967295:[72075186224037897:1:1:255:1:4000:0] Offset: 376 Size: 224 }, error: cannot get blob: curlCode: 28, Timeout was reached, detailed error: , curlCode: 28, Timeout was reached, status: ERROR" issue_code: 2013 severity: 1 (server_code: 400080)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_alter_tiering.py.TestAlterTiering.test.many_tables.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ [2/10] chunk ran 2 tests (total:434.11s - test:431.61s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 21.6G (22601576K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 388425 54.8M 54.4M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388606 40.8M 23.3M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388628 825M 823M 806M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 392855 2.4G 2.3G 1.8G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 393347 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 393931 2.3G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 394823 2.4G 2.3G 1.7G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 395930 2.3G 2.1G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 397185 2.3G 2.2G 1.6G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 398348 2.3G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 399544 2.3G 2.1G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 400687 2.3G 2.1G 1.5G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr [fail] test_insert.py::TestInsert::test_multi[read_data_during_bulk_upsert] [default-linux-x86_64-release-asan] (241.07s) ydb/tests/olap/scenario/conftest.py:117: in test_multi raise errors[0] ydb/tests/olap/scenario/conftest.py:85: in worker self._test_suffix(local_ctx, suffix, codes, idx) ydb/tests/olap/scenario/conftest.py:136: in _test_suffix ctx.executable(self, ctx) ydb/tests/olap/scenario/test_insert.py:106: in scenario_read_data_during_bulk_upsert thread2.join_all() ydb/tests/olap/common/thread_helper.py:45: in join_all thread.join(timeout=timeout) ydb/tests/olap/common/thread_helper.py:18: in join raise self.exc ydb/tests/olap/common/thread_helper.py:11: in run self.ret = self._target(*self._args, **self._kwargs) ydb/tests/olap/scenario/test_insert.py:56: in _loop_insert result = sth.execute_query( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:507: in execute_query return self._run_with_expected_status( ydb/tests/olap/scenario/helpers/scenario_tests_helper.py:366: in _run_with_expected_status pytest.fail(f'Unexpected status: must be in {repr(expected_status)}, but get {repr(error or status)}') E Failed: Unexpected status: must be in {}, but get Undetermined('message: "State of operation is unknown. Failed to deviler message." issue_code: 2026 severity: 1 (server_code: 400170)') Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/test_insert.py.TestInsert.test_multi.read_data_during_bulk_upsert.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff ------ [3/10] chunk ran 2 tests (total:273.35s - test:268.97s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 16.5G (17303768K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 387920 54.8M 54.7M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388014 40.2M 23.0M 10.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388040 945M 943M 855M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 393836 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 430124 1.6G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 394638 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 395662 1.7G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 397534 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 399164 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 430142 1.9G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 400461 1.7G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 401529 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 402418 1.7G 1.7G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [4/10] chunk ran 2 tests (total:197.80s - setup:0.01s test:195.85s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.5G (18307044K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 388475 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388678 40.3M 23.1M 10.2M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388714 832M 830M 739M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 393187 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 393664 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 394533 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 395741 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 397210 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 399329 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 400455 1.9G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 401865 1.8G 1.8G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 402636 1.8G 1.8G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ [5/10] chunk ran 8 tests (total:157.55s - test:156.62s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 17.3G (18152848K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 388109 54.8M 52.6M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388220 39.6M 22.2M 9.5M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 388223 756M 752M 660M └─ ydb-tests-olap-scenario --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doctes 393255 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 393646 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 394514 1.9G 1.9G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 396089 1.9G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 413423 1.9G 0b 0b │ └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/ 397893 1.8G 1.8G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 399465 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 400532 1.7G 1.6G 1.0G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 401527 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu 402256 1.8G 1.9G 1.3G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/chu Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/olap/scenario/test-results/py3test/testing_out_stuff/stderr ------ FAIL: 16 - GOOD, 2 - FAIL ydb/tests/olap/scenario ydb/core/tx/tx_proxy/ut_schemereq [size:medium] nchunks:10 ------ [0/10] chunk ran 30 tests (total:340.71s - test:340.40s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8 [default-linux-x86_64-release-asan] (11.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17181 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-8.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53 [default-linux-x86_64-release-asan] (9.35s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15732 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-53.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3 [default-linux-x86_64-release-asan] (10.53s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19766 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-3.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49 [default-linux-x86_64-release-asan] (10.00s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9381 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-49.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10 [default-linux-x86_64-release-asan] (11.43s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25114 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-10.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15 [default-linux-x86_64-release-asan] (10.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26059 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-15.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16 [default-linux-x86_64-release-asan] (9.60s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4967 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-16.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4 [default-linux-x86_64-release-asan] (9.24s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8230 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-4.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11 [default-linux-x86_64-release-asan] (10.26s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3536 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-11.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54 [default-linux-x86_64-release-asan] (9.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17541 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-54.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2 [default-linux-x86_64-release-asan] (10.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22609 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-2.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13 [default-linux-x86_64-release-asan] (12.77s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22895 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-13.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5 [default-linux-x86_64-release-asan] (9.56s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12930 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-5.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50 [default-linux-x86_64-release-asan] (10.40s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14042 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9 [default-linux-x86_64-release-asan] (11.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21205 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-9.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (9.11s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26979 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7 [default-linux-x86_64-release-asan] (12.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:31773 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-7.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18 [default-linux-x86_64-release-asan] (10.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17166 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-18.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12 [default-linux-x86_64-release-asan] (10.04s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62122 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-CreateUser-12.out ------ [1/10] chunk ran 30 tests (total:359.42s - test:359.06s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63 [default-linux-x86_64-release-asan] (10.23s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18976 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26 [default-linux-x86_64-release-asan] (10.69s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7997 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-26.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70 [default-linux-x86_64-release-asan] (8.43s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2955 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32 [default-linux-x86_64-release-asan] (9.99s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64503 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-32.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58 [default-linux-x86_64-release-asan] (11.91s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20775 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59 [default-linux-x86_64-release-asan] (9.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2975 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-59.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62 [default-linux-x86_64-release-asan] (10.00s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20245 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61 [default-linux-x86_64-release-asan] (11.62s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4297 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27 [default-linux-x86_64-release-asan] (8.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3535 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-27.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55 [default-linux-x86_64-release-asan] (8.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17981 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-55.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71 [default-linux-x86_64-release-asan] (10.34s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:32182 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57 [default-linux-x86_64-release-asan] (13.30s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29921 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64 [default-linux-x86_64-release-asan] (10.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1260 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-64.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72 [default-linux-x86_64-release-asan] (9.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24811 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60 [default-linux-x86_64-release-asan] (10.18s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22854 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-DropUser-60.out ------ [2/10] chunk ran 30 tests (total:358.62s - test:358.28s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13 [default-linux-x86_64-release-asan] (11.73s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4377 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-13.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46 [default-linux-x86_64-release-asan] (11.77s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29547 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (8.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64997 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15 [default-linux-x86_64-release-asan] (11.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25676 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-15.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2 [default-linux-x86_64-release-asan] (13.76s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19250 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-2.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48 [default-linux-x86_64-release-asan] (8.78s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16930 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-48.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42 [default-linux-x86_64-release-asan] (13.05s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27567 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (12.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19304 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12 [default-linux-x86_64-release-asan] (10.68s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24280 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-12.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43 [default-linux-x86_64-release-asan] (11.04s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26114 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-43.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45 [default-linux-x86_64-release-asan] (10.13s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64459 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-BuiltinUser-ModifyUser-45.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14 [default-linux-x86_64-release-asan] (10.11s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12410 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-14.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11 [default-linux-x86_64-release-asan] (10.17s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20164 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-11.out ------ [3/10] chunk ran 30 tests (total:362.65s - setup:0.02s test:362.25s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60 [default-linux-x86_64-release-asan] (10.80s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3485 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69 [default-linux-x86_64-release-asan] (8.71s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30473 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-69.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50 [default-linux-x86_64-release-asan] (11.88s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5139 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71 [default-linux-x86_64-release-asan] (11.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4949 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58 [default-linux-x86_64-release-asan] (11.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25630 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49 [default-linux-x86_64-release-asan] (13.08s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:65374 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-49.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59 [default-linux-x86_64-release-asan] (10.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29950 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-59.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57 [default-linux-x86_64-release-asan] (14.26s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22780 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63 [default-linux-x86_64-release-asan] (9.58s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7709 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9 [default-linux-x86_64-release-asan] (13.68s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15960 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-9.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61 [default-linux-x86_64-release-asan] (9.82s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3165 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56 [default-linux-x86_64-release-asan] (14.42s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5733 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-56.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55 [default-linux-x86_64-release-asan] (9.52s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64773 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-55.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52 [default-linux-x86_64-release-asan] (10.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62010 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51 [default-linux-x86_64-release-asan] (10.65s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27698 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54 [default-linux-x86_64-release-asan] (11.73s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5514 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-DropUser-54.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8 [default-linux-x86_64-release-asan] (9.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:20927 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-CreateUser-8.out ------ [4/10] chunk ran 30 tests (total:340.87s - test:340.54s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46 [default-linux-x86_64-release-asan] (8.80s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23240 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25 [default-linux-x86_64-release-asan] (10.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25735 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-25.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39 [default-linux-x86_64-release-asan] (9.03s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29560 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-39.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36 [default-linux-x86_64-release-asan] (10.47s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25281 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-36.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10 [default-linux-x86_64-release-asan] (9.74s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5874 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-10.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40 [default-linux-x86_64-release-asan] (9.74s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8919 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-40.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37 [default-linux-x86_64-release-asan] (10.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27342 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-37.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48 [default-linux-x86_64-release-asan] (9.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9738 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-48.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33 [default-linux-x86_64-release-asan] (9.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:3942 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-33.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47 [default-linux-x86_64-release-asan] (9.43s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62593 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-Auth-LocalUser-ModifyUser-47.out ------ [5/10] chunk ran 30 tests (total:360.06s - test:359.67s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15 [default-linux-x86_64-release-asan] (9.07s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5756 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-15.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22 [default-linux-x86_64-release-asan] (13.06s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7668 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51 [default-linux-x86_64-release-asan] (11.49s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27074 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-51.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16 [default-linux-x86_64-release-asan] (9.07s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26563 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-16.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24 [default-linux-x86_64-release-asan] (11.55s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17321 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50 [default-linux-x86_64-release-asan] (10.18s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4755 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-50.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3 [default-linux-x86_64-release-asan] (11.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27135 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-3.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6 [default-linux-x86_64-release-asan] (11.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:25810 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-6.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5 [default-linux-x86_64-release-asan] (13.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7113 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-5.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23 [default-linux-x86_64-release-asan] (10.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:63011 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-23.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49 [default-linux-x86_64-release-asan] (9.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:64752 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-49.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52 [default-linux-x86_64-release-asan] (9.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26833 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-52.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21 [default-linux-x86_64-release-asan] (11.95s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14160 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-21.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58 [default-linux-x86_64-release-asan] (9.22s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27041 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17 [default-linux-x86_64-release-asan] (8.32s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:564, void NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext &, const TAlterLoginTestCase): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7374 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-CreateUser-17.out ------ [6/10] chunk ran 30 tests (total:362.51s - setup:0.01s test:362.15s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64 [default-linux-x86_64-release-asan] (12.74s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22346 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-64.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40 [default-linux-x86_64-release-asan] (11.86s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2454 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-40.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63 [default-linux-x86_64-release-asan] (9.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:24287 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65 [default-linux-x86_64-release-asan] (10.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:21832 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-65.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61 [default-linux-x86_64-release-asan] (14.79s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10028 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39 [default-linux-x86_64-release-asan] (11.99s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18903 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-39.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42 [default-linux-x86_64-release-asan] (8.76s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16716 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62 [default-linux-x86_64-release-asan] (11.05s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11834 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66 [default-linux-x86_64-release-asan] (9.78s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:31167 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-DropUser-66.out ------ [7/10] chunk ran 30 tests (total:345.27s - test:344.95s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48 [default-linux-x86_64-release-asan] (8.85s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15601 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-48.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13 [default-linux-x86_64-release-asan] (11.13s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23760 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-13.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7 [default-linux-x86_64-release-asan] (10.15s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30612 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-7.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5 [default-linux-x86_64-release-asan] (10.63s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:14627 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-5.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3 [default-linux-x86_64-release-asan] (11.88s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:12283 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-3.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10 [default-linux-x86_64-release-asan] (12.03s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:28269 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-10.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4 [default-linux-x86_64-release-asan] (10.03s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:11549 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-4.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24 [default-linux-x86_64-release-asan] (10.20s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:29500 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-24.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22 [default-linux-x86_64-release-asan] (9.32s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17965 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-22.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2 [default-linux-x86_64-release-asan] (9.74s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30174 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-2.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6 [default-linux-x86_64-release-asan] (12.20s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26998 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-6.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46 [default-linux-x86_64-release-asan] (12.44s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13257 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-46.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11 [default-linux-x86_64-release-asan] (10.07s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8413 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-11.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45 [default-linux-x86_64-release-asan] (9.65s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:18212 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-BuiltinUser-ModifyUser-45.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14 [default-linux-x86_64-release-asan] (8.89s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16580 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-CreateUser-14.out ------ [8/10] chunk ran 30 tests (total:362.13s - test:361.79s) [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57 [default-linux-x86_64-release-asan] (12.65s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13137 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-57.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72 [default-linux-x86_64-release-asan] (10.26s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:22416 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-72.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63 [default-linux-x86_64-release-asan] (9.67s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:1137 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-63.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71 [default-linux-x86_64-release-asan] (9.35s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:17848 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-71.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54 [default-linux-x86_64-release-asan] (13.15s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27049 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-54.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70 [default-linux-x86_64-release-asan] (8.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8713 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-70.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58 [default-linux-x86_64-release-asan] (11.88s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8152 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-58.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60 [default-linux-x86_64-release-asan] (11.10s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:62842 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-60.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53 [default-linux-x86_64-release-asan] (13.46s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:4433 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-53.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61 [default-linux-x86_64-release-asan] (9.90s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27195 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-61.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62 [default-linux-x86_64-release-asan] (10.14s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:2009 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-62.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56 [default-linux-x86_64-release-asan] (9.94s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:9855 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-56.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55 [default-linux-x86_64-release-asan] (13.87s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:13937 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-DropUser-55.out ------ [9/10] chunk ran 30 tests (total:352.75s - test:351.48s) [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly [default-linux-x86_64-release-asan] (13.16s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:10910 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-DomainLoginOnly.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44 [default-linux-x86_64-release-asan] (13.02s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:30341 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-44.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47 [default-linux-x86_64-release-asan] (9.61s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27740 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-47.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32 [default-linux-x86_64-release-asan] (8.45s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26497 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-32.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43 [default-linux-x86_64-release-asan] (11.84s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:23401 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-43.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck [default-linux-x86_64-release-asan] (16.12s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:26689 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnEmptyTenant-DomainLoginOnly-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck [default-linux-x86_64-release-asan] (14.48s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:8058 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant-StrictAclCheck.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAdministerTenant-StrictAclCheck [default-linux-x86_64-release-asan] (13.30s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:5456 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-StrictAclCheck.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAdministerTenant-StrictAclCheck.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42 [default-linux-x86_64-release-asan] (11.28s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:7024 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-42.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45 [default-linux-x86_64-release-asan] (13.81s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:27883 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-45.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46 [default-linux-x86_64-release-asan] (9.32s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:16149 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-46.out [fail] SchemeReqAdminAccessInTenant::ClusterAdminCanAuthOnNonEmptyTenant [default-linux-x86_64-release-asan] (13.18s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:19378 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 UnRef at /-S/util/generic/ptr.h:637:13 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAdminAccessInTenant.ClusterAdminCanAuthOnNonEmptyTenant.out [fail] SchemeReqAccess::AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48 [default-linux-x86_64-release-asan] (11.93s) assertion failed at ydb/core/tx/tx_proxy/schemereq_ut.cpp:256, void NKikimr::NTxProxyUT::CreateLocalUser(const TTestEnv &, const TString &, const TString &, const TString &): (sessionResult.IsSuccess())
: Error: GRpc error: (4): Deadline Exceeded
: Error: Grpc error response on endpoint localhost:15642 TBackTrace::Capture() at /-S/util/system/backtrace.cpp:284:9 GetCurrentTest at /-S/library/cpp/testing/unittest/registar.cpp:70:12 UnRef at /-S/util/generic/ptr.h:637:13 NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::AlterLoginProtect_RootDB(NUnitTest::TTestContext&, NKikimr::NTxProxyUT::NTestSuiteSchemeReqAccess::TAlterLoginTestCase) at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:5 __invoke at /-S/contrib/libs/cxxsupp/libcxx/include/__type_traits/invoke.h:149:18 operator() at /-S/ydb/core/tx/tx_proxy/schemereq_ut.cpp:0:1 TColoredProcessor::Run(std::__y1::function, TBasicString> const&, char const*, bool) at /-S/library/cpp/testing/unittest/utmain.cpp:526:13 ~__value_func at /-S/contrib/libs/cxxsupp/libcxx/include/__functional/function.h:398:16 UnRef at /-S/util/generic/ptr.h:637:13 NUnitTest::TTestFactory::Execute() at /-S/library/cpp/testing/unittest/registar.cpp:0:19 NUnitTest::RunMain(int, char**) at /-S/library/cpp/testing/unittest/utmain.cpp:0:44 ?? at ??:0:0 ?? at ??:0:0 _start at ??:0:0 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/tx_proxy/ut_schemereq/test-results/unittest/testing_out_stuff/SchemeReqAccess.AlterLoginProtect-RootDB-NoAuth-LocalUser-ModifyUser-48.out ------ FAIL: 161 - GOOD, 139 - FAIL ydb/core/tx/tx_proxy/ut_schemereq ydb/public/sdk/cpp/tests/integration/sessions_pool [size:medium] nchunks:10 ------ [2/10] chunk ran 1 test (total:26.83s - recipes:11.77s test:11.36s recipes:3.66s) [fail] YdbSdkSessionsPool::StressTestSync/0 [default-linux-x86_64-release-asan] (10.15s) Value param: 1 ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 1 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.0.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.0.out ------ [3/10] chunk ran 1 test (total:27.55s - recipes:12.21s test:11.39s recipes:3.91s) [fail] YdbSdkSessionsPool::StressTestSync/1 [default-linux-x86_64-release-asan] (10.24s) Value param: 10 ydb/public/sdk/cpp/tests/integration/sessions_pool/main.cpp:269: Expected equality of these values: Client->GetCurrentPoolSize() Which is: 0 activeSessionsLimit Which is: 10 Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.1.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/public/sdk/cpp/tests/integration/sessions_pool/test-results/gtest/testing_out_stuff/YdbSdkSessionsPool.StressTestSync.1.out ------ FAIL: 10 - GOOD, 2 - FAIL ydb/public/sdk/cpp/tests/integration/sessions_pool ydb/core/tx/datashard/ut_order [size:medium] nchunks:40 ------ [28/40] chunk ran 1 test (total:613.52s - test:600.05s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: DataShardTxOrder::RandomPointsAndRanges (timeout) duration: 610.23s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/stderr [timeout] DataShardTxOrder::RandomPointsAndRanges [default-linux-x86_64-release-asan] (610.23s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/DataShardTxOrder.RandomPointsAndRanges.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/tx/datashard/ut_order/test-results/unittest/testing_out_stuff/DataShardTxOrder.RandomPointsAndRanges.out ------ TIMEOUT: 53 - GOOD, 1 - TIMEOUT ydb/core/tx/datashard/ut_order ------ [test_disk.py 0/10] chunk ran 1 test (total:48.90s - test:48.82s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.9G (16646132K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 876713 54.9M 54.9M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 876891 40.3M 23.0M 10.1M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 876894 756M 759M 675M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 878073 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 878222 1.7G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 878507 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 878766 1.6G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 879096 1.6G 1.7G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 879313 1.6G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 879539 1.4G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 879751 1.6G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 879977 1.4G 1.6G 1.1G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ------ [test_tablet.py 0/10] chunk ran 1 test (total:60.16s - test:60.04s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 15.0G (15730120K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 878318 54.8M 54.9M 7.3M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 878530 40.1M 22.9M 10.0M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 878569 771M 775M 690M └─ ydb-tests-tools-nemesis-ut --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --doc 879972 1.6G 1.6G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 880173 1.8G 1.9G 1.4G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 880492 1.9G 1.9G 1.5G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 880773 1.6G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 881037 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 881221 1.8G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 881487 1.8G 1.7G 1.2G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ 881729 1.8G 1.9G 1.4G └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/ Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/nemesis/ut/test-results/py3test/testing_out_stuff/stderr ydb/tests/functional/dstool [size:medium] ------ sole chunk ran 4 tests (total:177.44s - test:177.32s canon:0.03s) Info: Test run has exceeded 8.0G (8388608K) memory limit with 13.6G (14255112K) used. This may lead to test failure on the Autocheck/CI You can increase test's ram requirement using REQUIREMENTS(ram:X) in the ya.make pid rss ref pdirt 1050108 54.8M 54.8M 7.2M test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1050222 40.8M 23.6M 10.6M └─ test_tool run_test @/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ya_command_file_0.args 1050225 763M 766M 685M └─ ydb-tests-functional-dstool --basetemp /home/runner/actions_runner/_work/ydb/ydb/tmp/out/tmp --capture no -c pkg:library.python.pytest:pytest.yatest.ini -p no:factor --d 1097169 1.6G 1.6G 1.1G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1097744 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1098267 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1098895 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1099481 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1100121 1.8G 0b 0b ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1100655 1.8G 1.8G 1.3G ├─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf 1101229 1.8G 1.3G 872M └─ ydbd server --suppress-version-check --yaml-config=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuf Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/stderr [fail] test_canonical_requests.py::Test::test_essential [default-linux-x86_64-release-asan] (88.95s) ydb/tests/functional/dstool/test_canonical_requests.py:152: in test_essential self._wait_pdisk_metrics_collected() ydb/tests/functional/dstool/test_canonical_requests.py:86: in _wait_pdisk_metrics_collected wait_for_and_assert(all_pdisk_metrics_collected, is_(True), message='All pdisk metrics collected') ydb/tests/library/common/wait_for.py:73: in wait_for_and_assert assert_that(result, matcher, message) E AssertionError: All pdisk metrics collected E Expected: E but: was Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff/test_canonical_requests.py.Test.test_essential.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/functional/dstool/test-results/py3test/testing_out_stuff ------ FAIL: 3 - GOOD, 1 - FAIL ydb/tests/functional/dstool ydb/tests/stress/olap_workload/tests [size:medium] ------ sole chunk ran 1 test (total:153.98s - test:153.66s) [fail] test_workload.py::TestYdbWorkload::test [default-linux-x86_64-release-asan] (145.86s) ydb/tests/stress/olap_workload/tests/test_workload.py:27: in test yatest.common.execute([ library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:422: in _finalise raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/olap_workload --endpoint grpc://localhost:13273 --database /Root --duration 120' has failed with code 1. E Errors: E ...c E for next_opt in opt_generator: E ^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/retries.py", line 98, in retry_operation_impl E result = YdbRetryOperationFinalResult(callee(*args, **kwargs)) E ^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/pool.py", line 250, in wrapped_callee E return [result_set for result_set in it] E ^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 173, in __next__ E return self._next() E ^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/_utilities.py", line 164, in _next E res = self.wrapper(next(self.it)) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/session.py", line 390, in E lambda resp: base.wrap_execute_query_response( E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 211, in decorator E return func(rpc_state, response_pb, session_state, *args, **kwargs) E ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ E File "contrib/python/ydb/py3/ydb/query/base.py", line 229, in wrap_execute_query_response E issues._process_response(response_pb) E File "contrib/python/ydb/py3/ydb/issues.py", line 237, in _process_response E raise exc_obj(_format_response(response_proto), response_proto.issues) E ydb.issues.Timeout: message: "Query compilation timed out." severity: 1 (server_code: 400090) Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff/test_workload.py.TestYdbWorkload.test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/stress/olap_workload/tests/test-results/py3test/testing_out_stuff ------ FAIL: 1 - FAIL ydb/tests/stress/olap_workload/tests ydb/core/viewer/tests [size:medium] ------ sole chunk ran 51 tests (total:79.22s - test:78.96s canon:0.14s) [fail] test.py::TestViewer::test_viewer_tenantinfo [default-linux-x86_64-release-asan] (0.03s) Test results differ from canonical: test_result['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '441', test_result['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '95' test_result['TenantInfo'][1]['Id']: - 6 + 42 test_result['TenantInfo'][2]['Id']: - 8 + 44 test_result['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_viewer_tenantinfo_db [default-linux-x86_64-release-asan] (0.12s) Test results differ from canonical: test_result['/Root']['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '441', test_result['/Root']['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '95' test_result['/Root/dedicated_db']['TenantInfo'][0]['Id']: - 6 + 42 test_result['/Root/serverless_db']['TenantInfo'][0]['Id']: - 8 + 44 test_result['/Root/serverless_db']['TenantInfo'][0]['ResourceId']: - 7 + 43 test_result['/Root/shared_db']['TenantInfo'][0]['Id']: - 7 + 43 test_result['no-database']['TenantInfo'][0]['GrpcRequestBytes']: extra key with value '441', test_result['no-database']['TenantInfo'][0]['GrpcResponseBytes']: extra key with value '95' test_result['no-database']['TenantInfo'][1]['Id']: - 6 + 42 test_result['no-database']['TenantInfo'][2]['Id']: - 8 + 44 test_result['no-database']['TenantInfo'][2]['ResourceId']: - 7 + 43 test_result['no-database']['TenantInfo'][3]['Id']: - 7 + 43 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_viewer_tenantinfo_db.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff [fail] test.py::TestViewer::test_scheme_directory [default-linux-x86_64-release-asan] (0.16s) Test results differ from canonical: test_result['1-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['1-get']['self']['owner']: - root + user1 test_result['3-get']['children'][4]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['3-get']['self']['owner']: - root + user1 test_result['5-get']['children'][3]: extra value {'created_at': {'plan_step': 'not-zero-number-text', 'tx_id':... test_result['5-get']['self']['owner']: - root + user1 Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff/test.py.TestViewer.test_scheme_directory.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/viewer/tests/test-results/py3test/testing_out_stuff ------ FAIL: 48 - GOOD, 3 - FAIL ydb/core/viewer/tests ydb/core/statistics/aggregator/ut [size:medium] nchunks:60 ------ [4/60] chunk ran 1 test (total:604.30s - test:600.06s) Chunk exceeded 600s timeout and was killed List of the tests involved in the launch: AnalyzeColumnshard::AnalyzeMultiOperationId (timeout) duration: 602.63s Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/run_test.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/stderr [timeout] AnalyzeColumnshard::AnalyzeMultiOperationId [default-linux-x86_64-release-asan] (602.63s) Killed by timeout (600 s) Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff Stderr: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.err Stdout: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/core/statistics/aggregator/ut/test-results/unittest/testing_out_stuff/AnalyzeColumnshard.AnalyzeMultiOperationId.out ------ TIMEOUT: 31 - GOOD, 1 - TIMEOUT ydb/core/statistics/aggregator/ut ydb/tests/fq/streaming_optimize [size:medium] nchunks:8 ------ [test_sql_negative.py 0/4] chunk ran 1 test (total:33.85s - recipes:0.79s test:32.49s recipes:0.51s) [fail] test_sql_negative.py::test[watermarks-bad_column-default.txt] [default-linux-x86_64-release-asan] (29.36s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_hk8c9z4l/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bc7dcbe7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bc7dcb6ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bc7dcc1740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bc7dcb0fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bc7dcb0fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bc7dcb0fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bc7dcc66cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bc7dcc66cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bc7dcbe6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bc7dcbe6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bc7dcb6ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bc7dcb0f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bc7dcb0f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_column-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_negative.py 1/4] chunk ran 1 test (total:63.44s - recipes:1.32s test:59.23s recipes:2.67s) [fail] test_sql_negative.py::test[watermarks-bad_pushdown-default.txt] [default-linux-x86_64-release-asan] (51.17s) ydb/tests/fq/streaming_optimize/test_sql_negative.py:48: in test result = fq_run.yql_exec(check_error=False, action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ny43jiwr/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bc2fc787960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bc2fc70ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bc2fc7b740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bc2fc6afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bc2fc6afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bc2fc6afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bc2fc806cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bc2fc806cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bc2fc786977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bc2fc786977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bc2fc70ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bc2fc6af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bc2fc6af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453594 byte(s) leaked in 8613 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_negative.py.test.watermarks-bad_pushdown-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 0/4] chunk ran 8 tests (total:157.19s - recipes:0.62s test:156.01s recipes:0.44s) [fail] test_sql_streaming.py::test[hop-GroupByHop-default.txt] [default-linux-x86_64-release-asan] (24.21s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gb45yh1_/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7afe5f877960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7afe5f7fab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7afe5f8a740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7afe5f79fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7afe5f79fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7afe5f79fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7afe5f8f6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7afe5f8f6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7afe5f876977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7afe5f876977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7afe5f7fab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7afe5f79f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7afe5f79f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopByStringKey-default.txt] [default-linux-x86_64-release-asan] (19.30s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_76_uu6an/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b240dfb7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b240df3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b240dfe740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b240dedfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b240dedfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b240dedfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b240e036cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b240e036cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b240dfb6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b240dfb6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b240df3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b240dedf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b240dedf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopExprKey-default.txt] [default-linux-x86_64-release-asan] (19.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ygm4s6w3/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b31481a7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b314812ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b31481d740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b31480cfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b31480cfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b31480cfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b3148226cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b3148226cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b31481a6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b31481a6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b314812ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b31480cf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b31480cf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453741 byte(s) leaked in 8616 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopListKey-default.txt] [default-linux-x86_64-release-asan] (16.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d5uedu9p/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b43e08a7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b43e082ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b43e08d740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b43e07cfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b43e07cfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b43e07cfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b43e0926cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b43e0926cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b43e08a6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b43e08a6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b43e082ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b43e07cf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b43e07cf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopNoKey-default.txt] [default-linux-x86_64-release-asan] (17.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x63ere40/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b6b57787960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b6b5770ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b6b577b740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b6b576afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b6b576afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b6b576afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b6b57806cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b6b57806cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b6b57786977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b6b57786977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b6b5770ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b6b576af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b6b576af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopPercentile-default.txt] [default-linux-x86_64-release-asan] (19.30s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_7uxpvtxi/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b8444657960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b84445dab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b844468740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b844457fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b844457fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b844457fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b84446d6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b84446d6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b8444656977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b8444656977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b84445dab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b844457f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b844457f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (17.93s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6iwhaexm/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bad86fb7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bad86f3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bad86fe740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bad86edfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bad86edfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bad86edfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bad87036cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bad87036cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bad86fb6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bad86fb6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bad86f3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bad86edf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bad86edf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hop-GroupByHopWithDataWatermarks-default.txt] [default-linux-x86_64-release-asan] (16.49s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_35ro3bc1/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bee3bc47960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bee3bbcab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bee3bc7740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bee3bb6fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bee3bb6fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bee3bb6fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bee3bcc6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bee3bcc6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bee3bc46977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bee3bc46977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bee3bbcab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bee3bb6f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bee3bb6f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453692 byte(s) leaked in 8615 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hop-GroupByHopWithDataWatermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 1/4] chunk ran 8 tests (total:158.42s - recipes:0.64s test:157.15s recipes:0.48s) [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindow-default.txt] [default-linux-x86_64-release-asan] (27.56s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_y1hkvrv3/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bea94b57960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bea94adab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bea94b8740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bea94a7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bea94a7fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bea94a7fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bea94bd6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bea94bd6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bea94b56977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bea94b56977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bea94adab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bea94a7f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bea94a7f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindow-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowByStringKey-default.txt] [default-linux-x86_64-release-asan] (19.33s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xf9ty9jd/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b6c77fb7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b6c77f3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b6c77fe740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b6c77edfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b6c77edfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b6c77edfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b6c78036cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b6c78036cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b6c77fb6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b6c77fb6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b6c77f3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b6c77edf36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b6c77edf36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowByStringKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowExprKey-default.txt] [default-linux-x86_64-release-asan] (18.43s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun__n1762o8/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b04674e7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b046746ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b046751740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b046740fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b046740fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b046740fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b0467566cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b0467566cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b04674e6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b04674e6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b046746ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b046740f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b046740f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowExprKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowListKey-default.txt] [default-linux-x86_64-release-asan] (17.29s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_npb3uwb7/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b16db687960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b16db60ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b16db6b740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b16db5afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b16db5afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b16db5afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b16db706cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b16db706cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b16db686977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b16db686977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b16db60ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b16db5af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b16db5af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowListKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowNoKey-default.txt] [default-linux-x86_64-release-asan] (18.04s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jvuhrwfg/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bd811ab7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bd811a3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bd811ae740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bd8119dfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bd8119dfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bd8119dfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bd811b36cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bd811b36cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bd811ab6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bd811ab6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bd811a3ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bd8119df36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bd8119df36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowNoKey-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowPercentile-default.txt] [default-linux-x86_64-release-asan] (18.23s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_gnkwugzp/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bef82927960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bef828aab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bef8295740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bef8284fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bef8284fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bef8284fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bef829a6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bef829a6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bef82926977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bef82926977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bef828aab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bef8284f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bef8284f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowPercentile-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt] [default-linux-x86_64-release-asan] (18.76s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_64woorxb/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b375d7d7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b375d75ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b375d80740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b375d6ffe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b375d6ffe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b375d6ffd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b375d856cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b375d856cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b375d7d6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b375d7d6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b375d75ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b375d6ff36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b375d6ff36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453103 byte(s) leaked in 8604 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.hopping_window-GroupByHoppingWindowTimeExtractorUnusedColumns-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopic-default.txt] [default-linux-x86_64-release-asan] (15.27s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_r1pm787n/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b2b76ec7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b2b76e4ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b2b76ef740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b2b76defe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b2b76defe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b2b76defd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b2b76f46cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b2b76f46cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b2b76ec6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b2b76ec6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b2b76e4ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b2b76def36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b2b76def36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 2/4] chunk ran 8 tests (total:159.17s - recipes:0.79s test:157.84s recipes:0.42s) [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadata-default.txt] [default-linux-x86_64-release-asan] (29.83s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bbrqncf5/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bcbad8d7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bcbad85ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bcbad90740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bcbad7ffe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bcbad7ffe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bcbad7ffd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bcbad956cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bcbad956cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bcbad8d6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bcbad8d6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bcbad85ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bcbad7ff36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bcbad7ff36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadata-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataInsideFilter-default.txt] [default-linux-x86_64-release-asan] (18.94s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_4rnp2825/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b102ad17960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b102ac9ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b102ad4740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b102ac3fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b102ac3fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b102ac3fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b102ad96cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b102ad96cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b102ad16977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b102ad16977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b102ac9ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b102ac3f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b102ac3f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453791 byte(s) leaked in 8617 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataInsideFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataNestedDeep-default.txt] [default-linux-x86_64-release-asan] (19.08s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_pp_kdaip/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bf0ceef7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bf0cee7ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bf0cef2740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bf0cee1fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bf0cee1fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bf0cee1fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bf0cef76cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bf0cef76cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bf0ceef6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bf0ceef6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bf0cee7ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bf0cee1f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bf0cee1f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 446084 byte(s) leaked in 8472 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataNestedDeep-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithMetadataWithFilter-default.txt] [default-linux-x86_64-release-asan] (16.65s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_xb1_jdhv/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b06cf187960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b06cf10ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b06cf1b740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b06cf0afe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b06cf0afe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b06cf0afd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b06cf206cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b06cf206cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b06cf186977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b06cf186977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b06cf10ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b06cf0af36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b06cf0af36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithMetadataWithFilter-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (16.98s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_d1vrx6hb/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bded2057960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bded1fdab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bded208740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bded1f7fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bded1f7fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bded1f7fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bded20d6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bded20d6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bded2056977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bded2056977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bded1fdab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bded1f7f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bded1f7f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadTwoTopics-default.txt] [default-linux-x86_64-release-asan] (18.52s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_ch8xy_aa/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bbe6e107960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bbe6e08ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bbe6e13740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bbe6e02fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bbe6e02fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bbe6e02fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bbe6e186cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bbe6e186cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bbe6e106977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bbe6e106977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bbe6e08ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bbe6e02f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bbe6e02f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteSameTopic-default.txt] [default-linux-x86_64-release-asan] (17.96s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_l54aa_8g/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b498df67960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b498deeab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b498df9740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b498de8fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b498de8fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b498de8fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b498dfe6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b498dfe6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b498df66977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b498df66977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b498deeab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b498de8f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b498de8f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteSameTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-ReadWriteTopic-default.txt] [default-linux-x86_64-release-asan] (15.46s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_6h50cbta/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b698f217960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b698f19ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b698f24740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b698f13fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b698f13fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b698f13fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b698f296cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b698f296cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b698f216977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b698f216977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b698f19ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b698f13f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b698f13f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopic-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ [test_sql_streaming.py 3/4] chunk ran 7 tests (total:142.48s - recipes:0.83s test:141.07s recipes:0.47s) [fail] test_sql_streaming.py::test[pq-ReadWriteTopicWithSchema-default.txt] [default-linux-x86_64-release-asan] (29.56s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_vq9p_0eo/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b68aa807960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b68aa78ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b68aa83740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b68aa72fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b68aa72fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b68aa72fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b68aa886cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b68aa886cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b68aa806977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b68aa806977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b68aa78ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b68aa72f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b68aa72f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453785 byte(s) leaked in 8617 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-ReadWriteTopicWithSchema-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[pq-WriteTwoTopics-default.txt] [default-linux-x86_64-release-asan] (18.69s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_bpvmt_n6/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b9f936b7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b9f9363ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b9f936e740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b9f935dfe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b9f935dfe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b9f935dfd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b9f93736cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b9f93736cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b9f936b6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b9f936b6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b9f9363ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b9f935df36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b9f935df36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.pq-WriteTwoTopics-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[solomon-ReadTopicGroupWriteToSolomon-default.txt] [default-linux-x86_64-release-asan] (18.67s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_qnp5qdjr/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7bf752347960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7bf7522cab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7bf75237740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7bf75226fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7bf75226fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7bf75226fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7bf7523c6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7bf7523c6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7bf752346977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7bf752346977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7bf7522cab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7bf75226f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7bf75226f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.solomon-ReadTopicGroupWriteToSolomon-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks-default.txt] [default-linux-x86_64-release-asan] (16.31s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun___ke_d_o/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b4829267960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b48291eab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b482929740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b482918fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b482918fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b482918fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b48292e6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b48292e6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b4829266977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b4829266977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b48291eab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b482918f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b482918f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_adjust-default.txt] [default-linux-x86_64-release-asan] (16.80s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_kosfx4zz/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b80e9b47960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b80e9acab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b80e9b7740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b80e9a6fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b80e9a6fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b80e9a6fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b80e9bc6cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b80e9bc6cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b80e9b46977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b80e9b46977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b80e9acab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b80e9a6f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b80e9a6f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_adjust-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_as-default.txt] [default-linux-x86_64-release-asan] (18.19s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_jrkluq98/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b40544e7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b405446ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b405451740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b405440fe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b405440fe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b405440fd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b4054566cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b4054566cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b40544e6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b40544e6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b405446ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b405440f36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b405440f36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453840 byte(s) leaked in 8618 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_as-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff [fail] test_sql_streaming.py::test[watermarks-watermarks_drop-default.txt] [default-linux-x86_64-release-asan] (18.72s) ydb/tests/fq/streaming_optimize/test_sql_streaming.py:37: in test result = fq_run.yql_exec(action="explain") ydb/tests/fq/tools/fqrun.py:80: in yql_exec proc_result = yatest.common.process.execute(cmd.strip().split(), check_exit_code=False, cwd=self.res_dir) library/python/testing/yatest_common/yatest/common/process.py:656: in execute res.wait(check_exit_code, timeout, on_timeout) library/python/testing/yatest_common/yatest/common/process.py:411: in wait self._finalise(check_exit_code) library/python/testing/yatest_common/yatest/common/process.py:425: in _finalise self.verify_sanitize_errors() library/python/testing/yatest_common/yatest/common/process.py:454: in verify_sanitize_errors raise ExecutionError(self) E yatest.common.process.ExecutionError: Command '/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/tools/fqrun/fqrun --exclude-linked-udfs --action=explain --cfg=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/fq_config.conf --result-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/results.txt --ast-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/ast.txt --plan-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/plan.json --log-file=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/log.txt --udfs-dir=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql_udfs --result-format=full-proto --canonical-output --query=/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/query_0.sql --emulate-pq=test_topic_input@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/topic_0.txt --emulate-pq=test_topic_input2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/topic_1.txt --emulate-pq=test_topic_output@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/topic_2.txt --emulate-pq=test_topic_output2@/home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/yql/test_fqrun_x15oa8zw/topic_3.txt' has failed with code 100. E Errors: E ...mpl /-S/contrib/tools/python3/Python/bltinmodule.c:1093:17 E #30 0x7b1b313d7960 in builtin_exec /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:543:20 E #31 0x7b1b3135ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #32 0x7b1b3140740d in _PyEval_EvalFrameDefault /tmp/Python/bytecodes.c:3263:26 E #33 0x7b1b312ffe9a in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #34 0x7b1b312ffe9a in object_vacall /-S/contrib/tools/python3/Objects/call.c:850:14 E #35 0x7b1b312ffd08 in PyObject_CallMethodObjArgs /-S/contrib/tools/python3/Objects/call.c:911:24 E #36 0x7b1b31456cec in import_find_and_load /-S/contrib/tools/python3/Python/import.c:2802:11 E #37 0x7b1b31456cec in PyImport_ImportModuleLevelObject /-S/contrib/tools/python3/Python/import.c:2885:15 E #38 0x7b1b313d6977 in builtin___import___impl /-S/contrib/tools/python3/Python/bltinmodule.c:276:12 E #39 0x7b1b313d6977 in builtin___import__ /-S/contrib/tools/python3/Python/clinic/bltinmodule.c.h:107:20 E #40 0x7b1b3135ab35 in cfunction_vectorcall_FASTCALL_KEYWORDS /-S/contrib/tools/python3/Objects/methodobject.c:438:24 E #41 0x7b1b312ff36c in _PyObject_VectorcallTstate /-S/contrib/tools/python3/Include/internal/pycore_call.h:92:11 E #42 0x7b1b312ff36c in _PyObject_CallFunctionVa /-S/contrib/tools/python3/Objects/call.c:562:18 E E SUMMARY: AddressSanitizer: 453733 byte(s) leaked in 8616 allocation(s). Log: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff/test_sql_streaming.py.test.watermarks-watermarks_drop-default.txt.log Logsdir: /home/runner/actions_runner/_work/ydb/ydb/tmp/out/ydb/tests/fq/streaming_optimize/test-results/py3test/testing_out_stuff ------ FAIL: 33 - FAIL ydb/tests/fq/streaming_optimize Total 337 suites: 315 - GOOD 16 - FAIL 6 - TIMEOUT Total 9367 tests: 9076 - GOOD 203 - FAIL 75 - NOT_LAUNCHED 6 - TIMEOUT 7 - SKIPPED Cache efficiency ratio is 89.15% (42548 of 47728). Local: 0 (0.00%), dist: 9673 (20.27%), by dynamic uids: 0 (0.00%), avoided: 32875 (68.88%) Dist cache download: count=4509, size=16.7 GiB, speed=165.55 MiB/s Disk usage for tools/sdk at least 1.18 GiB Additional disk space consumed for build cache 1.01 TiB Critical path: [ 27066 ms] [CC] [nvZxmKw38bmMmzXrxvH8AQ default-linux-x86_64 release asan]: $(SOURCE_ROOT)/ydb/core/persqueue/pqtablet/partition/mlp/mlp_consumer.cpp [started: 0 (1767493032335), finished: 27066 (1767493059401)] [ 72 ms] [AR] [QEQ8l0CRWoUksuPVgbobdg default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/core/persqueue/pqtablet/partition/mlp/libpqtablet-partition-mlp.a [started: 27081 (1767493059416), finished: 27153 (1767493059488)] [ 66417 ms] [LD] [XFVHM0SbGTAPby5d6W0CGA default-linux-x86_64 release asan]: $(BUILD_ROOT)/ydb/apps/ydbd/ydbd [started: 48501 (1767493080836), finished: 114918 (1767493147253)] [650135 ms] [TM] [test-10328883828574438480 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 2891640 (1767495923975), finished: 3541775 (1767496574110)] [ 36146 ms] [TA] [test-16292006792505099192]: $(BUILD_ROOT)/ydb/tests/functional/serverless/test-results/py3test/{meta.json ... results_accumulator.log} [started: 3758982 (1767496791317), finished: 3795128 (1767496827463)] Time from start: 4093688 ms, time elapsed by graph 779836 ms, time diff 3313852 ms. The longest 10 tasks: [650135 ms] [TM] [test-10328883828574438480 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1767495923975, finished: 1767496574110] [633361 ms] [TM] [test-7992164649941825679 asan default-linux-x86_64 release]: ydb/tests/functional/serverless/py3test [started: 1767496157772, finished: 1767496791133] [623994 ms] [TM] [test-11892826991650226155 asan default-linux-x86_64 release]: ydb/core/persqueue/pqtablet/partition/mlp/ut/unittest [started: 1767493079218, finished: 1767493703212] [614274 ms] [TM] [test-5051824788335788387 asan default-linux-x86_64 release]: ydb/core/tx/datashard/ut_order/unittest [started: 1767493565862, finished: 1767494180136] [607501 ms] [TM] [test-826678268478144616 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767493936862, finished: 1767494544363] [604834 ms] [TM] [test-17779712691024394813 asan default-linux-x86_64 release]: ydb/core/statistics/aggregator/ut/unittest [started: 1767494761389, finished: 1767495366223] [602302 ms] [TM] [test-12636067794723398809 asan default-linux-x86_64 release]: ydb/tests/olap/column_family/compression/py3test [started: 1767493359752, finished: 1767493962054] [599694 ms] [TM] [test-1088287544449587775 asan default-linux-x86_64 release]: ydb/tests/olap/scenario/py3test [started: 1767493361056, finished: 1767493960750] [566188 ms] [TM] [test-6337827159224675283 asan default-linux-x86_64 release]: ydb/core/statistics/service/ut/unittest [started: 1767493938702, finished: 1767494504890] [564730 ms] [TM] [test-474208297978039246 asan default-linux-x86_64 release]: ydb/core/cms/ut_sentinel_unstable/unittest [started: 1767495707438, finished: 1767496272168] Total time by type: [158421565 ms] [TM] [count: 4344, ave time 36469.05 msec] [ 11408694 ms] [LD] [count: 416, ave time 27424.75 msec] [ 9706341 ms] [prepare:get from dist cache] [count: 9673, ave time 1003.45 msec] [ 1040602 ms] [TS] [count: 177, ave time 5879.11 msec] [ 667355 ms] [prepare:put into local cache, clean build dir] [count: 9688, ave time 68.88 msec] [ 648957 ms] [prepare:bazel-store] [count: 3, ave time 216319.00 msec] [ 480010 ms] [prepare:put to dist cache] [count: 5098, ave time 94.16 msec] [ 349874 ms] [TA] [count: 219, ave time 1597.60 msec] [ 274122 ms] [prepare:tools] [count: 21, ave time 13053.43 msec] [ 182606 ms] [prepare:AC] [count: 4, ave time 45651.50 msec] [ 30753 ms] [CC] [count: 6, ave time 5125.50 msec] [ 2792 ms] [AR] [count: 9, ave time 310.22 msec] [ 1006 ms] [UN] [count: 2, ave time 503.00 msec] [ 756 ms] [CP] [count: 2, ave time 378.00 msec] [ 605 ms] [prepare:resources] [count: 1, ave time 605.00 msec] [ 581 ms] [CF] [count: 2, ave time 290.50 msec] [ 522 ms] [BI] [count: 1, ave time 522.00 msec] [ 390 ms] [SB] [count: 1, ave time 390.00 msec] [ 126 ms] [PK] [count: 1, ave time 126.00 msec] [ 21 ms] [prepare:clean] [count: 3, ave time 7.00 msec] Total tasks times: Total failed tasks time - 0 ms (0.00%) Total tests tasks time - 159812041 ms (93.32%) Total run tasks time - 171257661 ms Configure time - 4.9 s Statistics overhead 2173 ms Info: Dump results report to /home/runner/actions_runner/_work/ydb/ydb/tmp/results/try_1/report.json Ok + echo 0